commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
a4f49b988a10afc160c217d32da46ea854059e8c | Add migration file | rapidpro/ureport,Ilhasoft/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport,Ilhasoft/ureport,rapidpro/ureport | ureport/polls/migrations/0060_populate_category_displayed.py | ureport/polls/migrations/0060_populate_category_displayed.py | # Generated by Django 2.2.10 on 2020-05-05 15:01
from django.db import migrations
def noop(apps, schema_editor): # pragma: no cover
pass
def populate_category_displayed(apps, schema_editor): # pragma: no cover
PollResponseCategory = apps.get_model("polls", "PollResponseCategory")
updated = 0
for obj in PollResponseCategory.objects.all().exclude(category=None):
PollResponseCategory.objects.filter(id=obj.id).update(category_displayed=obj.category)
updated += 1
if updated > 0:
print(f"populated {updated} poll response categories")
class Migration(migrations.Migration):
dependencies = [
("polls", "0059_pollresponsecategory_category_displayed"),
]
operations = [migrations.RunPython(populate_category_displayed, noop)]
| agpl-3.0 | Python |
|
b7cd3081585c0a4695db4f85b7db8e346a525e23 | add to pypi | paroj/libraw.py | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="libraw.py",
version="1.0",
description="python bindings using ctypes for libraw",
url="https://github.com/paroj/libraw.py",
author="Pavel Rojtberg",
license="LGPLv2",
classifiers=[
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
py_modules=["libraw"]
)
| lgpl-2.1 | Python |
|
88cb2155d55100d9b00dca1ecf4f9a01dec7c3f5 | Add missing 'import os' for integrationtest/vm/basic/suite_setup.py | SoftwareKing/zstack-woodpecker,zstackorg/zstack-woodpecker,zstackio/zstack-woodpecker,quarkonics/zstack-woodpecker,zstackorg/zstack-woodpecker,quarkonics/zstack-woodpecker,zstackio/zstack-woodpecker,SoftwareKing/zstack-woodpecker,zstackio/zstack-woodpecker,zstackorg/zstack-woodpecker | integrationtest/vm/basic/suite_setup.py | integrationtest/vm/basic/suite_setup.py | '''
@author: Frank
'''
import os
import zstackwoodpecker.setup_actions as setup_actions
import zstackwoodpecker.operations.deploy_operations as deploy_operations
import zstackwoodpecker.operations.config_operations as config_operations
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
USER_PATH = os.path.expanduser('~')
EXTRA_SUITE_SETUP_SCRIPT = '%s/.zstackwoodpecker/extra_suite_setup_config.sh' % USER_PATH
def test():
setup = setup_actions.SetupAction()
setup.plan = test_lib.all_config
setup.run()
if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT):
os.system("bash %s" % EXTRA_SUITE_SETUP_SCRIPT)
deploy_operations.deploy_initial_database(test_lib.deploy_config)
test_util.test_pass('Suite Setup Success')
| '''
@author: Frank
'''
import zstackwoodpecker.setup_actions as setup_actions
import zstackwoodpecker.operations.deploy_operations as deploy_operations
import zstackwoodpecker.operations.config_operations as config_operations
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_util as test_util
USER_PATH = os.path.expanduser('~')
EXTRA_SUITE_SETUP_SCRIPT = '%s/.zstackwoodpecker/extra_suite_setup_config.sh' % USER_PATH
def test():
setup = setup_actions.SetupAction()
setup.plan = test_lib.all_config
setup.run()
if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT):
os.system("bash %s" % EXTRA_SUITE_SETUP_SCRIPT)
deploy_operations.deploy_initial_database(test_lib.deploy_config)
test_util.test_pass('Suite Setup Success')
| apache-2.0 | Python |
f0205534cd1c812db94921b29ebef4207039e56b | work in progress (kind of) | projecthamster/experiments | hamster_sun.py | hamster_sun.py | #!/usr/bin/env python
# - coding: utf-8 -
# Copyright (C) 2010 Toms Bauģis <toms.baugis at gmail.com>
"""Base template"""
import gtk
from lib import graphics
import math
import hamster.client
import datetime as dt
from collections import defaultdict
class Scene(graphics.Scene):
def __init__(self):
graphics.Scene.__init__(self)
storage = hamster.client.Storage()
self.facts = storage.get_facts(dt.date(2007,1,1), dt.date.today())
self.day_counts = {}
activities, categories = defaultdict(int), defaultdict(int)
print len(self.facts)
for fact in self.facts:
self.day_counts.setdefault(fact['start_time'].date(), defaultdict(list))
self.day_counts[fact['start_time'].date()][fact['category']].append(fact)
activities[fact['name']] += 1
categories[fact['category']] += 1
if fact['end_time'] and fact['start_time'].date() != fact['end_time'].date():
self.day_counts.setdefault(fact['end_time'].date(), defaultdict(list))
self.day_counts[fact['end_time'].date()][fact['category']].append(fact)
self.activities = [activity[0] for activity in sorted(activities.items(), key=lambda item:item[1], reverse=True)]
self.categories = categories.keys()
self.connect("on-enter-frame", self.on_enter_frame)
def on_enter_frame(self, scene, context):
g = graphics.Graphics(context)
step = (360.0 / 365) * math.pi / 180.0
g.set_color("#999")
g.set_line_style(width = 1)
"""
for i in range(365):
g.move_to(self.width / 2, self.height / 2)
g.rel_line_to(math.cos(step * i) * 300,
math.sin(step * i) * 300)
g.stroke()
"""
colors = ("#ff0000", "#00ff00", "#0000ff", "#aaa000")
for day in self.day_counts:
year_day = day.timetuple().tm_yday
angle = year_day * step
for j, category in enumerate(self.day_counts[day]):
distance = 20 * (day.year - 2005) + self.categories.index(category) * 60 + 30
color = colors[self.categories.index(category)]
delta = dt.timedelta()
for fact in self.day_counts[day][category]:
delta += fact['delta']
hours = delta.seconds / 60 / 60
height = hours / 16.0 * 20
g.set_color(color)
#bar per category
g.move_to(math.cos(angle) * distance + self.width / 2,
math.sin(angle) * distance + self.height / 2)
g.line_to(math.cos(angle) * (distance + height) + self.width / 2 ,
math.sin(angle) * (distance + height) + self.height / 2)
g.line_to(math.cos(angle+step) * (distance + height) + self.width / 2 ,
math.sin(angle+step) * (distance + height) + self.height / 2)
g.line_to(math.cos(angle+step) * distance + self.width / 2,
math.sin(angle+step) * distance + self.height / 2)
g.close_path()
#g.fill_preserve()
g.stroke()
g.fill("#aaa")
for i, color in enumerate(colors):
g.move_to(0, i * 20)
g.set_color(color)
g.show_text(self.categories[i])
class BasicWindow:
def __init__(self):
window = gtk.Window(gtk.WINDOW_TOPLEVEL)
window.set_size_request(600, 500)
window.connect("delete_event", lambda *args: gtk.main_quit())
window.add(Scene())
window.show_all()
example = BasicWindow()
gtk.main()
| mit | Python |
|
e3cbc79cc60e21978fe682b73413e9de19b71543 | add a print hello world function | ctsit/J.O.B-Training-Repo-1 | helloAlyssa.py | helloAlyssa.py | #This is my hello world program
print ('Hello World')
| apache-2.0 | Python |
|
9339307b6bd42ad014e528d337fc9f195c632245 | Add tick class | techbureau/zaifbot,techbureau/zaifbot | zaifbot/exchange/tick.py | zaifbot/exchange/tick.py | class Tick:
def __init__(self, currency_pair):
self.size = currency_pair.info['aux_unit_step']
self._decimal_digits = currency_pair.info['aux_unit_point']
def truncate_price(self, price):
remainder = price % self.size
truncated_price = price - remainder
if self._decimal_digits == 0:
return int(truncated_price)
return truncated_price
| mit | Python |
|
d9d84083a488ad1b4643298d7a75b54b4e0e34be | add OptionChainConsistencyRegressionAlgorithm | kaffeebrauer/Lean,jameschch/Lean,AlexCatarino/Lean,QuantConnect/Lean,StefanoRaggi/Lean,jameschch/Lean,JKarathiya/Lean,kaffeebrauer/Lean,JKarathiya/Lean,redmeros/Lean,StefanoRaggi/Lean,JKarathiya/Lean,AlexCatarino/Lean,kaffeebrauer/Lean,jameschch/Lean,kaffeebrauer/Lean,StefanoRaggi/Lean,StefanoRaggi/Lean,AlexCatarino/Lean,AnshulYADAV007/Lean,redmeros/Lean,AnshulYADAV007/Lean,andrewhart098/Lean,QuantConnect/Lean,StefanoRaggi/Lean,jameschch/Lean,AnshulYADAV007/Lean,QuantConnect/Lean,Jay-Jay-D/LeanSTP,jameschch/Lean,Jay-Jay-D/LeanSTP,JKarathiya/Lean,AnshulYADAV007/Lean,QuantConnect/Lean,redmeros/Lean,AlexCatarino/Lean,andrewhart098/Lean,Jay-Jay-D/LeanSTP,Jay-Jay-D/LeanSTP,kaffeebrauer/Lean,redmeros/Lean,andrewhart098/Lean,Jay-Jay-D/LeanSTP,AnshulYADAV007/Lean,andrewhart098/Lean | Algorithm.Python/OptionChainConsistencyRegressionAlgorithm.py | Algorithm.Python/OptionChainConsistencyRegressionAlgorithm.py | # QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Indicators")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from QuantConnect.Indicators import *
from datetime import datetime, timedelta
### <summary>
### This regression algorithm checks if all the option chain data coming to the algo is consistent with current securities manager state
### </summary>
### <meta name="tag" content="regression test" />
### <meta name="tag" content="options" />
### <meta name="tag" content="using data" />
### <meta name="tag" content="filter selection" />
class OptionChainConsistencyRegressionAlgorithm(QCAlgorithm):
def Initialize(self):
self.SetCash(10000)
self.SetStartDate(2015,12,24)
self.SetEndDate(2015,12,24)
equity = self.AddEquity("GOOG")
option = self.AddOption("GOOG")
# set our strike/expiry filter for this option chain
option.SetFilter(self.UniverseFunc)
self.SetBenchmark(equity.Symbol)
self.OptionSymbol = option.Symbol
equity.SetDataNormalizationMode(DataNormalizationMode.Raw)
def OnData(self, slice):
if self.Portfolio.Invested: return
for kvp in slice.OptionChains:
chain = kvp.Value
for o in chain:
if not self.Securities.ContainsKey(o.Symbol):
# inconsistency found: option chains contains contract information that is not available in securities manager and not available for trading
self.Log("inconsistency found: option chains contains contract {0} that is not available in securities manager and not available for trading".format(o.Symbol.Value))
contracts = filter(lambda x: x.Expiry.date() == self.Time.date() and
x.Strike < chain.Underlying.Price and
x.Right == OptionRight.Call, chain)
sorted_contracts = sorted(contracts, key = lambda x: x.Strike, reverse = True)
if len(sorted_contracts) > 2:
self.MarketOrder(sorted_contracts[2].Symbol, 1)
self.MarketOnCloseOrder(sorted_contracts[2].Symbol, -1)
# set our strike/expiry filter for this option chain
def UniverseFunc(self, universe):
return universe.IncludeWeeklys().Strikes(-2, 2).Expiration(timedelta(0), timedelta(10))
def OnOrderEvent(self, orderEvent):
self.Log(str(orderEvent)) | using System;
namespace QuantConnect.Algorithm.Python
{
public class OptionChainConsistencyRegressionAlgorithm
{
public OptionChainConsistencyRegressionAlgorithm()
{
}
}
}
| apache-2.0 | Python |
aafb77596ae0cb6c27b2564434367d2b4d5debd1 | Add tests | cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3,cheral/orange3 | Orange/widgets/visualize/tests/test_owscatterplot.py | Orange/widgets/visualize/tests/test_owscatterplot.py | import numpy as np
from Orange.data import Table
from Orange.widgets.tests.base import WidgetTest
from Orange.widgets.visualize.owscatterplot import OWScatterPlot
class TestOWScatterPlot(WidgetTest):
def setUp(self):
self.widget = self.create_widget(OWScatterPlot)
self.data = Table("iris")
def test_set_data(self):
self.widget.set_data(self.data)
self.assertEqual(self.widget.data, self.data)
self.assertEqual(self.widget.subset_data, None)
def test_subset_data(self):
self.widget.set_subset_data(self.data[:30])
self.assertEqual(len(self.widget.subset_data), 30)
self.assertEqual(self.widget.data, None)
np.testing.assert_array_equal(self.widget.subset_data, self.data[:30])
def test_set_data_none(self):
self.widget.set_data(None)
self.assertEqual(self.widget.data, None)
self.assertEqual(self.widget.subset_data, None)
def test_subset_data_none(self):
self.widget.set_subset_data(None)
self.assertEqual(self.widget.subset_data, None)
self.assertEqual(self.widget.data, None)
| bsd-2-clause | Python |
|
47ad7f4d3b69315e25ae96099fe73b4d9cd7666e | Use file extension to select config file parser | bchretien/dotbot,imattman/dotbot,imattman/dotbot,anishathalye/dotbot,bchretien/dotbot,bchretien/dotbot,anishathalye/dotbot,imattman/dotbot | dotbot/config.py | dotbot/config.py | import yaml
import json
import os.path
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
_, ext = os.path.splitext(config_file_path)
with open(config_file_path) as fin:
print ext
if ext == '.json':
data = json.load(fin)
else:
data = yaml.safe_load(fin)
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| import yaml
import json
from .util import string
class ConfigReader(object):
def __init__(self, config_file_path):
self._config = self._read(config_file_path)
def _read(self, config_file_path):
try:
with open(config_file_path) as fin:
try:
data = yaml.safe_load(fin)
except Exception as e:
# try falling back to JSON, but return original exception
# if that fails too
try:
fin.seek(0)
data = json.load(fin)
except Exception:
raise e
return data
except Exception as e:
msg = string.indent_lines(str(e))
raise ReadingError('Could not read config file:\n%s' % msg)
def get_config(self):
return self._config
class ReadingError(Exception):
pass
| mit | Python |
fec74a5401f925755484955a1b38dd3044824eb3 | Create npy2ckpt.py | nasatony/deeplab_resnet,ALISCIFP/tensorflow-resnet-segmentation,ALISCIFP/tensorflow-resnet-segmentation,DrSleep/tensorflow-deeplab-resnet | npy2ckpt.py | npy2ckpt.py | """Conversion of the .npy weights into the .ckpt ones.
This script converts the weights of the DeepLab-ResNet model
from the numpy format into the TensorFlow one.
"""
from __future__ import print_function
import argparse
import os
import tensorflow as tf
import numpy as np
from deeplab_resnet import DeepLabResNetModel
SAVE_DIR = './'
def get_arguments():
"""Parse all the arguments provided from the CLI.
Returns:
A list of parsed arguments.
"""
parser = argparse.ArgumentParser(description="NPY to CKPT converter.")
parser.add_argument("npy_path", type=str,
help="Path to the .npy file, which contains the weights.")
parser.add_argument("--save_dir", type=str, default=SAVE_DIR,
help="Where to save the converted .ckpt file.")
return parser.parse_args()
def save(saver, sess, logdir):
model_name = 'model.ckpt'
checkpoint_path = os.path.join(logdir, model_name)
if not os.path.exists(logdir):
os.makedirs(logdir)
saver.save(sess, checkpoint_path, write_meta_graph=False)
print('The weights have been converted to {}.'.format(checkpoint_path))
def main():
"""Create the model and start the training."""
args = get_arguments()
# Default image.
image_batch = tf.constant(0, tf.float32, shape=[1, 321, 321, 3])
# Create network.
net = DeepLabResNetModel({'data': image_batch})
var_list = tf.trainable_variables()
# Set up tf session and initialize variables.
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
init = tf.initialize_all_variables()
sess.run(init)
# Loading .npy weights.
net.load(args.npy_path, sess)
# Saver for converting the loaded weights into .ckpt.
saver = tf.train.Saver(var_list=var_list)
save(saver, sess, args.save_dir)
if __name__ == '__main__':
main()
| mit | Python |
|
8b5bf433b304895f04813c64d556316c48c046fe | add setup.py for distribute | pombredanne/python-v8,pombredanne/python-v8,damoti/python-v8,damoti/python-v8,pombredanne/python-v8,damoti/python-v8,damoti/python-v8,pombredanne/python-v8 | setup.py | setup.py | #!/usr/bin/env python
import os, os.path
from distutils.core import setup, Extension
import distutils.msvccompiler
source_files = ["Engine.cpp", "Wrapper.cpp", "PyV8.cpp"]
macros = [("BOOST_PYTHON_STATIC_LIB", None)]
third_party_libraries = ["python", "boost", "v8"]
include_dirs = os.environ["INCLUDE"].split(';') + [os.path.join("lib", lib, "inc") for lib in third_party_libraries]
library_dirs = os.environ["LIB"].split(';') + [os.path.join("lib", lib, "lib") for lib in third_party_libraries]
libraries = ["winmm"]
pyv8 = Extension(name = "_PyV8",
sources = [os.path.join("src", file) for file in source_files],
define_macros = macros,
include_dirs = include_dirs,
library_dirs = library_dirs,
libraries = libraries,
extra_compile_args = ["/O2", "/GL", "/MT", "/EHsc", "/Gy", "/Zi"],
extra_link_args = ["/DLL", "/OPT:REF", "/OPT:ICF", "/MACHINE:X86"],
)
setup(name='PyV8',
version='0.1',
description='Python Wrapper for Google V8 Engine',
author='Flier Lu',
author_email='[email protected]',
url='http://code.google.com/p/pyv8/',
license="Apache 2.0",
py_modules=['PyV8'],
ext_modules=[pyv8]
) | apache-2.0 | Python |
|
e24a354ae65db5874f51305b839a7ce553d44d78 | Build Sticks | Duke-NSOE/GeoHAT | GeoHat_V10/BuildSticks.py | GeoHat_V10/BuildSticks.py | #---------------------------------------------------------------------------------
# BuildSticks.py
#
# Description: Create sticks (lines between connected patches, with appropriate weights),
# from edge list csv file
#
# Requires: NetworkX to be stored in script folder (or installed)
# Create Edge List tool must be run first
#
# Inputs: <edge list> <Patch raster> <scratch directory>
# Output: <Patch connected attribute table (CSV format)>
#
# August 4, 2016
# Nathan Walker
# Building on code from John Fay
#
#---------------------------------------------------------------------------------
# Import modules
import sys, os, arcpy
import arcpy.sa as sa
##---FUNCTIONS---
# Message management
def msg(txt): print msg; arcpy.AddMessage(txt); return
# Input variables
edgeList = arcpy.GetParameterAsText(0)
patchRaster = arcpy.GetParameterAsText(1)
sticks = arcpy.GetParameterAsText(3)
# Output variables
outdir = arcpy.GetParameterAsText(2)
# set overwrite to true
arcpy.env.overwriteOutput = True
##---PROCESSES---
msg("Converting table to dbf")
# Convert csv to format that is editable and includes OID
edgeListDBF = arcpy.CopyRows_management(in_rows=edgeList, out_table=outdir + "/edgeList.dbf", config_keyword="")
# Add edge ID field
arcpy.AddField_management(in_table=edgeListDBF, field_name="EdgeID", field_type="LONG", field_precision="", field_scale="", field_length="", field_alias="", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="")
arcpy.CalculateField_management(edgeListDBF, "EdgeID", "!OID!", "PYTHON_9.3", "")
msg("Converting patch raster to polygon")
# Convert Raster to Polygon
patch_RtoP = arcpy.RasterToPolygon_conversion(patchRaster, "in_memory/Patch_RtoP", "NO_SIMPLIFY", "Value")
# Add X and Y fields to polygons, representing patch centroid locations
arcpy.AddField_management(patch_RtoP, "X", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
arcpy.AddField_management(patch_RtoP, "Y", "FLOAT", "", "", "", "", "NULLABLE", "NON_REQUIRED", "")
arcpy.CalculateField_management(patch_RtoP, "X", "!Shape.Centroid.X!", "PYTHON_9.3", "")
arcpy.CalculateField_management(patch_RtoP, "Y", "!Shape.Centroid.Y!", "PYTHON_9.3", "")
msg("Joining patch centroids to edge list")
# Join FromID to patch
arcpy.JoinField_management(edgeListDBF, "FromID", patch_RtoP, "GRIDCODE", "")
# Join ToID to patch
arcpy.JoinField_management(edgeListDBF, "ToID", patch_RtoP, "GRIDCODE", "")
msg("Convert X/Y start/end points to line")
# Create line from coordinates of From and To patches
arcpy.XYToLine_management(in_table=edgeListDBF, out_featureclass=sticks, startx_field="X", starty_field="Y", endx_field="X_1", endy_field="Y_1", line_type="GEODESIC", id_field="EdgeID", spatial_reference="PROJCS['WGS_1984_UTM_Zone_18S',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',500000.0],PARAMETER['False_Northing',10000000.0],PARAMETER['Central_Meridian',-75.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-5120900 1900 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision")
msg("Adding cost information to line")
# Join back cost information from edge list
arcpy.JoinField_management(sticks, "EdgeID", edgeListDBF, "EdgeID", "")
msg("Cleaning up")
# Delete extra fields
arcpy.DeleteField_management(in_table=sticks, drop_field="X;Y;X_1;Y_1;EdgeID_1;ID;GRIDCODE;X_12;Y_12;ID_1;GRIDCODE_1;X_12_13;Y_12_13")
# Delete temporary file
arcpy.Delete_management(in_data=outdir + "/edgeList.dbf", data_type="DbaseTable")
| cc0-1.0 | Python |
|
56915ed7d290fff6e37859181781687590a2e974 | Remove early_stopping.py from estimator/contrib in favor of estimator/python/estimator/early_stopping.py. And the test. | aam-at/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,renyi533/tensorflow,Intel-Corporation/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,alsrgv/tensorflow,renyi533/tensorflow,kevin-coder/tensorflow-fork,xzturn/tensorflow,xzturn/tensorflow,gunan/tensorflow,ageron/tensorflow,karllessard/tensorflow,annarev/tensorflow,kevin-coder/tensorflow-fork,aam-at/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,arborh/tensorflow,xzturn/tensorflow,theflofly/tensorflow,jhseu/tensorflow,theflofly/tensorflow,alsrgv/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kevin-coder/tensorflow-fork,kevin-coder/tensorflow-fork,ppwwyyxx/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,aam-at/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,petewarden/tensorflow,DavidNorman/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,karllessard/tensorflow,ageron/tensorflow,frreiss/tensorflow-fred,alsrgv/tensorflow,arborh/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,ageron/tensorflow,DavidNorman/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow,ghchinoy/tensorflow,kevin-coder/tensorflow-fork,annarev/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,jbedorf/tensorflow,arborh/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,ghchinoy/tensorflow,petewarden/tensorflow,theflofly/tensorflow,jhseu/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,petewarden/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,ghchinoy/tensorflow,chemelnucfin/tensorflow,annarev/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,sarvex/tensorflow,ghchinoy/tensorflow,alsrgv/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,sarvex/tensorflow,freedomtan/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,jbedorf/tensorflow,renyi533/tensorflow,theflofly/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jhseu/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,Intel-Corporation/tensorflow,adit-chandra/tensorflow,jhseu/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,theflofly/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,theflofly/tensorflow,paolodedios/tensorflow,cxxgtxy/tensorflow,alsrgv/tensorflow,adit-chandra/tensorflow,ageron/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,arborh/tensorflow,sarvex/tensorflow,ghchinoy/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,sarvex/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ageron/tensorflow,freedomtan/tensorflow,paolodedios/tensorflow,ageron/tensorflow,theflofly/tensorflow,ageron/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,adit-chandra/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,DavidNorman/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,davidzchen/tensorflow,gunan/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,ghchinoy/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,aldian/tensorflow,ghchinoy/tensorflow,sarvex/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,alsrgv/tensorflow,renyi533/tensorflow,petewarden/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,aldian/tensorflow,ppwwyyxx/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,arborh/tensorflow,renyi533/tensorflow,jbedorf/tensorflow,aam-at/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,aam-at/tensorflow,jhseu/tensorflow,tensorflow/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,alsrgv/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,annarev/tensorflow,petewarden/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,gunan/tensorflow,alsrgv/tensorflow,karllessard/tensorflow,karllessard/tensorflow,aam-at/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,jbedorf/tensorflow,ghchinoy/tensorflow,jbedorf/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,gunan/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,karllessard/tensorflow,ageron/tensorflow,ageron/tensorflow,ageron/tensorflow,gunan/tensorflow,frreiss/tensorflow-fred,theflofly/tensorflow,chemelnucfin/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,sarvex/tensorflow,xzturn/tensorflow,aam-at/tensorflow,jbedorf/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,tensorflow/tensorflow-pywrap_saved_model,chemelnucfin/tensorflow,yongtang/tensorflow,renyi533/tensorflow,tensorflow/tensorflow,aldian/tensorflow,renyi533/tensorflow,jbedorf/tensorflow,ppwwyyxx/tensorflow,paolodedios/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,arborh/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,chemelnucfin/tensorflow,aldian/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,annarev/tensorflow,aam-at/tensorflow,aldian/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,jbedorf/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,chemelnucfin/tensorflow,karllessard/tensorflow,annarev/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,renyi533/tensorflow,arborh/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,jbedorf/tensorflow,alsrgv/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,petewarden/tensorflow,DavidNorman/tensorflow,ageron/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,cxxgtxy/tensorflow,kevin-coder/tensorflow-fork,tensorflow/tensorflow,aam-at/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,DavidNorman/tensorflow,cxxgtxy/tensorflow,theflofly/tensorflow,renyi533/tensorflow,yongtang/tensorflow,petewarden/tensorflow,alsrgv/tensorflow,gunan/tensorflow,gunan/tensorflow,gunan/tensorflow,aam-at/tensorflow,sarvex/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,ageron/tensorflow,jhseu/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,davidzchen/tensorflow,jhseu/tensorflow,davidzchen/tensorflow,jhseu/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,renyi533/tensorflow,frreiss/tensorflow-fred,chemelnucfin/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,DavidNorman/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,jbedorf/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,xzturn/tensorflow,sarvex/tensorflow,petewarden/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,DavidNorman/tensorflow,ppwwyyxx/tensorflow,arborh/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,ghchinoy/tensorflow,theflofly/tensorflow,annarev/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,kevin-coder/tensorflow-fork,yongtang/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,gautam1858/tensorflow,gunan/tensorflow,gunan/tensorflow,aldian/tensorflow | tensorflow/contrib/estimator/python/estimator/early_stopping.py | tensorflow/contrib/estimator/python/estimator/early_stopping.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""early_stopping python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python.estimator import early_stopping
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
early_stopping.__all__ = [
s for s in dir(early_stopping) if not s.startswith('__')
]
from tensorflow_estimator.python.estimator.early_stopping import *
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""early_stopping python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.contrib.estimator.python.estimator import early_stopping
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
early_stopping.__all__ = [
s for s in dir(early_stopping) if not s.startswith('__')
]
from tensorflow_estimator.contrib.estimator.python.estimator.early_stopping import *
| apache-2.0 | Python |
1ee1d0daab4b8e123bc04996019fb12cc65b8888 | Add tISM SDB module (#36957) | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/sdb/tism.py | salt/sdb/tism.py | # -*- coding: utf-8 -*-
'''
tISM - the Immutalbe Secrets Manager SDB Module
:maintainer: tISM
:maturity: New
:platform: all
.. versionadded:: TBD
This module will decrypt PGP encrypted secrets against a tISM server.
.. code::
sdb://<profile>/<encrypted secret>
sdb://tism/hQEMAzJ+GfdAB3KqAQf9E3cyvrPEWR1sf1tMvH0nrJ0bZa9kDFLPxvtwAOqlRiNp0F7IpiiVRF+h+sW5Mb4ffB1TElMzQ+/G5ptd6CjmgBfBsuGeajWmvLEi4lC6/9v1rYGjjLeOCCcN4Dl5AHlxUUaSrxB8akTDvSAnPvGhtRTZqDlltl5UEHsyYXM8RaeCrBw5Or1yvC9Ctx2saVp3xmALQvyhzkUv5pTb1mH0I9Z7E0ian07ZUOD+pVacDAf1oQcPpqkeNVTQQ15EP0fDuvnW+a0vxeLhkbFLfnwqhqEsvFxVFLHVLcs2ffE5cceeOMtVo7DS9fCtkdZr5hR7a+86n4hdKfwDMFXiBwSIPMkmY980N/H30L/r50+CBkuI/u4M2pXDcMYsvvt4ajCbJn91qaQ7BDI=
A profile must be setup in the minion configuration or pillar. If you want to use sdb in a runner or pillar you must also place a profile in the master configuration.
.. code-block:: yaml
tism:
driver: tism
url: https://my.tismd:8080/decrypt
token: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhZG1pbiI6MSwiZXhwIjoxNTg1MTExNDYwLCJqdGkiOiI3NnA5cWNiMWdtdmw4Iiwia2V5cyI6WyJBTEwiXX0.RtAhG6Uorf5xnSf4Ya_GwJnoHkCsql4r1_hiOeDSLzo
'''
import logging
import json
import salt.utils.http as http
from salt.exceptions import SaltConfigurationError
log = logging.getLogger(__name__)
__virtualname__ = "tism"
def __virtual__():
'''
This module has no other system dependencies
'''
return __virtualname__
def get(key, service=None, profile=None): # pylint: disable=W0613
'''
Get a decrypted secret from the tISMd API
'''
if not profile.get('url') or not profile.get('token'):
raise SaltConfigurationError("url and/or token missing from the tism sdb profile")
request = {"token": profile['token'], "encsecret": key}
result = http.query(
profile['url'],
method='POST',
data=json.dumps(request),
)
decrypted = result.get('body')
if not decrypted:
log.warning('tism.get sdb decryption request failed with error {0}'.format(result.get('error', 'unknown')))
return "ERROR"+str(result.get('status', 'unknown'))
return decrypted
| apache-2.0 | Python |
|
8dad8cf8c83eba037b29d3243b29b985dc4004a1 | add setup.py | epage/telepathy-python,PabloCastellano/telepathy-python,epage/telepathy-python,max-posedon/telepathy-python,max-posedon/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,detrout/telepathy-python | setup.py | setup.py | #!/usr/bin/python
from distutils.core import setup
setup(
name='telepathy-python',
version='0.0.1',
packages=['telepathy'],
)
| lgpl-2.1 | Python |
|
b5c2986ccf3c70b9cb52d0374c53bc8232719554 | Add dbm_metrics.py script where the AIS method will be stored | woozzu/pylearn2,kastnerkyle/pylearn2,chrish42/pylearn,jeremyfix/pylearn2,goodfeli/pylearn2,lisa-lab/pylearn2,lamblin/pylearn2,daemonmaker/pylearn2,Refefer/pylearn2,cosmoharrigan/pylearn2,ashhher3/pylearn2,fulmicoton/pylearn2,pkainz/pylearn2,fyffyt/pylearn2,lunyang/pylearn2,w1kke/pylearn2,cosmoharrigan/pylearn2,lamblin/pylearn2,CIFASIS/pylearn2,ashhher3/pylearn2,sandeepkbhat/pylearn2,KennethPierce/pylearnk,cosmoharrigan/pylearn2,kose-y/pylearn2,daemonmaker/pylearn2,se4u/pylearn2,aalmah/pylearn2,jamessergeant/pylearn2,shiquanwang/pylearn2,TNick/pylearn2,skearnes/pylearn2,pombredanne/pylearn2,ddboline/pylearn2,pombredanne/pylearn2,junbochen/pylearn2,JesseLivezey/pylearn2,shiquanwang/pylearn2,skearnes/pylearn2,sandeepkbhat/pylearn2,shiquanwang/pylearn2,w1kke/pylearn2,hyqneuron/pylearn2-maxsom,nouiz/pylearn2,daemonmaker/pylearn2,matrogers/pylearn2,fishcorn/pylearn2,hantek/pylearn2,aalmah/pylearn2,kose-y/pylearn2,mclaughlin6464/pylearn2,bartvm/pylearn2,fyffyt/pylearn2,JesseLivezey/pylearn2,TNick/pylearn2,caidongyun/pylearn2,fishcorn/pylearn2,w1kke/pylearn2,hantek/pylearn2,fyffyt/pylearn2,skearnes/pylearn2,kastnerkyle/pylearn2,lamblin/pylearn2,lunyang/pylearn2,fyffyt/pylearn2,CIFASIS/pylearn2,junbochen/pylearn2,nouiz/pylearn2,woozzu/pylearn2,hyqneuron/pylearn2-maxsom,matrogers/pylearn2,se4u/pylearn2,caidongyun/pylearn2,KennethPierce/pylearnk,woozzu/pylearn2,JesseLivezey/pylearn2,lancezlin/pylearn2,abergeron/pylearn2,kastnerkyle/pylearn2,lunyang/pylearn2,nouiz/pylearn2,abergeron/pylearn2,mclaughlin6464/pylearn2,pombredanne/pylearn2,hantek/pylearn2,se4u/pylearn2,pkainz/pylearn2,aalmah/pylearn2,Refefer/pylearn2,caidongyun/pylearn2,pkainz/pylearn2,JesseLivezey/plankton,msingh172/pylearn2,mclaughlin6464/pylearn2,TNick/pylearn2,chrish42/pylearn,goodfeli/pylearn2,kose-y/pylearn2,jeremyfix/pylearn2,KennethPierce/pylearnk,TNick/pylearn2,mkraemer67/pylearn2,shiquanwang/pylearn2,ddboline/pylearn2,lisa-lab/pylearn2,Refefer/pylearn2,alexjc/pylearn2,msingh172/pylearn2,matrogers/pylearn2,junbochen/pylearn2,bartvm/pylearn2,abergeron/pylearn2,jamessergeant/pylearn2,lunyang/pylearn2,pombredanne/pylearn2,junbochen/pylearn2,fishcorn/pylearn2,kastnerkyle/pylearn2,goodfeli/pylearn2,skearnes/pylearn2,bartvm/pylearn2,jamessergeant/pylearn2,fulmicoton/pylearn2,sandeepkbhat/pylearn2,lancezlin/pylearn2,mkraemer67/pylearn2,chrish42/pylearn,jeremyfix/pylearn2,theoryno3/pylearn2,alexjc/pylearn2,lisa-lab/pylearn2,abergeron/pylearn2,msingh172/pylearn2,hyqneuron/pylearn2-maxsom,JesseLivezey/plankton,mkraemer67/pylearn2,ashhher3/pylearn2,msingh172/pylearn2,hyqneuron/pylearn2-maxsom,lamblin/pylearn2,cosmoharrigan/pylearn2,matrogers/pylearn2,ddboline/pylearn2,ashhher3/pylearn2,theoryno3/pylearn2,chrish42/pylearn,woozzu/pylearn2,JesseLivezey/pylearn2,jamessergeant/pylearn2,ddboline/pylearn2,Refefer/pylearn2,fishcorn/pylearn2,hantek/pylearn2,lisa-lab/pylearn2,JesseLivezey/plankton,theoryno3/pylearn2,mkraemer67/pylearn2,nouiz/pylearn2,aalmah/pylearn2,w1kke/pylearn2,jeremyfix/pylearn2,theoryno3/pylearn2,lancezlin/pylearn2,fulmicoton/pylearn2,caidongyun/pylearn2,CIFASIS/pylearn2,kose-y/pylearn2,alexjc/pylearn2,sandeepkbhat/pylearn2,CIFASIS/pylearn2,daemonmaker/pylearn2,pkainz/pylearn2,alexjc/pylearn2,fulmicoton/pylearn2,lancezlin/pylearn2,KennethPierce/pylearnk,goodfeli/pylearn2,mclaughlin6464/pylearn2,bartvm/pylearn2,se4u/pylearn2,JesseLivezey/plankton | pylearn2/scripts/dbm/dbm_metrics.py | pylearn2/scripts/dbm/dbm_metrics.py | #!/usr/bin/env python
import argparse
if __name__ == '__main__':
# Argument parsing
parser = argparse.ArgumentParser()
parser.add_argument("metric", help="the desired metric",
choices=["ais"])
parser.add_argument("model_path", help="path to the pickled DBM model")
args = parser.parse_args()
metric = args.metric
model_path = args.model_path
| bsd-3-clause | Python |
|
a8b079b8be1e9559770dd0f701385b2361158e24 | Add tests_require to setup.py | mpenkov/smart_open,piskvorky/smart_open,RaRe-Technologies/smart_open,RaRe-Technologies/smart_open,mpenkov/smart_open | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Radim Rehurek <[email protected]>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
import io
import os
import sys
if sys.version_info < (2, 6):
raise ImportError("smart_open requires python >= 2.6")
# TODO add ez_setup?
from setuptools import setup, find_packages
def read(fname):
return io.open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8').read()
setup(
name = 'smart_open',
version = '1.3.4',
description = 'Utils for streaming large files (S3, HDFS, gzip, bz2...)',
long_description = read('README.rst'),
packages=find_packages(),
author = u'Radim Řehůřek',
author_email = '[email protected]',
maintainer = u'Radim Řehůřek',
maintainer_email = '[email protected]',
url = 'https://github.com/piskvorky/smart_open',
download_url = 'http://pypi.python.org/pypi/smart_open',
keywords = 'file streaming, s3, hdfs',
license = 'MIT',
platforms = 'any',
install_requires=[
'boto >= 2.32',
'bz2file',
'requests',
],
tests_require=[
'mock',
'moto',
'responses',
],
test_suite="smart_open.tests",
classifiers = [ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: System :: Distributed Computing',
'Topic :: Database :: Front-Ends',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Radim Rehurek <[email protected]>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
import io
import os
import sys
if sys.version_info < (2, 6):
raise ImportError("smart_open requires python >= 2.6")
# TODO add ez_setup?
from setuptools import setup, find_packages
def read(fname):
return io.open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8').read()
setup(
name = 'smart_open',
version = '1.3.4',
description = 'Utils for streaming large files (S3, HDFS, gzip, bz2...)',
long_description = read('README.rst'),
packages=find_packages(),
author = u'Radim Řehůřek',
author_email = '[email protected]',
maintainer = u'Radim Řehůřek',
maintainer_email = '[email protected]',
url = 'https://github.com/piskvorky/smart_open',
download_url = 'http://pypi.python.org/pypi/smart_open',
keywords = 'file streaming, s3, hdfs',
license = 'MIT',
platforms = 'any',
install_requires=[
'boto >= 2.32',
'bz2file',
'requests',
],
test_suite="smart_open.tests",
classifiers = [ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: System :: Distributed Computing',
'Topic :: Database :: Front-Ends',
],
)
| mit | Python |
c230fc69e2509c79190e53589457f161accd1626 | Change long_description in setup.py. | MnO2/rediscli,oguzy/mycli,evook/mycli,suzukaze/mycli,oguzy/mycli,j-bennet/mycli,evook/mycli,D-e-e-m-o/mycli,shoma/mycli,MnO2/rediscli,thanatoskira/mycli,jinstrive/mycli,brewneaux/mycli,danieljwest/mycli,thanatoskira/mycli,webwlsong/mycli,D-e-e-m-o/mycli,danieljwest/mycli,brewneaux/mycli,chenpingzhao/mycli,jinstrive/mycli,mdsrosa/mycli,martijnengler/mycli,ZuoGuocai/mycli,webwlsong/mycli,shoma/mycli,ZuoGuocai/mycli,chenpingzhao/mycli,suzukaze/mycli,martijnengler/mycli,j-bennet/mycli,mdsrosa/mycli | setup.py | setup.py | import re
import ast
from setuptools import setup, find_packages
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('mycli/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
description = 'CLI for MySQL Database. With auto-completion and syntax highlighting.'
setup(
name='mycli',
author='Amjith Ramanujam',
author_email='amjith[dot]r[at]gmail.com',
version=version,
license='LICENSE.txt',
url='http://mycli.net',
packages=find_packages(),
package_data={'mycli': ['myclirc', '../AUTHORS', '../SPONSORS']},
description=description,
long_description=description,
install_requires=[
'click >= 4.1',
'Pygments >= 2.0', # Pygments has to be Capitalcased. WTF?
'prompt_toolkit==0.45',
'PyMySQL >= 0.6.6',
'sqlparse == 0.1.14',
'configobj >= 5.0.6',
],
entry_points='''
[console_scripts]
mycli=mycli.main:cli
''',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: SQL',
'Topic :: Database',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| import re
import ast
from setuptools import setup, find_packages
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('mycli/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
description = 'CLI for MySQL Database. With auto-completion and syntax highlighting.'
setup(
name='mycli',
author='Amjith Ramanujam',
author_email='amjith[dot]r[at]gmail.com',
version=version,
license='LICENSE.txt',
url='http://mycli.net',
packages=find_packages(),
package_data={'mycli': ['myclirc', '../AUTHORS', '../SPONSORS']},
description=description,
long_description=open('README.md').read(),
install_requires=[
'click >= 4.1',
'Pygments >= 2.0', # Pygments has to be Capitalcased. WTF?
'prompt_toolkit==0.45',
'PyMySQL >= 0.6.6',
'sqlparse == 0.1.14',
'configobj >= 5.0.6',
],
entry_points='''
[console_scripts]
mycli=mycli.main:cli
''',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: SQL',
'Topic :: Database',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| bsd-3-clause | Python |
2a331f0165b2e3874243fcfecc3e3deab2760ff4 | Add python setup filie | bitevery/client-api-python-v1 | setup.py | setup.py | from setuptools import setup
setup(name='bitevery',
version='0.0.1.b2',
description='BitEvery Python API',
url='https://www.bitevery.com',
author='BitEvery',
author_email='[email protected]',
license='MIT',
packages=['bitevery'],
zip_safe=False) | mit | Python |
|
57bfd23957bdd535b5ae21ed1df3ff25dd75a8bd | Add setup.py | piotrekw/pirx | setup.py | setup.py | from setuptools import setup
setup(
name='pirx',
version='0.1',
author='Piotr Wasilewski',
author_email='[email protected]',
description='Django settings builder',
license='MIT',
keywords='django settings build builder',
url='https://github.com/piotrekw/pirx',
scripts=['scripts/pirx-build.py'],
packages=['pirx']
)
| mit | Python |
|
76a8834243cc70f3065b686dd09004f1dc3ffdb0 | Create rapideye_remover_bordas_catalogo.py | leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing,leandromet/Geoprocessamento---Geoprocessing | rapideye_remover_bordas_catalogo.py | rapideye_remover_bordas_catalogo.py | from osgeo import ogr
import os
from osgeo import osr
from qgis.core import *
shapefile = "C:/Users/pedro.mendes/Desktop/Brasil_00_2016.shp"
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapefile, 0)
layer = dataSource.GetLayer()
proj=layer.GetSpatialRef()
outputMergefn = "C:/Users/pedro.mendes/Desktop/Brasil_01_2016.shp"
driverName = 'ESRI Shapefile'
geometryType = ogr.wkbPolygon
out_driver = ogr.GetDriverByName( driverName )
if os.path.exists(outputMergefn):
out_driver.DeleteDataSource(outputMergefn)
out_ds = out_driver.CreateDataSource(outputMergefn)
out_layer = out_ds.CreateLayer(outputMergefn, geom_type=geometryType, srs=proj)
juntaDefn=layer.GetLayerDefn()
juntaFeat=ogr.Geometry(3)
c=0
for feature in layer:
geom = feature.GetGeometryRef()
geom2 = geom.Difference(juntaFeat)
juntaFeat= juntaFeat.Union(geom)
out_feat = ogr.Feature(out_layer.GetLayerDefn())
out_feat.SetGeometry(geom2)
out_layer.CreateFeature(out_feat)
out_layer.SyncToDisk()
c+=1
#break
layer = None
dataSource=None
print "total de feicoes: %i " %( c)
| mit | Python |
|
737dadd2e447c9f03de80ea808e137dcc1206c9b | Create Nvidia_GPU_Temperature.py | jpsingleton/BlinkyTape_Python,Blinkinlabs/BlinkyTape_Python,railsagainstignorance/blinkytape | Nvidia_GPU_Temperature.py | Nvidia_GPU_Temperature.py | import time
from BlinkyTape import BlinkyTape
import subprocess
import os
import re
#bb = BlinkyTape('/dev/tty.usbmodemfa131')
bb = BlinkyTape('COM8')
while True:
output = subprocess.check_output(["C:\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe", "-a"], shell=True)
#os.popen('C:\\Program Files\NVIDIA Corporation\NVSMI\nvidia-smi.exe')
#output=os.popen("C:\\Program Files\\NVIDIA Corporation\\NVSMI\\nvidia-smi.exe").read()
#print("====" + str(output) + "=====")
temp = re.search("GPU Current.*",output).group()[30:33]
temp_baseline = 60
temp_multiplier = 5
color_temp = (int(temp) - temp_baseline ) * temp_multiplier
green = 100 - color_temp
red = 0 + color_temp
blue = 0
print "Current GPU Temp: %s RGB: %s %s %s" % (temp, red, green, blue)
for x in range(60):
bb.sendPixel(red, green, blue)
bb.show()
#time.sleep(1)
#for x in range(60):
# bb.sendPixel(100, 0, 0)
#bb.show()
time.sleep(1)
| mit | Python |
|
b39af3af2104875919577f769701e7bde73967fd | clean file initialized | schiob/MusGen | genetic_music.py | genetic_music.py | print('hola chio') | mit | Python |
|
fbc780c7beb94d73b2a4ea110e733f8c87763741 | Add location name lookup for ajax_select. | umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator,umitproject/openmonitor-aggregator | geoip/lookups.py | geoip/lookups.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##
## Author: Orcun Avsar <[email protected]>
##
## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License as
## published by the Free Software Foundation, either version 3 of the
## License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
"""Module for ajax autocomplete lookups for locations.
"""
from ajax_select import LookupChannel
from geoip.models import Location
from geoip.models import LocationNamesAggregation
class LocationLookup(LookupChannel):
model = Location
def get_query(self,q,request):
words = q.replace(',',' ').replace('-', ' ').split()
query = Location.objects.all()
queries = []
for word in words:
query = Location.objects.filter(name__icontains=word)[:20]
queries.append(query)
entities = []
for query in queries:
for entity in query:
entities.append(entity)
return entities
def format_match(self,obj):
obj.name | agpl-3.0 | Python |
|
af3ba846a8074132c64568c420ecb9b6ade9c6ea | Work on defining RegEx to find and format molecular geometries in Gaussian output files. | thompcinnamon/QM-calc-scripts | geomRegexTest.py | geomRegexTest.py | __author__ = 'Thomas Heavey'
import re
filename = "testg.out"
def findgeoms(filename):
"""A function that takes a file name and returns a list of
geometries."""
relevantelem = [1,3,4,5]
xyzformat = '{:>2} {: f} {: f} {: f}'
geomregex = re.compile(
r'(?:Standard orientation)' # non-capturing (nc) start of geometry
r'(?:.+?)' # nc geometry header
r'((?:(?:\s+\d+\s+)' # nc atom number
r'(\d+\s+)' # (capturing) atomic number
r'(?:\d+\s+)' # nc atomic type
r'(-?\d+\.\d+\s*){3,3}' # 3 cartesian coordinates (x,y,z)
r')+)' # repeat for at least one atom
r'(?:-)' # nc end at line of dashes
, re.DOTALL)
with open(filename, 'r') as file:
geoms = geomregex.search(file.read())
print(geoms.group(1))
mlgeoms = geoms.group(1)
for line in mlgeoms.split('\n'):
# Ignore blank lines:
if len(line) < 2:
continue
xyzelemstring = [line.split()[i] for i in relevantelem]
xyzelemnum = [float(i) for i in xyzelemstring]
xyzelemnum[0] = int(xyzelemstring[0])
print(xyzformat.format(*xyzelemnum))
findgeoms(filename) | apache-2.0 | Python |
|
df9a6ab91eedfe91343ceb103156fe08cd965614 | test script form new Keras 2x API model config | SummaLabs/DLS,SummaLabs/DLS,SummaLabs/DLS,SummaLabs/DLS | app/backend-test/keras_2x_api/run01_print_keras_model_json.py | app/backend-test/keras_2x_api/run01_print_keras_model_json.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'ar'
if __name__ == '__main__':
pass | mit | Python |
|
1498e786201c1c1e2127da7d23db142559ad68a8 | Add support for Assembla | foauth/oauth-proxy,foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | services/assembla.py | services/assembla.py | import foauth.providers
class Assembla(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://www.assembla.com/'
docs_url = 'http://api-doc.assembla.com/content/api_reference.html'
category = 'Code'
# URLs to interact with the API
authorize_url = 'https://api.assembla.com/authorization'
access_token_url = 'https://api.assembla.com/token'
api_domain = 'api.assembla.com'
available_permissions = [
(None, 'read, write and manage your projects'),
]
def __init__(self, *args, **kwargs):
super(Assembla, self).__init__(*args, **kwargs)
self.auth = (self.client_id, self.client_secret)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/v1/user')
return unicode(r.json()[u'id'])
| bsd-3-clause | Python |
|
10c7e718488a6daad5bcea97e00aece24179168e | Add regression test for bug #1937084 | openstack/nova,mahak/nova,openstack/nova,openstack/nova,mahak/nova,mahak/nova | nova/tests/functional/regressions/test_bug_1937084.py | nova/tests/functional/regressions/test_bug_1937084.py | # Copyright 2021, Red Hat, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import context
from nova import exception
from nova import objects
from nova.tests.functional.api import client
from nova.tests.functional import integrated_helpers
class TestDetachAttachmentNotFound(integrated_helpers._IntegratedTestBase):
"""Regression test for the Nova portion of bug 1937084
This regression test asserts the behaviour of Nova when Cinder raises a 404
during a DELETE request against an attachment.
In the context of bug 1937084 this could happen if a caller attempted to
DELETE a volume attachment through Nova's os-volume_attachments API and
then made a separate DELETE request against the underlying volume in Cinder
when it was marked as available.
"""
microversion = 'latest'
def test_delete_attachment_volume_not_found(self):
# Create a server and attach a single volume
server = self._create_server(networks='none')
server_id = server['id']
self.api.post_server_volume(
server_id,
{
'volumeAttachment': {
'volumeId': self.cinder.IMAGE_BACKED_VOL
}
}
)
self._wait_for_volume_attach(server_id, self.cinder.IMAGE_BACKED_VOL)
# Assert that we have an active bdm for the attachment before we detach
bdm = objects.BlockDeviceMapping.get_by_volume_and_instance(
context.get_admin_context(),
self.cinder.IMAGE_BACKED_VOL,
server_id)
with mock.patch(
'nova.volume.cinder.API.attachment_delete',
side_effect=exception.VolumeAttachmentNotFound(
attachment_id=bdm.attachment_id)
) as (
mock_attachment_delete
):
# DELETE /servers/{server_id}/os-volume_attachments/{volume_id} is
# async but as we are using CastAsCall it's sync in our func tests
ex = self.assertRaises(
client.OpenStackApiException,
self.api.delete_server_volume,
server_id,
self.cinder.IMAGE_BACKED_VOL)
self.assertEqual(500, ex.response.status_code)
mock_attachment_delete.assert_called_once()
# FIXME(lyarwood): This is the Nova portion of bug #1937084 where
# the original caller hasn't polled os-volume_attachments and sent
# a seperate DELETE request to c-api for the volume as soon as it
# has become available but before n-cpu has finished the original
# call. This leads to the sync request to c-api to delete the
# attachment returning a 404 that Nova translates into
# VolumeAttachmentNotFound.
#
# Replace this with the following once the exception is ignored:
#
# self.assertRaises(
# exception.VolumeBDMNotFound,
# objects.BlockDeviceMapping.get_by_volume_and_instance,
# context.get_admin_context(),
# self.cinder.IMAGE_BACKED_VOL,
# server_id)
#
bdm = objects.BlockDeviceMapping.get_by_volume_and_instance(
context.get_admin_context(),
self.cinder.IMAGE_BACKED_VOL,
server_id)
| apache-2.0 | Python |
|
0fc46c92f8682879591d9fc473be34116c9106be | add migration | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | custom/ilsgateway/migrations/0010_auto_20160830_1923.py | custom/ilsgateway/migrations/0010_auto_20160830_1923.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('ilsgateway', '0009_auto_20160413_1311'),
]
operations = [
migrations.AlterField(
model_name='deliverygroupreport',
name='report_date',
field=models.DateTimeField(default=datetime.datetime.utcnow),
preserve_default=True,
),
migrations.AlterField(
model_name='slabconfig',
name='sql_location',
field=models.OneToOneField(to='locations.SQLLocation'),
preserve_default=True,
),
migrations.AlterField(
model_name='supplypointstatus',
name='status_type',
field=models.CharField(max_length=50, choices=[(b'rr_fac', b'rr_fac'), (b'trans_fac', b'trans_fac'), (b'soh_fac', b'soh_fac'), (b'super_fac', b'super_fac'), (b'rr_dist', b'rr_dist'), (b'del_del', b'del_del'), (b'la_fac', b'la_fac'), (b'del_dist', b'del_dist'), (b'del_fac', b'del_fac')]),
preserve_default=True,
),
]
| bsd-3-clause | Python |
|
3dfa8bb2d428f86c5156a974e84e0756cc6d792f | Create headache.py | scdiniz/headache | headache.py | headache.py | # This is... Headache! One more very simple Brainfuck interpreter! #
# by Sidnei Diniz - [email protected] - http://bitworm.com.br #
# GitHub: http://github.com/scdiniz/headache
# Date: 29-12-2015 #
import sys
# Interpreter kernel
class Headache():
# Constructor
def __init__(self):
self.cells = bytearray([0] * 30000)
self.commands = []
# Load code file
def load(self, file):
code = open(file, "r")
for line in code:
for c in line:
if c in ("<", ">", "+", "-", ".", ",", "[", "]"):
self.commands.append(c)
code.close()
# Verify loop for errors
def validateLoop(self):
countStart = 0
countEnd = 0
for cmd in self.commands:
if cmd == "[":
countStart += 1
if cmd == "]":
countEnd += 1
return countStart == countEnd
# Make loop dictionary
def setLoopDict(self):
if self.validateLoop():
self.loopDict = {}
tmp = []
i = 0
while i < len(self.commands):
if self.commands[i] == "[":
tmp.append(i)
if self.commands[i] == "]":
if len(tmp) > 0:
value = tmp.pop()
self.loopDict[value] = i
self.loopDict[i] = value
else:
return False
i += 1
return True
else:
return False
# Run interpreter
def run(self, file):
self.load(file)
# Make loop dictionary
if self.setLoopDict():
cell = 0
i = 0
# Execute command by command
while i < len(self.commands):
if self.commands[i] == "<":
cell -= 1
if self.commands[i] == ">":
cell += 1
if self.commands[i] == "+":
if self.cells[cell] < 255:
self.cells[cell] += 1
else:
self.cells[cell] = 0
if self.commands[i] == "-":
if self.cells[cell] > 0:
self.cells[cell] -= 1
else:
self.cells[cell] = 255
if self.commands[i] == "]":
if self.cells[cell] > 0:
i = self.loopDict[i]
if self.commands[i] == "[":
if self.cells[cell] == 0:
i = self.loopDict[i]
if self.commands[i] == ",":
self.cells[cell] = ord(input()[0])#ord(input()[0])
if self.commands[i] == ".":
try:
print(chr(self.cells[cell]), end = "", flush = True)
except:
None
i += 1
else:
# Error on loop dictionary
print("My head hurts! Verify your loop instructions '[' ']'")
# Start
count = 0
file = ""
# Reading sys arguments
for arg in sys.argv:
count += 1
if count == 2:
file = arg
break
# Verify if file name was insert
if count < 2:
print("My head hurts! Come on, tell me brainfuck file name!")
else:
# Launch interpreter
Headache().run(file)
| mit | Python |
|
c2b69a51faac56689edc88e747a00b60cf08cc04 | Add default ordering of progress outcome groups | uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers | dthm4kaiako/poet/migrations/0003_auto_20190731_1912.py | dthm4kaiako/poet/migrations/0003_auto_20190731_1912.py | # Generated by Django 2.1.5 on 2019-07-31 07:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('poet', '0002_progressoutcomegroup'),
]
operations = [
migrations.AlterModelOptions(
name='progressoutcomegroup',
options={'ordering': ['name']},
),
]
| mit | Python |
|
32a79573b38c6d2ea7f5b81363610a5d9332ed4e | Add python script to parse JSON output | leo27lijiang/app-monitor,leo27lijiang/app-monitor | src/main/resources/jsonformat.py | src/main/resources/jsonformat.py | #!/usr/bin/python2.7
import json
import socket
import sys
def readOutput(host, port):
data = None
s = None
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, int(port)))
except socket.error as msg:
s = None
print msg
if s is None:
return None
try:
data = s.recv(1024)
except socket.error as msg:
print msg
if s is not None:
s.close
return data
def parseData(jsonData, metric, key):
data = json.loads(jsonData)
for x in data:
if not 'name' in x:
continue
if x['name'] == metric:
if not 'datapoint' in x:
continue
monitorData = x['datapoint']
for k in monitorData:
if k == key:
return monitorData[k]
return 'Metric [%s:%s] not found'%(metric,key)
if __name__ == '__main__':
if len(sys.argv) < 4:
print 'Usage python jsonformat.py host port metric:key ...'
print 'The output like:'
print '[value1,value2,...]'
else:
jsonData = readOutput(sys.argv[1], sys.argv[2])
if jsonData is None:
print 'Read JSON data error'
else:
l = []
for x in sys.argv[3:]:
args = x.split(':')
if len(args) != 2:
continue
value = parseData(jsonData, args[0], args[1])
l.append(value)
print l
| apache-2.0 | Python |
|
699469342179fdc4319b5f39ea201015860ef09d | Add migration for CI fix | Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data | infrastructure/migrations/0020_auto_20210922_0929.py | infrastructure/migrations/0020_auto_20210922_0929.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-09-22 07:29
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('infrastructure', '0019_project_latest_implementation_year'),
]
operations = [
migrations.AlterField(
model_name='project',
name='latest_implementation_year',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear'),
),
]
| mit | Python |
|
09d815c6b53c74ae9a2f3831a2eec9c2b266eca7 | add the prototype. | Tynox/SimpleXLS2XML | simple_xls_to_xml.py | simple_xls_to_xml.py | # encoding:utf-8
import codecs
import xlrd
import xml.dom.minidom
filter_words = None
def xlsRead():
global filter_words
data = xlrd.open_workbook("filter.xlsx")
table = data.sheets()[0] # 获取第一个sheet
filter_words = table.col_values(0)
def createXML():
if filter_words is None:
return
impl = xml.dom.minidom.getDOMImplementation()
dom = impl.createDocument(None, "filters", None)
root = dom.documentElement
for f in filter_words:
filter = dom.createElement("filter")
filter.setAttribute("word", f)
root.appendChild(filter)
out = codecs.open("filters.xml", "w", "utf-8")
dom.writexml(out, addindent=" ", newl="\n", encoding="utf-8")
out.close()
if __name__ == "__main__":
xlsRead()
createXML() | unlicense | Python |
|
6a268c69fced2a5b9e97086fa2a9089837376db4 | add subfolder | IIS-DIRL/Python-Tools | keras/metrics/empty.py | keras/metrics/empty.py | #
| apache-2.0 | Python |
|
9524b824e5edb6e88c776d3420b618b6a2d1b7fa | Add files via upload | fsxfreak/esys-pbi,fsxfreak/esys-pbi,fsxfreak/esys-pbi | src/graph_realtimeEdit.py | src/graph_realtimeEdit.py | from pylsl import StreamInlet, resolve_byprop, local_clock, TimeoutError
from pylsl import StreamInfo,StreamOutlet
from random import random as rand
import collections
import numpy as np
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui,QtCore
import time
import signal, sys, os, time, csv
import serial
import threading
win = pg.GraphicsWindow()
graph = None
class Graph(object):
def __init__(self,size=(600,350)):
streams = resolve_byprop('name', 'bci', timeout=2.5)
try:
self.inlet = StreamInlet(streams[0])
except IndexError:
raise ValueError('Make sure stream name=bci is opened first.')
self.running = True
self.frequency = 250.0
self.sampleinterval = (1/self.frequency)
self.timewindow = 10
self._bufsize = int(self.timewindow/self.sampleinterval)
self.dataBuffer = collections.deque([0.0]*self._bufsize,self._bufsize)
self.timeBuffer = collections.deque([0.0]*self._bufsize,self._bufsize)
self.x = np.zeros(self._bufsize)
self.y = np.zeros(self._bufsize)
self.app = QtGui.QApplication([])
self.plt = pg.plot(title='Dynamic Plotting with PyQtGraph')
self.plt.resize(*size)
self.plt.showGrid(x=True,y=True)
self.plt.setLabel('left','amplitude','V')
self.plt.setLabel('bottom','time','s')
self.curve = self.plt.plot(self.x,self.y,pen=(255,0,0))
def _graph_lsl(self):
while self.running:
sample, timestamp = self.inlet.pull_sample(timeout=5)
# time correction to sync to local_clock()
try:
if timestamp is not None and sample is not None:
timestamp = timestamp + self.inlet.time_correction(timeout=5)
# TODO Place graphing stuff here
self.dataBuffer.append(sample[0])
self.y[:] = self.dataBuffer
self.timeBuffer.append(timestamp)
self.x[:] = self.timeBuffer
# added
self.sampleNum = self.x
self.timestampIndex = self.y
self.sampleNum = np.roll(self.sampleNum, 1) # scroll data
self.timestampIndex = np.roll(self.timestampIndex, 1)
self.curve.setData(self.sampleNum, self.timestampIndex) # re-plot
self.app.processEvents()
print(sample, timestamp)
except TimeoutError:
pass
print('closing graphing utility')
self.inlet.close_stream()
def start(self):
self.lsl_thread = threading.Thread(target=self._graph_lsl)
self.lsl_thread.start()
def stop(self):
self.running = False
self.lsl_thread.join(5)
# Place any graphing termination or cleanup here
def load(queue):
global graph
graph = Graph()
print('init graph')
def randomData():
info = StreamInfo('bci','randomData',1,150)
outlet = StreamOutlet(info)
print ('now sending data')
while True:
sample = [rand()]
outlet.push_sample(sample)
time.sleep(1)
def start():
graph.start()
graph.app.exec_()
def stop():
graph.stop()
print('Stopping graphing.')
os._exit(0) # dirty, but it's ok because everything is already cleaned up
def sigint_handler(signal, frame):
stop()
def sigterm_handler(signal, frame):
stop()
def main():
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigterm_handler)
data_stream = threading.Thread(target=randomData)
data_stream.start()
load(queue=None)
start()
try:
signal.pause()
except AttributeError:
while True:
time.sleep(1)
stop()
def begin(queue, event=None):
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigterm_handler)
load(queue)
start()
try:
while True:
signal.pause()
except AttributeError:
# signal.pause() not implemented on windows
while not event.is_set():
time.sleep(1)
print('event was set, stopping')
stop()
if __name__ == '__main__':
main()
| mit | Python |
|
1d1712259a1e6e23b7a6a5541f70573b05619e99 | Create stock.py | glovebx/web_tree_image_order_line,OdooCommunityWidgets/web_tree_image_order_line,OdooCommunityWidgets/web_tree_image_order_line,AlejandroCatalina/web_tree_image_order_line,AlejandroCatalina/web_tree_image_order_line,glovebx/web_tree_image_order_line | stock.py | stock.py | from openerp.osv import fields, osv
class stock_move(osv.Model):
_name = 'stock.move'
_inherit = 'stock.move'
def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False,
loc_dest_id=False, partner_id=False):
res_prod = super(stock_move, self).onchange_product_id(cr, uid, ids, prod_id, loc_id,loc_dest_id, partner_id)
prod_obj = self.pool.get('product.product')
obj = prod_obj.browse(cr, uid, prod_id)
res_prod['value'].update({'image_small': obj.image_small})
return res_prod
_columns = {
'image_small' : fields.binary('Product Image'),
}
stock_move()
class sale_order_line(osv.Model):
_name = 'sale.order.line'
_inherit = 'sale.order.line'
_columns = {
'image_small' : fields.binary('Product Image'),
}
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False,image_small=False, context=None):
context = context or {}
res = super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty=qty,
uom=uom, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id,
lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
product_obj = self.pool.get('product.product')
product_obj = product_obj.browse(cr, uid, product, context=context)
res['value'].update({'image_small': product_obj.image_small or False})
return res
sale_order_line()
class sale_order(osv.Model):
_name = 'sale.order'
_inherit = 'sale.order'
def _prepare_order_line_move(self, cr, uid, order, line, picking_id, date_planned, context=None):
res = super(sale_order, self)._prepare_order_line_move(cr, uid, order=order, line=line, picking_id=picking_id, date_planned=date_planned, context=context)
res['image_small'] = line.image_small
return res
sale_order()
| mit | Python |
|
ed09ca11fc3586c9782103269b12240ed6b27911 | complete and tested juliaset, HW4 | naslash/juliasets | juliaset.py | juliaset.py | class JuliaSet(object):
def set_plane(self, _d):
self._d=_d
self._complexplane=[]
x=-2
y=-2
while x<=2:
while y<=2:
self._complexplane.append(complex(x,y))
y+=_d
x+=_d
y=-2
return self._complexplane
def __init__(self, c, n=100):
self.c = c
self.n = n
self._d=0.001
self._complexplane=[]#self.set_plane(self._d)
def juliamap(self, z):
return ((z**2)+self.c)
def iterate(self, z):
m = 0
while True:
m+=1
z=self.juliamap(z)
if abs(z)>2:
return m
elif m>=self.n:
return 0
def set_spacing(self, d):
self._d = d
self._complexplane=self.set_plane(self._d)
def generate(self):
self.set = [self.iterate(z) for z in self._complexplane]
return self.set
| mit | Python |
|
8282cca05b784bb0966ba8246900627286c5d98c | Use invoke as build tool | davidbgk/udata,davidbgk/udata,opendatateam/udata,etalab/udata,jphnoel/udata,etalab/udata,grouan/udata,etalab/udata,grouan/udata,davidbgk/udata,jphnoel/udata,opendatateam/udata,jphnoel/udata,grouan/udata,opendatateam/udata | tasks.py | tasks.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from os.path import join, abspath, dirname, exists
from invoke import run, task
ROOT = abspath(join(dirname(__file__)))
I18N_DOMAIN = 'udata-admin'
def green(text):
return '\033[1;32m{0}\033[0;m'.format(text)
def red(text):
return '\033[1;31m{0}\033[0;m'.format(text)
def cyan(text):
return '\033[1;36m{0}\033[0;m'.format(text)
def lrun(command, *args, **kwargs):
run('cd {0} && {1}'.format(ROOT, command), *args, **kwargs)
def nrun(command, *args, **kwargs):
lrun('node_modules/.bin/{0}'.format(command), *args, **kwargs)
@task
def clean(bower=False, node=False):
'''Cleanup all build artifacts'''
patterns = [
'build', 'dist', 'cover', 'docs/_build',
'**/*.pyc', '*.egg-info', '.tox'
]
if bower:
patterns.append('udata/static/bower')
if node:
patterns.append('node_modules')
for pattern in patterns:
print('Removing {0}'.format(pattern))
run('cd {0} && rm -rf {1}'.format(ROOT, pattern))
@task
def test():
'''Run tests suite'''
run('cd {0} && nosetests --rednose --force-color udata'.format(ROOT), pty=True)
@task
def cover():
'''Run tests suite with coverage'''
run('cd {0} && nosetests --rednose --force-color \
--with-coverage --cover-html --cover-package=udata'.format(ROOT), pty=True)
@task
def doc():
'''Build the documentation'''
run('cd {0}/doc && make html'.format(ROOT), pty=True)
@task
def qa():
'''Run a quality report'''
run('flake8 {0}/udata'.format(ROOT))
@task
def serve():
run('cd {0} && python manage.py serve -d -r'.format(ROOT), pty=True)
@task
def work(loglevel='info'):
run('celery -A udata.worker worker --purge --autoreload -l %s' % loglevel)
@task
def beat(loglevel='info'):
run('celery -A udata.worker beat -l %s' % loglevel)
@task
def i18n():
run('python setup.py extract_messages')
run('python setup.py update_catalog')
run('udata i18njs -d udata udata/static')
@task
def i18nc():
run('cd {0} && python setup.py compile_catalog'.format(ROOT))
@task
def build():
print(cyan('Compiling translations'))
lrun('python setup.py compile_catalog')
@task(build)
def dist():
'''Package for distribution'''
print(cyan('Building a distribuable package'))
lrun('python setup.py bdist_wheel', pty=True)
| agpl-3.0 | Python |
|
c63144242d9cf2ecf02d58eb9a93cfe426acc6dc | Add script to send unregister user emails | laurenrevere/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,leb2dg/osf.io,crcresearch/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,chennan47/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,sloria/osf.io,baylee-d/osf.io,cslzchen/osf.io,hmoco/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,hmoco/osf.io,erinspace/osf.io,icereval/osf.io,cwisecarver/osf.io,icereval/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,erinspace/osf.io,chrisseto/osf.io,sloria/osf.io,brianjgeiger/osf.io,chennan47/osf.io,adlius/osf.io,crcresearch/osf.io,mattclark/osf.io,leb2dg/osf.io,pattisdr/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,felliott/osf.io,caneruguz/osf.io,hmoco/osf.io,cslzchen/osf.io,felliott/osf.io,baylee-d/osf.io,adlius/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,aaxelb/osf.io,mattclark/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,cslzchen/osf.io,aaxelb/osf.io,adlius/osf.io,caseyrollins/osf.io,chrisseto/osf.io,pattisdr/osf.io,Nesiehr/osf.io,binoculars/osf.io,erinspace/osf.io,caseyrollins/osf.io,laurenrevere/osf.io,mfraezz/osf.io,chennan47/osf.io,TomBaxter/osf.io,aaxelb/osf.io,caneruguz/osf.io,baylee-d/osf.io,saradbowman/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,adlius/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,sloria/osf.io,pattisdr/osf.io,TomBaxter/osf.io,felliott/osf.io,binoculars/osf.io,crcresearch/osf.io,mattclark/osf.io | scripts/send_preprint_unreg_contributor_emails.py | scripts/send_preprint_unreg_contributor_emails.py | # -*- coding: utf-8 -*-
"""Sends an unregistered user claim email for preprints created after 2017-03-14. A hotfix was made on that
date which caused unregistered user claim emails to not be sent. The regression was fixed on 2017-05-05. This
sends the emails that should have been sent during that time period.
NOTE: This script should only be run ONCE.
"""
import sys
import logging
import datetime as dt
import pytz
from framework.auth import Auth
from website.app import init_app
init_app(routes=False)
from website.project import signals as project_signals
from scripts import utils as script_utils
from website.project.views import contributor # flake8: noqa (set up listeners)
from osf.models import PreprintService
logger = logging.getLogger(__name__)
logging.getLogger('website.mails.mails').setLevel(logging.CRITICAL)
# datetime at which https://github.com/CenterForOpenScience/osf.io/commit/568413a77cc51511a0f7afe081a218676a36ebb6 was committed
START_DATETIME = dt.datetime(2017, 3, 14, 19, 10, tzinfo=pytz.utc)
# datetime at which https://github.com/CenterForOpenScience/osf.io/commit/38513916bb9584eb723c46e35553dc6d2c267e1a was deployed
END_DATETIME = dt.datetime(2017, 5, 5, 5, 48, tzinfo=pytz.utc)
def main():
dry_run = '--dry' in sys.argv
if not dry:
# If we're not running in dry mode log everything to a file
script_utils.add_file_logger(logger, __file__)
count = 0
preprints = PreprintService.objects.filter(
is_published=True,
date_published__gte=START_DATETIME,
date_published__lte=END_DATETIME
).order_by('date_published').select_related('node', 'node__creator')
for preprint in preprints:
auth = Auth(preprint.node.creator)
for author in preprint.node.contributors.filter(is_active=False):
assert not author.is_registered
logger.info('Sending email to unregistered User {} on PreprintService {}'.format(author._id, preprint._id))
if not dry_run:
project_signals.contributor_added.send(
preprint.node,
contributor=author,
auth=auth,
email_template='preprint'
)
count += 1
logger.info('Sent an email to {} unregistered users'.format(count))
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
fcac525d3f974c7d4a1e90c1adc444c6d6e72018 | Add sed executor #123 | DMOJ/judge,DMOJ/judge,DMOJ/judge | executors/SED.py | executors/SED.py |
from .base_executor import ScriptExecutor
from judgeenv import env
class Executor(ScriptExecutor):
ext = '.sed'
name = 'SED'
command = env['runtime'].get('sed')
test_program = '''s/.*/echo: Hello, World!/
q'''
fs = ['.*\.(so|sed)', '/dev/urandom$', '/proc/self/maps$', '/proc/filesystems$', '/+lib/charset.alias$']
syscalls = ['getgroups32', 'statfs64']
def get_cmdline(self):
return [self.get_command(), '-f', self._code]
initialize = Executor.initialize
| agpl-3.0 | Python |
|
1f52ef331a3529fe0f8b1ad5528d4d5cdd5d0b7a | add mnist deep auto like hinton's | jasonwbw/EffictiveRBM | rbm/autoencoder/mnist_deep_auto.py | rbm/autoencoder/mnist_deep_auto.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Author : @Jason_wbw
"""
This program pertrains a deep autoencoder for MNIST dataset
You cat set the maxinum number of epochs for pertraining each layer
and you can set the architectrue of the multiplayer nets.
"""
from converter import Converter
from rbm import RBM, RBMLinear
import pickle
class MNISTDeepAuto(object):
def __init__(self, batch_num = 100):
self._load_data()
def _load_data(self):
print "begin converting data into memory"
self.converter = Converter()
print "converting end\n"
def train(self):
print "train rbm level 1"
rbm = RBM(self.converter.dimensionality, 1000)
rbm.train(self.converter.train_images, max_epochs = 10, batch = 100)
hidden_probs1 = rbm.hidden_probs
self.pickle_dumps(rbm.weights, 'l1_w.pkl')
self.pickle_dumps(rbm.hidden_bias, 'l1_hb.pkl')
self.pickle_dumps(rbm.visible_bias, 'l1_vb.pkl')
del rbm
print "train rbm level 1 end\n"
print "train rbm level 2"
rbm_l2 = RBM(1000, 500)
rbm_l2.train(hidden_probs1, max_epochs = 10, batch = 100)
hidden_probs2 = rbm_l2.hidden_probs
self.pickle_dumps(rbm_l2.weights, 'l2_w.pkl')
self.pickle_dumps(rbm_l2.hidden_bias, 'l2_hb.pkl')
self.pickle_dumps(rbm_l2.visible_bias, 'l2_vb.pkl')
del rbm_l2
print "train rbm level 2 end\n"
print "train rbm level 3"
rbm_l3 = RBM(500, 250)
rbm_l3.train(hidden_probs2, max_epochs = 10, batch = 100)
hidden_probs3 = rbm_l3.hidden_probs
self.pickle_dumps(rbm_l3.weights, 'l3_w.pkl')
self.pickle_dumps(rbm_l3.hidden_bias, 'l3_hb.pkl')
self.pickle_dumps(rbm_l3.visible_bias, 'l3_vb.pkl')
del rbm_l3
print "train rbm level 3 end\n"
print "train rbm level 4"
rbm_l4 = RBMLinear(250, 30)
rbm_l4.train(hidden_probs3, max_epochs = 10, batch = 100)
hidden_top = rbm_l4.hidden_probs
self.pickle_dumps(rbm_l4.weights, 'l4_w.pkl')
self.pickle_dumps(rbm_l4.hidden_bias, 'l4_hb.pkl')
self.pickle_dumps(rbm_l4.visible_bias, 'l4_vb.pkl')
del rbm_l4
print "train rbm level 4 end\n"
def pickle_dumps(self, obj, filename):
f = open(filename, 'w')
pickle.dump(obj, f)
f.close()
if __name__ == '__main__':
auto = MNISTDeepAuto()
auto.train() | mit | Python |
|
84cf95cde942d91f53959fea4151847902a69d14 | Add a cleanup script. | harvitronix/rl-rc-car | rl-rc-car/cleanup.py | rl-rc-car/cleanup.py | from rccar import RCCar
car = RCCar()
car.cleanup_gpio()
| mit | Python |
|
b2741a8316ea1ffbf9e88a9fb883ef9e2507be42 | Upgrade libchromiuncontent to 3245ef8 | greyhwndz/electron,bbondy/electron,pandoraui/electron,ervinb/electron,ervinb/electron,jcblw/electron,shaundunne/electron,shockone/electron,matiasinsaurralde/electron,rhencke/electron,aaron-goshine/electron,Jonekee/electron,wan-qy/electron,carsonmcdonald/electron,howmuchcomputer/electron,Rokt33r/electron,stevemao/electron,mhkeller/electron,faizalpribadi/electron,sshiting/electron,nicholasess/electron,trigrass2/electron,felixrieseberg/electron,twolfson/electron,fffej/electron,deed02392/electron,iftekeriba/electron,shennushi/electron,gerhardberger/electron,yan-foto/electron,greyhwndz/electron,brave/electron,trigrass2/electron,mattotodd/electron,stevekinney/electron,thomsonreuters/electron,sshiting/electron,kcrt/electron,smczk/electron,gabrielPeart/electron,jhen0409/electron,simongregory/electron,the-ress/electron,egoist/electron,eric-seekas/electron,vaginessa/electron,benweissmann/electron,simongregory/electron,miniak/electron,jonatasfreitasv/electron,kikong/electron,timruffles/electron,Jacobichou/electron,jsutcodes/electron,davazp/electron,Gerhut/electron,minggo/electron,Evercoder/electron,faizalpribadi/electron,brave/electron,thompsonemerson/electron,darwin/electron,jiaz/electron,sshiting/electron,RobertJGabriel/electron,shaundunne/electron,mirrh/electron,gamedevsam/electron,subblue/electron,beni55/electron,vipulroxx/electron,gstack/infinium-shell,MaxWhere/electron,mrwizard82d1/electron,xiruibing/electron,ankitaggarwal011/electron,sky7sea/electron,saronwei/electron,oiledCode/electron,bruce/electron,micalan/electron,Evercoder/electron,Rokt33r/electron,GoooIce/electron,trankmichael/electron,rreimann/electron,gbn972/electron,xiruibing/electron,joneit/electron,gabrielPeart/electron,miniak/electron,Neron-X5/electron,beni55/electron,IonicaBizauKitchen/electron,preco21/electron,pandoraui/electron,coderhaoxin/electron,matiasinsaurralde/electron,RIAEvangelist/electron,thomsonreuters/electron,stevekinney/electron,electron/electron,vHanda/electron,jsutcodes/electron,iftekeriba/electron,lrlna/electron,synaptek/electron,zhakui/electron,biblerule/UMCTelnetHub,tylergibson/electron,jacksondc/electron,SufianHassan/electron,cos2004/electron,cqqccqc/electron,jannishuebl/electron,renaesop/electron,pirafrank/electron,neutrous/electron,vaginessa/electron,soulteary/electron,noikiy/electron,thingsinjars/electron,icattlecoder/electron,JussMee15/electron,nicobot/electron,synaptek/electron,shiftkey/electron,dongjoon-hyun/electron,miniak/electron,jhen0409/electron,robinvandernoord/electron,trigrass2/electron,DivyaKMenon/electron,Zagorakiss/electron,bbondy/electron,gamedevsam/electron,bitemyapp/electron,LadyNaggaga/electron,arturts/electron,soulteary/electron,ianscrivener/electron,gamedevsam/electron,JussMee15/electron,thompsonemerson/electron,aichingm/electron,MaxWhere/electron,ianscrivener/electron,stevemao/electron,medixdev/electron,bwiggs/electron,shiftkey/electron,fireball-x/atom-shell,chrisswk/electron,natgolov/electron,simongregory/electron,vaginessa/electron,MaxWhere/electron,meowlab/electron,John-Lin/electron,dkfiresky/electron,mjaniszew/electron,ervinb/electron,tomashanacek/electron,aecca/electron,posix4e/electron,jjz/electron,nekuz0r/electron,systembugtj/electron,kikong/electron,BionicClick/electron,webmechanicx/electron,chriskdon/electron,digideskio/electron,RIAEvangelist/electron,aliib/electron,voidbridge/electron,cqqccqc/electron,aliib/electron,evgenyzinoviev/electron,jiaz/electron,gerhardberger/electron,astoilkov/electron,nagyistoce/electron-atom-shell,nekuz0r/electron,adcentury/electron,shennushi/electron,etiktin/electron,tincan24/electron,cqqccqc/electron,kazupon/electron,ankitaggarwal011/electron,ervinb/electron,smczk/electron,simonfork/electron,smczk/electron,JesselJohn/electron,fabien-d/electron,renaesop/electron,vipulroxx/electron,mhkeller/electron,webmechanicx/electron,bright-sparks/electron,pombredanne/electron,webmechanicx/electron,tonyganch/electron,preco21/electron,Floato/electron,evgenyzinoviev/electron,adcentury/electron,saronwei/electron,jacksondc/electron,neutrous/electron,astoilkov/electron,systembugtj/electron,wolfflow/electron,jonatasfreitasv/electron,matiasinsaurralde/electron,Faiz7412/electron,aichingm/electron,abhishekgahlot/electron,jannishuebl/electron,coderhaoxin/electron,aecca/electron,posix4e/electron,vaginessa/electron,the-ress/electron,darwin/electron,pandoraui/electron,anko/electron,rreimann/electron,bpasero/electron,digideskio/electron,egoist/electron,dongjoon-hyun/electron,stevekinney/electron,jlhbaseball15/electron,tinydew4/electron,posix4e/electron,aaron-goshine/electron,kostia/electron,davazp/electron,saronwei/electron,adamjgray/electron,kostia/electron,nicobot/electron,kikong/electron,Zagorakiss/electron,wan-qy/electron,kostia/electron,etiktin/electron,fritx/electron,howmuchcomputer/electron,RobertJGabriel/electron,JesselJohn/electron,Zagorakiss/electron,Faiz7412/electron,kcrt/electron,joaomoreno/atom-shell,beni55/electron,joneit/electron,setzer777/electron,shennushi/electron,tomashanacek/electron,eric-seekas/electron,tincan24/electron,mjaniszew/electron,gstack/infinium-shell,Jacobichou/electron,preco21/electron,jonatasfreitasv/electron,cos2004/electron,bbondy/electron,deepak1556/atom-shell,simongregory/electron,kokdemo/electron,the-ress/electron,abhishekgahlot/electron,wolfflow/electron,Rokt33r/electron,thingsinjars/electron,digideskio/electron,Ivshti/electron,leftstick/electron,brave/muon,nekuz0r/electron,jcblw/electron,tinydew4/electron,minggo/electron,adcentury/electron,leethomas/electron,zhakui/electron,fffej/electron,brave/electron,ankitaggarwal011/electron,egoist/electron,zhakui/electron,setzer777/electron,JussMee15/electron,farmisen/electron,jlhbaseball15/electron,subblue/electron,bobwol/electron,JussMee15/electron,nicobot/electron,thomsonreuters/electron,meowlab/electron,Rokt33r/electron,smczk/electron,nagyistoce/electron-atom-shell,benweissmann/electron,eriser/electron,fireball-x/atom-shell,twolfson/electron,adcentury/electron,maxogden/atom-shell,eriser/electron,etiktin/electron,voidbridge/electron,anko/electron,mattdesl/electron,MaxGraey/electron,eric-seekas/electron,tylergibson/electron,jiaz/electron,LadyNaggaga/electron,tincan24/electron,anko/electron,BionicClick/electron,kokdemo/electron,GoooIce/electron,bpasero/electron,aichingm/electron,lzpfmh/electron,tinydew4/electron,IonicaBizauKitchen/electron,mjaniszew/electron,seanchas116/electron,Evercoder/electron,beni55/electron,leethomas/electron,IonicaBizauKitchen/electron,RobertJGabriel/electron,miniak/electron,hokein/atom-shell,christian-bromann/electron,lrlna/electron,gamedevsam/electron,bwiggs/electron,soulteary/electron,fffej/electron,Ivshti/electron,farmisen/electron,chriskdon/electron,biblerule/UMCTelnetHub,maxogden/atom-shell,robinvandernoord/electron,leftstick/electron,fomojola/electron,natgolov/electron,RIAEvangelist/electron,jonatasfreitasv/electron,renaesop/electron,howmuchcomputer/electron,tonyganch/electron,nicholasess/electron,arusakov/electron,voidbridge/electron,voidbridge/electron,pirafrank/electron,mjaniszew/electron,Rokt33r/electron,xfstudio/electron,noikiy/electron,tylergibson/electron,jhen0409/electron,minggo/electron,dkfiresky/electron,MaxGraey/electron,roadev/electron,nicobot/electron,farmisen/electron,sshiting/electron,jonatasfreitasv/electron,tylergibson/electron,brenca/electron,jlord/electron,michaelchiche/electron,seanchas116/electron,ervinb/electron,benweissmann/electron,LadyNaggaga/electron,wan-qy/electron,xfstudio/electron,electron/electron,eric-seekas/electron,systembugtj/electron,jhen0409/electron,coderhaoxin/electron,bruce/electron,jannishuebl/electron,jaanus/electron,setzer777/electron,JussMee15/electron,preco21/electron,eriser/electron,aecca/electron,mattdesl/electron,arturts/electron,mubassirhayat/electron,Faiz7412/electron,DivyaKMenon/electron,thompsonemerson/electron,medixdev/electron,dkfiresky/electron,bitemyapp/electron,joaomoreno/atom-shell,darwin/electron,nagyistoce/electron-atom-shell,jannishuebl/electron,shiftkey/electron,kenmozi/electron,baiwyc119/electron,baiwyc119/electron,pombredanne/electron,bitemyapp/electron,gabriel/electron,shiftkey/electron,gbn972/electron,jjz/electron,yalexx/electron,John-Lin/electron,xiruibing/electron,xiruibing/electron,bbondy/electron,icattlecoder/electron,jlhbaseball15/electron,kokdemo/electron,leolujuyi/electron,ankitaggarwal011/electron,dahal/electron,bitemyapp/electron,timruffles/electron,gabriel/electron,adamjgray/electron,soulteary/electron,mubassirhayat/electron,RobertJGabriel/electron,rsvip/electron,natgolov/electron,jtburke/electron,takashi/electron,Faiz7412/electron,jsutcodes/electron,bbondy/electron,RIAEvangelist/electron,hokein/atom-shell,chrisswk/electron,wan-qy/electron,natgolov/electron,BionicClick/electron,oiledCode/electron,lzpfmh/electron,fireball-x/atom-shell,xfstudio/electron,systembugtj/electron,fabien-d/electron,zhakui/electron,roadev/electron,howmuchcomputer/electron,miniak/electron,noikiy/electron,kenmozi/electron,mirrh/electron,xfstudio/electron,mattotodd/electron,bwiggs/electron,IonicaBizauKitchen/electron,electron/electron,carsonmcdonald/electron,vaginessa/electron,mjaniszew/electron,saronwei/electron,howmuchcomputer/electron,gbn972/electron,aecca/electron,systembugtj/electron,noikiy/electron,fritx/electron,gerhardberger/electron,gerhardberger/electron,coderhaoxin/electron,takashi/electron,biblerule/UMCTelnetHub,astoilkov/electron,fabien-d/electron,gabriel/electron,sshiting/electron,bitemyapp/electron,hokein/atom-shell,abhishekgahlot/electron,gamedevsam/electron,Neron-X5/electron,Neron-X5/electron,joneit/electron,SufianHassan/electron,fomojola/electron,vipulroxx/electron,RobertJGabriel/electron,leftstick/electron,jsutcodes/electron,arturts/electron,minggo/electron,SufianHassan/electron,smczk/electron,wan-qy/electron,webmechanicx/electron,the-ress/electron,GoooIce/electron,ianscrivener/electron,SufianHassan/electron,deepak1556/atom-shell,neutrous/electron,brave/muon,eriser/electron,mrwizard82d1/electron,leolujuyi/electron,kcrt/electron,destan/electron,brave/muon,Jonekee/electron,gabriel/electron,rreimann/electron,Neron-X5/electron,jcblw/electron,pirafrank/electron,edulan/electron,twolfson/electron,yalexx/electron,saronwei/electron,tonyganch/electron,tincan24/electron,shiftkey/electron,baiwyc119/electron,felixrieseberg/electron,robinvandernoord/electron,John-Lin/electron,RIAEvangelist/electron,meowlab/electron,egoist/electron,edulan/electron,tinydew4/electron,arusakov/electron,vaginessa/electron,bwiggs/electron,electron/electron,mattdesl/electron,deed02392/electron,fffej/electron,matiasinsaurralde/electron,Ivshti/electron,thompsonemerson/electron,dongjoon-hyun/electron,gabriel/electron,MaxGraey/electron,anko/electron,stevemao/electron,jaanus/electron,mirrh/electron,Zagorakiss/electron,DivyaKMenon/electron,fritx/electron,aichingm/electron,cos2004/electron,shockone/electron,jacksondc/electron,jlhbaseball15/electron,jcblw/electron,vipulroxx/electron,pombredanne/electron,leolujuyi/electron,sircharleswatson/electron,pirafrank/electron,Andrey-Pavlov/electron,dkfiresky/electron,bright-sparks/electron,rreimann/electron,gabrielPeart/electron,benweissmann/electron,rhencke/electron,biblerule/UMCTelnetHub,IonicaBizauKitchen/electron,jtburke/electron,brave/electron,aichingm/electron,rprichard/electron,pandoraui/electron,icattlecoder/electron,evgenyzinoviev/electron,yan-foto/electron,jacksondc/electron,brave/muon,setzer777/electron,Gerhut/electron,rsvip/electron,jtburke/electron,mhkeller/electron,timruffles/electron,Neron-X5/electron,d-salas/electron,jannishuebl/electron,renaesop/electron,rreimann/electron,mirrh/electron,arturts/electron,dahal/electron,MaxGraey/electron,davazp/electron,nekuz0r/electron,faizalpribadi/electron,SufianHassan/electron,RobertJGabriel/electron,joaomoreno/atom-shell,MaxWhere/electron,jaanus/electron,gabrielPeart/electron,renaesop/electron,gamedevsam/electron,sshiting/electron,tomashanacek/electron,jiaz/electron,mattdesl/electron,rsvip/electron,destan/electron,kokdemo/electron,lzpfmh/electron,jhen0409/electron,stevemao/electron,michaelchiche/electron,evgenyzinoviev/electron,thomsonreuters/electron,adamjgray/electron,gbn972/electron,jiaz/electron,chriskdon/electron,nicobot/electron,LadyNaggaga/electron,adamjgray/electron,vHanda/electron,Andrey-Pavlov/electron,shockone/electron,adcentury/electron,simonfork/electron,noikiy/electron,carsonmcdonald/electron,deepak1556/atom-shell,greyhwndz/electron,hokein/atom-shell,bright-sparks/electron,jaanus/electron,greyhwndz/electron,robinvandernoord/electron,sircharleswatson/electron,kenmozi/electron,maxogden/atom-shell,systembugtj/electron,tonyganch/electron,sircharleswatson/electron,renaesop/electron,medixdev/electron,biblerule/UMCTelnetHub,jlord/electron,edulan/electron,digideskio/electron,kazupon/electron,yan-foto/electron,bright-sparks/electron,jaanus/electron,rajatsingla28/electron,preco21/electron,Neron-X5/electron,Ivshti/electron,eric-seekas/electron,natgolov/electron,mattdesl/electron,jlhbaseball15/electron,sky7sea/electron,medixdev/electron,aaron-goshine/electron,joneit/electron,minggo/electron,tomashanacek/electron,simonfork/electron,ianscrivener/electron,rajatsingla28/electron,aaron-goshine/electron,dongjoon-hyun/electron,ankitaggarwal011/electron,fabien-d/electron,dkfiresky/electron,coderhaoxin/electron,christian-bromann/electron,soulteary/electron,etiktin/electron,subblue/electron,JesselJohn/electron,pandoraui/electron,carsonmcdonald/electron,roadev/electron,jlhbaseball15/electron,matiasinsaurralde/electron,saronwei/electron,dongjoon-hyun/electron,kostia/electron,Jacobichou/electron,arturts/electron,fffej/electron,trankmichael/electron,thompsonemerson/electron,leethomas/electron,felixrieseberg/electron,darwin/electron,mirrh/electron,stevekinney/electron,deed02392/electron,lrlna/electron,mrwizard82d1/electron,d-salas/electron,bright-sparks/electron,deed02392/electron,astoilkov/electron,GoooIce/electron,meowlab/electron,roadev/electron,kokdemo/electron,benweissmann/electron,Floato/electron,posix4e/electron,brave/electron,Floato/electron,gabrielPeart/electron,Floato/electron,carsonmcdonald/electron,fireball-x/atom-shell,Jonekee/electron,jlord/electron,brave/muon,bpasero/electron,MaxGraey/electron,medixdev/electron,smczk/electron,wan-qy/electron,sircharleswatson/electron,bwiggs/electron,rhencke/electron,aecca/electron,nicholasess/electron,posix4e/electron,tinydew4/electron,michaelchiche/electron,neutrous/electron,shennushi/electron,trankmichael/electron,John-Lin/electron,kazupon/electron,jlord/electron,nagyistoce/electron-atom-shell,subblue/electron,pombredanne/electron,bright-sparks/electron,synaptek/electron,trankmichael/electron,benweissmann/electron,stevekinney/electron,JesselJohn/electron,setzer777/electron,thomsonreuters/electron,zhakui/electron,vipulroxx/electron,gerhardberger/electron,pombredanne/electron,BionicClick/electron,thingsinjars/electron,lrlna/electron,brenca/electron,robinvandernoord/electron,voidbridge/electron,gerhardberger/electron,felixrieseberg/electron,iftekeriba/electron,thingsinjars/electron,SufianHassan/electron,yan-foto/electron,kikong/electron,Faiz7412/electron,wolfflow/electron,eric-seekas/electron,micalan/electron,aliib/electron,Zagorakiss/electron,fomojola/electron,kcrt/electron,mrwizard82d1/electron,tonyganch/electron,jjz/electron,DivyaKMenon/electron,howmuchcomputer/electron,fireball-x/atom-shell,christian-bromann/electron,Evercoder/electron,lrlna/electron,takashi/electron,Andrey-Pavlov/electron,mubassirhayat/electron,baiwyc119/electron,ianscrivener/electron,kenmozi/electron,micalan/electron,joneit/electron,brenca/electron,d-salas/electron,jjz/electron,medixdev/electron,vipulroxx/electron,rhencke/electron,bobwol/electron,Andrey-Pavlov/electron,joaomoreno/atom-shell,Gerhut/electron,anko/electron,Gerhut/electron,icattlecoder/electron,shennushi/electron,simonfork/electron,brave/muon,mattotodd/electron,lrlna/electron,Evercoder/electron,leftstick/electron,mhkeller/electron,jannishuebl/electron,jsutcodes/electron,dahal/electron,farmisen/electron,jaanus/electron,nicholasess/electron,dahal/electron,nicobot/electron,icattlecoder/electron,christian-bromann/electron,leolujuyi/electron,webmechanicx/electron,kenmozi/electron,LadyNaggaga/electron,fomojola/electron,astoilkov/electron,Rokt33r/electron,gerhardberger/electron,beni55/electron,xiruibing/electron,rajatsingla28/electron,kazupon/electron,faizalpribadi/electron,eriser/electron,cqqccqc/electron,shockone/electron,coderhaoxin/electron,bitemyapp/electron,MaxWhere/electron,adcentury/electron,seanchas116/electron,JesselJohn/electron,fritx/electron,eriser/electron,wolfflow/electron,Jonekee/electron,webmechanicx/electron,shennushi/electron,adamjgray/electron,faizalpribadi/electron,synaptek/electron,timruffles/electron,stevemao/electron,cos2004/electron,ianscrivener/electron,leftstick/electron,nicholasess/electron,stevemao/electron,maxogden/atom-shell,synaptek/electron,evgenyzinoviev/electron,trigrass2/electron,chrisswk/electron,leolujuyi/electron,twolfson/electron,Jacobichou/electron,JussMee15/electron,gbn972/electron,voidbridge/electron,kcrt/electron,davazp/electron,brave/electron,yalexx/electron,the-ress/electron,noikiy/electron,mubassirhayat/electron,oiledCode/electron,etiktin/electron,vHanda/electron,roadev/electron,cos2004/electron,BionicClick/electron,leolujuyi/electron,brenca/electron,micalan/electron,edulan/electron,matiasinsaurralde/electron,the-ress/electron,jsutcodes/electron,leethomas/electron,kenmozi/electron,aaron-goshine/electron,tomashanacek/electron,darwin/electron,dkfiresky/electron,jjz/electron,iftekeriba/electron,destan/electron,nekuz0r/electron,trankmichael/electron,kikong/electron,jacksondc/electron,cos2004/electron,brenca/electron,electron/electron,vHanda/electron,mhkeller/electron,cqqccqc/electron,yalexx/electron,rsvip/electron,yalexx/electron,oiledCode/electron,jhen0409/electron,thingsinjars/electron,evgenyzinoviev/electron,chriskdon/electron,gabrielPeart/electron,neutrous/electron,subblue/electron,michaelchiche/electron,jonatasfreitasv/electron,rsvip/electron,edulan/electron,JesselJohn/electron,jcblw/electron,icattlecoder/electron,bbondy/electron,sky7sea/electron,fomojola/electron,dahal/electron,twolfson/electron,etiktin/electron,ervinb/electron,Jacobichou/electron,bpasero/electron,vHanda/electron,oiledCode/electron,meowlab/electron,synaptek/electron,seanchas116/electron,rprichard/electron,pandoraui/electron,setzer777/electron,jlord/electron,shiftkey/electron,bpasero/electron,gstack/infinium-shell,mattotodd/electron,aichingm/electron,destan/electron,takashi/electron,DivyaKMenon/electron,mhkeller/electron,simonfork/electron,Gerhut/electron,bruce/electron,trigrass2/electron,bobwol/electron,trankmichael/electron,greyhwndz/electron,abhishekgahlot/electron,dongjoon-hyun/electron,mattotodd/electron,arusakov/electron,bpasero/electron,miniak/electron,gbn972/electron,seanchas116/electron,simongregory/electron,wolfflow/electron,soulteary/electron,seanchas116/electron,xfstudio/electron,kazupon/electron,pombredanne/electron,aaron-goshine/electron,aecca/electron,rajatsingla28/electron,posix4e/electron,bruce/electron,John-Lin/electron,mattotodd/electron,tincan24/electron,shaundunne/electron,jtburke/electron,mrwizard82d1/electron,shockone/electron,joaomoreno/atom-shell,gabriel/electron,pirafrank/electron,Jonekee/electron,kostia/electron,leethomas/electron,roadev/electron,fomojola/electron,fffej/electron,yan-foto/electron,rajatsingla28/electron,fritx/electron,deepak1556/atom-shell,oiledCode/electron,carsonmcdonald/electron,lzpfmh/electron,shaundunne/electron,aliib/electron,kcrt/electron,joaomoreno/atom-shell,christian-bromann/electron,nagyistoce/electron-atom-shell,greyhwndz/electron,Andrey-Pavlov/electron,rhencke/electron,d-salas/electron,sircharleswatson/electron,takashi/electron,RIAEvangelist/electron,pirafrank/electron,adamjgray/electron,shaundunne/electron,d-salas/electron,abhishekgahlot/electron,gstack/infinium-shell,bruce/electron,GoooIce/electron,digideskio/electron,jjz/electron,LadyNaggaga/electron,chriskdon/electron,jcblw/electron,lzpfmh/electron,deed02392/electron,gstack/infinium-shell,takashi/electron,electron/electron,trigrass2/electron,mrwizard82d1/electron,minggo/electron,Floato/electron,thomsonreuters/electron,farmisen/electron,brenca/electron,rprichard/electron,christian-bromann/electron,abhishekgahlot/electron,mjaniszew/electron,Evercoder/electron,chrisswk/electron,jtburke/electron,arusakov/electron,bruce/electron,John-Lin/electron,Andrey-Pavlov/electron,lzpfmh/electron,deed02392/electron,electron/electron,kazupon/electron,tincan24/electron,felixrieseberg/electron,stevekinney/electron,Floato/electron,jtburke/electron,destan/electron,beni55/electron,leethomas/electron,vHanda/electron,farmisen/electron,tomashanacek/electron,davazp/electron,destan/electron,fabien-d/electron,natgolov/electron,MaxWhere/electron,fritx/electron,rreimann/electron,sky7sea/electron,kokdemo/electron,aliib/electron,faizalpribadi/electron,twolfson/electron,dahal/electron,astoilkov/electron,thingsinjars/electron,Jonekee/electron,jacksondc/electron,micalan/electron,timruffles/electron,leftstick/electron,aliib/electron,digideskio/electron,xfstudio/electron,rprichard/electron,mubassirhayat/electron,kostia/electron,the-ress/electron,d-salas/electron,chriskdon/electron,arusakov/electron,zhakui/electron,wolfflow/electron,anko/electron,meowlab/electron,robinvandernoord/electron,egoist/electron,preco21/electron,jiaz/electron,edulan/electron,bobwol/electron,shaundunne/electron,rhencke/electron,IonicaBizauKitchen/electron,sircharleswatson/electron,hokein/atom-shell,nicholasess/electron,micalan/electron,Jacobichou/electron,shockone/electron,michaelchiche/electron,neutrous/electron,mirrh/electron,biblerule/UMCTelnetHub,joneit/electron,Ivshti/electron,baiwyc119/electron,bobwol/electron,yalexx/electron,iftekeriba/electron,DivyaKMenon/electron,michaelchiche/electron,tylergibson/electron,cqqccqc/electron,felixrieseberg/electron,simongregory/electron,mattdesl/electron,egoist/electron,baiwyc119/electron,maxogden/atom-shell,Gerhut/electron,deepak1556/atom-shell,tylergibson/electron,nekuz0r/electron,iftekeriba/electron,arusakov/electron,simonfork/electron,GoooIce/electron,thompsonemerson/electron,tonyganch/electron,xiruibing/electron,BionicClick/electron,rajatsingla28/electron,ankitaggarwal011/electron,yan-foto/electron,Zagorakiss/electron,sky7sea/electron,bpasero/electron,bwiggs/electron,chrisswk/electron,sky7sea/electron,arturts/electron,bobwol/electron,subblue/electron,tinydew4/electron,davazp/electron | script/lib/config.py | script/lib/config.py | #!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '3245ef802fbf546f1a1d206990aa9d18be6bfbfe'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| #!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = 'f0c3a4546d8e75689c16b9aee1052a72951e58de'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
| mit | Python |
a03eaddd3e950f628320d1b5b007d87b11906844 | add saveload.py (with error) | PhysicsEngine/SoundLine-server,PhysicsEngine/SoundLine-server | converter/saveload.py | converter/saveload.py | #!/usr/local/bin/python
# -*- encoding:utf-8
import sys
import subprocess as sp
import numpy
def load_mp3(filename):
command = [ 'ffmpeg',
'-i', sys.argv[1],
'-f', 's16le',
'-acodec', 'pcm_s16le',
'-ar', '44100', # ouput will have 44100 Hz
'-ac', '2', # stereo (set to '1' for mono)
'-']
pipe = sp.Popen(command, stdout=sp.PIPE, bufsize=10**8)
raw_audio = pipe.proc.stdout.read(128000*6)
# Reorganize raw_audio as a Numpy array with two-columns (1 per channel)
audio_array = numpy.fromstring(raw_audio, dtype="int16")
audio_array = audio_array.reshape(len(audio_array))
return audio_array
def save_mp3(filename,audio_array):
pipe2 = sp.Popen([ 'ffmpeg',
'-y', # (optional) means overwrite the output file if it already exists.
"-f", 's16le', # means 16bit input
"-acodec", "pcm_s16le", # means raw 16bit input
'-ar', "44100", # the input will have 44100 Hz
'-ac','2', # the input will have 2 channels (stereo)
'-i', '-', # means that the input will arrive from the pipe
'-vn', # means "don't expect any video input"
'-acodec', "libmp3lame", # output audio codec
# '-b', "3000k", # output bitrate (=quality). Here, 3000kb/second
filename],
stdin=sp.PIPE,stdout=sp.PIPE, stderr=sp.PIPE)
audio_array.astype("int16").tofile(self.proc.stdin)
def main():
ary = load_mp3(sys.argv[1])
# ary = ary.reshape((ary.shape[0]*2))
save_mp3(sys.argv[2],ary)
if __name__ == '__main__':
main()
| mit | Python |
|
4ab3e59b7e9fe339c96042107c3f59bdf1afc46a | add instagram compliance fix | requests/requests-oauthlib,singingwolfboy/requests-oauthlib | requests_oauthlib/compliance_fixes/instagram.py | requests_oauthlib/compliance_fixes/instagram.py | try:
from urlparse import urlparse, parse_qs
except ImportError:
from urllib.parse import urlparse, parse_qs
from oauthlib.common import add_params_to_uri
def instagram_compliance_fix(session):
def _non_compliant_param_name(url, headers, data):
# If the user has already specified the token, either in the URL
# or in a data dictionary, then there's nothing to do.
# If the specified token is different from ``session.access_token``,
# we assume the user intends to override the access token.
url_query = dict(parse_qs(urlparse(url).query))
token = url_query.get("token")
if not token and isinstance(data, dict):
token = data.get("token")
if token:
# Nothing to do, just return.
return url, headers, data
token = [('access_token', session.access_token)]
url = add_params_to_uri(url, token)
return url, headers, data
session.register_compliance_hook(
'protected_request', _non_compliant_param_name)
return session
| isc | Python |
|
b9b34eb2bca76e76ba4f7399b12daa27ed2ab7f4 | Create uvSetTgl.py | aaronfang/personal_scripts | af_scripts/uv/uvSetTgl.py | af_scripts/uv/uvSetTgl.py | # This script will switch UV Set between "map1" and "atlasmap".
# Useage:
# Select meshes and run this script
import maya.cmds as cmds
def uvsetTgl():
shape_node = cmds.ls(sl=True, fl=True, dag=True, type='shape')
current_uvset = cmds.polyUVSet(shape_node[0],q=True, currentUVSet=True)
for shape in shape_node:
uvsets = cmds.polyUVSet(shape,q=True,auv=True)
if "map1" and "atlasUV" in uvsets:
if current_uvset[0] == 'map1':
cmds.polyUVSet(shape, currentUVSet=True, uvSet="atlasUV")
elif current_uvset[0] == 'atlasUV':
cmds.polyUVSet(shape, currentUVSet=True, uvSet="map1")
else:
cmds.polyUVSet(shape, currentUVSet=True, uvSet="map1")
elif "map1" in uvsets and "atlasUV" not in uvsets:
cmds.polyUVSet(shape, currentUVSet=True, uvSet="map1")
uvsetTgl()
| mit | Python |
|
a9b45bf50dae68c9a801ec7942c4f4cc38fa08f5 | Create GenerateUnifiedReports.py | npburg/EventSalesReport | GenerateUnifiedReports.py | GenerateUnifiedReports.py | import argparse
# Read options on which PayPal records to process (year / month) or run on discovery to find the files or discover new files and generate new unified files but preserve the old ones (default)
# load all the Website records based on discovery
# load the PayPal monthly report(s)
# reconsile each record in PayPal records to identify the event and standardize the fields
# save to file the unified records
# GenerateUnifiedReports.py [no options] - this will discover which PayPay files exist wihtout corrisponsiding unified record files and generate the missing unified record files.
# GenerateUnifiedReports.py -f - this will force the generation of all unfied record files even if they already exist
# GenerateUnifiedReports.py -start 2012 01 -end 2013 07 - this will generate the unified record files for the range specified. (start year, start month, end year, end month)
parser = argparse.ArgumentParser( description='Process options for generating unified reports')
parser.add_argument( '-force', metavar='force generate')
parser.add_argument( '-start', metavar='start year / month', nargs='2')
parser.add_argument( '-end', metavar='end year / month', nargs='2')
#GenerateUnifiedRecord( paypal_filename, unified_filename )
| mit | Python |
|
24f665e02912a3f79eec9776c86863a9e172d94a | Create HR_pythonPrintFunction.py | bluewitch/Code-Blue-Python | HR_pythonPrintFunction.py | HR_pythonPrintFunction.py | import sys
if __name__ == '__main__':
n = int(input())
# imported sys for a elegant solution, Python 3
# * before range means taking everything 0 or more
print(*range(1,n+1), sep='',end='\n', file= sys.stdout)
| mit | Python |
|
ce552a70f77934d4b76b5710b76b22967484d17e | Create folderwatcher.py | bengjerstad/miscellaneous,bengjerstad/miscellaneous | folderwatcher.py | folderwatcher.py | import os
import time
import datetime
outold = []
try:
while True:
out = os.listdir()
if outold != out:
ldate= datetime.datetime.now().strftime('%I:%M:%S')
for x in outold:
if x not in out:
print ('Moved: '+ldate+' '+x)
for x in out:
if x not in outold:
print ('New: '+ldate+' '+x)
outold = out
time.sleep(1)
except KeyboardInterrupt:
pass
| mit | Python |
|
41fc87e402aa2864c22adb5c09a713c2b0eacb72 | Add replace test that shutdowns a node and replaces a pod (#806) | mesosphere/dcos-commons,vishnu2kmohan/dcos-commons,mesosphere/dcos-commons,vishnu2kmohan/dcos-commons,mesosphere/dcos-commons,mesosphere/dcos-commons,vishnu2kmohan/dcos-commons,vishnu2kmohan/dcos-commons,vishnu2kmohan/dcos-commons,mesosphere/dcos-commons | frameworks/cassandra/tests/test_recovery_shutdown.py | frameworks/cassandra/tests/test_recovery_shutdown.py | import pytest
from tests.config import *
import sdk_install as install
import sdk_tasks as tasks
import sdk_utils as utils
import json
import shakedown
import time
import sdk_cmd as cmd
def setup_module(module):
install.uninstall(PACKAGE_NAME)
utils.gc_frameworks()
# check_suppression=False due to https://jira.mesosphere.com/browse/CASSANDRA-568
install.install(PACKAGE_NAME, DEFAULT_TASK_COUNT, check_suppression=False)
def setup_function(function):
tasks.check_running(PACKAGE_NAME, DEFAULT_TASK_COUNT)
def teardown_module(module):
install.uninstall(PACKAGE_NAME)
@pytest.mark.sanity
@pytest.mark.recovery
@pytest.mark.shutdown_node
def test_shutdown_host_test():
service_ip = shakedown.get_service_ips(PACKAGE_NAME).pop()
print('marathon ip = {}'.format(service_ip))
node_ip = 0
for pod_id in range(0, DEFAULT_TASK_COUNT):
node_ip = get_pod_host(pod_id)
if node_ip != service_ip:
break
if node_ip is None:
assert Fail, 'could not find a node to shutdown'
old_agent = get_pod_agent(pod_id)
print('pod id = {}, node_ip = {}, agent = {}'.format(pod_id, node_ip, old_agent))
task_ids = tasks.get_task_ids(PACKAGE_NAME, 'node-{}'.format(pod_id))
# instead of partition/reconnect, we shutdown host permanently
status, stdout = shakedown.run_command_on_agent(node_ip, 'sudo shutdown -h +1')
print('shutdown agent {}: [{}] {}'.format(node_ip, status, stdout))
assert status is True
time.sleep(100)
cmd.run_cli('cassandra pods replace node-{}'.format(pod_id))
tasks.check_tasks_updated(PACKAGE_NAME, 'node', task_ids)
#double check all tasks are running
tasks.check_running(PACKAGE_NAME, DEFAULT_TASK_COUNT)
new_agent = get_pod_agent(pod_id)
assert old_agent != new_agent
def get_pod_agent(id):
stdout = cmd.run_cli('cassandra pods info node-{}'.format(id))
return json.loads(stdout)[0]['info']['slaveId']['value']
def get_pod_label(id):
stdout = cmd.run_cli('cassandra pods info node-{}'.format(id))
return json.loads(stdout)[0]['info']['labels']['labels']
def get_pod_host(id):
labels = get_pod_label(id)
for i in range(0, len(labels)):
if labels[i]['key'] == 'offer_hostname':
return labels[i]['value']
return None
| apache-2.0 | Python |
|
c11e74d4210c6de8917dfde6cb33d75f6b1b835a | add migration that solves BigAutoField problem | adamcharnock/django-hordak,adamcharnock/django-hordak | hordak/migrations/0032_check_account_type_big_int.py | hordak/migrations/0032_check_account_type_big_int.py | # Generated by Django 4.0.7 on 2022-09-18 10:33
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("hordak", "0031_alter_account_currencies"),
]
operations = [
migrations.RunSQL(
"""
CREATE OR REPLACE FUNCTION check_account_type()
RETURNS TRIGGER AS
$$
BEGIN
IF NEW.parent_id::INT::BOOL THEN
NEW.type = (SELECT type FROM hordak_account WHERE id = NEW.parent_id);
END IF;
RETURN NEW;
END;
$$
LANGUAGE plpgsql;
""",
"DROP FUNCTION check_account_type()",
),
]
| mit | Python |
|
d3a11021f8be8e93c5c067b5fcf59bc4f9f92cea | add computation of sts for ISUSM | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/dbutil/compute_isusm_sts.py | scripts/dbutil/compute_isusm_sts.py | """
Figure out when the ISUSM data started...
"""
import psycopg2
import network
import sys
import datetime
import pytz
basets = datetime.datetime.now()
basets = basets.replace(tzinfo=pytz.timezone("America/Chicago"))
isuag = psycopg2.connect(database='isuag', host='iemdb')
icursor = isuag.cursor()
mesosite = psycopg2.connect(database='mesosite', host='iemdb')
mcursor = mesosite.cursor()
table = network.Table("ISUSM")
icursor.execute("""SELECT station, min(valid), max(valid) from sm_hourly
GROUP by station ORDER by min ASC""")
for row in icursor:
station = row[0]
if not table.sts.has_key(station):
print 'Whoa station: %s does not exist in metadatabase?' % (station,)
continue
if table.sts[station]['archive_begin'] != row[1]:
print 'Updated %s STS WAS: %s NOW: %s' % (station,
table.sts[station]['archive_begin'], row[1])
mcursor.execute("""UPDATE stations SET archive_begin = %s
WHERE id = %s and network = %s""" , (row[1], station, 'ISUSM') )
if mcursor.rowcount == 0:
print 'ERROR: No rows updated'
mcursor.close()
mesosite.commit()
mesosite.close()
| mit | Python |
|
133a4311fdb3c96edeb927250e549fcaf4080696 | add silly module | desaster/uusipuu | modules/silly.py | modules/silly.py | # -*- coding: ISO-8859-15 -*-
from core.Uusipuu import UusipuuModule
import random, time
from core.tdiff import *
class Module(UusipuuModule):
def cmd_noppa(self, user, target, params):
self.log('ok noppaa heitetn!!')
self.chanmsg('%s!' % random.choice((
'ykknen',
'kakkonen',
'kolmonen',
'nelonen',
'vitonen',
'kutonen')))
# vim: set et sw=4:
| bsd-2-clause | Python |
|
f340bde6e047d86171385b90a023ac01e8914d0c | Add simple neural network (#6452) | TheAlgorithms/Python | neural_network/simple_neural_network.py | neural_network/simple_neural_network.py | """
Forward propagation explanation:
https://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250
"""
import math
import random
# Sigmoid
def sigmoid_function(value: float, deriv: bool = False) -> float:
"""Return the sigmoid function of a float.
>>> sigmoid_function(3.5)
0.9706877692486436
>>> sigmoid_function(3.5, True)
-8.75
"""
if deriv:
return value * (1 - value)
return 1 / (1 + math.exp(-value))
# Initial Value
INITIAL_VALUE = 0.02
def forward_propagation(expected: int, number_propagations: int) -> float:
"""Return the value found after the forward propagation training.
>>> res = forward_propagation(32, 10000000)
>>> res > 31 and res < 33
True
>>> res = forward_propagation(32, 1000)
>>> res > 31 and res < 33
False
"""
# Random weight
weight = float(2 * (random.randint(1, 100)) - 1)
for _ in range(number_propagations):
# Forward propagation
layer_1 = sigmoid_function(INITIAL_VALUE * weight)
# How much did we miss?
layer_1_error = (expected / 100) - layer_1
# Error delta
layer_1_delta = layer_1_error * sigmoid_function(layer_1, True)
# Update weight
weight += INITIAL_VALUE * layer_1_delta
return layer_1 * 100
if __name__ == "__main__":
import doctest
doctest.testmod()
expected = int(input("Expected value: "))
number_propagations = int(input("Number of propagations: "))
print(forward_propagation(expected, number_propagations))
| mit | Python |
|
4b07d7cdd791a03ef4c7ec7e6e4188b625ffb8dc | Add migration | Clarity-89/clarityv2,Clarity-89/clarityv2,Clarity-89/clarityv2,Clarity-89/clarityv2 | src/clarityv2/portfolio/migrations/0002_auto_20180228_2055.py | src/clarityv2/portfolio/migrations/0002_auto_20180228_2055.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-02-28 18:55
from __future__ import unicode_literals
import ckeditor.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='entry',
name='description',
field=ckeditor.fields.RichTextField(blank=True),
),
]
| mit | Python |
|
0bc48c7131e0589e7f2980e16bce6c2dfcdbafda | Fix usage message from tag:file to tag=file | MeteorAdminz/phantomjs,DocuSignDev/phantomjs,peakji/phantomjs,bettiolo/phantomjs,ChrisAntaki/phantomjs,fxtentacle/phantomjs,wuxianghou/phantomjs,JingZhou0404/phantomjs,xsyntrex/phantomjs,Lochlan/phantomjs,jjyycchh/phantomjs,fxtentacle/phantomjs,ChrisAntaki/phantomjs,bukalov/phantomjs,AladdinSonni/phantomjs,jefleponot/phantomjs,thomasrogers03/phantomjs,wuxianghou/phantomjs,brandingbrand/phantomjs,kyroskoh/phantomjs,paulfitz/phantomjs,liorvh/phantomjs,vietch2612/phantomjs,smasala/phantomjs,tinfoil/phantomjs,attilahorvath/phantomjs,joomel1/phantomjs,bjko/phantomjs,cirrusone/phantom2,woodpecker1/phantomjs,MaDKaTZe/phantomjs,wxkdesky/phantomjs,hexid/phantomjs,dongritengfei/phantomjs,bmotlaghFLT/FLT_PhantomJS,rishilification/phantomjs,smasala/phantomjs,lattwood/phantomjs,zhulin2609/phantomjs,pcarrier-packaging/deb-phantomjs,Andrey-Pavlov/phantomjs,grevutiu-gabriel/phantomjs,saisai/phantomjs,mattvick/phantomjs,sporttech/phantomjs,cirrusone/phantom2,Medium/phantomjs-1,tianzhihen/phantomjs,rishilification/phantomjs,jdar/phantomjs-modified,peakji/phantomjs,OCForks/phantomjs,MaDKaTZe/phantomjs,RobertoMalatesta/phantomjs,revolutionaryG/phantomjs,nin042/phantomjs,chauhanmohit/phantomjs,wxkdesky/phantomjs,cirrusone/phantom2,danigonza/phantomjs,viewdy/phantomjs2,smasala/phantomjs,gskachkov/phantomjs,joomel1/phantomjs,forzi/phantomjs_stradivari_fork,jillesme/phantomjs,sharma1nitish/phantomjs,VinceZK/phantomjs,farhi-naz/phantomjs,ixiom/phantomjs,bjko/phantomjs,Vitallium/phantomjs,chauhanmohit/phantomjs,likaiwalkman/phantomjs,vietch2612/phantomjs,raff/phantomjs,fentas/phantomjs,etiennekruger/phantomjs-qt5,mark-ignacio/phantomjs,pigshell/nhnick,Andrey-Pavlov/phantomjs,wuxianghou/phantomjs,zhulin2609/phantomjs,wxkdesky/phantomjs,likaiwalkman/phantomjs,fxtentacle/phantomjs,PeterWangPo/phantomjs,linjeffrey/phantomjs,hexid/phantomjs,jkburges/phantomjs,smasala/phantomjs,cirrusone/phantom2,kyroskoh/phantomjs,joomel1/phantomjs,klickagent/phantomjs,sxhao/phantomjs,bettiolo/phantomjs,dparshin/phantomjs,Vitallium/phantomjs,gitromand/phantomjs,dparshin/phantomjs,bjko/phantomjs,AladdinSonni/phantomjs,neraliu/tpjs,raff/phantomjs,Klaudit/phantomjs,zackw/phantomjs,astefanutti/phantomjs,ezoic/phantomjs,dongritengfei/phantomjs,webmull/phantomjs,bukalov/phantomjs,pigshell/nhnick,Deepakpatle/phantomjs,fxtentacle/phantomjs,you21979/phantomjs,viewdy/phantomjs2,Andrey-Pavlov/phantomjs,NickelMedia/phantomjs,you21979/phantomjs,chirilo/phantomjs,NickelMedia/phantomjs,houzhenggang/phantomjs,Lkhagvadelger/phantomjs,chirilo/phantomjs,viewdy/phantomjs2,brandingbrand/phantomjs,ezoic/phantomjs,sporttech/phantomjs,Observer-Wu/phantomjs,jjyycchh/phantomjs,aljscott/phantomjs,chylli/phantomjs,eceglov/phantomjs,jdar/phantomjs-modified,Dinamize/phantomjs,rishilification/phantomjs,jguyomard/phantomjs,Tomtomgo/phantomjs,Andrey-Pavlov/phantomjs,AladdinSonni/phantomjs,unb-libraries/phantomjs,skyeckstrom/phantomjs,linjeffrey/phantomjs,gitromand/phantomjs,MeteorAdminz/phantomjs,raff/phantomjs,martonw/phantomjs,Lkhagvadelger/phantomjs,MeteorAdminz/phantomjs,nicksay/phantomjs,webmull/phantomjs,saisai/phantomjs,Lkhagvadelger/phantomjs,cesarmarinhorj/phantomjs,VinceZK/phantomjs,cloudflare/phantomjs,Medium/phantomjs-1,neraliu/tainted-phantomjs,neraliu/tainted-phantomjs,jkburges/phantomjs,webmull/phantomjs,jillesme/phantomjs,ye11ow/phantomjs,neraliu/tainted-phantomjs,viewdy/phantomjs2,webmull/phantomjs,asrie/phantomjs,revolutionaryG/phantomjs,klickagent/phantomjs,MaDKaTZe/phantomjs,ixiom/phantomjs,delighted/phantomjs,unb-libraries/phantomjs,smasala/phantomjs,jguyomard/phantomjs,kinwahlai/phantomjs-ghostdriver,farhi-naz/phantomjs,webmull/phantomjs,saisai/phantomjs,eugene1g/phantomjs,Andrey-Pavlov/phantomjs,bkrukowski/phantomjs,Dinamize/phantomjs,lattwood/phantomjs,fentas/phantomjs,admetricks/phantomjs,tmuelle2/phantomjs,iradul/phantomjs-clone,angelman/phantomjs,matepeter90/phantomjs,pcarrier-packaging/deb-phantomjs,ramanajee/phantomjs,dongritengfei/phantomjs,DocuSignDev/phantomjs,jkburges/phantomjs,klim-iv/phantomjs-qt5,cloudflare/phantomjs,ChrisAntaki/phantomjs,houzhenggang/phantomjs,danigonza/phantomjs,etiennekruger/phantomjs-qt5,nicksay/phantomjs,iradul/phantomjs-clone,yoki/phantomjs,neraliu/tpjs,klickagent/phantomjs,lseyesl/phantomjs,lattwood/phantomjs,bukalov/phantomjs,djmaze/phantomjs,yoki/phantomjs,avinashkunuje/phantomjs,you21979/phantomjs,iradul/phantomjs,christoph-buente/phantomjs,zhengyongbo/phantomjs,Dinamize/phantomjs,toanalien/phantomjs,fentas/phantomjs,fentas/phantomjs,eceglov/phantomjs,bkrukowski/phantomjs,likaiwalkman/phantomjs,iradul/phantomjs-clone,skyeckstrom/phantomjs,attilahorvath/phantomjs,pbrazdil/phantomjs,r3b/phantomjs,petermat/phantomjs,ariya/phantomjs,PeterWangPo/phantomjs,markhu/phantomjs,ezoic/phantomjs,thomasrogers03/phantomjs,tinfoil/phantomjs,forzi/phantomjs_stradivari_fork,NickelMedia/phantomjs,Andrey-Pavlov/phantomjs,vietch2612/phantomjs,iradul/phantomjs-clone,iver333/phantomjs,pcarrier-packaging/deb-phantomjs,mattvick/phantomjs,iradul/phantomjs-clone,iradul/phantomjs,xsyntrex/phantomjs,pigshell/nhnick,iradul/phantomjs,nin042/phantomjs,sharma1nitish/phantomjs,bprodoehl/phantomjs,linjeffrey/phantomjs,Medium/phantomjs-1,youprofit/phantomjs,lseyesl/phantomjs,raff/phantomjs,aljscott/phantomjs,S11001001/phantomjs,asrie/phantomjs,MeteorAdminz/phantomjs,Tomtomgo/phantomjs,Andrey-Pavlov/phantomjs,nin042/phantomjs,apanda/phantomjs-intercept,youprofit/phantomjs,markhu/phantomjs,mattvick/phantomjs,neraliu/tainted-phantomjs,neraliu/tpjs,pataquets/phantomjs,cesarmarinhorj/phantomjs,nicksay/phantomjs,hexid/phantomjs,matepeter90/phantomjs,fentas/phantomjs,christoph-buente/phantomjs,brandingbrand/phantomjs,lattwood/phantomjs,kyroskoh/phantomjs,toanalien/phantomjs,dparshin/phantomjs,attilahorvath/phantomjs,PeterWangPo/phantomjs,eceglov/phantomjs,JingZhou0404/phantomjs,shinate/phantomjs,yoki/phantomjs,sxhao/phantomjs,MaDKaTZe/phantomjs,woodpecker1/phantomjs,Klaudit/phantomjs,asrie/phantomjs,hexid/phantomjs,tinfoil/phantomjs,neraliu/tpjs,jkenn99/phantomjs,youprofit/phantomjs,delighted/phantomjs,mapbased/phantomjs,iver333/phantomjs,petermat/phantomjs,dongritengfei/phantomjs,woodpecker1/phantomjs,farhi-naz/phantomjs,lattwood/phantomjs,matepeter90/phantomjs,houzhenggang/phantomjs,martonw/phantomjs,delighted/phantomjs,MaDKaTZe/phantomjs,peakji/phantomjs,houzhenggang/phantomjs,martonw/phantomjs,AladdinSonni/phantomjs,webmull/phantomjs,hexid/phantomjs,martonw/phantomjs,youprofit/phantomjs,jdar/phantomjs-modified,skyeckstrom/phantomjs,mattvick/phantomjs,you21979/phantomjs,petermat/phantomjs,DocuSignDev/phantomjs,OCForks/phantomjs,pigshell/nhnick,vietch2612/phantomjs,OCForks/phantomjs,Lochlan/phantomjs,tmuelle2/phantomjs,joomel1/phantomjs,eceglov/phantomjs,vietch2612/phantomjs,delighted/phantomjs,AladdinSonni/phantomjs,farhi-naz/phantomjs,AladdinSonni/phantomjs,dongritengfei/phantomjs,DocuSignDev/phantomjs,fentas/phantomjs,cirrusone/phantom2,RobertoMalatesta/phantomjs,petermat/phantomjs,Vitallium/phantomjs,grevutiu-gabriel/phantomjs,avinashkunuje/phantomjs,houzhenggang/phantomjs,hexid/phantomjs,Lkhagvadelger/phantomjs,dhendo/phantomjs,bukalov/phantomjs,mapbased/phantomjs,saisai/phantomjs,avinashkunuje/phantomjs,JamesMGreene/phantomjs,tinfoil/phantomjs,JamesMGreene/phantomjs,matepeter90/phantomjs,fxtentacle/phantomjs,rishilification/phantomjs,dhendo/phantomjs,OCForks/phantomjs,AladdinSonni/phantomjs,ye11ow/phantomjs,zackw/phantomjs,thomasrogers03/phantomjs,wuxianghou/phantomjs,fentas/phantomjs,MaDKaTZe/phantomjs,PeterWangPo/phantomjs,MeteorAdminz/phantomjs,JingZhou0404/phantomjs,chirilo/phantomjs,bukalov/phantomjs,mark-ignacio/phantomjs,RobertoMalatesta/phantomjs,skyeckstrom/phantomjs,rishilification/phantomjs,jkburges/phantomjs,viewdy/phantomjs2,jorik041/phantomjs,brandingbrand/phantomjs,delighted/phantomjs,sharma1nitish/phantomjs,fxtentacle/phantomjs,attilahorvath/phantomjs,revolutionaryG/phantomjs,neraliu/tpjs,Tomtomgo/phantomjs,joomel1/phantomjs,jkburges/phantomjs,apanda/phantomjs-intercept,lattwood/phantomjs,kinwahlai/phantomjs-ghostdriver,nin042/phantomjs,MaDKaTZe/phantomjs,jjyycchh/phantomjs,dhendo/phantomjs,lseyesl/phantomjs,ramanajee/phantomjs,iradul/phantomjs-clone,Tomtomgo/phantomjs,unb-libraries/phantomjs,danigonza/phantomjs,paulfitz/phantomjs,toanalien/phantomjs,zhengyongbo/phantomjs,Deepakpatle/phantomjs,pigshell/nhnick,woodpecker1/phantomjs,PeterWangPo/phantomjs,bettiolo/phantomjs,eceglov/phantomjs,pcarrier-packaging/deb-phantomjs,Tomtomgo/phantomjs,neraliu/tpjs,iver333/phantomjs,pcarrier-packaging/deb-phantomjs,tmuelle2/phantomjs,JamesMGreene/phantomjs,smasala/phantomjs,kyroskoh/phantomjs,Deepakpatle/phantomjs,jkburges/phantomjs,jdar/phantomjs-modified,fentas/phantomjs,VinceZK/phantomjs,cesarmarinhorj/phantomjs,dongritengfei/phantomjs,avinashkunuje/phantomjs,klickagent/phantomjs,NickelMedia/phantomjs,dparshin/phantomjs,StevenBlack/phantomjs,zhulin2609/phantomjs,ramanajee/phantomjs,iradul/phantomjs,nicksay/phantomjs,brandingbrand/phantomjs,Andrey-Pavlov/phantomjs,pbrazdil/phantomjs,DocuSignDev/phantomjs,klim-iv/phantomjs-qt5,NickelMedia/phantomjs,Klaudit/phantomjs,bettiolo/phantomjs,eceglov/phantomjs,zhulin2609/phantomjs,asrie/phantomjs,Lkhagvadelger/phantomjs,r3b/phantomjs,astefanutti/phantomjs,cesarmarinhorj/phantomjs,unb-libraries/phantomjs,Lochlan/phantomjs,jorik041/phantomjs,tinfoil/phantomjs,chauhanmohit/phantomjs,dparshin/phantomjs,zackw/phantomjs,zhulin2609/phantomjs,OCForks/phantomjs,iver333/phantomjs,lseyesl/phantomjs,xsyntrex/phantomjs,apanda/phantomjs-intercept,ramanajee/phantomjs,pataquets/phantomjs,sharma1nitish/phantomjs,liorvh/phantomjs,iver333/phantomjs,OCForks/phantomjs,bjko/phantomjs,revolutionaryG/phantomjs,iradul/phantomjs-clone,dhendo/phantomjs,bmotlaghFLT/FLT_PhantomJS,avinashkunuje/phantomjs,admetricks/phantomjs,r3b/phantomjs,iradul/phantomjs-clone,VinceZK/phantomjs,lattwood/phantomjs,liorvh/phantomjs,djmaze/phantomjs,jorik041/phantomjs,Klaudit/phantomjs,woodpecker1/phantomjs,webmull/phantomjs,gskachkov/phantomjs,ariya/phantomjs,liorvh/phantomjs,forzi/phantomjs_stradivari_fork,houzhenggang/phantomjs,pigshell/nhnick,tianzhihen/phantomjs,christoph-buente/phantomjs,toanalien/phantomjs,bjko/phantomjs,MaDKaTZe/phantomjs,saisai/phantomjs,eugene1g/phantomjs,bkrukowski/phantomjs,forzi/phantomjs_stradivari_fork,kyroskoh/phantomjs,likaiwalkman/phantomjs,tianzhihen/phantomjs,cloudflare/phantomjs,liorvh/phantomjs,Deepakpatle/phantomjs,saisai/phantomjs,etiennekruger/phantomjs-qt5,pataquets/phantomjs,Klaudit/phantomjs,bkrukowski/phantomjs,raff/phantomjs,grevutiu-gabriel/phantomjs,liorvh/phantomjs,kyroskoh/phantomjs,wxkdesky/phantomjs,ramanajee/phantomjs,djmaze/phantomjs,ixiom/phantomjs,christoph-buente/phantomjs,chylli/phantomjs,wxkdesky/phantomjs,Dinamize/phantomjs,sharma1nitish/phantomjs,djmaze/phantomjs,StevenBlack/phantomjs,admetricks/phantomjs,chirilo/phantomjs,nicksay/phantomjs,webmull/phantomjs,S11001001/phantomjs,mark-ignacio/phantomjs,vegetableman/phantomjs,revolutionaryG/phantomjs,OCForks/phantomjs,unb-libraries/phantomjs,mapbased/phantomjs,petermat/phantomjs,zhengyongbo/phantomjs,r3b/phantomjs,admetricks/phantomjs,raff/phantomjs,bmotlaghFLT/FLT_PhantomJS,apanda/phantomjs-intercept,pataquets/phantomjs,Klaudit/phantomjs,bmotlaghFLT/FLT_PhantomJS,vegetableman/phantomjs,tmuelle2/phantomjs,woodpecker1/phantomjs,peakji/phantomjs,sporttech/phantomjs,peakji/phantomjs,yoki/phantomjs,StevenBlack/phantomjs,angelman/phantomjs,bjko/phantomjs,chylli/phantomjs,tinfoil/phantomjs,OCForks/phantomjs,sxhao/phantomjs,jkenn99/phantomjs,attilahorvath/phantomjs,chauhanmohit/phantomjs,chirilo/phantomjs,cirrusone/phantom2,bmotlaghFLT/FLT_PhantomJS,Observer-Wu/phantomjs,VinceZK/phantomjs,jdar/phantomjs-modified,markhu/phantomjs,yoki/phantomjs,joomel1/phantomjs,ezoic/phantomjs,bjko/phantomjs,chylli/phantomjs,zhulin2609/phantomjs,matepeter90/phantomjs,vietch2612/phantomjs,ramanajee/phantomjs,eceglov/phantomjs,gskachkov/phantomjs,etiennekruger/phantomjs-qt5,eugene1g/phantomjs,Dinamize/phantomjs,astefanutti/phantomjs,kyroskoh/phantomjs,zackw/phantomjs,nicksay/phantomjs,Lkhagvadelger/phantomjs,gitromand/phantomjs,nicksay/phantomjs,dhendo/phantomjs,brandingbrand/phantomjs,nin042/phantomjs,jkburges/phantomjs,attilahorvath/phantomjs,raff/phantomjs,VinceZK/phantomjs,delighted/phantomjs,JamesMGreene/phantomjs,Vitallium/phantomjs,jkenn99/phantomjs,Tomtomgo/phantomjs,thomasrogers03/phantomjs,farhi-naz/phantomjs,ye11ow/phantomjs,mapbased/phantomjs,shinate/phantomjs,jillesme/phantomjs,paulfitz/phantomjs,angelman/phantomjs,shinate/phantomjs,gskachkov/phantomjs,asrie/phantomjs,sxhao/phantomjs,jkenn99/phantomjs,iradul/phantomjs,pigshell/nhnick,zhengyongbo/phantomjs,raff/phantomjs,jjyycchh/phantomjs,bprodoehl/phantomjs,gitromand/phantomjs,neraliu/tainted-phantomjs,klickagent/phantomjs,jguyomard/phantomjs,iradul/phantomjs,tinfoil/phantomjs,jkburges/phantomjs,forzi/phantomjs_stradivari_fork,saisai/phantomjs,etiennekruger/phantomjs-qt5,smasala/phantomjs,nicksay/phantomjs,xsyntrex/phantomjs,peakji/phantomjs,PeterWangPo/phantomjs,gitromand/phantomjs,chylli/phantomjs,matepeter90/phantomjs,shinate/phantomjs,bprodoehl/phantomjs,bukalov/phantomjs,jdar/phantomjs-modified,forzi/phantomjs_stradivari_fork,bettiolo/phantomjs,eugene1g/phantomjs,Tomtomgo/phantomjs,eugene1g/phantomjs,mattvick/phantomjs,RobertoMalatesta/phantomjs,youprofit/phantomjs,jjyycchh/phantomjs,VinceZK/phantomjs,gitromand/phantomjs,jjyycchh/phantomjs,jkenn99/phantomjs,nin042/phantomjs,pbrazdil/phantomjs,attilahorvath/phantomjs,jorik041/phantomjs,aljscott/phantomjs,Medium/phantomjs-1,klickagent/phantomjs,cloudflare/phantomjs,sporttech/phantomjs,admetricks/phantomjs,tmuelle2/phantomjs,wxkdesky/phantomjs,sxhao/phantomjs,zhulin2609/phantomjs,gskachkov/phantomjs,shinate/phantomjs,aljscott/phantomjs,jguyomard/phantomjs,danigonza/phantomjs,avinashkunuje/phantomjs,JamesMGreene/phantomjs,liorvh/phantomjs,ixiom/phantomjs,NickelMedia/phantomjs,PeterWangPo/phantomjs,aljscott/phantomjs,jjyycchh/phantomjs,cesarmarinhorj/phantomjs,JamesMGreene/phantomjs,jefleponot/phantomjs,jefleponot/phantomjs,markhu/phantomjs,klickagent/phantomjs,linjeffrey/phantomjs,ramanajee/phantomjs,chauhanmohit/phantomjs,chylli/phantomjs,pigshell/nhnick,christoph-buente/phantomjs,Klaudit/phantomjs,lseyesl/phantomjs,grevutiu-gabriel/phantomjs,NickelMedia/phantomjs,astefanutti/phantomjs,pcarrier-packaging/deb-phantomjs,VinceZK/phantomjs,revolutionaryG/phantomjs,bettiolo/phantomjs,aljscott/phantomjs,mattvick/phantomjs,mapbased/phantomjs,peakji/phantomjs,vietch2612/phantomjs,ye11ow/phantomjs,pbrazdil/phantomjs,gitromand/phantomjs,ezoic/phantomjs,VinceZK/phantomjs,martonw/phantomjs,ChrisAntaki/phantomjs,r3b/phantomjs,wuxianghou/phantomjs,angelman/phantomjs,farhi-naz/phantomjs,JingZhou0404/phantomjs,rishilification/phantomjs,Lochlan/phantomjs,paulfitz/phantomjs,Deepakpatle/phantomjs,skyeckstrom/phantomjs,attilahorvath/phantomjs,StevenBlack/phantomjs,linjeffrey/phantomjs,iver333/phantomjs,jdar/phantomjs-modified,thomasrogers03/phantomjs,woodpecker1/phantomjs,thomasrogers03/phantomjs,chauhanmohit/phantomjs,sporttech/phantomjs,liorvh/phantomjs,christoph-buente/phantomjs,admetricks/phantomjs,chylli/phantomjs,skyeckstrom/phantomjs,Medium/phantomjs-1,Vitallium/phantomjs,yoki/phantomjs,aljscott/phantomjs,bkrukowski/phantomjs,vegetableman/phantomjs,zhengyongbo/phantomjs,fxtentacle/phantomjs,ixiom/phantomjs,xsyntrex/phantomjs,tmuelle2/phantomjs,bukalov/phantomjs,revolutionaryG/phantomjs,JingZhou0404/phantomjs,forzi/phantomjs_stradivari_fork,raff/phantomjs,iradul/phantomjs-clone,shinate/phantomjs,tianzhihen/phantomjs,smasala/phantomjs,r3b/phantomjs,bettiolo/phantomjs,wuxianghou/phantomjs,petermat/phantomjs,S11001001/phantomjs,NickelMedia/phantomjs,jkenn99/phantomjs,etiennekruger/phantomjs-qt5,JingZhou0404/phantomjs,christoph-buente/phantomjs,StevenBlack/phantomjs,dparshin/phantomjs,Deepakpatle/phantomjs,jorik041/phantomjs,paulfitz/phantomjs,iver333/phantomjs,JingZhou0404/phantomjs,DocuSignDev/phantomjs,bukalov/phantomjs,ramanajee/phantomjs,joomel1/phantomjs,markhu/phantomjs,bettiolo/phantomjs,dhendo/phantomjs,iver333/phantomjs,bprodoehl/phantomjs,Andrey-Pavlov/phantomjs,tmuelle2/phantomjs,neraliu/tpjs,admetricks/phantomjs,dparshin/phantomjs,klim-iv/phantomjs-qt5,bettiolo/phantomjs,likaiwalkman/phantomjs,neraliu/tainted-phantomjs,saisai/phantomjs,jillesme/phantomjs,mapbased/phantomjs,farhi-naz/phantomjs,ixiom/phantomjs,lseyesl/phantomjs,neraliu/tainted-phantomjs,ariya/phantomjs,pbrazdil/phantomjs,grevutiu-gabriel/phantomjs,aljscott/phantomjs,AladdinSonni/phantomjs,lattwood/phantomjs,bkrukowski/phantomjs,S11001001/phantomjs,thomasrogers03/phantomjs,dhendo/phantomjs,vegetableman/phantomjs,vietch2612/phantomjs,kyroskoh/phantomjs,RobertoMalatesta/phantomjs,pbrazdil/phantomjs,Deepakpatle/phantomjs,liorvh/phantomjs,Klaudit/phantomjs,lseyesl/phantomjs,Vitallium/phantomjs,jefleponot/phantomjs,zhengyongbo/phantomjs,mark-ignacio/phantomjs,vegetableman/phantomjs,mark-ignacio/phantomjs,rishilification/phantomjs,mark-ignacio/phantomjs,bmotlaghFLT/FLT_PhantomJS,dhendo/phantomjs,Deepakpatle/phantomjs,MeteorAdminz/phantomjs,apanda/phantomjs-intercept,fxtentacle/phantomjs,sxhao/phantomjs,chylli/phantomjs,Klaudit/phantomjs,JamesMGreene/phantomjs,Observer-Wu/phantomjs,cesarmarinhorj/phantomjs,zhengyongbo/phantomjs,brandingbrand/phantomjs,dparshin/phantomjs,mark-ignacio/phantomjs,neraliu/tpjs,dongritengfei/phantomjs,ariya/phantomjs,linjeffrey/phantomjs,jefleponot/phantomjs,VinceZK/phantomjs,viewdy/phantomjs2,StevenBlack/phantomjs,gskachkov/phantomjs,danigonza/phantomjs,pigshell/nhnick,sxhao/phantomjs,delighted/phantomjs,Vitallium/phantomjs,grevutiu-gabriel/phantomjs,jkenn99/phantomjs,jkenn99/phantomjs,thomasrogers03/phantomjs,grevutiu-gabriel/phantomjs,jdar/phantomjs-modified,jkburges/phantomjs,toanalien/phantomjs,you21979/phantomjs,jguyomard/phantomjs,jorik041/phantomjs,klim-iv/phantomjs-qt5,danigonza/phantomjs,ChrisAntaki/phantomjs,VinceZK/phantomjs,StevenBlack/phantomjs,cesarmarinhorj/phantomjs,asrie/phantomjs,ChrisAntaki/phantomjs,fentas/phantomjs,sharma1nitish/phantomjs,eugene1g/phantomjs,mapbased/phantomjs,jillesme/phantomjs,apanda/phantomjs-intercept,viewdy/phantomjs2,woodpecker1/phantomjs,neraliu/tainted-phantomjs,viewdy/phantomjs2,avinashkunuje/phantomjs,xsyntrex/phantomjs,wuxianghou/phantomjs,youprofit/phantomjs,jillesme/phantomjs,klim-iv/phantomjs-qt5,bprodoehl/phantomjs,neraliu/tainted-phantomjs,webmull/phantomjs,astefanutti/phantomjs,shinate/phantomjs,martonw/phantomjs,klim-iv/phantomjs-qt5,wxkdesky/phantomjs,yoki/phantomjs,paulfitz/phantomjs,RobertoMalatesta/phantomjs,chauhanmohit/phantomjs,thomasrogers03/phantomjs,tmuelle2/phantomjs,JamesMGreene/phantomjs,gitromand/phantomjs,gitromand/phantomjs,delighted/phantomjs,mark-ignacio/phantomjs,klickagent/phantomjs,Lkhagvadelger/phantomjs,tinfoil/phantomjs,StevenBlack/phantomjs,ye11ow/phantomjs,eugene1g/phantomjs,jguyomard/phantomjs,jorik041/phantomjs,pbrazdil/phantomjs,smasala/phantomjs,Observer-Wu/phantomjs,JamesMGreene/phantomjs,gskachkov/phantomjs,Lochlan/phantomjs,lseyesl/phantomjs,farhi-naz/phantomjs,eugene1g/phantomjs,jkenn99/phantomjs,likaiwalkman/phantomjs,attilahorvath/phantomjs,youprofit/phantomjs,hexid/phantomjs,unb-libraries/phantomjs,StevenBlack/phantomjs,StevenBlack/phantomjs,nin042/phantomjs,apanda/phantomjs-intercept,r3b/phantomjs,iradul/phantomjs,ezoic/phantomjs,JamesMGreene/phantomjs,vietch2612/phantomjs,admetricks/phantomjs,hexid/phantomjs,lattwood/phantomjs,jdar/phantomjs-modified,you21979/phantomjs,likaiwalkman/phantomjs,jillesme/phantomjs,petermat/phantomjs,gskachkov/phantomjs,vietch2612/phantomjs,shinate/phantomjs,jefleponot/phantomjs,revolutionaryG/phantomjs,jguyomard/phantomjs,saisai/phantomjs,Lochlan/phantomjs,chirilo/phantomjs,ramanajee/phantomjs,PeterWangPo/phantomjs,pigshell/nhnick,S11001001/phantomjs,klickagent/phantomjs,ramanajee/phantomjs,ixiom/phantomjs,Observer-Wu/phantomjs,PeterWangPo/phantomjs,Dinamize/phantomjs,rishilification/phantomjs,saisai/phantomjs,ChrisAntaki/phantomjs,toanalien/phantomjs,jguyomard/phantomjs,kinwahlai/phantomjs-ghostdriver,toanalien/phantomjs,mapbased/phantomjs,zackw/phantomjs,Dinamize/phantomjs,joomel1/phantomjs,skyeckstrom/phantomjs,asrie/phantomjs,sporttech/phantomjs,linjeffrey/phantomjs,zackw/phantomjs,eceglov/phantomjs,smasala/phantomjs,chirilo/phantomjs,you21979/phantomjs,Lkhagvadelger/phantomjs,AladdinSonni/phantomjs,Deepakpatle/phantomjs,likaiwalkman/phantomjs,farhi-naz/phantomjs,likaiwalkman/phantomjs,Dinamize/phantomjs,bettiolo/phantomjs,mattvick/phantomjs,tmuelle2/phantomjs,eceglov/phantomjs,yoki/phantomjs,martonw/phantomjs,Medium/phantomjs-1,bprodoehl/phantomjs,markhu/phantomjs,dparshin/phantomjs,chylli/phantomjs,jdar/phantomjs-modified,woodpecker1/phantomjs,chauhanmohit/phantomjs,delighted/phantomjs,bprodoehl/phantomjs,unb-libraries/phantomjs,MeteorAdminz/phantomjs,peakji/phantomjs,nicksay/phantomjs,shinate/phantomjs,sxhao/phantomjs,thomasrogers03/phantomjs,djmaze/phantomjs,rishilification/phantomjs,chylli/phantomjs,paulfitz/phantomjs,farhi-naz/phantomjs,dongritengfei/phantomjs,skyeckstrom/phantomjs,cloudflare/phantomjs,ezoic/phantomjs,revolutionaryG/phantomjs,djmaze/phantomjs,ariya/phantomjs,mapbased/phantomjs,OCForks/phantomjs,Lkhagvadelger/phantomjs,aljscott/phantomjs,chirilo/phantomjs,JamesMGreene/phantomjs,webmull/phantomjs,angelman/phantomjs,petermat/phantomjs,angelman/phantomjs,ye11ow/phantomjs,jguyomard/phantomjs,dparshin/phantomjs,avinashkunuje/phantomjs,jjyycchh/phantomjs,jkburges/phantomjs,yoki/phantomjs,tianzhihen/phantomjs,bmotlaghFLT/FLT_PhantomJS,Dinamize/phantomjs,JingZhou0404/phantomjs,Lochlan/phantomjs,cloudflare/phantomjs,MeteorAdminz/phantomjs,nin042/phantomjs,avinashkunuje/phantomjs,cirrusone/phantom2,farhi-naz/phantomjs,jorik041/phantomjs,skyeckstrom/phantomjs,bettiolo/phantomjs,Deepakpatle/phantomjs,Andrey-Pavlov/phantomjs,bprodoehl/phantomjs,vegetableman/phantomjs,youprofit/phantomjs,asrie/phantomjs,toanalien/phantomjs,admetricks/phantomjs,klim-iv/phantomjs-qt5,tinfoil/phantomjs,tianzhihen/phantomjs,bjko/phantomjs,kinwahlai/phantomjs-ghostdriver,kinwahlai/phantomjs-ghostdriver,OCForks/phantomjs,viewdy/phantomjs2,you21979/phantomjs,mattvick/phantomjs,pataquets/phantomjs,xsyntrex/phantomjs,DocuSignDev/phantomjs,likaiwalkman/phantomjs,cesarmarinhorj/phantomjs,zhulin2609/phantomjs,sxhao/phantomjs,mapbased/phantomjs,xsyntrex/phantomjs,mark-ignacio/phantomjs,linjeffrey/phantomjs,delighted/phantomjs,Lkhagvadelger/phantomjs,pataquets/phantomjs,bjko/phantomjs,kinwahlai/phantomjs-ghostdriver,bukalov/phantomjs,Observer-Wu/phantomjs,apanda/phantomjs-intercept,peakji/phantomjs,bjko/phantomjs,mark-ignacio/phantomjs,toanalien/phantomjs,tianzhihen/phantomjs,iradul/phantomjs,nin042/phantomjs,neraliu/tainted-phantomjs,toanalien/phantomjs,Medium/phantomjs-1,you21979/phantomjs,lattwood/phantomjs,jjyycchh/phantomjs,zhengyongbo/phantomjs,jorik041/phantomjs,you21979/phantomjs,MaDKaTZe/phantomjs,tianzhihen/phantomjs,pigshell/nhnick,chirilo/phantomjs,unb-libraries/phantomjs,aljscott/phantomjs,jefleponot/phantomjs,jillesme/phantomjs,RobertoMalatesta/phantomjs,tmuelle2/phantomjs,iver333/phantomjs,yoki/phantomjs,paulfitz/phantomjs,sharma1nitish/phantomjs,bmotlaghFLT/FLT_PhantomJS,klickagent/phantomjs,admetricks/phantomjs,petermat/phantomjs,danigonza/phantomjs,joomel1/phantomjs,martonw/phantomjs,kyroskoh/phantomjs,bkrukowski/phantomjs,DocuSignDev/phantomjs,cloudflare/phantomjs,NickelMedia/phantomjs,iradul/phantomjs,RobertoMalatesta/phantomjs,rishilification/phantomjs,chauhanmohit/phantomjs,grevutiu-gabriel/phantomjs,r3b/phantomjs,pbrazdil/phantomjs,fxtentacle/phantomjs,fxtentacle/phantomjs,astefanutti/phantomjs,bkrukowski/phantomjs,wuxianghou/phantomjs,fentas/phantomjs,AladdinSonni/phantomjs,djmaze/phantomjs,zhulin2609/phantomjs,houzhenggang/phantomjs,petermat/phantomjs,liorvh/phantomjs,pataquets/phantomjs,Vitallium/phantomjs,mattvick/phantomjs,sharma1nitish/phantomjs,xsyntrex/phantomjs,tinfoil/phantomjs,kyroskoh/phantomjs,JingZhou0404/phantomjs,matepeter90/phantomjs,OCForks/phantomjs,NickelMedia/phantomjs,Klaudit/phantomjs,martonw/phantomjs,viewdy/phantomjs2,pbrazdil/phantomjs,JingZhou0404/phantomjs,unb-libraries/phantomjs,smasala/phantomjs,avinashkunuje/phantomjs,jillesme/phantomjs,S11001001/phantomjs,attilahorvath/phantomjs,apanda/phantomjs-intercept,eceglov/phantomjs,zackw/phantomjs,zackw/phantomjs,yoki/phantomjs,likaiwalkman/phantomjs,bukalov/phantomjs,jorik041/phantomjs,klim-iv/phantomjs-qt5,ixiom/phantomjs,bukalov/phantomjs,tinfoil/phantomjs,grevutiu-gabriel/phantomjs,kinwahlai/phantomjs-ghostdriver,lseyesl/phantomjs,cesarmarinhorj/phantomjs,joomel1/phantomjs,grevutiu-gabriel/phantomjs,jorik041/phantomjs,Observer-Wu/phantomjs,vegetableman/phantomjs,bkrukowski/phantomjs,brandingbrand/phantomjs,dongritengfei/phantomjs,paulfitz/phantomjs,pataquets/phantomjs,bmotlaghFLT/FLT_PhantomJS,pataquets/phantomjs,asrie/phantomjs,jkburges/phantomjs,christoph-buente/phantomjs,thomasrogers03/phantomjs,pataquets/phantomjs,wuxianghou/phantomjs,Observer-Wu/phantomjs,jefleponot/phantomjs,djmaze/phantomjs,mattvick/phantomjs,pbrazdil/phantomjs,ramanajee/phantomjs,Tomtomgo/phantomjs,kyroskoh/phantomjs,zhengyongbo/phantomjs,MaDKaTZe/phantomjs,chirilo/phantomjs,sxhao/phantomjs,youprofit/phantomjs,christoph-buente/phantomjs,jjyycchh/phantomjs,astefanutti/phantomjs,iradul/phantomjs-clone,lseyesl/phantomjs,pcarrier-packaging/deb-phantomjs,dongritengfei/phantomjs,Medium/phantomjs-1,jkenn99/phantomjs,houzhenggang/phantomjs,ariya/phantomjs,angelman/phantomjs,wuxianghou/phantomjs,youprofit/phantomjs,sporttech/phantomjs,youprofit/phantomjs,ixiom/phantomjs,RobertoMalatesta/phantomjs,paulfitz/phantomjs,delighted/phantomjs,ye11ow/phantomjs,chylli/phantomjs,nin042/phantomjs,NickelMedia/phantomjs,Tomtomgo/phantomjs,wxkdesky/phantomjs,Observer-Wu/phantomjs,Lochlan/phantomjs,bkrukowski/phantomjs,jillesme/phantomjs,ChrisAntaki/phantomjs,vegetableman/phantomjs,shinate/phantomjs,Vitallium/phantomjs,tianzhihen/phantomjs,tianzhihen/phantomjs,MaDKaTZe/phantomjs,matepeter90/phantomjs,JingZhou0404/phantomjs,avinashkunuje/phantomjs,gitromand/phantomjs,Lochlan/phantomjs,attilahorvath/phantomjs,shinate/phantomjs,astefanutti/phantomjs,you21979/phantomjs,zhengyongbo/phantomjs,liorvh/phantomjs,Klaudit/phantomjs,markhu/phantomjs,pbrazdil/phantomjs,ChrisAntaki/phantomjs,cesarmarinhorj/phantomjs,markhu/phantomjs,Andrey-Pavlov/phantomjs,djmaze/phantomjs,bprodoehl/phantomjs,ixiom/phantomjs,angelman/phantomjs,MeteorAdminz/phantomjs,Lkhagvadelger/phantomjs,linjeffrey/phantomjs,RobertoMalatesta/phantomjs,webmull/phantomjs,pataquets/phantomjs,ezoic/phantomjs,bjko/phantomjs,grevutiu-gabriel/phantomjs,chauhanmohit/phantomjs,neraliu/tpjs,wxkdesky/phantomjs,PeterWangPo/phantomjs,S11001001/phantomjs,RobertoMalatesta/phantomjs,Observer-Wu/phantomjs,Tomtomgo/phantomjs,bprodoehl/phantomjs,fentas/phantomjs,AladdinSonni/phantomjs,linjeffrey/phantomjs,mapbased/phantomjs,sharma1nitish/phantomjs,eugene1g/phantomjs,christoph-buente/phantomjs,Lochlan/phantomjs,Deepakpatle/phantomjs,wxkdesky/phantomjs,aljscott/phantomjs,angelman/phantomjs,DocuSignDev/phantomjs,chirilo/phantomjs,jdar/phantomjs-modified,wuxianghou/phantomjs,zhengyongbo/phantomjs,rishilification/phantomjs,zackw/phantomjs,astefanutti/phantomjs,bprodoehl/phantomjs,zhulin2609/phantomjs,cirrusone/phantom2,saisai/phantomjs,cirrusone/phantom2,Tomtomgo/phantomjs,Lochlan/phantomjs,asrie/phantomjs,jillesme/phantomjs,matepeter90/phantomjs,dongritengfei/phantomjs,joomel1/phantomjs,gskachkov/phantomjs,pcarrier-packaging/deb-phantomjs,martonw/phantomjs,apanda/phantomjs-intercept,jjyycchh/phantomjs,eugene1g/phantomjs,petermat/phantomjs,Observer-Wu/phantomjs,tmuelle2/phantomjs,bmotlaghFLT/FLT_PhantomJS,etiennekruger/phantomjs-qt5,dhendo/phantomjs,gitromand/phantomjs,markhu/phantomjs,brandingbrand/phantomjs,toanalien/phantomjs,houzhenggang/phantomjs,mattvick/phantomjs,forzi/phantomjs_stradivari_fork,sharma1nitish/phantomjs,wxkdesky/phantomjs,jefleponot/phantomjs,sxhao/phantomjs,linjeffrey/phantomjs,fxtentacle/phantomjs,cloudflare/phantomjs,iver333/phantomjs,paulfitz/phantomjs,ezoic/phantomjs,dparshin/phantomjs,StevenBlack/phantomjs,angelman/phantomjs,chauhanmohit/phantomjs,woodpecker1/phantomjs,eceglov/phantomjs,klickagent/phantomjs,cesarmarinhorj/phantomjs,iradul/phantomjs-clone,djmaze/phantomjs,r3b/phantomjs,klim-iv/phantomjs-qt5,woodpecker1/phantomjs,neraliu/tpjs,danigonza/phantomjs,iver333/phantomjs,Medium/phantomjs-1,sporttech/phantomjs,ChrisAntaki/phantomjs,jkenn99/phantomjs,christoph-buente/phantomjs,angelman/phantomjs,vegetableman/phantomjs,revolutionaryG/phantomjs,asrie/phantomjs,viewdy/phantomjs2,S11001001/phantomjs,martonw/phantomjs,mark-ignacio/phantomjs,ixiom/phantomjs,revolutionaryG/phantomjs,admetricks/phantomjs,lseyesl/phantomjs,ChrisAntaki/phantomjs,ye11ow/phantomjs,zhulin2609/phantomjs,vietch2612/phantomjs,bmotlaghFLT/FLT_PhantomJS,lattwood/phantomjs,PeterWangPo/phantomjs,djmaze/phantomjs,nin042/phantomjs,apanda/phantomjs-intercept,sharma1nitish/phantomjs,bkrukowski/phantomjs | python/utils.py | python/utils.py | '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <[email protected]>
Copyright (C) 2011 Ariya Hidayat <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import argparse
version_major = 1
version_minor = 1
version_patch = 0
version = '%d.%d.%d' % (version_major, version_minor, version_patch)
license = '''
PyPhantomJS Version %s
Copyright (C) 2011 James Roe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
''' % version
def argParser():
parser = argparse.ArgumentParser(
description='Minimalistic headless WebKit-based JavaScript-driven tool',
usage='%(prog)s [options] script.[js|coffee] [script argument [script argument ...]]',
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument('--load-images', default='yes',
choices=['yes', 'no'],
help='Load all inlined images (default: %(default)s)'
)
parser.add_argument('--load-plugins', default='no',
choices=['yes', 'no'],
help='Load all plugins (i.e. Flash, Silverlight, ...)\n(default: %(default)s)'
)
parser.add_argument('--proxy', metavar='address:port',
help='Set the network proxy'
)
parser.add_argument('--upload-file', nargs='*',
metavar='tag=file', help='Upload 1 or more files'
)
parser.add_argument('script', metavar='script.[js|coffee]', nargs='*',
help='The script to execute, and any args to pass to it'
)
parser.add_argument('--version',
action='version', version=license,
help='show this program\'s version and license'
)
return parser
| '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <[email protected]>
Copyright (C) 2011 Ariya Hidayat <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import argparse
version_major = 1
version_minor = 1
version_patch = 0
version = '%d.%d.%d' % (version_major, version_minor, version_patch)
license = '''
PyPhantomJS Version %s
Copyright (C) 2011 James Roe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
''' % version
def argParser():
parser = argparse.ArgumentParser(
description='Minimalistic headless WebKit-based JavaScript-driven tool',
usage='%(prog)s [options] script.[js|coffee] [script argument [script argument ...]]',
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument('--load-images', default='yes',
choices=['yes', 'no'],
help='Load all inlined images (default: %(default)s)'
)
parser.add_argument('--load-plugins', default='no',
choices=['yes', 'no'],
help='Load all plugins (i.e. Flash, Silverlight, ...)\n(default: %(default)s)'
)
parser.add_argument('--proxy', metavar='address:port',
help='Set the network proxy'
)
parser.add_argument('--upload-file', nargs='*',
metavar='tag:file', help='Upload 1 or more files'
)
parser.add_argument('script', metavar='script.[js|coffee]', nargs='*',
help='The script to execute, and any args to pass to it'
)
parser.add_argument('--version',
action='version', version=license,
help='show this program\'s version and license'
)
return parser
| bsd-3-clause | Python |
4065a08ea401e0d95e8d40d9d735edf92edda861 | Add unit tests on cache handler | openstack/oslo.policy | oslo_policy/tests/test_cache_handler.py | oslo_policy/tests/test_cache_handler.py | # Copyright (c) 2020 OpenStack Foundation.
# All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test the cache handler module"""
import os
import fixtures
from oslotest import base as test_base
from oslo_policy import _cache_handler as _ch
class CacheHandlerTest(test_base.BaseTestCase):
def setUp(self):
super().setUp()
self.tmpdir = self.useFixture(fixtures.TempDir())
def test_read_cached_file(self):
file_cache = {}
path = os.path.join(self.tmpdir.path, 'tmpfile')
with open(path, 'w+') as fp:
fp.write('test')
reloaded, data = _ch.read_cached_file(file_cache, path)
self.assertEqual('test', data)
self.assertTrue(reloaded)
reloaded, data = _ch.read_cached_file(file_cache, path)
self.assertEqual('test', data)
self.assertFalse(reloaded)
reloaded, data = _ch.read_cached_file(
file_cache, path, force_reload=True)
self.assertEqual('test', data)
self.assertTrue(reloaded)
def test_read_cached_file_with_updates(self):
file_cache = {}
path = os.path.join(self.tmpdir.path, 'tmpfile')
with open(path, 'w+') as fp:
fp.write('test')
reloaded, data = _ch.read_cached_file(file_cache, path)
# update the timestamps
times = (os.stat(path).st_atime + 1, os.stat(path).st_mtime + 1)
os.utime(path, times)
reloaded, data = _ch.read_cached_file(file_cache, path)
self.assertTrue(reloaded)
| apache-2.0 | Python |
|
bcc7dc9bc014dfd17db6fced18de47535a003b60 | add import JSON script for propostions #2 | dpausp/arguments,dpausp/arguments,dpausp/arguments,dpausp/arguments | scripts/import_proposition_json.py | scripts/import_proposition_json.py | import argparse
import json
import logging
import sqlalchemy.orm
import transaction
from eliot import log_call, start_task
logging.basicConfig(level=logging.INFO)
from ekklesia_portal.app import make_wsgi_app
@log_call
def load_proposition_json_file(filepath):
with open(filepath) as f:
json_data = json.load(f)
# TODO: Slugify tag_names!
required_fields = {
'title',
'author',
'abstract',
'content'
}
optional_fields = {
'motivation',
'tags',
'external_discussion_url'
}
imported = {}
# TODO: Use Sets to find out which keys are missing!
for key in required_fields:
try:
value = json_data[key]
except KeyError:
raise KeyError(f"malformed wiki json_data JSON, key '{key}' not found!")
imported[key] = value
for key in optional_fields:
imported[key] = json_data.get(key)
if "type" in json_data:
imported["tags"].append(json_data["type"])
if "group" in json_data:
imported["tags"].append(json_data["group"])
imported["tags"] = list(set(imported["tags"]))
return imported
@log_call
def insert_proposition(department_name, title, abstract, content, motivation, author, tags, external_discussion_url):
department = session.query(Department).filter_by(name=department_name).one()
maybe_subject_area = [area for area in department.areas if area.name == "Allgemein"]
if not maybe_subject_area:
raise ValueError("Subject area 'Allgemein' not found! Please create it!")
subject_area = maybe_subject_area[0]
user = session.query(User).filter_by(name=author).scalar()
if user is None:
user = User(name=author, auth_type="import")
ballot = Ballot(area=subject_area)
proposition = Proposition(title=title, abstract=abstract, content=content, motivation=motivation,
external_discussion_url=external_discussion_url, ballot=ballot)
for tag_name in tags:
tag = session.query(Tag).filter_by(name=tag_name).scalar()
if tag is None:
tag = Tag(name=tag_name)
proposition.tags.append(tag)
supporter = Supporter(member=user, proposition=proposition, submitter=True)
session.add(supporter)
parser = argparse.ArgumentParser("Ekklesia Portal import_proposition_json.py")
parser.add_argument("-c", "--config-file", help=f"path to config file in YAML / JSON format")
parser.add_argument("-d", "--department", help=f"Choose the department to import to.")
parser.add_argument('filenames', nargs='+')
if __name__ == "__main__":
logg = logging.getLogger(__name__)
args = parser.parse_args()
app = make_wsgi_app(args.config_file)
from ekklesia_portal.database.datamodel import Ballot, Department, Proposition, User, Supporter, Tag
from ekklesia_portal.database import Session
session = Session()
sqlalchemy.orm.configure_mappers()
for fp in args.filenames:
with start_task(action_type="import_proposition"):
imported_data = load_proposition_json_file(fp)
insert_proposition(args.department, **imported_data)
transaction.commit()
| agpl-3.0 | Python |
|
3661ca3947763656165f8fc68ea42358ad37285a | Add stub for qiprofile update test. | ohsu-qin/qipipe | test/unit/helpers/test_qiprofile.py | test/unit/helpers/test_qiprofile.py | import os
import glob
import shutil
from nose.tools import (assert_equal, assert_is_not_none)
import qixnat
from ... import (project, ROOT)
from ...helpers.logging import logger
from qipipe.helpers import qiprofile
COLLECTION = 'Sarcoma'
"""The test collection."""
SUBJECT = 'Sarcoma001'
"""The test subjects."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfile(object):
"""qiprofile update tests."""
def setUp(self):
self._clean()
self._seed()
def tearDown(self):
self._clean()
def test_sync_session(self):
logger(__name__).debug("Testing qiprofile sync on %s %s..." %
(SUBJECT, SESSION))
def _clean(self):
"""Deletes the test XNAT session."""
with qixnat.connect() as xnat:
# Delete the test subject, if it exists.
xnat.delete_subjects(project, subject)
def _seed(self):
"""Populates the test XNAT session."""
with qixnat.connect() as xnat:
# Delete the test subject, if it exists.
xnat.delete_subjects(project(), subject)
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
| bsd-2-clause | Python |
|
36af45d88f01723204d9b65d4081e74a80f0776b | Add test for layers module. | chrinide/theanets,devdoer/theanets,lmjohns3/theanets | test/layers_test.py | test/layers_test.py | import theanets
import numpy as np
class TestLayer:
def test_build(self):
layer = theanets.layers.build('feedforward', nin=2, nout=4)
assert isinstance(layer, theanets.layers.Layer)
class TestFeedforward:
def test_create(self):
l = theanets.layers.Feedforward(nin=2, nout=4)
assert l.reset() == 12
class TestTied:
def test_create(self):
l0 = theanets.layers.Feedforward(nin=2, nout=4)
l = theanets.layers.Tied(partner=l0)
assert l.reset() == 2
class TestClassifier:
def test_create(self):
l = theanets.layers.Classifier(nin=2, nout=4)
assert l.reset() == 12
class TestRecurrent:
def test_create(self):
l = theanets.layers.Recurrent(nin=2, nout=4)
assert l.reset() == 28
class TestMRNN:
def test_create(self):
l = theanets.layers.MRNN(nin=2, nout=4, factors=3)
assert l.reset() == 42
class TestLSTM:
def test_create(self):
l = theanets.layers.LSTM(nin=2, nout=4)
assert l.reset() == 124
| mit | Python |
|
3dcf251276060b43ac888e0239f26a0cf2531832 | Add tests for proxy drop executable | gdestuynder/MozDef,mozilla/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,mozilla/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mozilla/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,mozilla/MozDef | tests/test_proxy_drop_executable.py | tests/test_proxy_drop_executable.py | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from positive_alert_test_case import PositiveAlertTestCase
from negative_alert_test_case import NegativeAlertTestCase
from alert_test_suite import AlertTestSuite
class TestAlertSSHPasswordAuthViolation(AlertTestSuite):
alert_filename = "proxy_drop_non_standard_port"
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_type": "event",
"_source": {
"category": "squid",
"tags": ["squid"],
"details": {
"details.sourceipaddress": "1.2.3.4",
"details.destination": "http://evil.com/evil.exe",
"details.proxyaction": "TCP_DENIED/-",
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "squid",
"tags": ['squid', 'proxy'],
"severity": "WARNING",
"summary": 'Multiple Proxy DROP events detected from 1.2.3.4 to the following executable file destinations: http://evil.com/evil.exe',
}
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default events and default alert expected",
events=AlertTestSuite.create_events(default_event, 1),
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['category'] = 'bad'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect category",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['tags'] = 'bad tag example'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with incorrect tags",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 10)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({
'minutes': 241})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)
| mpl-2.0 | Python |
|
30d4301a04081f3d7a4fdba835a56aa0adac1375 | fix latent slaves started serially with monkey patch instead | ethereum/ethereum-buildbot,ethereum/ethereum-buildbot,ethereum/ethereum-buildbot,vaporry/ethereum-buildbot,vaporry/ethereum-buildbot,vaporry/ethereum-buildbot | monkeypatch.py | monkeypatch.py | from twisted.python import log
from twisted.internet import reactor
def botmaster_maybeStartBuildsForSlave(self, slave_name):
"""
We delay this for 10 seconds, so that if multiple slaves start at the same
time, builds will be distributed between them.
"""
def do_start():
log.msg(format="Really starting builds on %(slave_name)s",
slave_name=slave_name)
builders = self.getBuildersForSlave(slave_name)
self.brd.maybeStartBuildsOn([b.name for b in builders])
log.msg(format="Waiting to start builds on %(slave_name)s",
slave_name=slave_name)
reactor.callLater(10, do_start)
from buildbot.process.slavebuilder import AbstractSlaveBuilder
def slavebuilder_buildStarted(self):
AbstractSlaveBuilder.buildStarted(self)
if self.slave and hasattr(self.slave, 'buildStarted'):
self.slave.buildStarted(self)
from buildbot.process.buildrequestdistributor import BasicBuildChooser
class NoFallBackBuildChooser(BasicBuildChooser):
"""
BuildChooser that doesn't fall back to rejected slaves.
In particular, builds with locks won't be assigned before a lock is ready.
"""
def __init__(self, bldr, master):
BasicBuildChooser.__init__(self, bldr, master)
self.rejectedSlaves = None
def apply_patches():
log.msg("Apply flocker_bb.monkeypatch.")
from buildbot.process.botmaster import BotMaster
BotMaster.maybeStartBuildsForSlave = botmaster_maybeStartBuildsForSlave
from buildbot.process.slavebuilder import SlaveBuilder
SlaveBuilder.buildStarted = slavebuilder_buildStarted
from buildbot.steps.master import MasterShellCommand
MasterShellCommand.renderables += ['path']
from buildbot.process.buildrequestdistributor import (
BuildRequestDistributor)
BuildRequestDistributor.BuildChooser = NoFallBackBuildChooser
| mit | Python |
|
379aef7e3aebc05352cacd274b43b156e32de18b | Add script to run tests | lamarmeigs/django-clean-fields | runtests.py | runtests.py | #!/usr/bin/env python
import argparse
import sys
import django
from django.conf import settings
from django.test.utils import get_runner
def runtests(test_labels):
settings.configure(INSTALLED_APPS=['tests'])
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests(test_labels)
sys.exit(failures)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('test_labels', nargs='*', default=['tests'])
args = parser.parse_args()
runtests(test_labels=args.test_labels)
| mit | Python |
|
abf39931331f54aff5f10345939420041bd2039d | Add test for APS2 instruction merging. | Plourde-Research-Lab/PyQLab,BBN-Q/PyQLab,rmcgurrin/PyQLab,calebjordan/PyQLab | tests/test_APS2Pattern.py | tests/test_APS2Pattern.py | import h5py
import unittest
import numpy as np
from copy import copy
from QGL import *
from instruments.drivers import APS2Pattern
class APSPatternUtils(unittest.TestCase):
def setUp(self):
self.q1gate = Channels.LogicalMarkerChannel(label='q1-gate')
self.q1 = Qubit(label='q1', gateChan=self.q1gate)
self.q1 = Qubit(label='q1')
self.q1.pulseParams['length'] = 30e-9
Compiler.channelLib = {'q1': self.q1, 'q1-gate': self.q1gate}
def test_synchronize_control_flow(self):
q1 = self.q1
pulse = Compiler.Waveform()
pulse.length = 24
pulse.key = 12345
delay = Compiler.Waveform()
delay.length = 100
delay.isTimeAmp = True
blank = Compiler.Waveform( BLANK(q1, pulse.length) )
seq_1 = [qwait(), delay, copy(pulse), qwait(), copy(pulse)]
seq_2 = [qwait(), copy(blank), qwait(), copy(blank)]
offsets = { APS2Pattern.wf_sig(pulse) : 0 }
instructions = APS2Pattern.create_seq_instructions([seq_1, seq_2, [], [], []], offsets)
instr_types = [
APS2Pattern.SYNC,
APS2Pattern.WAIT,
APS2Pattern.WFM,
APS2Pattern.MARKER,
APS2Pattern.WFM,
APS2Pattern.WAIT,
APS2Pattern.WFM,
APS2Pattern.MARKER
]
for actual, expected in zip(instructions, instr_types):
instrOpCode = (actual.header >> 4) & 0xf
assert(instrOpCode == expected)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python |
|
df05088b5a6233cb262017b8489723c23000eb17 | Add variable | robotframework/RIDE,HelioGuilherme66/RIDE,caio2k/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE,fingeronthebutton/RIDE,fingeronthebutton/RIDE,HelioGuilherme66/RIDE,caio2k/RIDE,HelioGuilherme66/RIDE,caio2k/RIDE,fingeronthebutton/RIDE,robotframework/RIDE | src/robotide/ui/images.py | src/robotide/ui/images.py | # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import wx
from robot.parsing.model import Variable
_SIZE = (16, 16)
_BASE = os.path.dirname(__file__)
class TreeImageList(wx.ImageList):
def __init__(self):
wx.ImageList.__init__(self, *_SIZE)
self._images = {
'TestDataDirectory': _TreeImage(self, wx.ART_FOLDER, wx.ART_FOLDER_OPEN),
'TestCaseFile': _TreeImage(self, wx.ART_NORMAL_FILE),
'TestCase': _TreeImage(self, 'robot.png'),
'UserKeyword': _TreeImage(self, 'process.png'),
'ResourceFile': _TreeImage(self, wx.ART_NORMAL_FILE),
'Variable': _TreeImage(self, 'process.png')
}
def __getitem__(self, key):
return self._images[key]
class _TreeImage(object):
def __init__(self, image_list, normal, expanded=None):
self.normal = self._get_image(image_list, normal)
self.expanded = self._get_image(image_list, expanded) if expanded else self.normal
def _get_image(self, image_list, source):
if source.startswith('wx'):
img = wx.ArtProvider_GetBitmap(source, wx.ART_OTHER, _SIZE)
else:
path = os.path.join(_BASE, source)
img = wx.Image(path, wx.BITMAP_TYPE_PNG).ConvertToBitmap()
return image_list.Add(img)
| # Copyright 2008-2009 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import wx
_SIZE = (16, 16)
_BASE = os.path.dirname(__file__)
class TreeImageList(wx.ImageList):
def __init__(self):
wx.ImageList.__init__(self, *_SIZE)
self._images = {
'TestDataDirectory': _TreeImage(self, wx.ART_FOLDER, wx.ART_FOLDER_OPEN),
'TestCaseFile': _TreeImage(self, wx.ART_NORMAL_FILE),
'TestCase': _TreeImage(self, 'robot.png'),
'UserKeyword': _TreeImage(self, 'process.png'),
'ResourceFile': _TreeImage(self, wx.ART_NORMAL_FILE)
}
def __getitem__(self, key):
return self._images[key]
class _TreeImage(object):
def __init__(self, image_list, normal, expanded=None):
self.normal = self._get_image(image_list, normal)
self.expanded = self._get_image(image_list, expanded) if expanded else self.normal
def _get_image(self, image_list, source):
if source.startswith('wx'):
img = wx.ArtProvider_GetBitmap(source, wx.ART_OTHER, _SIZE)
else:
path = os.path.join(_BASE, source)
img = wx.Image(path, wx.BITMAP_TYPE_PNG).ConvertToBitmap()
return image_list.Add(img)
| apache-2.0 | Python |
aeaf2e1a1207f2094ea4298b1ecff015f5996b5a | Add test cases for gabor filter | blink1073/scikit-image,dpshelio/scikit-image,emon10005/scikit-image,ofgulban/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,keflavich/scikit-image,Hiyorimi/scikit-image,keflavich/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,vighneshbirodkar/scikit-image,ClinicalGraphics/scikit-image,newville/scikit-image,WarrenWeckesser/scikits-image,youprofit/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,WarrenWeckesser/scikits-image,ClinicalGraphics/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,Britefury/scikit-image,GaZ3ll3/scikit-image,SamHames/scikit-image,jwiggins/scikit-image,rjeli/scikit-image,chintak/scikit-image,paalge/scikit-image,vighneshbirodkar/scikit-image,bennlich/scikit-image,jwiggins/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,SamHames/scikit-image,chintak/scikit-image,rjeli/scikit-image,bennlich/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,michaelaye/scikit-image,paalge/scikit-image,bsipocz/scikit-image,juliusbierk/scikit-image,Midafi/scikit-image,pratapvardhan/scikit-image,youprofit/scikit-image,rjeli/scikit-image,robintw/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,michaelaye/scikit-image,chriscrosscutler/scikit-image,ajaybhat/scikit-image,almarklein/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,chintak/scikit-image,Midafi/scikit-image,almarklein/scikit-image,emon10005/scikit-image,oew1v07/scikit-image,robintw/scikit-image,ofgulban/scikit-image,newville/scikit-image,GaZ3ll3/scikit-image,Hiyorimi/scikit-image | skimage/filter/tests/test_gabor.py | skimage/filter/tests/test_gabor.py | import numpy as np
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from skimage.filter import gabor_kernel, gabor_filter
def test_gabor_kernel_sum():
for sigmax in range(1, 10, 2):
for sigmay in range(1, 10, 2):
for frequency in range(0, 10, 2):
kernel = gabor_kernel(sigmax, sigmay, frequency+0.1, 0)
# make sure gaussian distribution is covered nearly 100%
assert_almost_equal(np.abs(kernel).sum(), 1, 2)
def test_gabor_kernel_theta():
for sigmax in range(1, 10, 2):
for sigmay in range(1, 10, 2):
for frequency in range(0, 10, 2):
for theta in range(0, 10, 2):
kernel0 = gabor_kernel(sigmax, sigmay, frequency+0.1, theta)
kernel180 = gabor_kernel(sigmax, sigmay, frequency,
theta+np.pi)
assert_array_almost_equal(np.abs(kernel0),
np.abs(kernel180))
def test_gabor_filter():
real, imag = gabor_filter(np.random.random((100, 100)), 1, 1, 1, 1)
if __name__ == "__main__":
from numpy import testing
testing.run_module_suite()
| bsd-3-clause | Python |
|
a70f46aac52be5b38b869cfbe18c0421a0032aee | Add script to count parameters of PyTorch model | sauhaardac/training,sauhaardac/training | count_params.py | count_params.py | import sys
import numpy as np
import torch
model = torch.load(sys.argv[1])
params = 0
for key in model:
params += np.multiply.reduce(model[key].shape)
print('Total number of parameters: ' + str(params))
| mit | Python |
|
fd4398b1e811aaa2b876c120f99ca7fff08618ca | install on windows via gohlke wheels | maurov/xraysloth,maurov/xraysloth | scripts/install_on_windows.py | scripts/install_on_windows.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script for installing on Microsoft Windows
Wheels from [GOHLKE WINDOWS REPOSITORY](https://www.lfd.uci.edu/~gohlke/pythonlibs/)
"""
try:
from gohlkegrabber import GohlkeGrabber
except ImportError:
print("gohlkegrabber not installed -> 'pip install gohlkegrabber")
pass
import subprocess
import tempfile
import shutil
PACKAGES = ('numpy')
def install_packages(packages, remove_tmpdir=True):
"""main script"""
_py = '3.7'
_platform = 'win_amd64'
_tmpdir = tempfile.mkdtemp(prefix='py37w')
print(f"Temporary directory is: {_tmpdir}")
gg = GohlkeGrabber()
for pkg in packages:
print(f"retreiving {pkg}...")
pkwhl = gg.retrieve(_tmpdir, pkg, python=_py, platform=_platform)
subprocess.call(f"pip install {pkwhl[0]}")
if remove_tmpdir:
shutil.rmtree(_tmpdir)
print("temporary directory removed")
if __name__ == "__main__":
pass
| bsd-3-clause | Python |
|
495c937d39da1902948065a38502f9d582fa2b3b | Add darkobject tests. | LegionXI/pydarkstar,AdamGagorik/pydarkstar | tests/darkobject.py | tests/darkobject.py | """
.. moduleauthor:: Adam Gagorik <[email protected]>
"""
import unittest
import pydarkstar.logutils
import pydarkstar.darkobject
pydarkstar.logutils.setDebug()
class TestDarkObject(unittest.TestCase):
def test_init(self):
pydarkstar.darkobject.DarkObject()
if __name__ == '__main__':
unittest.main() | mit | Python |
|
6f8699288f79ff856ed58595169cb08956cd210d | Create toeplitz-matrix.py | tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015 | Python/toeplitz-matrix.py | Python/toeplitz-matrix.py | # Time: O(m * n)
# Space: O(1)
class Solution(object):
def isToeplitzMatrix(self, matrix):
"""
:type matrix: List[List[int]]
:rtype: bool
"""
return all(i == 0 or j == 0 or matrix[i-1][j-1] == val
for i, row in enumerate(matrix)
for j, val in enumerate(row))
| mit | Python |
|
d2a78f252bf9a584569e372ca9474863e4496c7a | Add one test | manu3618/legendary-potato | tests/test_utils.py | tests/test_utils.py | import os
from contextlib import suppress
import numpy as np
import pandas as pd
import pytest
import legendary_potato.kernel
import legendary_potato.utils
TEST_PATH = os.path.join(os.path.abspath(os.path.curdir))
SAMPLE_PATH = os.path.join(TEST_PATH, 'sample')
GRAMMATRIX_PATH = os.path.join(TEST_PATH, 'gram_matrix')
def kernel_sample_iterator():
"""Return an iterator over (kernels, samples).
"""
for kern in kernel_iterator():
kern_name = kern.__name__
yield kern, kernel_samples(kern_name)
def kernel_iterator():
"""Return an iterator over kernels to be tested.
"""
for kern_name in os.listdir(SAMPLE_PATH):
yield legendary_potato.kernel.__dict__.get(kern_name)
def kernel_samples(kernel_name):
"""Return iterator over samples for a kernel from specific file(s).
The iterator generate (sample_name, sample)
A kernel_path is generated. If it is a file, each line is considered
to be a sample, and its name is the line number. If it is a direcctory,
each file is considered to be a string sample and its name is the
file name.
"""
kernel_sample_path = os.path.join(SAMPLE_PATH, kernel_name)
sep = ','
if os.path.isfile(kernel_sample_path):
# One sample per line
with open(kernel_sample_path, 'r') as sample_file:
line = sample_file.readline()
try:
if len(np.fromstring(line, sep=sep)) > 0:
# line composed of numbers
is_string = False
else:
# line composed of stings
is_string = True
except ValueError:
# line composed of mix of strings and numbers. should be
# treated as strings
is_string = True
sample_file.seek(0)
for nu, line in enumerate(sample_file):
if is_string:
yield (nu, [row.strip for row in line.split(sep)])
else:
yield (nu, np.fromstring(line, sep=sep))
else:
# kernel_sample_path is a directory
for sample_file in os.listdir(kernel_sample_path):
file_path = os.path.join(kernel_sample_path, sample_file)
with open(file_path, 'r') as pot:
yield sample_file, pot.read()
@pytest.mark.parametrize(('kernel', 'sample'), kernel_sample_iterator())
def test_matrix(kernel, sample, tol=1e-15):
"""Regression test on gram matrix.
Construct the Gram matrix for the kernel and the samples and compare it
to the previously calculated one.
kernel -- the potato kernel to test
sample -- the sample to construct the Gram matrix
tol -- tolerance for pointing float errors
"""
kernel_name = kernel.__name__ # TODO: find a more feng shui way
matrix_path = os.path.join(GRAMMATRIX_PATH, kernel_name + '.csv')
potato_util = legendary_potato.utils.PotatoUtils(kernel)
cur_matrix = potato_util.matrix(tr_s for _, tr_s in sample)
if os.path.exists(matrix_path):
test_matrix = pd.DataFrame().from_csv(matrix_path,
header=None,
index_col=False)
results = np.array(test_matrix, dtype=cur_matrix.dtype) - cur_matrix
assert (np.abs(results) < tol).all()
else:
with suppress(FileExistsError):
os.makedirs(GRAMMATRIX_PATH)
pd.DataFrame(cur_matrix).to_csv(matrix_path, header=None, index=None)
| mit | Python |
|
76c040e9da5d94dfcb68d3e9a8003b894c1cf1dc | test file for vimba.py | morefigs/pymba | tests/test_vimba.py | tests/test_vimba.py | import pytest
from pymba import Vimba, VimbaException
def test_version():
version = Vimba().version.split('.')
assert int(version[0]) >= 1
assert int(version[1]) >= 7
assert int(version[2]) >= 0
def test_startup_shutdown():
with pytest.raises(VimbaException) as e:
Vimba().system().feature_names()
assert e.value.error_code == VimbaException.ERR_STARTUP_NOT_CALLED
# manual
Vimba().startup()
Vimba().system().feature_names()
Vimba().shutdown()
# context manager
with Vimba() as vmb:
vmb.system().feature_names()
@pytest.fixture
def vmb() -> Vimba:
with Vimba() as v:
yield v
# works best with camera(s) attached
def test_interface_camera_ids(vmb: Vimba):
# for ethernet camera discovery
if vmb.system().GeVTLIsPresent:
vmb.system().run_feature_command("GeVDiscoveryAllOnce")
for func in (vmb.interface_ids, vmb.camera_ids):
ids = func()
assert isinstance(ids, list)
for x in ids:
assert isinstance(x, str)
| mit | Python |
|
295b83d466b90ea812e8c0bda56b4d38a31c956a | Create reversedArrayNum.py | NendoTaka/CodeForReference,NendoTaka/CodeForReference,NendoTaka/CodeForReference | CodeWars/8kyu/reversedArrayNum.py | CodeWars/8kyu/reversedArrayNum.py | def digitize(n):
return [int(i) for i in str(n)][::-1]
| mit | Python |
|
7b279117da06af5cf21b61ad810a9c3177de8e3e | Update fabfile.py | frasern/ADL_LRS,varunasingh/ustadmobile-tincanlrs,ljwolford/ADL_LRS,ELSUru/ADL_LRS,ljwolford/ADL_LRS,adlnet/ADL_LRS,Nuevosmedios/ADL_LRS,creighton/ADL_LRS,diagonalwalnut/Experience,frasern/ADL_LRS,daafgo/Server_LRS,adlnet/ADL_LRS,varunasingh/ADL_LRS,creighton/ADL_LRS,daafgo/Server_LRS,adlnet/ADL_LRS,varunasingh/ADL_LRS,ljwolford/ADL_LRS,adlnet/ADL_LRS,diagonalwalnut/Experience,creighton/ADL_LRS,Nuevosmedios/ADL_LRS,ELSUru/ADL_LRS,frasern/ADL_LRS | fabfile.py | fabfile.py | from fabric.api import local,run
import os
from os import path
#Add settings module so fab file can see it
os.environ['DJANGO_SETTINGS_MODULE'] = "adl_lrs.settings"
from django.conf import settings
adldir = settings.MEDIA_ROOT
actor_profile = 'actor_profile'
activity_profile = 'activity_profile'
activity_state = 'activity_state'
INSTALL_STEPS = ['yes | sudo apt-get install python-setuptools libmysqlclient-dev python-dev python-mysqldb python-libxml2 python-libxslt1 libxml2-dev libxslt1-dev',
'sudo easy_install pip',
'sudo pip install virtualenv',
'virtualenv env;. env/bin/activate;pip install -r requirements.txt;deactivate']
def deps_local():
for step in INSTALL_STEPS:
local(step)
#Create media directories and give them open permissions
if not os.path.exists(path.join(adldir,activity_profile)):
os.makedirs(path.join(adldir,activity_profile))
os.chmod(path.join(adldir,activity_profile), 0777)
if not os.path.exists(path.join(adldir,activity_state)):
os.makedirs(path.join(adldir,activity_state))
os.chmod(path.join(adldir,activity_state), 0777)
if not os.path.exists(path.join(adldir,actor_profile)):
os.makedirs(path.join(adldir,actor_profile))
os.chmod(path.join(adldir,actor_profile), 0777)
def deps_remote():
for step in INSTALL_STEPS:
run(step)
| from fabric.api import local,run
import os
from os import path
#Add settings module so fab file can see it
os.environ['DJANGO_SETTINGS_MODULE'] = "adl_lrs.settings"
from django.conf import settings
adldir = settings.MEDIA_ROOT
actor_profile = 'actor_profile'
activity_profile = 'activity_profile'
activity_state = 'activity_state'
INSTALL_STEPS = ['yes | sudo apt-get install python-setuptools libmysqlclient-dev python-dev python-mysqldb python-libxml2 python-libxslt1 libxml2-dev libxslt1-dev',
'sudo easy_install pip',
'pip install -r requirements.txt']
def deps_local():
for step in INSTALL_STEPS:
local(step)
def create_dirs():
#Create media directories and give them open permissions
if not os.path.exists(path.join(adldir,activity_profile)):
os.makedirs(path.join(adldir,activity_profile))
os.chmod(path.join(adldir,activity_profile), 0777)
if not os.path.exists(path.join(adldir,activity_state)):
os.makedirs(path.join(adldir,activity_state))
os.chmod(path.join(adldir,activity_state), 0777)
if not os.path.exists(path.join(adldir,actor_profile)):
os.makedirs(path.join(adldir,actor_profile))
os.chmod(path.join(adldir,actor_profile), 0777)
def deps_remote():
for step in INSTALL_STEPS:
run(step)
| apache-2.0 | Python |
86418c4f3ea786c6eb1aad6579dadfb286dec0a3 | Create InMoov2.minimal.py | sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | toSort/InMoov2.minimal.py | toSort/InMoov2.minimal.py | # a very minimal script for InMoov
# although this script is very short you can still
# do voice control of a right hand or finger box
# for any command which you say - you will be required to say a confirmation
# e.g. you say -> open hand, InMoov will ask -> "Did you say open hand?", you will need to
# respond with a confirmation ("yes","correct","yeah","ya")
rightPort = "COM8"
i01 = Runtime.createAndStart("i01", "InMoov")
# starting parts
i01.startEar()
i01.startMouth()
#to tweak the default voice
i01.mouth.setGoogleURI("http://thehackettfamily.org/Voice_api/api2.php?voice=Ryan&txt=")
##############
i01.startRightHand(rightPort)
# tweaking defaults settings of right hand
#i01.rightHand.thumb.setMinMax(55,135)
#i01.rightHand.index.setMinMax(0,160)
#i01.rightHand.majeure.setMinMax(0,140)
#i01.rightHand.ringFinger.setMinMax(48,145)
#i01.rightHand.pinky.setMinMax(45,146)
#i01.rightHand.thumb.map(0,180,55,135)
#i01.rightHand.index.map(0,180,0,160)
#i01.rightHand.majeure.map(0,180,0,140)
#i01.rightHand.ringFinger.map(0,180,48,145)
#i01.rightHand.pinky.map(0,180,45,146)
#################
# verbal commands
ear = i01.ear
ear.addCommand("attach right hand", "i01.rightHand", "attach")
ear.addCommand("disconnect right hand", "i01.rightHand", "detach")
ear.addCommand("rest", i01.getName(), "rest")
ear.addCommand("open hand", "python", "handopen")
ear.addCommand("close hand", "python", "handclose")
ear.addCommand("capture gesture", ear.getName(), "captureGesture")
ear.addCommand("manual", ear.getName(), "lockOutAllGrammarExcept", "voice control")
ear.addCommand("voice control", ear.getName(), "clearLock")
ear.addComfirmations("yes","correct","yeah","ya")
ear.addNegations("no","wrong","nope","nah")
ear.startListening()
def handopen():
i01.moveHand("left",0,0,0,0,0)
i01.moveHand("right",0,0,0,0,0)
def handclose():
i01.moveHand("left",180,180,180,180,180)
i01.moveHand("right",180,180,180,180,180)
| apache-2.0 | Python |
|
35e76ec99a3710a20b17a5afddaa14389af65098 | Add some simple MediaWiki importer. | ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked | tools/import_mediawiki.py | tools/import_mediawiki.py | import os
import os.path
import argparse
from sqlalchemy import create_engine
def main():
parser = argparse.ArgumentParser()
parser.add_argument('url')
parser.add_argument('-o', '--out', default='wikked_import')
parser.add_argument('--prefix', default='wiki')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('--ext', default='.md')
args = parser.parse_args()
prefix = args.prefix
out_dir = args.out
ext = '.' + args.ext.lstrip('.')
if not out_dir:
parser.print_help()
return 1
if os.path.isdir(out_dir):
print("The output directory already exists!")
return 1
engine = create_engine(args.url, echo=args.verbose)
conn = engine.connect()
query = (
'SELECT '
'p.page_id,p.page_title,p.page_latest,'
'r.rev_id,r.rev_text_id,t.old_id,t.old_text '
'from %(prefix)s_page p '
'INNER JOIN %(prefix)s_revision r ON p.page_latest = r.rev_id '
'INNER JOIN %(prefix)s_text t ON r.rev_text_id = t.old_id;' %
{'prefix': prefix})
q = conn.execute(query)
for p in q:
title = p['page_title'].decode('utf8')
text = p['old_text'].decode('utf8')
path_noext = os.path.join(out_dir, title)
path = path_noext + ext
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if os.path.exists(path):
suffnum = 2
while True:
new_path = '%s_%d%s' % (path_noext, suffnum, ext)
if not os.path.exists(new_path):
break
suffnum += 1
if suffnum > 100:
raise Exception("Can't find available path for: " %
path)
print("WARNING: %s exists" % path)
print("WARNING: creating %s instead" % new_path)
path = new_path
print(p['page_id'], title)
with open(path, 'w', encoding='utf8') as fp:
fp.write(text)
conn.close()
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
237041aff9d99ac840572742467772edf1f4d5ef | Add image download example | dtroyer/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk | examples/image/download.py | examples/image/download.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
"""
Download an image with the Image service.
For a full guide see
http://developer.openstack.org/sdks/python/openstacksdk/users/guides/image.html
"""
def download_image_stream(conn):
print("Download Image via streaming:")
# Find the image you would like to download.
image = conn.image.find_image("myimage")
# As the actual download now takes place outside of the library
# and in your own code, you are now responsible for checking
# the integrity of the data. Create an MD5 has to be computed
# after all of the data has been consumed.
md5 = hashlib.md5()
with open("myimage.qcow2", "wb") as local_image:
response = conn.image.download_image(image, stream=True)
# Read only 1024 bytes of memory at a time until
# all of the image data has been consumed.
for chunk in response.iter_content(chunk_size=1024):
# With each chunk, add it to the hash to be computed.
md5.update(chunk)
local_image.write(chunk)
# Now that you've consumed all of the data the response gave you,
# ensure that the checksums of what the server offered and
# what you downloaded are the same.
if response.headers["Content-MD5"] != md5.hexdigest():
raise Exception("Checksum mismatch in downloaded content")
def download_image(conn):
print("Download Image:")
# Find the image you would like to download.
image = conn.image.find_image("myimage")
with open("myimage.qcow2", "w") as local_image:
response = conn.image.download_image(image)
# Response will contain the entire contents of the Image.
local_image.write(response)
| apache-2.0 | Python |
|
5ea296703596306ea9895e37db9412f80731543a | Add a protein-plotting example, to show how to visualize nicely a graph | alexandreleroux/mayavi,dmsurti/mayavi,liulion/mayavi,alexandreleroux/mayavi,dmsurti/mayavi,liulion/mayavi | examples/mayavi/protein.py | examples/mayavi/protein.py | """
Visualize a protein graph structure downloaded from the protein database in
standard pdb format.
We parse the pdb file, but extract only a very small amount of
information: the type of atoms, their positions, and the links between them.
We assign a scalar value for the atoms to differenciate the different
types of atoms, but it does not correspond to the atomic mass. The size
and the color of the atom on the visualization is therefore not
chemicaly-significant.
The atoms are plotted using mlab.points3d, and connections between atoms
are added to the dataset, and visualized using a surface module.
To visualize the local atomic density, we use a gaussian splatter filter
that builds a kernel density estimation of the continuous density field:
each point is convoluted by a Gaussian kernel, and the sum of these
Gaussians form the resulting density field. We visualize this field using
volume rendering.
Reference for the pdb file standard:
http://mmcif.pdb.org/dictionaries/pdb-correspondence/pdb2mmcif.html
"""
# Author: Gael Varoquaux <[email protected]>
# Copyright (c) 2008, Enthought, Inc.
# License: BSD Style.
# The pdb code for the protein.
protein_code = '2q09'
# Retrieve the file from the protein database ##################################
import os
if not os.path.exists('pdb%s.ent.gz' % protein_code):
# Download the data
import urllib
print 'Downloading protein data, please wait'
opener = urllib.urlopen(
'ftp://ftp.wwpdb.org/pub/pdb/data/structures/divided/pdb/q0/pdb%s.ent.gz'
% protein_code)
open('pdb%s.ent.gz' % protein_code, 'w').write(opener.read())
# Parse the pdb file ###########################################################
import gzip
infile = gzip.GzipFile('pdb%s.ent.gz' % protein_code, 'rb')
nodes = dict()
edges = list()
atoms = set()
last_atom_label = None
last_chain_label = None
for line in infile:
line = line.split()
if line[0] in ('ATOM', 'HETATM'):
nodes[line[1]] = (line[2], line[6], line[7], line[8])
atoms.add(line[2])
chain_label = line[5]
if chain_label == last_chain_label:
edges.append((line[1], last_atom_label))
last_atom_label = line[1]
last_chain_label = chain_label
elif line[0] == 'CONECT':
for start, stop in zip(line[1:-1], line[2:]):
edges.append((start, stop))
atoms = list(atoms)
atoms.sort()
atoms = dict(zip(atoms, range(len(atoms))))
labels = dict()
x = list()
y = list()
z = list()
scalars = list()
for index, label in enumerate(nodes):
labels[label] = index
this_scalar, this_x, this_y, this_z= nodes[label]
scalars.append(atoms[this_scalar])
x.append(float(this_x))
y.append(float(this_y))
z.append(float(this_z))
connections = list()
for start, stop in edges:
connections.append((labels[start], labels[stop]))
import numpy as np
x = np.array(x)
y = np.array(y)
z = np.array(z)
scalars = np.array(scalars)
# Visualize the data ###########################################################
from enthought.mayavi import mlab
mlab.figure(1, bgcolor=(0, 0, 0))
mlab.clf()
pts = mlab.points3d(x, y, z, 1.5*scalars.max() - scalars,
scale_factor=0.015, resolution=10)
pts.mlab_source.dataset.lines = np.array(connections)
# Turn of clamping: the size of the glyph becomes absolute
pts.glyph.glyph.clamping = False
# Use a tube fiter to plot tubes on the link, varying the radius with the
# scalar value
tube = mlab.pipeline.tube(pts, tube_radius=0.15)
tube.filter.radius_factor = 1.
tube.filter.vary_radius = 'vary_radius_by_scalar'
mlab.pipeline.surface(tube, color=(0.8, 0.8, 0))
# Visualize the local atomic density
mlab.pipeline.volume(mlab.pipeline.gaussian_splatter(pts))
mlab.view(49, 31.5, 52.8, (4.2, 37.3, 20.6))
mlab.show()
| bsd-3-clause | Python |
|
ab60bd4f31a185884e0c05fa1a5f70c39a9d903a | add 52 | ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler,ericdahl/project-euler | python/p052.py | python/p052.py | def same(a, b):
return sorted(str(a)) == sorted(str(b))
for i in xrange(1, 1000000):
if same(i, 2 * i) and same(3 * i, 4 * i) and same(5 * i, 6 * i) and same(i, 6 * i):
print i
break | bsd-3-clause | Python |
|
fe6ece236e684d76441280ba700565f7fbce40cc | Create masked version based on pbcov cutogg | e-koch/VLA_Lband,e-koch/VLA_Lband | 14B-088/HI/analysis/pbcov_masking.py | 14B-088/HI/analysis/pbcov_masking.py |
'''
Cut out noisy regions by imposing a mask of the primary beam coverage.
'''
from astropy.io import fits
from spectral_cube import SpectralCube
from spectral_cube.cube_utils import beams_to_bintable
from astropy.utils.console import ProgressBar
import os
from analysis.paths import fourteenB_HI_data_path
# execfile(os.path.expanduser("~/Dropbox/code_development/ewky_scripts/write_huge_fits.py"))
pbcov = fits.open(fourteenB_HI_data_path("M33_14B-088_pbcov.fits"))[0]
cube = SpectralCube.read(fourteenB_HI_data_path("M33_14B-088_HI.clean.image.fits"))
# Apply the mask, using a cut-off of 0.3. This retains all of the regions with
# emission.
pblim = 0.3
masked_cube = cube.with_mask(pbcov.data > pblim)
masked_cube = masked_cube.minimal_subcube()
new_fitsname = fourteenB_HI_data_path("M33_14B-088_HI.clean.image.pbcov_gt_0.3_masked.fits",
no_check=True)
masked_cube.write(new_fitsname)
# create_huge_fits(new_fitsname, cube.header)
# save_hdu = fits.open(new_fitsname, mode='update')
# Save per channel
# for chan in ProgressBar(cube.shape[0]):
# save_hdu[0].data[chan] = cube[chan].value
# if chan % 50 == 0:
# save_hdu.flush()
# Save the beam table!
# save_hdu.append(beams_to_bintable(cube.beams))
# save_hdu.flush()
# save_hdu.close()
| mit | Python |
|
d8ddd6a843000c8b4125f166645a41443b6c06ba | Add kms_decrypt module | WeAreCloudar/ansible_modules | kms_decrypt.py | kms_decrypt.py | #!/usr/bin/python
import base64
DOCUMENTATION = '''
short_description: Decrypt a secret that was generated by KMS
description:
- This module decrypts the given secret using AWS KMS, and returns it as the Plaintext property
version_added: null
author: Ben Bridts
notes:
- Make sure you read http://docs.aws.amazon.com/kms/latest/developerguide/control-access.html to learn how to restrict
access to your keys
requirements:
- the boto3 python package
options:
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
version_added: "1.5"
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
version_added: "1.5"
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
version_added: "1.5"
secret:
description:
- The encrypted string you want to decode
required: false
default: CAT
'''
EXAMPLES = '''
- name: Decrypt secret
kms_decrypt:
secret: "{{ secret }}"
register: result
delegate_to: 127.0.0.1
- name: Show plaintext
debug: var=result.plaintext
delegate_to: 127.0.0.1
'''
import sys
try:
import boto3
except ImportError:
print "failed=True msg='boto3 required for this module'"
sys.exit(1)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
secret=dict(required=True),
))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
secret = module.params.get('secret')
secret = base64.decodestring(secret)
client = boto3.client('kms')
response = client.decrypt(
CiphertextBlob=secret
)
status_code = response['ResponseMetadata']['HTTPStatusCode']
if status_code != 200:
module.fail_json(msg='Failed with http status code %s' % status_code)
module.exit_json(changed=True, plaintext=response['Plaintext'], key_id=response['KeyId'])
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| mit | Python |
|
4af087e4920124eddb0342d0f22978872f9ba5dc | add landuse_sql.py which convert the .csv files from ArcMap to a SQL database | chengdai/spraw_tax | landuse_sql.py | landuse_sql.py | import sqlite3
import glob
import pandas
#Name of SQL database
sql_schema = 'LandUse_Approx.db'
files = [f for f in glob.glob("*.csv") if "LandUseApprox_" in f]
#Create table names for the SQL database.
#Table names will have 'landuse_' as prefix and the year and length as the ending in the format 'YYYY_Length'
#Store table names in a dictonary (table_names) with the .csv file name as key and SQL table name as value
table_names = {}
for f in files:
table_names[f] = 'landuse_' + f[-13:-4]
conn = sqlite3.connect(sql_schema)
c = conn.cursor()
#Convert each .csv file into a SQL database
#Iterate through all .csv file, convert each file into a Pandas DataFrame and then insert into SQL schema
for f in [files[0]]:
print f
raw_dataset = pandas.read_csv(f, index_col = 0)
print raw_dataset
raw_dataset.to_sql(table_names[f],conn)
#TODO: Apply Primary Key constraint on OBJECTID | lgpl-2.1 | Python |
|
e23ccb850a6aef017ae91e35f672e6c6b5184e23 | Add image preprocessing functions | jni/skan | skan/pre.py | skan/pre.py | import numpy as np
from scipy import spatial, ndimage as ndi
from skimage import filters, img_as_ubyte
def hyperball(ndim, radius):
"""Return a binary morphological filter containing pixels within `radius`.
Parameters
----------
ndim : int
The number of dimensions of the filter.
radius : int
The radius of the filter.
Returns
-------
ball : array of bool, shape [2 * radius + 1,] * ndim
The required structural element
"""
size = 2 * radius + 1
center = [(radius,) * ndim]
coords = np.mgrid[[slice(None, size),] * ndim].reshape(ndim, -1).T
distances = np.ravel(spatial.distance_matrix(coords, center))
selector = distances <= radius
ball = np.zeros((size,) * ndim, dtype=bool)
ball.ravel()[selector] = True
return ball
def threshold(image, *, sigma=0., radius=0, offset=0.):
"""Use scikit-image filters to "intelligently" threshold an image.
Parameters
----------
image : array, shape (M, N, ...[, 3])
Input image, conformant with scikit-image data type
specification [1]_.
sigma : float, optional
If positive, use Gaussian filtering to smooth the image before
thresholding.
radius : int, optional
If given, use local median thresholding instead of global.
offset : float, optional
If given, reduce the threshold by this amount. Higher values
result in more pixels above the threshold.
Returns
-------
thersholded : image of bool, same shape as `image`
The thresholded image.
References
----------
.. [1] http://scikit-image.org/docs/dev/user_guide/data_types.html
"""
if sigma > 0:
image = filters.gaussian(image, sigma=sigma)
image = img_as_ubyte(image)
if radius > 0:
footprint = hyperball(image.ndim, radius=radius)
t = ndi.median_filter(image, footprint=footprint) - offset
else:
t = filters.threshold_otsu(image) - offset
thresholded = image > t
return thresholded
| bsd-3-clause | Python |
|
f8823429d1bc548e4a91fe8ea64086d35dd66676 | Add race migration. | allcaps/tvdordrecht.nl,allcaps/tvdordrecht.nl,allcaps/tvdordrecht.nl,allcaps/tvdordrecht.nl,allcaps/tvdordrecht.nl | tvdordrecht/race/migrations/0003_auto_20150730_2250.py | tvdordrecht/race/migrations/0003_auto_20150730_2250.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('race', '0002_auto_20150729_1906'),
]
operations = [
migrations.AlterModelOptions(
name='distance',
options={'ordering': ['order'], 'verbose_name': 'Afstand', 'verbose_name_plural': 'Afstanden'},
),
migrations.AlterModelOptions(
name='result',
options={'ordering': ['date', 'event', 'distance', 'time'], 'verbose_name': 'Wie wat waar / Uitslag', 'verbose_name_plural': 'Wie wat waars / Uitslagen'},
),
migrations.RemoveField(
model_name='distance',
name='default',
),
migrations.AddField(
model_name='distance',
name='last_modified',
field=models.DateTimeField(auto_now=True, verbose_name=b'laatst bewerkt', null=True),
),
migrations.AddField(
model_name='distance',
name='last_modified_by',
field=models.ForeignKey(related_name='distance_last_modified_by', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name=b'Laatst bewerkt door'),
),
migrations.AddField(
model_name='distance',
name='owner',
field=models.ForeignKey(related_name='distance_owner', blank=True, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name=b'Eigenaar'),
),
migrations.AddField(
model_name='distance',
name='pub_date',
field=models.DateTimeField(null=True, verbose_name=b'publicatie datum', blank=True),
),
]
| mit | Python |
|
564851a1a7f1378c9ef0e936640b690300a112fb | Add synthtool scripts (#3765) | googleapis/google-cloud-java,googleapis/google-cloud-java,googleapis/google-cloud-java | java-containeranalysis/google-cloud-containeranalysis/synth.py | java-containeranalysis/google-cloud-containeranalysis/synth.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
gapic = gcp.GAPICGenerator()
common_templates = gcp.CommonTemplates()
library = gapic.java_library(
service='container',
version='v1beta1',
config_path='/google/devtools/containeranalysis/artman_containeranalysis_v1beta1.yaml',
artman_output_name='')
s.copy(library / 'gapic-google-cloud-containeranalysis-v1beta1/src', 'src')
s.copy(library / 'grpc-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/grpc-google-cloud-containeranalysis-v1beta1/src')
s.copy(library / 'proto-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/proto-google-cloud-containeranalysis-v1beta1/src')
| apache-2.0 | Python |
|
e2124aef9cb91dac3a597d353cd217ed328221e5 | Add gyp file to build cpu_features static library. | yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools,yongjhih/android_tools | ndk/sources/android/cpufeatures/cpu_features.gyp | ndk/sources/android/cpufeatures/cpu_features.gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'cpu_features',
'type': 'static_library',
'direct_dependent_settings': {
'include_dirs': [
'.',
],
},
'sources': [
'cpu-features.c',
],
},
],
}
| apache-2.0 | Python |
|
f4aad329c445415f1306882d386abe43969ba6a9 | Add test for API ticket basics. | Bitergia/allura,apache/allura,Bitergia/allura,apache/allura,apache/allura,leotrubach/sourceforge-allura,lym/allura-git,leotrubach/sourceforge-allura,heiths/allura,Bitergia/allura,heiths/allura,apache/allura,apache/incubator-allura,Bitergia/allura,leotrubach/sourceforge-allura,leotrubach/sourceforge-allura,lym/allura-git,apache/incubator-allura,heiths/allura,apache/incubator-allura,heiths/allura,lym/allura-git,lym/allura-git,heiths/allura,Bitergia/allura,apache/incubator-allura,apache/allura,lym/allura-git | Allura/allura/tests/functional/test_rest_api_tickets.py | Allura/allura/tests/functional/test_rest_api_tickets.py | from pprint import pprint
from datetime import datetime, timedelta
import json
from pylons import c
from ming.orm import session
from allura import model as M
from allura.lib import helpers as h
from alluratest.controller import TestController, TestRestApiBase
class TestApiTicket(TestRestApiBase):
def set_api_ticket(self, expire=None):
if not expire:
expire = timedelta(days=1)
api_ticket = M.ApiTicket(user_id=self.user._id, capabilities={'import': 'test'},
expires=datetime.utcnow() + expire)
session(api_ticket).flush()
self.set_api_token(api_ticket)
def test_bad_signature(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/home/', api_signature='foo')
assert r.status_int == 403
def test_bad_token(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/home/', api_key='foo')
assert r.status_int == 403
def test_bad_timestamp(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/home/', api_timestamp=(datetime.utcnow() + timedelta(days=1)).isoformat())
assert r.status_int == 403
def test_bad_path(self):
self.set_api_ticket()
r = self.api_post('/rest/1/test/home/')
assert r.status_int == 404
r = self.api_post('/rest/p/1223/home/')
assert r.status_int == 404
r = self.api_post('/rest/p/test/12home/')
assert r.status_int == 404
def test_no_api(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/admin/')
assert r.status_int == 404
def test_project_ping(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/home/')
assert r.status_int == 200
assert r.json['shortname'] == 'test'
def test_project_ping_expired_ticket(self):
self.set_api_ticket(timedelta(seconds=-1))
r = self.api_post('/rest/p/test/home/')
assert r.status_int == 403
def test_subproject_ping(self):
self.set_api_ticket()
r = self.api_post('/rest/p/test/sub1/home/')
assert r.status_int == 200
assert r.json['shortname'] == 'test/sub1'
| apache-2.0 | Python |
|
3eb8e73faf56bf3e3e3eb7cc8209c780d0f71b62 | create nanoparticle class | pewen/ten | nanoparticle.py | nanoparticle.py | from scipy.constants import pi
import numpy as np
from math import cos, sin
class NanoParticle(object):
def __init__(self, r, n_acceptors, tau_D, R_Forster):
"""
Create a nanoparticle object
Parameters
----------
R : float
Radio of nanoparticule
n_acceptors : float
Number of acceptors in the nanoparticle
tau_D : float
Lifetime of the donor
R_Forster : float
Radio de Forster
"""
self.R = r
self.n_acceptors = n_acceptors
self.acceptors_positions = np.zeros((n_acceptors,3))
self.tau_D = tau_D
self.R_Forster = R_Forster
def deposit_superficial_acceptors(self):
"""
Generate random number of acceptors (n_acceptors) on the surface of the nanoparticle.
"""
for i in range(self.n_acceptors):
#Generate in spheric
theta = np.random.uniform(low=0, high=2*pi)
phi = np.random.uniform(low=0, high=pi)
#Transform to cartesian
self.acceptors_positions[i][0] = sin(phi)*cos(theta)*self.R
self.acceptors_positions[i][1] = sin(phi)*sin(theta)*self.R
self.acceptors_positions[i][2] = cos(phi)*self.R
def deposit_volumetrically_acceptors(self):
"""
Generate random number of acceptors (n_acceptors) anywhere in the nanoparticle.
Is not easy generate random point usin spherical coordinates.
For now, we generate random point in cartesian coordinates.
Reference link to implement in sphereic: http://mathworld.wolfram.com/SpherePointPicking.html
"""
for i in range(self.n_acceptors):
self.acceptors_positions[i][0] = np.random.uniform(low=-self.R, high=self.R)
self.acceptors_positions[i][1] = np.random.uniform(low=-self.R, high=self.R)
self.acceptors_positions[i][2] = np.random.uniform(low=-self.R, high=self.R)
def photon(self):
"""
Generate random position of a photon in the nanoparticle.
"""
x = np.random.uniform(low=-self.R, high=self.R)
y = np.random.uniform(low=-self.R, high=self.R)
z = np.random.uniform(low=-self.R, high=self.R)
self.photon = np.array([x, y, z])
def walk(self):
pass
def distance(self):
"""
Calculate, for all acceptor, 1/(r**6), where r are the distace bewteen the photon and acceptors
"""
self.dist = np.zeros(self.n_acceptors)
for i in range(self.n_acceptors):
self.dist[i] = (sum((self.photon - self.acceptors_positions[i])**2))**3
| mit | Python |
|
b6356e4b7a88e1b2034f37aa135794b08e79c70b | Test POC script | zhrif/script-cli | tests/chk.py | tests/chk.py | import paramiko,sys,re,time,subprocess,getpass,os
def main(argv):
os.system('cls') #on windows
mins = 0
print ("\n[info] "+time.strftime("%d/%m/%Y %H:%M:%S") +"\n")
print ("""
_ _
______ _____ ___ ___ _ __(_)_ __ | |_
|_ /\ \/ / __| / __|/ __| '__| | '_ \| __|
/ / > < (__ \__ \ (__| | | | |_) | |_
/___|/_/\_\___| |___/\___|_| |_| .__/ \__|
|_|
""")
user = raw_input('Username : ')
passwd = getpass.getpass(prompt='Password : ')
while mins != -1:
q = raw_input('script #>')
if "quit" in q:
mins = -1
if "exit" in q:
mins = -1
else:
case(q)
os.system('cls') #on windows
def sshto(host,command):
output = ''
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, username=user, password=passwd)
print "\n[info] executing : \""+ command + "\" on " + nslookup(host)
stdin, stdout, stderr = ssh.exec_command(command)
stdin.flush()
for line in stdout:
# print line.strip('\n')
output+=str(line)
ssh.close()
return output
def sshtoEnter(host,command):
output = ''
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(host, username=user, password=passwd)
print "\n[info] executing : \""+ command + "\" on " + nslookup(host)
chan = ssh.invoke_shell()
chan.send(command + '\n')
time.sleep(5)
chan.send('\r')
ssh.close()
print "[info] completed : \""+ command + "\" on " + nslookup(host)
return output
def checkBadlink(interface,output):
for line in output.splitlines():
if interface in line:
if 'down' in line:
print "[Response] interface is down"
elif 'up' in line:
print "[Response] interface is up"
print('\n')
def nslookup(ip):
output = ""
ns = subprocess.Popen(["nslookup",ip], stdout=subprocess.PIPE)
outns, err = ns.communicate()
for line in outns.splitlines():
if 'Non-existent' in line:
output = ip
break
elif 'Name' in line:
for ls in line.split():
if 'Name' not in ls:
output = ls
return output
def checkNotRespon(ip):
ping = subprocess.Popen(["ping",ip], stdout=subprocess.PIPE)
print "\n[info] Pinging... "+nslookup(ip)
outping, err = ping.communicate()
fail = outping.count('Request timed out')+outping.count('Destination net unreachable')
if fail==4:
print "[Response] Device is not reachable"
elif fail>=1:
print "[Response] Possibly link is intermittent"
else:
print "[Response] Device is reachable"
for line in outping.splitlines():
if "Minimum" in line:
print "[Response] "+line
print('\n')
def checkBGP(peer,output):
for line in output.splitlines():
if peer in line:
if 'Active' or 'Idle' in line:
print "[Response] BGP is down"
elif 'Admin' in line:
print "[Response] BGP is administratively down"
else:
print "[Response] BGP is up"
print('\n')
def checkErr(output):
for line in output.splitlines():
if "counters" in line:
if "never" in line:
print "[Response] Link has never been cleared"
else:
print "[Response] It has been " + line.split()[-1] + " since last counter cleared"
if "input" in line:
print "[Response] Link is seeing :-\n[Response] " + line
if "output" in line:
print "[Response] " + line
pass
################################################################################
# options and selections.
################################################################################
def ping(expres):
# ping 128.58.XXX.XXX
checkNotRespon(expres.split()[-1])
def stats(expres):
#stats se0/0/0 128.58.XXX.XXX
checkErr(sshto(expres.split()[-1],'sh int '+expres.split()[1]+' | i inte|err'))
def clearcounter(expres):
#clear counter se0/0 128.58.XXX.XXX
sshtoEnter(expres.split()[-1],'clear counter ' + expres.split()[-2])
def case(semantic):
if "stats" in semantic:
stats(semantic)
elif "clear counter" in semantic:
clearcounter(semantic)
elif "ping" in semantic:
ping(semantic)
elif "\r" in semantic:
pass
else:
print "[info] sorry but feature "+semantic+" has not been implemented"
user = ' '
passwd = ' '
ippat = re.compile('[0-9]+(?:\.[0-9]+){3}')
if __name__ == "__main__":
main(sys.argv)
# case("stats se0/0/0 128.58.246.214")
# case("clear counter se0/0/0 128.58.246.214")
# case("stats se0/0/0 128.58.246.214")
# print sshto('128.58.246.214','sh log')
# case ("ping 128.58.246.214")
#time.sleep(60) #60 Second Sleep | apache-2.0 | Python |
|
b3e6855489eba5d59507ef6fb4c92f8284526ec1 | Check consecutive elements in an array | prathamtandon/g4gproblems | Arrays/check_consecutive_elements.py | Arrays/check_consecutive_elements.py | import unittest
"""
Given an unsorted array of numbers, return true if the array only contains consecutive elements.
Input: 5 2 3 1 4
Ouput: True (consecutive elements from 1 through 5)
Input: 83 78 80 81 79 82
Output: True (consecutive elements from 78 through 83)
Input: 34 23 52 12 3
Output: False
"""
"""
Approach:
1. First check that there are (max - min + 1) elements in the array.
2. Second, check that all elements are unique.
3. If all elements are consecutive, we can use arr[i]-min as an index into the array.
4. If element is positive, make it negative, else if its negative, there is repetition.
NOTE: This only works if all numbers are positive, otherwise use a hashmap to check for dupes.
O(n) time complexity and O(1) space complexity.
"""
def check_consecutive_only(list_of_numbers):
min_val = min(list_of_numbers)
max_val = max(list_of_numbers)
if len(list_of_numbers) != (max_val - min_val + 1):
return False
for num in list_of_numbers:
index = abs(num) - min_val
if list_of_numbers[index] < 0:
return False
list_of_numbers[index] = -list_of_numbers[index]
return True
class TestConsecutiveElements(unittest.TestCase):
def test_consecutive_true(self):
list_of_numbers = [83, 78, 80, 81, 79, 82]
self.assertTrue(check_consecutive_only(list_of_numbers))
def test_consecutive_false(self):
list_of_numbers = [7, 6, 5, 5, 3, 4]
self.assertFalse(check_consecutive_only(list_of_numbers))
list_of_numbers = [34, 23, 52, 12, 3]
self.assertFalse(check_consecutive_only(list_of_numbers))
| mit | Python |
|
f325937df3f1f1f972c7a0780d702f7fea5d03f5 | Test `__eq__`, `__ne__`, and `__hash__` | jwodder/permutation | test/test_eq.py | test/test_eq.py | import pytest
from permutation import Permutation
EQUIV_CLASSES = [
[
Permutation(),
Permutation(1),
Permutation(1,2),
Permutation(1,2,3,4,5),
Permutation.transposition(2,2),
Permutation.cycle(),
Permutation.from_cycles(),
Permutation.from_cycles(()),
],
[
Permutation(2,1),
Permutation(2,1,3,4,5),
Permutation.transposition(1,2),
Permutation.transposition(2,1),
Permutation.cycle(1,2),
Permutation.cycle(2,1),
Permutation.from_cycles((1,2)),
],
[
Permutation(2,3,1),
Permutation(2,3,1,4,5),
Permutation.cycle(1,2,3),
Permutation.cycle(2,3,1),
Permutation.cycle(3,1,2),
Permutation.from_cycles((1,2,3)),
Permutation.from_cycles((2,3,1)),
Permutation.from_cycles((3,1,2)),
],
[
Permutation(3,1,2),
Permutation(3,1,2,4,5),
Permutation.cycle(1,3,2),
Permutation.cycle(2,1,3),
Permutation.cycle(3,2,1),
Permutation.from_cycles((1,3,2)),
Permutation.from_cycles((2,1,3)),
Permutation.from_cycles((3,2,1)),
],
[
Permutation(3,2,1),
Permutation(3,2,1,4,5),
Permutation.transposition(1,3),
Permutation.transposition(3,1),
Permutation.cycle(1,3),
Permutation.cycle(3,1),
Permutation.from_cycles((1,3)),
Permutation.from_cycles((3,1)),
],
[
Permutation(2,3,1,5,4),
Permutation.from_cycles((1,2,3), (4,5)),
Permutation.from_cycles((1,2,3), (5,4)),
Permutation.from_cycles((3,1,2), (4,5)),
Permutation.from_cycles((4,5), (3,1,2)),
Permutation.from_cycles((4,5), (1,2,3)),
Permutation.from_cycles((5,4), (1,2,3)),
],
]
@pytest.mark.parametrize('p,q',
[(p,q) for eqcls in EQUIV_CLASSES for p in eqcls for q in eqcls]
)
def test_eq(p,q):
assert p == q
assert not (p != q)
assert hash(p) == hash(q)
@pytest.mark.parametrize('p,q', [
(p,q) for i, ps in enumerate(EQUIV_CLASSES)
for qs in EQUIV_CLASSES[:i] + EQUIV_CLASSES[i+1:]
for p in ps
for q in qs
])
def test_neq(p,q):
assert p != q
assert not (p == q)
@pytest.mark.parametrize('p', [p for eqcls in EQUIV_CLASSES for p in eqcls])
@pytest.mark.parametrize('x', [None, 0, 1, True, False, '(1 2)', (1,2), [1,2]])
def test_neq_other_types(p,x):
assert p != x
assert not (p == x)
| mit | Python |
|
55b33bff9856cc91943f0a5ae492db1fdc7d8d5a | Add missing python 3 only file. | cpcloud/numba,stonebig/numba,gmarkall/numba,numba/numba,seibert/numba,seibert/numba,stonebig/numba,cpcloud/numba,stuartarchibald/numba,stuartarchibald/numba,IntelLabs/numba,sklam/numba,IntelLabs/numba,sklam/numba,numba/numba,numba/numba,seibert/numba,jriehl/numba,cpcloud/numba,jriehl/numba,IntelLabs/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,seibert/numba,stuartarchibald/numba,sklam/numba,sklam/numba,stonebig/numba,gmarkall/numba,jriehl/numba,sklam/numba,IntelLabs/numba,numba/numba,stuartarchibald/numba,jriehl/numba,cpcloud/numba,cpcloud/numba,gmarkall/numba,IntelLabs/numba,gmarkall/numba,gmarkall/numba,numba/numba,jriehl/numba,stonebig/numba | numba/tests/jitclass_usecases.py | numba/tests/jitclass_usecases.py | """
Usecases with Python 3 syntax in the signatures. This is a separate module
in order to avoid syntax errors with Python 2.
"""
class TestClass1(object):
def __init__(self, x, y, z=1, *, a=5):
self.x = x
self.y = y
self.z = z
self.a = a
class TestClass2(object):
def __init__(self, x, y, z=1, *args, a=5):
self.x = x
self.y = y
self.z = z
self.args = args
self.a = a
| bsd-2-clause | Python |
|
e251aff9a232a66b2d24324f394da2ad9345ce79 | Add migration script for changing users with None as email_verifications to {} | rdhyee/osf.io,samanehsan/osf.io,barbour-em/osf.io,saradbowman/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,jolene-esposito/osf.io,emetsger/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,CenterForOpenScience/osf.io,samchrisinger/osf.io,jnayak1/osf.io,caneruguz/osf.io,reinaH/osf.io,binoculars/osf.io,GageGaskins/osf.io,adlius/osf.io,barbour-em/osf.io,brianjgeiger/osf.io,sloria/osf.io,adlius/osf.io,acshi/osf.io,rdhyee/osf.io,MerlinZhang/osf.io,Nesiehr/osf.io,wearpants/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,TomHeatwole/osf.io,wearpants/osf.io,RomanZWang/osf.io,bdyetton/prettychart,zachjanicki/osf.io,jolene-esposito/osf.io,Ghalko/osf.io,asanfilippo7/osf.io,ckc6cz/osf.io,CenterForOpenScience/osf.io,jinluyuan/osf.io,leb2dg/osf.io,zachjanicki/osf.io,fabianvf/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,rdhyee/osf.io,RomanZWang/osf.io,mluke93/osf.io,njantrania/osf.io,Johnetordoff/osf.io,icereval/osf.io,fabianvf/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,cosenal/osf.io,zamattiac/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,zachjanicki/osf.io,cslzchen/osf.io,reinaH/osf.io,mluo613/osf.io,danielneis/osf.io,jeffreyliu3230/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,aaxelb/osf.io,kch8qx/osf.io,SSJohns/osf.io,kwierman/osf.io,dplorimer/osf,leb2dg/osf.io,sbt9uc/osf.io,kwierman/osf.io,felliott/osf.io,caseyrygt/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,mluke93/osf.io,mluo613/osf.io,zamattiac/osf.io,barbour-em/osf.io,hmoco/osf.io,haoyuchen1992/osf.io,Ghalko/osf.io,reinaH/osf.io,crcresearch/osf.io,chrisseto/osf.io,KAsante95/osf.io,chrisseto/osf.io,ZobairAlijan/osf.io,caseyrollins/osf.io,dplorimer/osf,jnayak1/osf.io,doublebits/osf.io,jeffreyliu3230/osf.io,dplorimer/osf,Ghalko/osf.io,cldershem/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,amyshi188/osf.io,cosenal/osf.io,mfraezz/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,abought/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,jinluyuan/osf.io,jmcarp/osf.io,jeffreyliu3230/osf.io,aaxelb/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,kwierman/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,doublebits/osf.io,mattclark/osf.io,caseyrygt/osf.io,emetsger/osf.io,amyshi188/osf.io,leb2dg/osf.io,arpitar/osf.io,mluo613/osf.io,samanehsan/osf.io,felliott/osf.io,danielneis/osf.io,kch8qx/osf.io,cldershem/osf.io,KAsante95/osf.io,samchrisinger/osf.io,mluo613/osf.io,fabianvf/osf.io,DanielSBrown/osf.io,jinluyuan/osf.io,acshi/osf.io,sbt9uc/osf.io,crcresearch/osf.io,adlius/osf.io,mfraezz/osf.io,binoculars/osf.io,felliott/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,jnayak1/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,caseyrygt/osf.io,petermalcolm/osf.io,asanfilippo7/osf.io,arpitar/osf.io,kch8qx/osf.io,cslzchen/osf.io,felliott/osf.io,Johnetordoff/osf.io,ticklemepierce/osf.io,ZobairAlijan/osf.io,petermalcolm/osf.io,cwisecarver/osf.io,danielneis/osf.io,mluo613/osf.io,hmoco/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,abought/osf.io,amyshi188/osf.io,ZobairAlijan/osf.io,chrisseto/osf.io,arpitar/osf.io,adlius/osf.io,haoyuchen1992/osf.io,reinaH/osf.io,sloria/osf.io,brianjgeiger/osf.io,wearpants/osf.io,erinspace/osf.io,ticklemepierce/osf.io,erinspace/osf.io,njantrania/osf.io,lyndsysimon/osf.io,chennan47/osf.io,TomBaxter/osf.io,acshi/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,caseyrygt/osf.io,jmcarp/osf.io,abought/osf.io,doublebits/osf.io,njantrania/osf.io,ckc6cz/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,TomBaxter/osf.io,acshi/osf.io,wearpants/osf.io,mfraezz/osf.io,cosenal/osf.io,asanfilippo7/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,HarryRybacki/osf.io,amyshi188/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,petermalcolm/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,cldershem/osf.io,jmcarp/osf.io,jinluyuan/osf.io,aaxelb/osf.io,bdyetton/prettychart,danielneis/osf.io,jmcarp/osf.io,caseyrollins/osf.io,zamattiac/osf.io,fabianvf/osf.io,njantrania/osf.io,mattclark/osf.io,KAsante95/osf.io,alexschiller/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,mluke93/osf.io,alexschiller/osf.io,emetsger/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,MerlinZhang/osf.io,billyhunt/osf.io,alexschiller/osf.io,acshi/osf.io,cldershem/osf.io,zachjanicki/osf.io,aaxelb/osf.io,pattisdr/osf.io,SSJohns/osf.io,jnayak1/osf.io,jeffreyliu3230/osf.io,dplorimer/osf,alexschiller/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,pattisdr/osf.io,erinspace/osf.io,hmoco/osf.io,cslzchen/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,baylee-d/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,SSJohns/osf.io,bdyetton/prettychart,emetsger/osf.io,RomanZWang/osf.io,caneruguz/osf.io,kch8qx/osf.io,mfraezz/osf.io,cosenal/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,chennan47/osf.io,crcresearch/osf.io,barbour-em/osf.io,kwierman/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,jolene-esposito/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,lyndsysimon/osf.io,doublebits/osf.io,mluke93/osf.io,bdyetton/prettychart,KAsante95/osf.io,abought/osf.io,baylee-d/osf.io,Ghalko/osf.io,doublebits/osf.io,baylee-d/osf.io,samchrisinger/osf.io,sloria/osf.io,arpitar/osf.io,billyhunt/osf.io,kch8qx/osf.io,HarryRybacki/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,mattclark/osf.io,DanielSBrown/osf.io,MerlinZhang/osf.io,icereval/osf.io,billyhunt/osf.io,icereval/osf.io,cwisecarver/osf.io,MerlinZhang/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,samanehsan/osf.io | scripts/migration/migrate_none_as_email_verification.py | scripts/migration/migrate_none_as_email_verification.py | """ Ensure that users with User.email_verifications == None now have {} instead
"""
import logging
import sys
from tests.base import OsfTestCase
from tests.factories import UserFactory
from modularodm import Q
from nose.tools import *
from website import models
from website.app import init_app
from scripts import utils as scripts_utils
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
count = 0
if not dry_run:
scripts_utils.add_file_logger(logger, __file__)
logger.info("Iterating users with None as their email_verification")
for user in get_users_with_none_in_email_verifications():
user.email_verifications = {}
count += 1
logger.info(repr(user))
if not dry_run:
user.save()
print('{} users migrated'.format(count))
def get_users_with_none_in_email_verifications():
return models.User.find( Q('email_verifications', 'eq', None))
class TestMigrateDates(OsfTestCase):
def setUp(self):
super(TestMigrateDates, self).setUp()
self.user1 = UserFactory(email_verfications=None)
self.user2 = UserFactory(email_verfications={})
def test_migrate_none_as_email(self):
main()
assert_equal(self.user1.email_verifications, {})
assert_not_equal(self.user2.email_verifications, None)
if __name__ == '__main__':
main()
| apache-2.0 | Python |
|
97bf6ba36b27822a9bd73cb9a27d9878e48945e2 | add a decorator to ignore signals from fixture loading | MadeInHaus/django-template,MadeInHaus/django-template,MadeInHaus/django-template,MadeInHaus/django-template | project/apps/utils/signal_decorators.py | project/apps/utils/signal_decorators.py |
from functools import wraps
def disable_for_loaddata(signal_handler):
"""
Decorator that turns off signal handlers when loading fixture data.
based on http://stackoverflow.com/a/15625121
"""
@wraps(signal_handler)
def wrapper(*args, **kwargs):
if kwargs.get('raw'):
return
signal_handler(*args, **kwargs)
return wrapper
| mit | Python |
|
f8b5e413b46350f25bd7d231a8102c706fbf34f8 | Add new package: py-devlib (#16982) | iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-devlib/package.py | var/spack/repos/builtin/packages/py-devlib/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyDevlib(PythonPackage):
"""Library for interaction with and instrumentation of remote devices."""
homepage = "https://github.com/ARM-software/devlib"
url = "https://github.com/ARM-software/devlib/archive/v1.2.tar.gz"
version('1.2', sha256='4cdb6767a9430b49eecffe34e2b9fcbcfc7e65328122d909aa71c3d11a86503d')
version('1.1.2', sha256='c900420cb97239b4642f5e333e43884fb09507b530edb55466e7b82103b4deaa')
version('1.1.1', sha256='eceb7a2721197a6023bbc2bbf346663fc117e4f54e1eb8334a3085dead9c8036')
version('1.1.0', sha256='317e9be2303ebb6aebac9a2ec398c622ea16d6e46079dc9e37253b37d739ca9d')
version('1.0.0', sha256='2f78278bdc9731a4fa13c41c74f08e0b8c5143de5fa1e1bdb2302673aec45862')
version('0.0.4', sha256='0f55e684d43fab759d0e74bd8f0d0260d9546a8b8d853d286acfe5e00c86da05')
version('0.0.3', sha256='29ec5f1de481783ab0b9efc111dfeb67c890187d56fca8592b25ee756ff32902')
version('0.0.2', sha256='972f33be16a06572a19b67d909ee0ed6cb6f21f9a9da3c43fd0ff5851421051d')
depends_on('py-setuptools', type='build')
depends_on('py-python-dateutil', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-pyserial', type=('build', 'run'))
depends_on('py-wrapt', type=('build', 'run'))
depends_on('py-future', type=('build', 'run'))
depends_on('py-pandas', type=('build', 'run'))
depends_on('py-enum34', type=('build', 'run'), when='^python@:3.3')
depends_on('py-contextlib2', type=('build', 'run'), when='^python@:2.999')
depends_on('py-numpy@:1.16.4', type=('build', 'run'), when='^python@:2.999')
depends_on('py-numpy', type=('build', 'run'), when='^python@:3.0')
| lgpl-2.1 | Python |
|
de39aa257d845ecb6e1c2e7c4c4911497d00cdcf | add sample, non working, test_wsgi | openstack-infra/os-loganalyze,openstack-infra/os-loganalyze,openstack-infra/os-loganalyze | os_loganalyze/tests/test_wsgi.py | os_loganalyze/tests/test_wsgi.py | #!/usr/bin/python
#
# Copyright (c) 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test the ability to convert files into wsgi generators
"""
from os_loganalyze.tests import base
import os_loganalyze.wsgi as log_wsgi
def _start_response(*args):
return
class TestWsgi(base.TestCase):
def test_nofile(self):
gen = log_wsgi.application(None, _start_response)
self.assertTrue(False)
self.assertEqual(gen, ['Invalid file url'])
environ = {
'path': '/htmlify/foo.txt'
}
gen = log_wsgi.application(environ, _start_response)
self.assertEqual(gen, ['Invalid file url1'])
| apache-2.0 | Python |
|
4c8ea40eeec6df07cf8721c256ad8cc3d35fb23e | Add intial unit test file | masterkoppa/whatodo | src/test_main.py | src/test_main.py | import pytest
from main import *
test_files = [ "examples/C/filenames/script", "examples/Clojure/index.cljs.hl",
"examples/Chapel/lulesh.chpl", "examples/Forth/core.fth",
"examples/GAP/Magic.gd", "examples/JavaScript/steelseries-min.js",
"examples/Matlab/FTLE_reg.m", "examples/Perl6/for.t",
"examples/VimL/solarized.vim", "examples/C/cpu.c",
"examples/CSS/bootstrap.css", "examples/D/mpq.d",
"examples/Go/api.pb.go", "examples/HTML+ERB/index.html.erb"]
number_of_comments = [
423,# examples/C/filenames/script
13, # examples/Clojure/index.cljs.hl
609,# examples/Chapel/lulesh.chpl
0, # examples/Forth/core.fth
3, # examples/GAP/Magic.gd
2, # examples/JavaScript/steelseries-min.js
6, # examples/Matlab/FTLE_reg.m
586,# examples/Perl6/for.t
20, # examples/VimL/solarized.vim
39, # examples/C/cpu.c
680,# examples/CSS/bootstrap.css
167,# examples/D/mpq.d
0, # examples/Go/api.pb.go
10 # examples/HTML+ERB/index.html.erb
]
def test_get_comment_tokens():
from pygments.lexers.c_cpp import CLexer
file_text_test = "int main(int argc, char[] argv){\n//This is a comment\n}\n"
c_lexer = CLexer()
results = []
for comment in get_comment_tokens(file_text_test, c_lexer):
results.append(comment)
assert len(results) == 1
assert results[0] == "//This is a comment\n"
def test_get_tokens_from_file():
for index,file in enumerate(test_files, 0):
result = get_tokens_from_file("../" + file)
#print(index)
print(file)
assert number_of_comments[index] == len(result.keys()) | mit | Python |
|
33a3e4a8adc6b3284de18fe02c67eafa3a391226 | Create tinycrypt.py | cuhsat/tinycrypt | tinycrypt.py | tinycrypt.py | mit | Python |
||
6cda3951d27e819cb452233f514c953c923d9a53 | Add Python script to check links (#872) | jlopp/lopp.net,jlopp/lopp.net,jlopp/lopp.net,jlopp/lopp.net | check_links.py | check_links.py | import os
from fnmatch import fnmatch
import bs4
from requests import get
from tqdm import tqdm
import webbrowser
import pyinputplus as pyip
from fake_headers import Headers
from random import shuffle
import validators
# Create a list of all the HTML files in lopp.net
all_html_files = []
website_directory = pyip.inputFilepath(
prompt="Enter the path to the website directory: "
)
all_html_files = []
os.chdir(website_directory)
all_html_files = []
for root, dirs, files in os.walk(os.getcwd()):
for file in files:
if fnmatch(file, "*.html"):
all_html_files.append(os.path.join(root, file))
# Parse each HTML and create a list of links associated with each HTML file
all_links = []
for html_file in all_html_files:
with open(html_file, "r") as f:
soup = bs4.BeautifulSoup(f, "html.parser")
for link in soup.find_all("a"):
all_links.append(link.get("href"))
# Remove all duplicate links and those pointing to other pages in lopp.net
print(f"Total number of links: {len(all_links)}")
all_links = list(set(all_links)) # Removes duplicate links
shuffle(
all_links
) # We don't want to visit the same page twice in a row, so shuffle the list
for link in all_links:
if validators.url(link) == False:
# If the link is not a valid URL, remove it
all_links.remove(link)
elif link.find("lopp.net") != -1:
# Ignores the link if it points to one of the other pages in lopp.net or blog.lopp.net
all_links.remove(link)
elif link[0] == "#" or link[0] == "/":
# Ignores the link if it is a link to a specific section of the page
all_links.remove(link)
print(f"Total number of links: {len(all_links)}")
# Iterate over each link and download the page with requests
failed_links = []
headers = Headers(headers=True).generate()
# For this first iteration, the timeout is set to 3 seconds
for link in tqdm(all_links):
try:
r = get(link, timeout=3, headers=headers)
if r.status_code != 200:
failed_links.append(link)
except:
failed_links.append(link)
print("Finished checking links with a timeout of 3 seconds")
print(f"Number of failed links: {len(failed_links)}")
print("Retrying the failed links with a timeout of 10 seconds")
# Retries the failed links with a longer timeout
for link in tqdm(failed_links):
try:
r = get(link, timeout=10, headers=headers)
if r.status_code == 200:
failed_links.remove(link)
except:
pass
print("Finished checking links with a timeout of 10 seconds")
print(f"Number of failed links: {len(failed_links)}")
print(failed_links)
really_failed_links = []
for link in failed_links:
webbrowser.open_new_tab(link)
if pyip.inputYesNo("Is this link working? ") == "no":
really_failed_links.append(link)
# Search all the HTML files for the failed links and print them out
files_with_failed_links = []
for html_file in all_html_files:
with open(html_file, "r") as f:
soup = bs4.BeautifulSoup(f, "html.parser")
for link in soup.find_all("a"):
if link.get("href") in really_failed_links:
files_with_failed_links.append(f"{html_file} - {link.get('href')}")
break
# Finally, output a list of the really broken links and their associated HTML files to a text file
os.chdir("..")
try:
f = open("broken_links.txt", "x")
except:
f = open("broken_links.txt", "w")
for link in files_with_failed_links:
f.write(link + "\n")
f.close()
| unlicense | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.