commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
83cdd840979dc452f444914a0c40d077e6917c38 | Add DB connector class. | idcodeoverflow/SocialNetworkAnalyzer | DBConnection.py | DBConnection.py | __author__ = 'David'
| mit | Python |
|
4ff0e6a4d190d8c1f60903d18dcdaac1edeace8a | Create test.py | bhagirathbhard/redbot,agitatedgenius/redbot | test.py | test.py | import unittest
from mock import patch
import RedDefineBot
class TestBot(unittest.TestCase):
def test_auth_called(self,mock):
self.assertTrue(mock.called)
def test_auth_notcalled(self,mock):
self.assertFalse(mock.called)
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
00b04f773b9e2018b08776c5d53ff3dad7ed00d1 | Create test.py | chapman-cpsc-230/hw-0-mckennasheridan,chapman-cpsc-230/hw-0-benav115 | test.py | test.py | """test.py
"""
print "Hello world"
| mit | Python |
|
b4b2b80cb1d0c0729e8e98085c2cfc3bc55ddda3 | Solve the Longest Lines challenge using Python3 | TommyN94/CodeEvalSolutions,TommyN94/CodeEvalSolutions | LongestLines.py | LongestLines.py | # Longest Lines
#
# https://www.codeeval.com/open_challenges/2/
#
# Challenge Description: Write a program which reads a file and prints to
# stdout the specified number of the longest lines that are sorted based on
# their length in descending order.
import sys
input_file = sys.argv[1]
with open(input_file, 'r') as test_cases:
number_of_lines_to_print = int(test_cases.readline().rstrip())
lines = []
for case in test_cases:
lines.append(case.rstrip())
lines.sort(key=len, reverse=True)
for l in lines[:number_of_lines_to_print]:
print(l)
| mit | Python |
|
37e674f05547c7b6b93f447477443644865975d1 | Bring back the Root URL config | ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark | urls.py | urls.py | __author__ = 'ankesh'
from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/basic/plus/')),
url(r'^upload/', include('fileupload.urls')),
url(r'^plots/', include('plots.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
) | bsd-2-clause | Python |
|
b733f433d797b302c46cb71cf0230b986f630d26 | Create w3_1.py | s40523244/2016fallcp_hw,s40523244/2016fallcp_hw,s40523244/2016fallcp_hw | w3_1.py | w3_1.py | print("你教得真好")
| agpl-3.0 | Python |
|
73afce309f0e73b441c0ade49849397cba0fb0c2 | update spec runner to work with invoke's boolean flags to run specs untranslated | kachick/topaz,babelsberg/babelsberg-r,topazproject/topaz,babelsberg/babelsberg-r,kachick/topaz,babelsberg/babelsberg-r,topazproject/topaz,kachick/topaz,topazproject/topaz,babelsberg/babelsberg-r,babelsberg/babelsberg-r,topazproject/topaz | tasks/specs.py | tasks/specs.py | from invoke import task, run as run_
from .base import BaseTest
class Rubyspecs(BaseTest):
def __init__(self, files, options, untranslated=False):
super(Rubyspecs, self).__init__()
self.exe = "`pwd`/bin/%s" % ("topaz_untranslated.py" if untranslated else "topaz")
self.files = files
self.options = options
self.download_mspec()
self.download_rubyspec()
def mspec(self, args):
run_("../mspec/bin/mspec %s -t %s --config=topaz.mspec %s" % (args, self.exe, self.files))
def run(self):
self.mspec("run -G fails %s" % self.options)
def tag(self):
self.mspec("tag --add fails -G fails -f spec %s" % self.options)
def untag(self):
self.mspec("tag --del fails -g fails -f spec %s" % self.options)
def generate_spectask(taskname):
def spectask(files="", options="", untranslated=False):
runner = Rubyspecs(files, options, untranslated=untranslated)
getattr(runner, taskname)()
spectask.__name__ = taskname
return task(spectask)
run = generate_spectask("run")
tag = generate_spectask("tag")
untag = generate_spectask("untag")
| from invoke import task, run as run_
from .base import BaseTest
class Rubyspecs(BaseTest):
def __init__(self, files, options, translated=True):
super(Rubyspecs, self).__init__()
self.exe = "`pwd`/bin/%s" % ("topaz" if translated else "topaz_untranslated.py")
self.files = files
self.options = options
self.download_mspec()
self.download_rubyspec()
def mspec(self, args):
run_("../mspec/bin/mspec %s -t %s --config=topaz.mspec %s" % (args, self.exe, self.files))
def run(self):
self.mspec("run -G fails %s" % self.options)
def tag(self):
self.mspec("tag --add fails -G fails -f spec %s" % self.options)
def untag(self):
self.mspec("tag --del fails -g fails -f spec %s" % self.options)
def generate_spectask(taskname):
def spectask(files="", options="", translated=True):
runner = Rubyspecs(files, options, translated=(translated != "False"))
getattr(runner, taskname)()
spectask.__name__ = taskname
return task(spectask)
run = generate_spectask("run")
tag = generate_spectask("tag")
untag = generate_spectask("untag")
| bsd-3-clause | Python |
90399f50a3f50d9193ae1e6b2042215fb388230f | Create Video Stream program for webcam | SentientCNC/Sentient-CNC | VideoStream.py | VideoStream.py | import cv2
import numpy as np
cap = cv2.VideoCapture(0)
print('Beginning Capture Device opening...\n')
print('Capture device opened?', cap.isOpened())
while True:
ret, frame = cap.read()
gray_image = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
cv2.imshow('frame', gray_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# Release the capture
cap.release()
cv2.destroyAllWindows()
| apache-2.0 | Python |
|
1437bb868844731d3fdb13c6dd52dfd706df6f63 | Add a new script to clean up a habitica user given user email | sunil07t/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server | bin/ext_service/clean_habitica_user.py | bin/ext_service/clean_habitica_user.py | import argparse
import sys
import logging
import emission.core.get_database as edb
import emission.net.ext_service.habitica.proxy as proxy
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("user_email",
help="the email address of the user whose habitica account you want to clean up")
args = parser.parse_args()
del_uuid = edb.get_uuid_db().find_one({'user_email': args.user_email})['uuid']
logging.debug("Found uuid %s" % del_uuid)
del_habitica_creds = edb.get_habitica_db().find_one({'user_id': del_uuid})
logging.debug("del_habitica_creds = %s" % del_habitica_creds)
del_result = proxy.habiticaProxy(del_uuid, "DELETE",
"/api/v3/user",
{'password': del_habitica_creds['habitica_password']})
logging.debug("delete result = %s" % del_result)
| bsd-3-clause | Python |
|
b9820246c62733e9e47103d41a07a9a4253be15a | Create weather-script.py | RoryCrispin/epaper-weather-display,RoryCrispin/epaper-weather-display,RoryCrispin/epaper-weather-display | weather-script.py | weather-script.py | #!/usr/bin/python2
#Rory Crispin -- rozzles.com -- 2015
from xml.dom import minidom
import datetime
import codecs
import pywapi
result = pywapi.get_weather_from_yahoo('UKXX3856', 'metric')
iconCodes = ["056", "073", "073", "01e", "01e", "064", "01c", "064", "01c", "01c", "015", "019", "019", "064", "064",
"064", "064", "015", "0b6", "063", "014", "014", "062", "050", "050", "076", "013", "013", "013", "002",
"002", "02e", "00d", "02e", "00d", "017", "072", "01e", "01e", "01e", "01a", "064", "064", "064", "013",
"01d", "064", "01d", "00d"]
day_one = datetime.datetime.now()
highs = [None] * 4
lows = [None] * 4
icons = [None] * 4
for i in range(0, 4):
icons[i] = str(iconCodes[int(result['forecasts'][i]['code'])])
highs[i] = str(result['forecasts'][i]['high'])
lows[i] = str(result['forecasts'][i]['low'])
sunsetTime = result['astronomy']['sunset']
#
# Preprocess SVG
#
# Open SVG to process
output = codecs.open('weather-script-preprocess.svg', 'r', encoding='utf-8').read()
# Insert icons and temperatures
output = output.replace('ICON_ONE', icons[0])
output = output.replace('ICON_TWO', icons[1])
output = output.replace('ICON_THREE', icons[2])
output = output.replace('ICON_FOUR', icons[3])
output = output.replace('HIGH_ONE', str(highs[0])).replace('HIGH_TWO', str(highs[1])).replace('HIGH_THREE',
str(highs[2])).replace(
'HIGH_FOUR', str(highs[3]))
output = output.replace('LOW_ONE', str(lows[0])).replace('LOW_TWO', str(lows[1])).replace('LOW_THREE',
str(lows[2])).replace(
'LOW_FOUR', str(lows[3]))
# # Insert days of week
one_day = datetime.timedelta(days=1)
days_of_week = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
output = output.replace('DAY_THREE', days_of_week[(day_one + 2 * one_day).weekday()]).replace('DAY_FOUR', days_of_week[
(day_one + 3 * one_day).weekday()])
output = output.replace('SUNSET', sunsetTime)
# Write output
codecs.open('weather-script-output.svg', 'w', encoding='utf-8').write(output)
| mit | Python |
|
a9dd25c825bacd03ae358cc153c94ce3960ec0cf | Add serializers | agfor/chipy.org,brianray/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,tanyaschlusser/chipy.org,brianray/chipy.org,tanyaschlusser/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org,brianray/chipy.org,bharathelangovan/chipy.org,chicagopython/chipy.org,agfor/chipy.org,chicagopython/chipy.org,agfor/chipy.org | chipy_org/apps/meetings/serializers.py | chipy_org/apps/meetings/serializers.py | from rest_framework import serializers
from .models import Meeting, Topic, Presentor
class PresentorSerializer(serializers.ModelSerializer):
class Meta:
model = Presentor
fields = ('name', 'release')
class TopicSerializer(serializers.ModelSerializer):
presentor = PresentorSerializer()
class Meta:
model = Topic
fields = (
'title',
'presentor',
'length',
'description',
'embed_video',
'slides_link',
'start_time',
'approved'
)
depth = 1
class MeetingSerializer(serializers.ModelSerializer):
topics = TopicSerializer(many=True)
class Meta:
model = Meeting
fields = ('when', 'where', 'live_stream', 'topics')
depth = 2
| mit | Python |
|
55185a7a7402c9d0ce2677b00a329aa4197556c3 | add mediator | xuwei0455/design_patterns | Mediator.py | Mediator.py | # -*- coding: utf-8 -*-
"""
Mediator pattern
"""
class AbstractColleague(object):
"""
AbstractColleague
"""
def __init__(self, mediator):
self.mediator = mediator
class ConcreteColleague(AbstractColleague):
"""
ConcreteColleague
"""
def __init__(self, name, mediator):
self.name = name
AbstractColleague.__init__(self, mediator)
def send(self, message, receiver=None):
self.mediator.send(message, self, receiver)
@staticmethod
def notify(name, message, sender):
print u'From: {} To: {} -- {}'.format(name, sender.name, message)
class AbstractMediator(object):
"""
AbstractMediator
"""
def send(self, message, colleague):
pass
class ConcreteMediator(AbstractMediator):
def __init__(self, name):
self.name = name
self.colleagues = []
def register(self, colleague):
self.colleagues.append(colleague)
def send(self, message, colleague, receiver=None):
if receiver:
receiver.notify(colleague.name, message, receiver)
else:
for _ in self.colleagues:
if _ != colleague:
_.notify(colleague.name, message, _)
if __name__ == '__main__':
mediator = ConcreteMediator(u'UN')
USA = ConcreteColleague(u'USA', mediator)
mediator.register(USA)
Japan = ConcreteColleague(u'Japan', mediator)
mediator.register(Japan)
Iraq = ConcreteColleague(u'Iraq', mediator)
mediator.register(Iraq)
UK = ConcreteColleague(u'UK', mediator)
mediator.register(UK)
USA.send(u"I'm the boss, bitch!")
Japan.send(u'Emm...', receiver=USA)
Iraq.send(u'A ha!', receiver=USA)
UK.send(u"Reversed?")
UK.send(u"My litter brother send that, boss...Trust me!", receiver=USA)
| mit | Python |
|
290f990e31a5f732fb054846caea9346946778df | enable import as module | sao-eht/lmtscripts,sao-eht/lmtscripts,sao-eht/lmtscripts,sao-eht/lmtscripts | __init__.py | __init__.py | """
.. module:: lmtscripts
:platform: Unix
:synopsis: useful scripts for EHT observations at LMT
.. moduleauthor:: Lindy Blackburn <[email protected]>
.. moduleauthor:: Katie Bouman <[email protected]>
"""
| mit | Python |
|
19a4c4364d1629cd6bfd7ca27ae4e6441f13747e | Make mygmm a module | khrapovs/mygmm | __init__.py | __init__.py | from .mygmm.mygmm import * | mit | Python |
|
a7f4d96becfd1a58794a4dbedb9e9c8f6ac8c1a6 | Create acceptor.py | hgfeaon/simple-paxos | acceptor.py | acceptor.py | #! /usr/bin/env python
import message
import logging
class Acceptor(message.MessageListener):
def __init__(self, config, network):
message.MessageListener.__init__(self,
name = 'AcceptorListenser',
mapping = {
message.MSG_PROPOSAL_REQ : self.on_proposal_request,
message.MSG_ACCEPT_REQ : self.on_accept_request
})
self.network = network
self.config = config
self.promised_id = 0
self.accepted_id = 0
self.accepted_values= []
def on_proposal_request(self, pkg, msg):
logging.debug('process proposal request')
return False
def on_accept_request(self, pkg, msg):
logging.debug('process accept request')
return False
| mit | Python |
|
1265e6ce2e6f8423e13f5fb5d54328369cfaa3ec | add geojsonloader tester | codefortokyo/data-processing-tools,codefortokyo/data | tests/geojson/geojsonloader.py | tests/geojson/geojsonloader.py | # -*- coding: utf-8 -*-
import sys
import os
import unittest
import uuid
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, PROJECT_ROOT)
class GeoJSONLoaderTester(unittest.TestCase):
def setUp(self):
pass
def test__init__(self):
from cftt.geojson.geojsonloader import GeoJSONLoader
test = GeoJSONLoader()
s = test()
self.assertEqual(len(s), 0)
self.assertEqual(s.attributes, {})
test.attr('note', 'あ')
s = test()
self.assertEqual(s.attributes, {u'note': 'あ'.decode('utf-8')})
test = GeoJSONLoader(one=1, two=2)
s = test({'features': [
{'type': 'feature',
'properties': {},
'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}}],
'three': 3})
self.assertEqual(s.attributes,
{u'one': 1, u'two': 2, u'three': 3})
self.assertEqual(len(s), 1)
def test__call__(self):
from cftt.geojson.geojsonloader import GeoJSONLoader
id = uuid.uuid4()
test = GeoJSONLoader(id=id)
s = test()
self.assertEqual(s.attributes['id'], id)
self.assertEqual(len(s), 0)
self.assertEqual(len(s.attributes), 1)
s = test({'features': [
{'type': 'feature',
'properties': {},
'geometry': {'type': 'Point', 'coordinates': [0.0, 0.0]}}],
'three': 3})
self.assertEqual(s.attributes['id'], id)
self.assertEqual(s.attributes,
{u'three': 3, u'id': id})
self.assertEqual(len(s), 1)
test.attr('two', 2)
t = test(s)
self.assertEqual(s.attributes['id'], id)
self.assertEqual(s.attributes,
{u'three': 3, u'id': id})
self.assertEqual(len(s), 1)
self.assertEqual(t.attributes,
{u'three': 3, u'id': id, u'two': 2})
test_data_dir = os.path.join(PROJECT_ROOT, 'test_data')
gj = os.path.join(test_data_dir, 'geojson', 'japan.geojson')
s = test(gj)
self.assertEqual(len(s), 47)
t = test._load_from_json_file(gj)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
zip = os.path.join(test_data_dir, 'geojson', 'japan.zip')
s = test(zip)
self.assertEqual(len(s), 47)
t = test._load_from_zip_file(zip)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
from cftt.common.asyncfileserver import AsyncFileServer
with AsyncFileServer(port=8001):
url = 'http://localhost:8001/test_data/geojson/japan.zip'
s = test(url)
self.assertEqual(len(s), 47)
t = test._load_from_url(url)
self.assertEqual(s.attributes, t.attributes)
self.assertEqual(len(s), len(t))
if __name__ == '__main__':
unittest.main()
| mit | Python |
|
13b94129947cbfab4b7870e130a2efbbf41bfbb7 | Add missing file | kislyuk/rehash | rehash/__init__.py | rehash/__init__.py | from __future__ import absolute_import, division, print_function, unicode_literals
import os, sys, hashlib
from ctypes import cast, c_void_p, POINTER, Structure, c_int, c_ulong, c_char, c_size_t, c_ssize_t, py_object, memmove
from ssl import OPENSSL_VERSION
PyObject_HEAD = [
('ob_refcnt', c_size_t),
('ob_type', c_void_p)
]
# OpenSSL 1.0.2 and earlier:
# https://github.com/openssl/openssl/blob/OpenSSL_1_0_2-stable/crypto/evp/evp.h#L159-L181
# OpenSSL 1.1.0 and later:
# https://github.com/openssl/openssl/blob/master/crypto/include/internal/evp_int.h#L99-L113
class EVP_MD(Structure):
_fields_ = [
('type', c_int),
('pkey_type', c_int),
('md_size', c_int),
('flags', c_ulong),
('init', c_void_p),
('update', c_void_p),
('final', c_void_p),
('copy', c_void_p),
('cleanup', c_void_p),
]
if OPENSSL_VERSION < "OpenSSL 1.1.0":
_fields_ += [
('sign', c_void_p),
('verify', c_void_p),
('required_pkey_type', c_int * 5),
]
_fields_ += [
('block_size', c_int),
('ctx_size', c_int),
]
# https://github.com/openssl/openssl/blob/master/crypto/evp/evp_locl.h#L12-L22
class EVP_MD_CTX(Structure):
_fields_ = [
('digest', POINTER(EVP_MD)),
('engine', c_void_p),
('flags', c_ulong),
('md_data', POINTER(c_char)),
]
class EVPWrapper(Structure):
_fields_ = PyObject_HEAD + [
("name", POINTER(py_object)),
("ctx", POINTER(EVP_MD_CTX))
]
class ResumableHasher(object):
name = None
def __init__(self, name=None, data=None):
if self.name is not None:
data = name
else:
self.name = name
hasher_args = [] if data is None else [data]
self._hasher = self._get_hashlib_hasher(self.name)(*hasher_args)
def _get_hashlib_hasher(self, name):
if name.startswith("blake2"):
raise Exception("blake2 algorithms are not OpenSSL-based and not supported by rehash")
if name.startswith("sha3"):
raise Exception("sha3 algorithms are not supported by rehash")
if name.startswith("shake"):
raise Exception("shake algorithms are not supported by rehash")
if name in hashlib.algorithms_guaranteed:
return getattr(hashlib, name)
else:
return hashlib.new(name)
def _get_evp_md_ctx(self):
c_evp_obj = cast(c_void_p(id(self._hasher)), POINTER(EVPWrapper))
return c_evp_obj.contents.ctx.contents
def __getstate__(self):
ctx = self._get_evp_md_ctx()
ctx_size = ctx.digest.contents.ctx_size
hasher_state = ctx.md_data[:ctx_size]
return dict(name=self.name, md_data=hasher_state)
def __setstate__(self, state):
self.name = state["name"]
self._hasher = self._get_hashlib_hasher(self.name)()
ctx = self._get_evp_md_ctx()
ctx_size = ctx.digest.contents.ctx_size
memmove(ctx.md_data, state["md_data"], ctx_size)
def __getattr__(self, a):
return getattr(self._hasher, a)
def _initialize():
module = sys.modules[__name__]
for name in hashlib.algorithms_guaranteed:
if name.startswith("blake2"):
continue
setattr(module, name, type(name, (ResumableHasher,), dict(name=name)))
_initialize()
| apache-2.0 | Python |
|
72d19081bea1dba061c7bf1f57c305f427be1e28 | Implement the SQUIT server command | Heufneutje/txircd,ElementalAlchemist/txircd | txircd/modules/server/squit.py | txircd/modules/server/squit.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class ServerQuit(ModuleData):
implements(IModuleData)
name = "ServerQuit"
core = True
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("serverquit", 1, self.sendSQuit),
("commandpermission-SQUIT", 1, self.restrictSQuit) ]
def userCommands(self):
return [ ("SQUIT", 1, UserSQuit(self.ircd)) ]
def serverCommands(self):
return [ ("SQUIT", 1, ServerSQuit(self.ircd)),
("RSQUIT", 1, RemoteSQuit(self.ircd)) ]
def sendSQuit(self, server, reason):
closestHop = server
while closestHop.nextClosest != self.ircd.serverID:
closestHop = self.ircd.servers[closestHop.nextClosest]
for otherServer in self.ircd.servers.itervalues():
if closestHop == otherServer:
continue
otherServer.sendMessage("SQUIT", server.serverID, ":{}".format(reason), prefix=server.nextClosest)
def restrictSQuit(self, user, command, data):
if not self.ircd.runActionUntilValue("userhasoperpermission", user, "command-squit"):
return False
return None
class UserSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, user, params, prefix, tags):
if len(params) < 2:
user.sendSingleError("SQuitParams", irc.ERR_NEEDMOREPARAMS, "SQUIT", ":Not enough parameters")
return None
source = self.ircd.serverID
if params[0] not in self.ircd.serverNames:
if ircLower(params[0]) == ircLower(self.ircd.name):
user.sendSingleError("SQuitTarget", irc.ERR_NOSUCHSERVER, self.ircd.name, ":You can't unlink this server from itself")
return None
user.sendSingleError("SQuitTarget", irc.ERR_NOSUCHSERVER, params[0], ":No such server")
return None
return {
"source": source,
"target": self.ircd.servers[self.ircd.serverNames[params[0]]],
"reason": params[1]
}
def execute(self, user, data):
targetServer = data["target"]
reason = data["reason"]
if targetServer.nextClosest == self.ircd.serverID:
targetServer.disconnect(reason)
user.sendMessage("NOTICE", ":*** Disconnected {}".format(targetServer.name))
else:
targetServer.sendMessage("RSQUIT", targetServer.serverID, ":{}".format(reason), prefix=self.ircd.serverID)
user.sendMessage("NOTICE", ":*** Sent remote SQUIT for {}".format(targetServer.name))
return True
class ServerSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, server, params, prefix, tags):
if len(params) != 2:
return None
if params[0] not in self.ircd.servers:
return None
return {
"target": self.ircd.servers[params[0]],
"reason": params[1]
}
def execute(self, server, data):
data["target"].disconnect(data["reason"])
return True
class RemoteSQuit(Command):
implements(ICommand)
def __init__(self, ircd):
self.ircd = ircd
def parseParams(self, server, params, prefix, tags):
if len(params) != 2:
return None
if params[0] not in self.ircd.servers:
return None
return {
"target": self.ircd.servers[params[0]],
"reason": params[1]
}
def execute(self, server, data):
targetServer = data["target"]
if targetServer.nextClosest == self.ircd.serverID:
targetServer.disconnect(data["reason"])
return True
targetServer.sendMessage("RSQUIT", targetServer.serverID, ":{}".format(data["reason"]), prefix=targetServer.nextClosest)
return True
squit = ServerQuit() | bsd-3-clause | Python |
|
5bb387947ac13bcd3949c6b17839033231c05e2d | Add unittests for cupy.testing.array | truongdq/chainer,benob/chainer,wkentaro/chainer,jnishi/chainer,okuta/chainer,hvy/chainer,cupy/cupy,ktnyt/chainer,cupy/cupy,niboshi/chainer,ktnyt/chainer,sinhrks/chainer,chainer/chainer,tkerola/chainer,jnishi/chainer,aonotas/chainer,okuta/chainer,hvy/chainer,ronekko/chainer,AlpacaDB/chainer,kiyukuta/chainer,chainer/chainer,sinhrks/chainer,chainer/chainer,niboshi/chainer,kikusu/chainer,keisuke-umezawa/chainer,jnishi/chainer,hvy/chainer,ktnyt/chainer,kashif/chainer,okuta/chainer,okuta/chainer,kikusu/chainer,ysekky/chainer,keisuke-umezawa/chainer,AlpacaDB/chainer,cupy/cupy,ktnyt/chainer,rezoo/chainer,pfnet/chainer,cupy/cupy,niboshi/chainer,benob/chainer,chainer/chainer,wkentaro/chainer,jnishi/chainer,truongdq/chainer,keisuke-umezawa/chainer,anaruse/chainer,wkentaro/chainer,wkentaro/chainer,niboshi/chainer,keisuke-umezawa/chainer,cemoody/chainer,hvy/chainer,delta2323/chainer | tests/cupy_tests/testing_tests/test_array.py | tests/cupy_tests/testing_tests/test_array.py | import copy
import unittest
import numpy
import six
import cupy
from cupy import testing
@testing.parameterize(
*testing.product({
'assertion': ['assert_allclose', 'assert_array_almost_equal',
'assert_array_almost_equal_nulp',
'assert_array_max_ulp', 'assert_array_equal'],
'array_module_x': [numpy, cupy],
'array_module_y': [numpy, cupy]
})
)
@testing.gpu
class TestEqualityAssertion(unittest.TestCase):
def setUp(self):
self.assertion = getattr(testing, self.assertion)
val = numpy.random.uniform(-1, 1, (2, 3))
self.x = self.array_module_x.array(val, val.dtype, copy=True)
self.y = self.array_module_y.array(val, val.dtype, copy=True)
def test_equality(self):
self.assertion(self.x, self.y)
def test_inequality(self):
self.y += 1
with self.assertRaises(AssertionError):
self.assertion(self.x, self.y)
def _convert_array(xs, array_module):
if array_module == 'all_numpy':
return xs
elif array_module == 'all_cupy':
return cupy.asarray(xs)
else:
return [cupy.asarray(x) if numpy.random.random_integers(0, 1)
else x for x in xs]
@testing.parameterize(
*testing.product({
'array_module_x': ['all_numpy', 'all_cupy', 'random'],
'array_module_y': ['all_numpy', 'all_cupy', 'random']
})
)
@testing.gpu
class TestListEqualityAssertion(unittest.TestCase):
def setUp(self):
xs = [numpy.random.uniform(-1, 1, (2, 3)) for _ in six.moves.range(10)]
ys = copy.deepcopy(xs)
self.xs = _convert_array(xs, self.array_module_x)
self.ys = _convert_array(ys, self.array_module_y)
def test_equality_numpy(self):
testing.assert_array_list_equal(self.xs, self.ys)
def test_inequality_numpy(self):
self.xs[0] += 1
with self.assertRaises(AssertionError):
testing.assert_array_list_equal(self.xs, self.ys)
@testing.parameterize(
*testing.product({
'array_module_x': [numpy, cupy],
'array_module_y': [numpy, cupy]
})
)
@testing.gpu
class TestLessAssertion(unittest.TestCase):
def setUp(self):
val = numpy.random.uniform(-1, 1, (2, 3))
self.x = self.array_module_x.array(val, val.dtype, copy=True)
self.y = self.array_module_y.array(val + 1, val.dtype, copy=True)
def test_equality_numpy(self):
testing.assert_array_less(self.x, self.y)
def test_inequality_numpy(self):
self.x[0] += 100
with self.assertRaises(AssertionError):
testing.assert_array_less(self.x, self.y)
| mit | Python |
|
6891c9e635cbe9ba663ac7f72bdff653bb8c8220 | make sure we can call commit | bank-netforce/netforce,anastue/netforce,anastue/netforce,anastue/netforce,anastue/netforce,anastue/netforce,bank-netforce/netforce,bank-netforce/netforce,bank-netforce/netforce,bank-netforce/netforce,anastue/netforce,bank-netforce/netforce | netforce_general/netforce_general/controllers/root.py | netforce_general/netforce_general/controllers/root.py | # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce import config
from netforce.database import get_connection
from netforce import access
from netforce.model import get_model
class Root(Controller):
_path="/"
def get(self):
url=None
db=get_connection()
try:
if db:
res=db.get("SELECT root_url FROM settings WHERE id=1")
url=res.root_url
if url:
self.redirect(url)
return
user_id=access.get_active_user()
action=None
if user_id:
user=get_model("base.user").browse(user_id)
profile=user.profile_id
action=profile.home_action
if action:
self.redirect("/ui#name=%s"%action)
return
self.redirect("/ui#name=login")
finally:
if db:
db.commit()
Root.register()
| # Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.controller import Controller
from netforce import config
from netforce.database import get_connection
from netforce import access
from netforce.model import get_model
class Root(Controller):
_path="/"
def get(self):
url=None
db=get_connection()
try:
if db:
res=db.get("SELECT root_url FROM settings WHERE id=1")
url=res.root_url
if url:
self.redirect(url)
return
user_id=access.get_active_user()
action=None
if user_id:
user=get_model("base.user").browse(user_id)
profile=user.profile_id
action=profile.home_action
if action:
self.redirect("/ui#name=%s"%action)
return
self.redirect("/ui#name=login")
finally:
db.commit()
Root.register()
| mit | Python |
21d7e6f83f34e66167d7452998f2c7622a90e46c | Create test_parser.py | zeffii/small_csv_onetouch_parser | test_parser.py | test_parser.py | import os
import csv
import json
import collections
from collections import defaultdict
filename = "C:/Users/zeffi/Documents/Export_482016.csv"
some_dict = defaultdict(list)
def sanedate(date):
MM, DD, YYYY = date.split('/')
return '/'.join([DD, MM, YYYY])
def formatted_time(gtime):
HH, MM, SS = gtime.split(':')
return ':'.join([HH, MM])
def open_csv_test(filename):
#csvfile = open(filename, 'r', encoding='ISO-8859-15', newline='')
csvfile = open(filename, 'r', newline='')
ofile = csv.reader(csvfile, delimiter=',')
# skip the first 7 lines (OneTouch uses an odd csv format)
for i in range(6):
next(ofile)
for row in ofile:
try:
print(row)
date, gtime, gvalue = row[1:4]
date = date + '__' + sanedate(date)
gtime = formatted_time(gtime)
some_dict[date].append({'time': gtime, 'value': float(gvalue)})
except:
print("failed at")
print(row)
with open('C:/Users/zeffi/Documents/some_constructed.json', 'w') as wfile:
wfile.write(json.dumps(some_dict, sort_keys=True, indent=4))
open_csv_test(filename)
| mit | Python |
|
fabf4e8bd93155101d459716b35c10b32a3dfd16 | add tests/utils.py | nirs/yappi,nirs/yappi | tests/utils.py | tests/utils.py | import sys
import yappi
import unittest
class YappiUnitTestCase(unittest.TestCase):
def setUp(self):
if yappi.is_running():
yappi.stop()
yappi.clear_stats()
yappi.set_clock_type('cpu') # reset to default clock type
def tearDown(self):
fstats = yappi.get_func_stats()
if not fstats._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in Func stats\r\n")
fstats.debug_print()
for fstat in fstats:
if not fstat.children._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in ChildFunc stats\r\n")
fstat.children.print_all()
tstats = yappi.get_func_stats()
if not tstats._debug_check_sanity():
sys.stdout.write("ERR: Duplicates found in Thread stats\r\n")
tstats.print_all()
def assert_raises_exception(func):
try:
_run(func)
assert 0 == 1
except:
pass
def run_with_yappi(func, *args, **kwargs):
yappi.start()
func(*args, **kwargs)
yappi.stop()
def run_and_get_func_stats(func, *args, **kwargs):
run_with_yappi(func, *args, **kwargs)
return yappi.get_func_stats()
def run_and_get_thread_stats(func, *args, **kwargs):
run_with_yappi(func, *args, **kwargs)
return yappi.get_thread_stats()
def is_py3x():
return sys.version_info > (3, 0)
def find_stat_by_name(stats, name):
for stat in stats:
if stat.name == name:
return stat
| mit | Python |
|
21a9ca4487d0d3ef9f2aa2ba5909b37c735c18e6 | Fix linter errors in test_tftrt.py | dongjoon-hyun/tensorflow,gunan/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,caisq/tensorflow,theflofly/tensorflow,dendisuhubdy/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,Xeralux/tensorflow,ghchinoy/tensorflow,apark263/tensorflow,xzturn/tensorflow,zasdfgbnm/tensorflow,allenlavoie/tensorflow,gautam1858/tensorflow,paolodedios/tensorflow,allenlavoie/tensorflow,aldian/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,davidzchen/tensorflow,jart/tensorflow,ZhangXinNan/tensorflow,xzturn/tensorflow,dongjoon-hyun/tensorflow,AnishShah/tensorflow,ageron/tensorflow,yanchen036/tensorflow,arborh/tensorflow,Intel-Corporation/tensorflow,snnn/tensorflow,freedomtan/tensorflow,brchiu/tensorflow,Xeralux/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xodus7/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,ppwwyyxx/tensorflow,jbedorf/tensorflow,nburn42/tensorflow,dendisuhubdy/tensorflow,Bismarrck/tensorflow,xodus7/tensorflow,frreiss/tensorflow-fred,theflofly/tensorflow,yanchen036/tensorflow,ageron/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,hfp/tensorflow-xsmm,aselle/tensorflow,AnishShah/tensorflow,manipopopo/tensorflow,dancingdan/tensorflow,hfp/tensorflow-xsmm,manipopopo/tensorflow,jalexvig/tensorflow,jalexvig/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,kobejean/tensorflow,jalexvig/tensorflow,ZhangXinNan/tensorflow,DavidNorman/tensorflow,eaplatanios/tensorflow,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,jart/tensorflow,ghchinoy/tensorflow,ppwwyyxx/tensorflow,xodus7/tensorflow,ghchinoy/tensorflow,Xeralux/tensorflow,Xeralux/tensorflow,snnn/tensorflow,brchiu/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,ageron/tensorflow,annarev/tensorflow,girving/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,hfp/tensorflow-xsmm,aam-at/tensorflow,ZhangXinNan/tensorflow,snnn/tensorflow,karllessard/tensorflow,snnn/tensorflow,aldian/tensorflow,renyi533/tensorflow,drpngx/tensorflow,dongjoon-hyun/tensorflow,zasdfgbnm/tensorflow,aselle/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,chemelnucfin/tensorflow,freedomtan/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-pywrap_saved_model,xzturn/tensorflow,jbedorf/tensorflow,dongjoon-hyun/tensorflow,gunan/tensorflow,Intel-Corporation/tensorflow,aldian/tensorflow,jart/tensorflow,gojira/tensorflow,gautam1858/tensorflow,nburn42/tensorflow,Xeralux/tensorflow,caisq/tensorflow,kevin-coder/tensorflow-fork,hehongliang/tensorflow,yanchen036/tensorflow,jbedorf/tensorflow,dendisuhubdy/tensorflow,sarvex/tensorflow,Xeralux/tensorflow,paolodedios/tensorflow,gojira/tensorflow,gunan/tensorflow,petewarden/tensorflow,kobejean/tensorflow,alsrgv/tensorflow,gunan/tensorflow,jhseu/tensorflow,benoitsteiner/tensorflow-xsmm,AnishShah/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,girving/tensorflow,ageron/tensorflow,xodus7/tensorflow,ghchinoy/tensorflow,frreiss/tensorflow-fred,nburn42/tensorflow,dendisuhubdy/tensorflow,nburn42/tensorflow,gunan/tensorflow,Xeralux/tensorflow,dancingdan/tensorflow,seanli9jan/tensorflow,manipopopo/tensorflow,alsrgv/tensorflow,aam-at/tensorflow,hfp/tensorflow-xsmm,AnishShah/tensorflow,jendap/tensorflow,arborh/tensorflow,xodus7/tensorflow,theflofly/tensorflow,xodus7/tensorflow,xzturn/tensorflow,jhseu/tensorflow,davidzchen/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_saved_model,asimshankar/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,aselle/tensorflow,freedomtan/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,lukeiwanski/tensorflow,seanli9jan/tensorflow,annarev/tensorflow,karllessard/tensorflow,dancingdan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,alsrgv/tensorflow,ghchinoy/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,kevin-coder/tensorflow-fork,Intel-tensorflow/tensorflow,tensorflow/tensorflow,eaplatanios/tensorflow,benoitsteiner/tensorflow-xsmm,meteorcloudy/tensorflow,lukeiwanski/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,drpngx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,jalexvig/tensorflow,kobejean/tensorflow,chemelnucfin/tensorflow,jhseu/tensorflow,dancingdan/tensorflow,jart/tensorflow,davidzchen/tensorflow,ppwwyyxx/tensorflow,caisq/tensorflow,apark263/tensorflow,alshedivat/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,seanli9jan/tensorflow,Intel-tensorflow/tensorflow,seanli9jan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,apark263/tensorflow,aam-at/tensorflow,ghchinoy/tensorflow,DavidNorman/tensorflow,gojira/tensorflow,benoitsteiner/tensorflow-xsmm,ageron/tensorflow,jalexvig/tensorflow,ppwwyyxx/tensorflow,brchiu/tensorflow,frreiss/tensorflow-fred,alsrgv/tensorflow,dancingdan/tensorflow,zasdfgbnm/tensorflow,jbedorf/tensorflow,lukeiwanski/tensorflow,meteorcloudy/tensorflow,zasdfgbnm/tensorflow,arborh/tensorflow,nburn42/tensorflow,Bismarrck/tensorflow,renyi533/tensorflow,zasdfgbnm/tensorflow,allenlavoie/tensorflow,jalexvig/tensorflow,jalexvig/tensorflow,nburn42/tensorflow,hfp/tensorflow-xsmm,yongtang/tensorflow,adit-chandra/tensorflow,hehongliang/tensorflow,Xeralux/tensorflow,ageron/tensorflow,manipopopo/tensorflow,DavidNorman/tensorflow,jalexvig/tensorflow,aam-at/tensorflow,aldian/tensorflow,karllessard/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_saved_model,lukeiwanski/tensorflow,brchiu/tensorflow,meteorcloudy/tensorflow,kobejean/tensorflow,asimshankar/tensorflow,caisq/tensorflow,Xeralux/tensorflow,gautam1858/tensorflow,nburn42/tensorflow,hehongliang/tensorflow,benoitsteiner/tensorflow-xsmm,ppwwyyxx/tensorflow,AnishShah/tensorflow,snnn/tensorflow,meteorcloudy/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,aam-at/tensorflow,DavidNorman/tensorflow,aam-at/tensorflow,renyi533/tensorflow,Bismarrck/tensorflow,ZhangXinNan/tensorflow,dancingdan/tensorflow,ghchinoy/tensorflow,lukeiwanski/tensorflow,nburn42/tensorflow,frreiss/tensorflow-fred,AnishShah/tensorflow,petewarden/tensorflow,jart/tensorflow,chemelnucfin/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,dongjoon-hyun/tensorflow,brchiu/tensorflow,aam-at/tensorflow,karllessard/tensorflow,karllessard/tensorflow,renyi533/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yanchen036/tensorflow,adit-chandra/tensorflow,girving/tensorflow,renyi533/tensorflow,Intel-Corporation/tensorflow,theflofly/tensorflow,alsrgv/tensorflow,ghchinoy/tensorflow,AnishShah/tensorflow,gojira/tensorflow,annarev/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,xzturn/tensorflow,jbedorf/tensorflow,dendisuhubdy/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,jhseu/tensorflow,jart/tensorflow,zasdfgbnm/tensorflow,renyi533/tensorflow,caisq/tensorflow,aselle/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,cxxgtxy/tensorflow,hehongliang/tensorflow,dendisuhubdy/tensorflow,adit-chandra/tensorflow,jart/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,asimshankar/tensorflow,asimshankar/tensorflow,manipopopo/tensorflow,girving/tensorflow,eaplatanios/tensorflow,aam-at/tensorflow,allenlavoie/tensorflow,aam-at/tensorflow,dancingdan/tensorflow,zasdfgbnm/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,Xeralux/tensorflow,hehongliang/tensorflow,aselle/tensorflow,aselle/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,manipopopo/tensorflow,yanchen036/tensorflow,tensorflow/tensorflow-pywrap_saved_model,snnn/tensorflow,snnn/tensorflow,karllessard/tensorflow,brchiu/tensorflow,snnn/tensorflow,adit-chandra/tensorflow,sarvex/tensorflow,arborh/tensorflow,xodus7/tensorflow,freedomtan/tensorflow,DavidNorman/tensorflow,renyi533/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,hfp/tensorflow-xsmm,renyi533/tensorflow,aselle/tensorflow,nburn42/tensorflow,benoitsteiner/tensorflow-xsmm,dancingdan/tensorflow,girving/tensorflow,yongtang/tensorflow,jhseu/tensorflow,dendisuhubdy/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,gunan/tensorflow,davidzchen/tensorflow,kevin-coder/tensorflow-fork,meteorcloudy/tensorflow,allenlavoie/tensorflow,Bismarrck/tensorflow,drpngx/tensorflow,girving/tensorflow,aselle/tensorflow,ageron/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,arborh/tensorflow,cxxgtxy/tensorflow,dancingdan/tensorflow,benoitsteiner/tensorflow-xsmm,aldian/tensorflow,girving/tensorflow,frreiss/tensorflow-fred,arborh/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,meteorcloudy/tensorflow,xodus7/tensorflow,dancingdan/tensorflow,xodus7/tensorflow,alshedivat/tensorflow,jalexvig/tensorflow,caisq/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,allenlavoie/tensorflow,girving/tensorflow,alshedivat/tensorflow,eaplatanios/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,meteorcloudy/tensorflow,apark263/tensorflow,gautam1858/tensorflow,seanli9jan/tensorflow,alsrgv/tensorflow,dongjoon-hyun/tensorflow,Bismarrck/tensorflow,sarvex/tensorflow,allenlavoie/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,jalexvig/tensorflow,snnn/tensorflow,aselle/tensorflow,dendisuhubdy/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,zasdfgbnm/tensorflow,dendisuhubdy/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,DavidNorman/tensorflow,apark263/tensorflow,zasdfgbnm/tensorflow,petewarden/tensorflow,kobejean/tensorflow,kobejean/tensorflow,drpngx/tensorflow,hehongliang/tensorflow,annarev/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow,lukeiwanski/tensorflow,kevin-coder/tensorflow-fork,theflofly/tensorflow,theflofly/tensorflow,seanli9jan/tensorflow,AnishShah/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,adit-chandra/tensorflow,manipopopo/tensorflow,jendap/tensorflow,chemelnucfin/tensorflow,allenlavoie/tensorflow,ghchinoy/tensorflow,aselle/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,chemelnucfin/tensorflow,frreiss/tensorflow-fred,alsrgv/tensorflow,cxxgtxy/tensorflow,manipopopo/tensorflow,brchiu/tensorflow,ageron/tensorflow,yongtang/tensorflow,gunan/tensorflow,brchiu/tensorflow,asimshankar/tensorflow,asimshankar/tensorflow,gunan/tensorflow,nburn42/tensorflow,sarvex/tensorflow,ZhangXinNan/tensorflow,theflofly/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,alsrgv/tensorflow,ZhangXinNan/tensorflow,paolodedios/tensorflow,DavidNorman/tensorflow,jhseu/tensorflow,girving/tensorflow,jendap/tensorflow,hfp/tensorflow-xsmm,paolodedios/tensorflow,kevin-coder/tensorflow-fork,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,apark263/tensorflow,ageron/tensorflow,lukeiwanski/tensorflow,caisq/tensorflow,jendap/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,eaplatanios/tensorflow,alshedivat/tensorflow,eaplatanios/tensorflow,chemelnucfin/tensorflow,petewarden/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,jart/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,arborh/tensorflow,eaplatanios/tensorflow,gunan/tensorflow,benoitsteiner/tensorflow-xsmm,AnishShah/tensorflow,arborh/tensorflow,DavidNorman/tensorflow,jbedorf/tensorflow,aldian/tensorflow,brchiu/tensorflow,seanli9jan/tensorflow,kobejean/tensorflow,gunan/tensorflow,Bismarrck/tensorflow,yanchen036/tensorflow,paolodedios/tensorflow,seanli9jan/tensorflow,petewarden/tensorflow,ageron/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,gojira/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jart/tensorflow,frreiss/tensorflow-fred,jendap/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,chemelnucfin/tensorflow,jbedorf/tensorflow,gojira/tensorflow,meteorcloudy/tensorflow,DavidNorman/tensorflow,apark263/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,benoitsteiner/tensorflow-xsmm,jbedorf/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yanchen036/tensorflow,paolodedios/tensorflow,drpngx/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,yongtang/tensorflow,annarev/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,kobejean/tensorflow,zasdfgbnm/tensorflow,cxxgtxy/tensorflow,caisq/tensorflow,gojira/tensorflow,manipopopo/tensorflow,arborh/tensorflow,sarvex/tensorflow,ZhangXinNan/tensorflow,gojira/tensorflow,snnn/tensorflow,chemelnucfin/tensorflow,benoitsteiner/tensorflow-xsmm,karllessard/tensorflow,asimshankar/tensorflow,apark263/tensorflow,tensorflow/tensorflow,ZhangXinNan/tensorflow,eaplatanios/tensorflow,eaplatanios/tensorflow,meteorcloudy/tensorflow,Intel-tensorflow/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,jhseu/tensorflow,lukeiwanski/tensorflow,AnishShah/tensorflow,ghchinoy/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,kevin-coder/tensorflow-fork,drpngx/tensorflow,renyi533/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,alsrgv/tensorflow,yongtang/tensorflow,dongjoon-hyun/tensorflow,gautam1858/tensorflow,snnn/tensorflow,jendap/tensorflow,zasdfgbnm/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,kevin-coder/tensorflow-fork,alshedivat/tensorflow,yongtang/tensorflow,caisq/tensorflow,kevin-coder/tensorflow-fork,tensorflow/tensorflow-experimental_link_static_libraries_once,jart/tensorflow,drpngx/tensorflow,lukeiwanski/tensorflow,ppwwyyxx/tensorflow,seanli9jan/tensorflow,DavidNorman/tensorflow,gojira/tensorflow,drpngx/tensorflow,alshedivat/tensorflow,eaplatanios/tensorflow,girving/tensorflow,dancingdan/tensorflow,Xeralux/tensorflow,theflofly/tensorflow,jbedorf/tensorflow,frreiss/tensorflow-fred,kobejean/tensorflow,caisq/tensorflow,apark263/tensorflow,annarev/tensorflow,alsrgv/tensorflow,petewarden/tensorflow,arborh/tensorflow,eaplatanios/tensorflow,jendap/tensorflow,xzturn/tensorflow,ageron/tensorflow,arborh/tensorflow,allenlavoie/tensorflow,chemelnucfin/tensorflow,hfp/tensorflow-xsmm,tensorflow/tensorflow-experimental_link_static_libraries_once,yanchen036/tensorflow,girving/tensorflow,cxxgtxy/tensorflow,ZhangXinNan/tensorflow,drpngx/tensorflow,Intel-tensorflow/tensorflow,dongjoon-hyun/tensorflow,jendap/tensorflow,manipopopo/tensorflow,gunan/tensorflow,nburn42/tensorflow,freedomtan/tensorflow,dongjoon-hyun/tensorflow,hfp/tensorflow-xsmm,Intel-Corporation/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow-pywrap_saved_model,allenlavoie/tensorflow,Intel-tensorflow/tensorflow,alshedivat/tensorflow,sarvex/tensorflow,Bismarrck/tensorflow,ZhangXinNan/tensorflow,dendisuhubdy/tensorflow,tensorflow/tensorflow,AnishShah/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ppwwyyxx/tensorflow,jendap/tensorflow,alshedivat/tensorflow,freedomtan/tensorflow,jendap/tensorflow,gojira/tensorflow,apark263/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,manipopopo/tensorflow,apark263/tensorflow,freedomtan/tensorflow,alshedivat/tensorflow,renyi533/tensorflow,Intel-tensorflow/tensorflow,ZhangXinNan/tensorflow,alshedivat/tensorflow,aam-at/tensorflow,drpngx/tensorflow,benoitsteiner/tensorflow-xsmm,jhseu/tensorflow,theflofly/tensorflow,alshedivat/tensorflow,karllessard/tensorflow,brchiu/tensorflow,asimshankar/tensorflow,karllessard/tensorflow,jhseu/tensorflow,xodus7/tensorflow,benoitsteiner/tensorflow-xsmm,Bismarrck/tensorflow,aldian/tensorflow,annarev/tensorflow,dongjoon-hyun/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,arborh/tensorflow,frreiss/tensorflow-fred,jalexvig/tensorflow,frreiss/tensorflow-fred,gojira/tensorflow,aselle/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,kobejean/tensorflow,meteorcloudy/tensorflow,kevin-coder/tensorflow-fork,chemelnucfin/tensorflow,xodus7/tensorflow,freedomtan/tensorflow,hehongliang/tensorflow,annarev/tensorflow,gautam1858/tensorflow,lukeiwanski/tensorflow | tensorflow/contrib/tensorrt/test/test_tftrt.py | tensorflow/contrib/tensorrt/test/test_tftrt.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
import tensorflow.contrib.tensorrt as trt
def get_simple_graph_def():
"""Create a simple graph and return its graph_def"""
g = tf.Graph()
with g.as_default():
a = tf.placeholder(dtype=tf.float32, shape=(None, 24, 24, 2), name="input")
e = tf.constant(
[[[[1., 0.5, 4., 6., 0.5, 1.], [1., 0.5, 1., 1., 0.5, 1.]]]],
name="weights",
dtype=tf.float32)
conv = tf.nn.conv2d(
input=a, filter=e, strides=[1, 2, 2, 1], padding="SAME", name="conv")
b = tf.constant([4., 1.5, 2., 3., 5., 7.], name="bias", dtype=tf.float32)
t = tf.nn.bias_add(conv, b, name="biasAdd")
relu = tf.nn.relu(t, "relu")
idty = tf.identity(relu, "ID")
v = tf.nn.max_pool(
idty, [1, 2, 2, 1], [1, 2, 2, 1], "VALID", name="max_pool")
tf.squeeze(v, name="output")
return g.as_graph_def()
def run_graph(gdef, dumm_inp):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.50)
tf.reset_default_graph()
g = tf.Graph()
with g.as_default():
inp, out = tf.import_graph_def(
graph_def=gdef, return_elements=["input", "output"])
inp = inp.outputs[0]
out = out.outputs[0]
with tf.Session(
config=tf.ConfigProto(gpu_options=gpu_options), graph=g) as sess:
val = sess.run(out, {inp: dumm_inp})
return val
if "__main__" in __name__:
inp_dims = (100, 24, 24, 2)
dummy_input = np.random.random_sample(inp_dims)
gdef = get_simple_graph_def()
# Get optimized graph
trt_graph = trt.create_inference_graph(gdef, ["output"], inp_dims[0])
o1 = run_graph(gdef, dummy_input)
o2 = run_graph(trt_graph, dummy_input)
assert np.array_equal(o1, o2)
print("Pass")
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TensorRT integration."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tensorflow.contrib.tensorrt as trt
import numpy as np
def getSimpleGraphDef():
"""Create a simple graph and return its graph_def"""
g = tf.Graph()
with g.as_default():
A = tf.placeholder(dtype=tf.float32, shape=(None, 24, 24, 2), name="input")
e = tf.constant(
[[[[1., 0.5, 4., 6., 0.5, 1.], [1., 0.5, 1., 1., 0.5, 1.]]]],
name="weights",
dtype=tf.float32)
conv = tf.nn.conv2d(
input=A, filter=e, strides=[1, 2, 2, 1], padding="SAME", name="conv")
b = tf.constant([4., 1.5, 2., 3., 5., 7.], name="bias", dtype=tf.float32)
t = tf.nn.bias_add(conv, b, name="biasAdd")
relu = tf.nn.relu(t, "relu")
idty = tf.identity(relu, "ID")
v = tf.nn.max_pool(
idty, [1, 2, 2, 1], [1, 2, 2, 1], "VALID", name="max_pool")
out = tf.squeeze(v, name="output")
return g.as_graph_def()
def runGraph(gdef, dumm_inp):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.50)
tf.reset_default_graph()
g = tf.Graph()
with g.as_default():
inp, out = tf.import_graph_def(
graph_def=gdef, return_elements=["input", "output"])
inp = inp.outputs[0]
out = out.outputs[0]
with tf.Session(
config=tf.ConfigProto(gpu_options=gpu_options), graph=g) as sess:
val = sess.run(out, {inp: dumm_inp})
return val
if "__main__" in __name__:
inpDims = (100, 24, 24, 2)
dummy_input = np.random.random_sample(inpDims)
gdef = getSimpleGraphDef()
trt_graph = trt.create_inference_graph(gdef, ["output"],
inpDims[0]) # Get optimized graph
o1 = runGraph(gdef, dummy_input)
o2 = runGraph(trt_graph, dummy_input)
assert (np.array_equal(o1, o2))
print("Pass")
| apache-2.0 | Python |
a333a5c15ffd2b775ad4d854c7accd32b898d2fb | Add encryptor_python3.py compatible with Python 3 | Caleydo/caleydo_security_flask,Caleydo/caleydo_security_flask,Caleydo/caleydo_security_flask | encryptor_python3.py | encryptor_python3.py | from __future__ import print_function
__author__ = 'Samuel Gratzl'
if __name__ == '__main__':
import uuid
import hashlib
password = input('enter password: ').encode('utf-8')
salt = uuid.uuid4().hex.encode('utf-8')
hashed_password = hashlib.sha512(password + salt).hexdigest()
print(password)
print(salt)
print(hashed_password)
| bsd-3-clause | Python |
|
5a634b9b837726a595a4450c8b1f46dd24b282a0 | Add generic Python context template | AgalmicVentures/Environment,AgalmicVentures/Environment,AgalmicVentures/Environment | scripts/Context.py | scripts/Context.py |
# Copyright (c) 2015-2019 Agalmic Ventures LLC (www.agalmicventures.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import atexit
import datetime
import inspect
try:
import ujson as json
except ImportError:
import json
import os
import subprocess
import threading
import traceback
def _updateRunEnd(context):
"""
Exit handler for clean shutdowns.
"""
endTime = datetime.datetime.now()
startTime = context.startTime()
print('Run duration: %s (%s - %s)' % (endTime - startTime, startTime, endTime))
class _Context(object):
def __init__(self):
self._startTime = datetime.datetime.now()
#Check the version
currentFile = os.path.abspath(inspect.getfile(inspect.currentframe()))
currentDir = os.path.dirname(currentFile)
parentDir = os.path.dirname(currentDir)
workingDirectory = os.getcwd()
os.chdir(parentDir)
try:
self._gitVersion = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf8').strip()
print('Version: %s' % self._gitVersion)
except subprocess.CalledProcessError:
self._gitVersion = None
print('WARNING: Could not retrieve git version!')
os.chdir(workingDirectory)
#Prepare for the end of the run
atexit.register(_updateRunEnd, self)
##### Accessors #####
def gitVersion(self):
"""
Returns the git version of the running code.
:return: str
"""
return self._gitVersion
def startTime(self):
"""
Returns the start time of the running code.
:return: datetime.datetime
"""
return self._startTime
##### Helpers #####
def handleException(self, exception, details=None):
"""
Handles an exception by logging it, inserting it into the database, etc.
:param exception: The exception
"""
backtrace = traceback.format_exc()
print(' EXCEPTION THROWN '.center(60, '*'))
print(backtrace)
#The global context is truly a singleton because the hardware it is managing can have only
#one thing using it at a time.
_context = None
_contextLock = threading.Lock()
class Context(object):
"""
The global singleton that holds all other object that are instantiated only
once (e.g. config, logging, DB connections, etc.).
"""
def __new__(cls):
global _context
if _context is None:
if _contextLock.acquire():
try:
if _context is None:
_context = _Context()
finally:
_contextLock.release()
return _context
| mit | Python |
|
a7b25e343623f41b0466c8cea852ecc07ffab359 | Create marsLanderLevelTwo.py | NendoTaka/CodeForReference,NendoTaka/CodeForReference,NendoTaka/CodeForReference | Codingame/Python/Medium/marsLanderLevelTwo.py | Codingame/Python/Medium/marsLanderLevelTwo.py | import sys
import math
# Auto-generated code below aims at helping you parse
# the standard input according to the problem statement.
surface_n = int(input()) # the number of points used to draw the surface of Mars.
surface = []
for i in range(surface_n):
# land_x: X coordinate of a surface point. (0 to 6999)
# land_y: Y coordinate of a surface point. By linking all the points together in a sequential fashion, you form the surface of Mars.
land_x, land_y = [int(j) for j in input().split()]
surface.append([land_x,land_y])
minx = 0
maxx = 0
last = 0
for x in range(1, len(surface)):
if surface[x][1] == surface[last][1]:
minx = last
maxx = x
last = x
ly = surface[minx][1]
minx = surface[minx][0]
maxx = surface[maxx][0]
# game loop
while 1:
# h_speed: the horizontal speed (in m/s), can be negative.
# v_speed: the vertical speed (in m/s), can be negative.
# fuel: the quantity of remaining fuel in liters.
# rotate: the rotation angle in degrees (-90 to 90).
# power: the thrust power (0 to 4).
x, y, h_speed, v_speed, fuel, rotate, power = [int(i) for i in input().split()]
if h_speed < -60 or h_speed > 60:
d = (-45,45)[h_speed > 60]
p = 4
elif x < maxx and x > minx:
if h_speed < -20:
d = -60
p = 4
elif h_speed > 20:
d = 60
p = 4
else:
if maxx - x < 200 and h_speed > 0:
d = 15
elif minx - x > -200 and h_speed < 0:
d = -15
else:
d = 0
p = (3,4)[math.sqrt(v_speed**2+((y-ly)*2*(4-3.711))) < -38]
else:
d = (30,-30)[x < minx]
p = 4
# Write an action using print
# To debug: print("Debug messages...", file=sys.stderr)
# rotate power. rotate is the desired rotation angle. power is the desired thrust power.
print(d,"4")
| mit | Python |
|
ac2f517f15816277dd808ac473c4581212b8e841 | add migration for meta | WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder | Seeder/www/migrations/0004_auto_20170223_1457.py | Seeder/www/migrations/0004_auto_20170223_1457.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-23 14:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('www', '0003_auto_20170216_2204'),
]
operations = [
migrations.AlterModelOptions(
name='topiccollection',
options={'ordering': ['id'], 'verbose_name': 'Topic collection', 'verbose_name_plural': 'Topic collections'},
),
]
| mit | Python |
|
0a8af4a4f5e9fa711e9e4b1b14cc639d5ff166a0 | Create beta_dog_recommendation_system.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/beta/beta_dog_recommendation_system.py | Solutions/beta/beta_dog_recommendation_system.py | from itertools import takewhile
def find_similar_dogs(breed):
compare = dogs[breed]
scores = sorted((
[
dog,
sum(1 if q in compare else 0 for q in dogs[dog])
]
for dog in dogs if dog != breed
), key = lambda x: x[1], reverse=True)
max_score = scores[0][1]
return {s[0] for s in takewhile(lambda x: x[1]==max_score, scores)}
| mit | Python |
|
8a293ddc633730a6c2323392b1ac9083e5a45ad4 | Create lora_test_recv.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/test/lora_test_recv.py | device/src/test/lora_test_recv.py | # lora_test_recv.py
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
#JSON data format:
#{ID:123,CMD:heartbeat,DATA:hello,SEQUENCE:123}
from pyb import Pin
from pyb import UART
from pyb import Timer
import time
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
u4.write('{ID:1,CMD:OnLine,DATA:TYPBoard1,SEQ:0}')
if __name__=='__main__':
while True:
len = u4.any()
if(len > 0):
print(u4.read())
| mit | Python |
|
d19a36fda0bfc9d221d65bde1612ff6181fca66d | add proposed setup.py file | shimpe/pyvectortween,shimpe/pyvectortween | setup.py | setup.py | from distutils.core import setup
setup(
name='vectortween',
version='0.0.1',
packages=['vectortween'],
url='',
license='MIT',
author='stefaan himpe',
author_email='[email protected]',
description='some tweening for use with libraries like gizeh and moviepy'
)
| mit | Python |
|
f96686735db03abdc2470c27ff8d7a04643c7727 | Add Exercise 9.8. | skidzo/pydy,skidzo/pydy,Shekharrajak/pydy,jcrist/pydy,jcrist/pydy,jcrist/pydy,Shekharrajak/pydy,jcrist/pydy,Shekharrajak/pydy,oliverlee/pydy,skidzo/pydy,oliverlee/pydy,jcrist/pydy,jcrist/pydy,skidzo/pydy,jcrist/pydy,Shekharrajak/pydy,oliverlee/pydy | Kane1985/Chapter5/Ex9.8.py | Kane1985/Chapter5/Ex9.8.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.8 from Kane 1985."""
from __future__ import division
from sympy import simplify, solve, symbols, Matrix
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import inertia, RigidBody
from sympy.physics.mechanics import cross, dot, dynamicsymbols
from util import msprint, subs, partial_velocities
from util import generalized_active_forces, potential_energy
from util import kde_matrix, vc_matrix
q1, q2, q3, q4, q5 = dynamicsymbols('q1:6')
q1d, q2d, q3d, q4d, q5d = dynamicsymbols('q1:6', level=1)
u1, u2, u3, u4, u5 = dynamicsymbols('u1:6')
u_prime, R, M, g, e, f, theta = symbols('u\' R, M, g, e, f, theta')
a, b, mA, mB, IA, J, K, t = symbols('a b mA mB IA J K t')
IA22, IA23, IA33 = symbols('IA22 IA23 IA33')
Q1, Q2, Q3 = symbols('Q1, Q2 Q3')
TB, TC = symbols('TB TC')
# reference frames
F = ReferenceFrame('F')
P = F.orientnew('P', 'axis', [-theta, F.y])
A = P.orientnew('A', 'axis', [q1, P.x])
# define frames for wheels
B = A.orientnew('B', 'axis', [q4, A.z])
C = A.orientnew('C', 'axis', [q5, A.z])
# define points
pO = Point('O')
pO.set_vel(F, 0)
pD = pO.locatenew('D', q2*P.y + q3*P.z)
pD.set_vel(A, 0)
pD.set_vel(F, pD.pos_from(pO).dt(F))
pS_star = pD.locatenew('S*', e*A.y)
pQ = pD.locatenew('Q', f*A.y - R*A.x)
for p in [pS_star, pQ]:
p.set_vel(A, 0)
p.v2pt_theory(pD, F, A)
# masscenters of bodies A, B, C
pA_star = pD.locatenew('A*', a*A.y)
pB_star = pD.locatenew('B*', -b*A.z)
pC_star = pD.locatenew('C*', +b*A.z)
for p in [pA_star, pB_star, pC_star]:
p.set_vel(A, 0)
p.v2pt_theory(pD, F, A)
# points of B, C touching the plane P
pB_hat = pB_star.locatenew('B^', -R*A.x)
pC_hat = pC_star.locatenew('C^', -R*A.x)
pB_hat.set_vel(B, 0)
pC_hat.set_vel(C, 0)
pB_hat.v2pt_theory(pB_star, F, B)
pC_hat.v2pt_theory(pC_star, F, C)
# kinematic differential equations and velocity constraints
kde = [u1 - dot(A.ang_vel_in(F), A.x),
u2 - dot(pD.vel(F), A.y),
u3 - q3d,
u4 - q4d,
u5 - q5d]
kde_map = solve(kde, [q1d, q2d, q3d, q4d, q5d])
vc = [dot(p.vel(F), A.y) for p in [pB_hat, pC_hat]] + [dot(pD.vel(F), A.z)]
vc_map = solve(subs(vc, kde_map), [u3, u4, u5])
forces = [(pS_star, -M*g*F.x), (pQ, Q1*A.x)] # no friction at point Q
torques = [(A, -TB*A.z), (A, -TC*A.z), (B, TB*A.z), (C, TC*A.z)]
partials = partial_velocities(zip(*forces + torques)[0], [u1, u2],
F, kde_map, vc_map, express_frame=A)
Fr, _ = generalized_active_forces(partials, forces + torques)
q = [q1, q2, q3, q4, q5]
u = [u1, u2]
n = len(q)
p = len(u)
m = n - p
if vc_map is not None:
u += sorted(vc_map.keys())
dV_dq = symbols('∂V/∂q1:{0}'.format(n + 1))
dV_eq = Matrix(Fr).T
W_sr, _ = kde_matrix(u, kde_map)
if vc_map is not None:
A_kr, _ = vc_matrix(u, vc_map)
else:
A_kr = Matrix.zeros(m, p)
for s in range(W_sr.shape[0]):
dV_eq += dV_dq[s] * (W_sr[s, :p] + W_sr[s, p:]*A_kr[:, :p])
print('Eq 5.1.18:')
for elem in dV_eq:
print(msprint(elem))
| bsd-3-clause | Python |
|
a57d39e7f63e6c034644a158aabb5ff6e6f04ae9 | add response test to testing module | karec/oct,TheGhouls/oct,TheGhouls/oct,karec/oct,TheGhouls/oct | oct/testing/response.py | oct/testing/response.py | # This file is fit for containing basic response status check
# All functions have to take a response object in param
def check_response_status(resp, status):
"""
This will check is the response_code is equal to the status
:param resp: a response object
:param status: the expected status
:type status: int
:return: None
:raise: AssertionError
"""
assert(resp.code == status), "Bad Response: HTTP %s, expected %s, URL : %s" % (resp.code, status, resp.geturl()) | mit | Python |
|
2f1b12a6f173c01f9631d0ad5a4d3c3f411983cb | add file notification platform | maddox/home-assistant,kennedyshead/home-assistant,jawilson/home-assistant,vitorespindola/home-assistant,tchellomello/home-assistant,pschmitt/home-assistant,kennedyshead/home-assistant,shaftoe/home-assistant,theolind/home-assistant,tmm1/home-assistant,DavidLP/home-assistant,shaftoe/home-assistant,Duoxilian/home-assistant,ewandor/home-assistant,jabesq/home-assistant,pottzer/home-assistant,jaharkes/home-assistant,mahendra-r/home-assistant,srcLurker/home-assistant,PetePriority/home-assistant,sander76/home-assistant,dmeulen/home-assistant,leoc/home-assistant,home-assistant/home-assistant,vitorespindola/home-assistant,Zac-HD/home-assistant,titilambert/home-assistant,JshWright/home-assistant,HydrelioxGitHub/home-assistant,ct-23/home-assistant,w1ll1am23/home-assistant,eagleamon/home-assistant,jnewland/home-assistant,michaelarnauts/home-assistant,open-homeautomation/home-assistant,tomduijf/home-assistant,deisi/home-assistant,Cinntax/home-assistant,sffjunkie/home-assistant,ma314smith/home-assistant,alanbowman/home-assistant,toddeye/home-assistant,open-homeautomation/home-assistant,Nzaga/home-assistant,molobrakos/home-assistant,alanbowman/home-assistant,leoc/home-assistant,hmronline/home-assistant,eagleamon/home-assistant,tboyce021/home-assistant,persandstrom/home-assistant,MartinHjelmare/home-assistant,theolind/home-assistant,betrisey/home-assistant,Smart-Torvy/torvy-home-assistant,oandrew/home-assistant,Theb-1/home-assistant,sanmiguel/home-assistant,jaharkes/home-assistant,MartinHjelmare/home-assistant,ErykB2000/home-assistant,caiuspb/home-assistant,morphis/home-assistant,Julian/home-assistant,hmronline/home-assistant,aronsky/home-assistant,nkgilley/home-assistant,teodoc/home-assistant,persandstrom/home-assistant,teodoc/home-assistant,partofthething/home-assistant,CCOSTAN/home-assistant,EricRho/home-assistant,robjohnson189/home-assistant,aoakeson/home-assistant,PetePriority/home-assistant,dorant/home-assistant,coteyr/home-assistant,hmronline/home-assistant,postlund/home-assistant,qedi-r/home-assistant,Theb-1/home-assistant,alexmogavero/home-assistant,sffjunkie/home-assistant,LinuxChristian/home-assistant,robbiet480/home-assistant,jnewland/home-assistant,JshWright/home-assistant,toddeye/home-assistant,persandstrom/home-assistant,ErykB2000/home-assistant,mahendra-r/home-assistant,tchellomello/home-assistant,xifle/home-assistant,sanmiguel/home-assistant,justyns/home-assistant,florianholzapfel/home-assistant,shaftoe/home-assistant,happyleavesaoc/home-assistant,michaelarnauts/home-assistant,varunr047/homefile,HydrelioxGitHub/home-assistant,sdague/home-assistant,qedi-r/home-assistant,pottzer/home-assistant,FreekingDean/home-assistant,sfam/home-assistant,deisi/home-assistant,Nzaga/home-assistant,devdelay/home-assistant,luxus/home-assistant,adrienbrault/home-assistant,dmeulen/home-assistant,keerts/home-assistant,Smart-Torvy/torvy-home-assistant,tboyce1/home-assistant,CCOSTAN/home-assistant,auduny/home-assistant,ma314smith/home-assistant,CCOSTAN/home-assistant,hexxter/home-assistant,partofthething/home-assistant,stefan-jonasson/home-assistant,fbradyirl/home-assistant,florianholzapfel/home-assistant,aoakeson/home-assistant,xifle/home-assistant,open-homeautomation/home-assistant,sffjunkie/home-assistant,ewandor/home-assistant,Theb-1/home-assistant,nkgilley/home-assistant,miniconfig/home-assistant,JshWright/home-assistant,coteyr/home-assistant,leoc/home-assistant,MungoRae/home-assistant,tinloaf/home-assistant,alexkolar/home-assistant,srcLurker/home-assistant,Duoxilian/home-assistant,nnic/home-assistant,keerts/home-assistant,Zyell/home-assistant,hmronline/home-assistant,deisi/home-assistant,mikaelboman/home-assistant,theolind/home-assistant,MungoRae/home-assistant,emilhetty/home-assistant,bencmbrook/home-assistant,HydrelioxGitHub/home-assistant,rohitranjan1991/home-assistant,robjohnson189/home-assistant,FreekingDean/home-assistant,PetePriority/home-assistant,auduny/home-assistant,Zac-HD/home-assistant,open-homeautomation/home-assistant,dmeulen/home-assistant,dorant/home-assistant,devdelay/home-assistant,varunr047/homefile,luxus/home-assistant,varunr047/homefile,bdfoster/blumate,nnic/home-assistant,maddox/home-assistant,bencmbrook/home-assistant,oandrew/home-assistant,emilhetty/home-assistant,LinuxChristian/home-assistant,adrienbrault/home-assistant,nevercast/home-assistant,shaftoe/home-assistant,alexmogavero/home-assistant,Smart-Torvy/torvy-home-assistant,badele/home-assistant,dmeulen/home-assistant,aequitas/home-assistant,tomduijf/home-assistant,balloob/home-assistant,alexkolar/home-assistant,DavidLP/home-assistant,tboyce1/home-assistant,mahendra-r/home-assistant,hexxter/home-assistant,Teagan42/home-assistant,florianholzapfel/home-assistant,sfam/home-assistant,jabesq/home-assistant,emilhetty/home-assistant,tboyce021/home-assistant,sanmiguel/home-assistant,jaharkes/home-assistant,morphis/home-assistant,w1ll1am23/home-assistant,stefan-jonasson/home-assistant,soldag/home-assistant,postlund/home-assistant,alexmogavero/home-assistant,balloob/home-assistant,jamespcole/home-assistant,molobrakos/home-assistant,mikaelboman/home-assistant,mikaelboman/home-assistant,dorant/home-assistant,LinuxChristian/home-assistant,kyvinh/home-assistant,instantchow/home-assistant,pschmitt/home-assistant,keerts/home-assistant,Julian/home-assistant,leoc/home-assistant,tmm1/home-assistant,EricRho/home-assistant,Danielhiversen/home-assistant,ct-23/home-assistant,tinloaf/home-assistant,mezz64/home-assistant,eagleamon/home-assistant,turbokongen/home-assistant,bencmbrook/home-assistant,betrisey/home-assistant,molobrakos/home-assistant,bdfoster/blumate,nugget/home-assistant,jaharkes/home-assistant,deisi/home-assistant,alexmogavero/home-assistant,caiuspb/home-assistant,LinuxChristian/home-assistant,oandrew/home-assistant,happyleavesaoc/home-assistant,xifle/home-assistant,vitorespindola/home-assistant,badele/home-assistant,turbokongen/home-assistant,home-assistant/home-assistant,joopert/home-assistant,florianholzapfel/home-assistant,betrisey/home-assistant,kyvinh/home-assistant,MartinHjelmare/home-assistant,stefan-jonasson/home-assistant,Julian/home-assistant,Danielhiversen/home-assistant,miniconfig/home-assistant,nugget/home-assistant,mKeRix/home-assistant,keerts/home-assistant,leppa/home-assistant,joopert/home-assistant,ma314smith/home-assistant,aequitas/home-assistant,bdfoster/blumate,aronsky/home-assistant,Teagan42/home-assistant,devdelay/home-assistant,g12mcgov/home-assistant,nugget/home-assistant,robjohnson189/home-assistant,robbiet480/home-assistant,Duoxilian/home-assistant,ct-23/home-assistant,philipbl/home-assistant,mikaelboman/home-assistant,oandrew/home-assistant,betrisey/home-assistant,pottzer/home-assistant,alanbowman/home-assistant,luxus/home-assistant,varunr047/homefile,rohitranjan1991/home-assistant,Cinntax/home-assistant,jamespcole/home-assistant,ErykB2000/home-assistant,ma314smith/home-assistant,SEJeff/home-assistant,srcLurker/home-assistant,hexxter/home-assistant,instantchow/home-assistant,mKeRix/home-assistant,ewandor/home-assistant,instantchow/home-assistant,auduny/home-assistant,morphis/home-assistant,EricRho/home-assistant,deisi/home-assistant,GenericStudent/home-assistant,Nzaga/home-assistant,nnic/home-assistant,balloob/home-assistant,xifle/home-assistant,nevercast/home-assistant,LinuxChristian/home-assistant,aoakeson/home-assistant,lukas-hetzenecker/home-assistant,Duoxilian/home-assistant,robjohnson189/home-assistant,MungoRae/home-assistant,nevercast/home-assistant,tomduijf/home-assistant,sdague/home-assistant,tboyce1/home-assistant,sffjunkie/home-assistant,maddox/home-assistant,g12mcgov/home-assistant,jnewland/home-assistant,rohitranjan1991/home-assistant,emilhetty/home-assistant,mikaelboman/home-assistant,justyns/home-assistant,MungoRae/home-assistant,emilhetty/home-assistant,miniconfig/home-assistant,Smart-Torvy/torvy-home-assistant,mKeRix/home-assistant,tboyce1/home-assistant,philipbl/home-assistant,Zyell/home-assistant,miniconfig/home-assistant,hexxter/home-assistant,aequitas/home-assistant,lukas-hetzenecker/home-assistant,leppa/home-assistant,srcLurker/home-assistant,sffjunkie/home-assistant,bdfoster/blumate,ct-23/home-assistant,caiuspb/home-assistant,bdfoster/blumate,jabesq/home-assistant,happyleavesaoc/home-assistant,soldag/home-assistant,MungoRae/home-assistant,michaelarnauts/home-assistant,fbradyirl/home-assistant,devdelay/home-assistant,ct-23/home-assistant,tmm1/home-assistant,hmronline/home-assistant,DavidLP/home-assistant,alexkolar/home-assistant,sfam/home-assistant,jawilson/home-assistant,titilambert/home-assistant,mKeRix/home-assistant,SEJeff/home-assistant,Zac-HD/home-assistant,happyleavesaoc/home-assistant,tinloaf/home-assistant,teodoc/home-assistant,sander76/home-assistant,SEJeff/home-assistant,Zac-HD/home-assistant,Julian/home-assistant,fbradyirl/home-assistant,stefan-jonasson/home-assistant,g12mcgov/home-assistant,Zyell/home-assistant,morphis/home-assistant,badele/home-assistant,kyvinh/home-assistant,justyns/home-assistant,mezz64/home-assistant,eagleamon/home-assistant,coteyr/home-assistant,philipbl/home-assistant,philipbl/home-assistant,jamespcole/home-assistant,varunr047/homefile,JshWright/home-assistant,GenericStudent/home-assistant,kyvinh/home-assistant | homeassistant/components/notify/file.py | homeassistant/components/notify/file.py | """
homeassistant.components.notify.file
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
File notification service.
Configuration:
To use the File notifier you will need to add something like the following
to your config/configuration.yaml
notify:
platform: file
path: PATH_TO_FILE
filename: FILENAME
timestamp: 1 or 0
Variables:
path
*Required
Path to the directory that contains your file. You need to have write
permission for that directory. The directory will be created if it doesn't
exist.
filename
*Required
Name of the file to use. The file will be created if it doesn't exist.
date
*Required
Add a timestamp to the entry, valid entries are 1 or 0.
"""
import logging
from pathlib import (Path, PurePath)
import homeassistant.util.dt as dt_util
from homeassistant.helpers import validate_config
from homeassistant.components.notify import (
DOMAIN, ATTR_TITLE, BaseNotificationService)
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config):
""" Get the file notification service. """
if not validate_config(config,
{DOMAIN: ['path',
'filename',
'timestamp']},
_LOGGER):
return None
path = config[DOMAIN]['path']
filename = config[DOMAIN]['filename']
filepath = Path(path, filename)
# pylint: disable=no-member
if not filepath.parent.exists():
try:
filepath.parent.mkdir(parents=True)
filepath.touch(mode=0o644, exist_ok=True)
except:
_LOGGER.exception("No write permission to given location.")
# raise PermissionError('') from None
# raise FileNotFoundError('') from None
return None
return FileNotificationService(filepath, config[DOMAIN]['timestamp'])
# pylint: disable=too-few-public-methods
class FileNotificationService(BaseNotificationService):
""" Implements notification service for the File service. """
# pylint: disable=no-member
def __init__(self, filepath, add_timestamp):
self._filepath = str(PurePath(filepath))
self._add_timestamp = add_timestamp
def send_message(self, message="", **kwargs):
""" Send a message to a file. """
file = open(self._filepath, 'a')
if not Path(self._filepath).stat().st_size:
title = '{} notifications (Log started: {})\n{}\n'.format(
kwargs.get(ATTR_TITLE),
dt_util.strip_microseconds(dt_util.utcnow()),
'-'*80)
file.write(title)
if self._add_timestamp == 1:
text = '{} {}\n'.format(dt_util.utcnow(), message)
file.write(text)
else:
text = '{}\n'.format(message)
file.write(text)
file.close()
| apache-2.0 | Python |
|
a9893fc562c9131fdaebaa842f587f415b7fdfda | Add second test. | fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python | oommfmif/test_basics.py | oommfmif/test_basics.py | import oommfmif as o
def test_get_oommf_version_return_type():
assert isinstance(o.get_version(), str)
def test_get_oommf_version():
assert o.get_version()[0:4] == "1.2."
| import oommfmif as o
def test_get_oommf_version():
assert isinstance(o.get_version(), str)
| bsd-2-clause | Python |
99389c1f863592c8c56c8dca415155536abbd0fd | Create new.py | wulidexixilian/iotprototype,wulidexixilian/iotprototype | simple_mqtt/new.py | simple_mqtt/new.py | mit | Python |
||
0ec30eb8bcf0e7688182f827bea24fd0ceb33501 | add models | fwilson42/dchacks2015,fwilson42/dchacks2015,fwilson42/dchacks2015 | models.py | models.py | from peewee import *
from config import db
class BaseModel(Model):
class Meta:
database = db
class HistoricalTrainPosition(BaseModel):
cars = IntegerField()
line_code = CharField()
next_station = CharField()
dest_station = CharField()
time = IntegerField()
timestamp = DateTimeField()
| mit | Python |
|
5c02d902753327b3413e994d6edc089b8ca72749 | Add create_flipper step | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/workflow/steps/create_flipper.py | dbaas/workflow/steps/create_flipper.py | # -*- coding: utf-8 -*-
import logging
from base import BaseStep
from dbaas_flipper.provider import FlipperProvider
LOG = logging.getLogger(__name__)
class CreateFlipper(BaseStep):
def __unicode__(self):
return "Creating Flipper"
def do(self, workflow_dict):
try:
if workflow_dict['qt']==1:
return True
flipper = FlipperProvider()
LOG.info("Creating Flipper...")
flipper.create_flipper_dependencies(
masterpairname=workflow_dict['names']['infra'],
hostname1=workflow_dict[
'hosts'][0].address,
writeip=workflow_dict[
'databaseinfraattr'][0].ip,
readip=workflow_dict[
'databaseinfraattr'][1].ip,
hostname2=workflow_dict[
'hosts'][1].address,
environment=workflow_dict['environment'])
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
if workflow_dict['qt']==1:
return True
LOG.info("Destroying Flipper...")
FlipperProvider(
).destroy_flipper_dependencies(masterpairname=workflow_dict['databaseinfra'].name,
environment=workflow_dict['environment'])
return True
except Exception, e:
print e
return False
| bsd-3-clause | Python |
|
cf99929c923cb31782a192f108c735bfcc9cde2f | Add render module, this will be the interface to manage rendering state files into high state data | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/render.py | salt/render.py | '''
Render is a module used to parse the render files into high salt state data
structures.
The render system uses render modules which are plugable interfaces under the
render directory.
'''
# Import salt modules
import salt.loader
class Render(object):
'''
Render state files.
'''
def __init__(self, opts):
pass
| apache-2.0 | Python |
|
773efcb6aec427034263d550c600da0654031fa4 | Add simpleTestCondition.py script to test condition notification framework w/o using full Django unit test infrastucture | xgds/xgds_basalt,xgds/xgds_basalt,xgds/xgds_basalt,xgds/xgds_basalt | apps/basaltApp/scripts/simpleTestCondition.py | apps/basaltApp/scripts/simpleTestCondition.py | #! /usr/bin/env python
#__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
import sys
import requests
import datetime
import json
import pytz
HTTP_PREFIX = 'https'
URL_PREFIX = 'localhost'
def test_set_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'start_time': isonow,
'status': 'Started',
'timezone': 'US/Hawaii',
'name': 'test_set_condition',
'extra': 'Start time should be set',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
return json_response
def test_update_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'status': 'in_progress',
'extra': 'In progress for this',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
return json_response
def test_end_condition():
url = "%s://%s%s" % (HTTP_PREFIX, URL_PREFIX, '/xgds_core/condition/set/')
nowtime = datetime.datetime.now(pytz.utc)
isonow = nowtime.isoformat()
nested_data_dict = {'end_time': isonow,
'status': 'completed',
'extra': 'Done done done',
'assignment': 'EV2',
'group_name': '20170426B',
'xgds_id': 'HIL13_A_WAY0_0_PXO'
}
data = {'time': isonow,
'source': 'playbook',
'id': 'PB1',
'data': json.dumps(nested_data_dict)
}
response = requests.post(url, data=data, verify=False)
json_response = response.json()
mode = sys.argv[1]
print "Running %s condition check..." % mode
if mode == 'set':
resp = test_set_condition()
if mode == 'update':
resp = test_update_condition()
if mode == 'end':
resp = test_end_condition()
print "response:", resp
| apache-2.0 | Python |
|
c735935c983cc7ccd72b2c71733e6f785a8a3ae3 | Create urls.py | IEEEDTU/CMS | Assessment/urls.py | Assessment/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^getAssignmentByCode', views.getAssignmentByCode, name='getAssignmentByCode'),
url(r'^retrieveAssignments', views.retrieveAssignments, name='retrieveAssignments'),
]
| mit | Python |
|
1bfbd397e3b3c805aa29f407915e1d10ca7eb179 | Create rbgLib.py | RoryCrispin/Rpi-RGB-LED-Library | rbgLib.py | rbgLib.py | from __future__ import division
import time
import RPi.GPIO as GPIO
# noinspection PyPep8Naming
class rgbColour(object):
red = 0
green = 0
blue = 0
def __init__(self, red, green, blue):
self.red = red
self.green = green
self.blue = blue
def hexToColour(r,g,b):
hex_constant = 0.3921568627
return rgbColour(hex_constant*r, hex_constant*g, hex_constant*b)
red = rgbColour(100,0,0)
green = rgbColour(0,100,0)
blue = rgbColour(0,0,100)
orange = hexToColour(255, 127, 0)
yellow = hexToColour(255,255,0)
indigo = hexToColour(75,0,130)
purple = rgbColour(100,0,100)
aqua = rgbColour(0,100,100)
turquoise = rgbColour(0,100,30)
# noinspection PyPep8Naming
class rbgLed(object):
def __init__(self, RED, GREEN, BLUE):
self.R_led = RED
self.G_led = GREEN
self.B_led = BLUE
def set_colour(self, colour):
self.R_led.set_ds(colour.red)
self.G_led.set_ds(colour.green)
self.B_led.set_ds(colour.blue)
def verbose_get_colour(self):
print (
"COLOUR : " + str(self.R_led.duty_cycle) + " " + str(self.G_led.duty_cycle) + " " + str(
self.B_led.duty_cycle))
return rgbColour(self.R_led.duty_cycle, self.G_led.duty_cycle, self.B_led.duty_cycle)
def blink(self, colour, hold_time):
self.set_colour(colour)
time.sleep(hold_time)
self.turn_off()
def get_colour(self):
return rgbColour(self.R_led.duty_cycle, self.G_led.duty_cycle, self.B_led.duty_cycle)
def turn_off(self):
self.R_led.turn_off()
self.G_led.turn_off()
self.B_led.turn_off()
def fade_to(self, destColour, length):
startColour = self.get_colour()
redDelta = find_delta(startColour.red, destColour.red, length)
print("RedDelta = " + str(redDelta))
greenDelta = find_delta(startColour.green, destColour.green, length)
blueDelta = find_delta(startColour.blue, destColour.blue, length)
for i in range(0, length + 1, 1):
time.sleep(0.004)
to = rgbColour(startColour.red + (i * redDelta),
startColour.green + (i * greenDelta),
startColour.blue + (i * blueDelta))
self.set_colour(to)
print to.red
class LED(object):
def __init__(self, pin, bool_pwm, freq):
self.pin = pin
GPIO.setup(self.pin, GPIO.OUT)
self.duty_cycle = 0
if bool_pwm:
self.pwm = GPIO.PWM(pin, freq)
self.pwm.start(self.duty_cycle)
def set_ds(self, duty_cycle):
self.duty_cycle = duty_cycle
self.pwm.ChangeDutyCycle(duty_cycle)
def turn_off(self):
self.set_ds(0)
def find_delta(start_val, finish_val, steps):
return (finish_val - start_val) / steps
def ledAlert(destColour, rgbLEDy, length):
startColour = rgbLEDy.get_colour()
rgbLEDy.fade_to(destColour, length)
time.sleep(0.5)
rgbLEDy.fade_to(startColour, length)
def init():
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
def exit(LED):
LED.turn_off()
GPIO.cleanup()
| mit | Python |
|
83d4ac6c3565044727c9b3fcbada9966d529a80e | Add forgotten font leader lib | Nadeflore/dakara-player-vlc | lib/font_loader.py | lib/font_loader.py | import os
import sys
import logging
FONT_FILE_NAME_LIST = (
"fontawesome-webfont.ttf",
)
FONT_DIRECTORY = "share"
FONT_DIRECTORY_SYSTEM = "/usr/share/fonts"
FONT_DIRECTORY_USER = os.path.join(os.environ['HOME'], ".local/share/fonts")
class FontLoader:
def __init__(self):
self.fonts_loaded = []
self.logger = logging.getLogger('FontLoader')
def load(self):
for font_file_name in FONT_FILE_NAME_LIST:
# check if font is in the project font directory
font_source_path = os.path.join(FONT_DIRECTORY, font_file_name)
if not os.path.isfile(font_source_path):
raise IOError("Font '{}' not found in project directories".format(
font_file_name
))
# check if the font is installed at system level
if os.path.isfile(os.path.join(FONT_DIRECTORY_SYSTEM, font_file_name)):
self.logger.debug("Font '{}' found in system directory".format(
font_file_name
))
continue
# check if the font is installed at user level
if os.path.isfile(os.path.join(FONT_DIRECTORY_USER, font_file_name)):
self.logger.debug("Font '{}' found in user directory".format(
font_file_name
))
continue
# if the font is not installed
font_target_path = os.path.join(FONT_DIRECTORY_USER, font_file_name)
os.symlink(
os.path.join(os.getcwd(), font_source_path),
font_target_path
)
self.fonts_loaded.append(font_target_path)
self.logger.debug("Font '{}' loaded in user directory: '{}'".format(
font_file_name,
font_target_path
))
def unload(self):
for font_path in self.fonts_loaded:
os.unlink(font_path)
self.logger.debug("Font '{}' unloaded".format(
font_path
))
self.fonts_loaded = []
| mit | Python |
|
d8f7cb58e7f760ccbb839aafeda4dbf7204d7d82 | Add r_latestagecapitalism | Fillll/reddit2telegram,Fillll/reddit2telegram | channels/r_latestagecapitalism/app.py | channels/r_latestagecapitalism/app.py | #encoding:utf-8
subreddit = 'latestagecapitalism'
t_channel = '@r_latestagecapitalism'
def send_post(submission, r2t):
return r2t.send_simple(submission)
| mit | Python |
|
151e8fc71e5ef2e31db13730bff57bc8fd915c30 | Add test case for list invoice | andela-sjames/paystack-python | paystackapi/tests/test_invoice.py | paystackapi/tests/test_invoice.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.invoice import Invoice
class TestInvoice(BaseTestCase):
@httpretty.activate
def test_create_invoice(self):
"""Method defined to test create Invoice."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice created"}',
status=201,
)
response = Invoice.create(
customer="CUS_je02lbimlqixzax",
amount=42000,
due_date="2019-05-08T00:00:00.000Z"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_list_invoice(self):
"""Method defined to test list Invoice."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/paymentrequest"),
content_type='text/json',
body='{"status": true, "message": "Invoice retrieved"}',
status=201,
)
response = Invoice.list(
customer="CUS_je02lbimlqixzax",
status="pending",
currency="NGN",
paid="false",
include_archive="true"
)
self.assertTrue(response['status'])
| mit | Python |
|
dcd1d962feec4f3cd914677545f74924ad9e6351 | Add test for file creation of low level library | mindriot101/fitsio-cffi | testing/test_direct_wrapper.py | testing/test_direct_wrapper.py | import os
from cffitsio._cfitsio import ffi, lib
def test_create_file(tmpdir):
filename = str(tmpdir.join('test.fits'))
f = ffi.new('fitsfile **')
status = ffi.new('int *')
lib.fits_create_file(f, filename, status)
assert status[0] == 0
assert os.path.isfile(filename)
| mit | Python |
|
2ccd94f9fb6f4a64976124ca82ac4c5ef585d64b | add serializer field | silverlogic/dj-bitcoin | djbitcoin/serializers.py | djbitcoin/serializers.py | from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from .utils import is_bitcoin_address_valid
class BitcoinAddressField(serializers.CharField):
default_error_messages = {
'invalid': _('Invalid bitcoin address.')
}
def to_internal_value(self, data):
data = super().to_internal_value(data)
if not is_bitcoin_address_valid(data):
self.fail('invalid')
return data
| mit | Python |
|
dddc76173a5150939535b2c506aa967fe17ee000 | Fix #12 : env implementation | oleiade/Elevator | elevator/env.py | elevator/env.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ConfigParser import ConfigParser
from utils.patterns import Singleton
from utils.decorators import lru_cache
from utils.snippets import items_to_dict
class Environment(object):
"""
Unix shells like environment class. Implements add,
get, load, flush methods. Handles lists of values too.
Basically Acts like a basic key/value store.
"""
__metaclass__ = Singleton
SEQ_DELIMITER = ','
def __init__(self, env_file=''):
self.attributes = set() # Stores manually added attributes
if env_file:
self.load(env_file=env_file) # Has to be called last!
def add(self, name, value):
"""Adds a key/value to env"""
setattr(self, name, value)
self.attributes.add(name)
@lru_cache(maxsize=1024)
def get(self, name):
"""Cached env key fetch"""
var = getattr(self, name)
if ',' in var:
return var.split(',')
return var
def append(self, var, value):
"""
`value` can either be a (name, value) tuple/list pair,
or a value string. If a pair is given, the method
will consider that the var to append to is a dict
and will try to add the name/value to it.
If it is a String, it will try to automatically transform
the pointed var to a sequence and add the value to it.
"""
env_var = getattr(self, var)
env_var_type = type(env_var)
if ((isinstance(value, tuple) or isinstance(value, list)) and \
len(value) == 2):
key, value = value
env_var.update({key: value})
elif isinstance(value, str):
if env_var_type != list:
env_var = [env_var]
env_var.append(value)
setattr(self, var, env_var)
else:
err_msg = "Env value has to wether be iterable sequence or str"
raise TypeError(err_msg)
self.attributes.add(var)
def load(self, env_file):
"""Loads an ini file containing the env description : key/value"""
config = ConfigParser()
config.read(env_file)
for section in config.sections():
setattr(self, section, items_to_dict(config.items(section)))
self.attributes.add(section)
for k, v in getattr(self, section).iteritems():
if self.CONFIG_SEQ_DELIMITER in v:
splitted = [e for e in v.split(self.SEQ_DELIMITER) if e]
getattr(self, section)[k] = splitted
def reload(self, env_file=''):
self.flush(env_file)
self.load(env_file)
def flush(self):
"""
Flushes the environment from it's manually
set attributes.
"""
for attr in self.attributes:
delattr(self, attr)
| mit | Python |
|
01b42c531f7ab0ca81768b6e9833062f9e31ba95 | Update train_tagger script | banglakit/spaCy,raphael0202/spaCy,raphael0202/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,Gregory-Howard/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,aikramer2/spaCy,banglakit/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,raphael0202/spaCy,explosion/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,explosion/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,banglakit/spaCy,raphael0202/spaCy,honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,aikramer2/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,aikramer2/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy | examples/training/train_tagger.py | examples/training/train_tagger.py | """A quick example for training a part-of-speech tagger, without worrying
about the tokenization, or other language-specific customizations."""
from __future__ import unicode_literals
from __future__ import print_function
import plac
from pathlib import Path
from spacy.vocab import Vocab
from spacy.tagger import Tagger
from spacy.tokens import Doc
import random
# You need to define a mapping from your data's part-of-speech tag names to the
# Universal Part-of-Speech tag set, as spaCy includes an enum of these tags.
# See here for the Universal Tag Set:
# http://universaldependencies.github.io/docs/u/pos/index.html
# You may also specify morphological features for your tags, from the universal
# scheme.
TAG_MAP = {
'N': {"pos": "NOUN"},
'V': {"pos": "VERB"},
'J': {"pos": "ADJ"}
}
# Usually you'll read this in, of course. Data formats vary.
# Ensure your strings are unicode.
DATA = [
(
["I", "like", "green", "eggs"],
["N", "V", "J", "N"]
),
(
["Eat", "blue", "ham"],
["V", "J", "N"]
)
]
def ensure_dir(path):
if not path.exists():
path.mkdir()
def main(output_dir=None):
if output_dir is not None:
output_dir = Path(output_dir)
ensure_dir(output_dir)
ensure_dir(output_dir / "pos")
ensure_dir(output_dir / "vocab")
vocab = Vocab(tag_map=TAG_MAP)
# The default_templates argument is where features are specified. See
# spacy/tagger.pyx for the defaults.
tagger = Tagger.blank(vocab, Tagger.default_templates())
for i in range(5):
for words, tags in DATA:
doc = Doc(vocab, orths_and_spaces=zip(words, [True] * len(words)))
tagger.update(doc, tags)
random.shuffle(DATA)
tagger.model.end_training()
doc = Doc(vocab, orths_and_spaces=zip(["I", "like", "blue", "eggs"], [True]*4))
tagger(doc)
for word in doc:
print(word.text, word.tag_, word.pos_)
if output_dir is not None:
tagger.model.dump(str(output_dir / 'pos' / 'model'))
with (output_dir / 'vocab' / 'strings.json').open('wb') as file_:
tagger.vocab.strings.dump(file_)
if __name__ == '__main__':
plac.call(main)
# I V VERB
# like V VERB
# blue N NOUN
# eggs N NOUN
| mit | Python |
|
8fc4fdc96c07432f87b49676b4ba9ca92a0f3385 | Add tool.parser module | liorvh/grab,giserh/grab,DDShadoww/grab,SpaceAppsXploration/grab,istinspring/grab,huiyi1990/grab,subeax/grab,pombredanne/grab-1,kevinlondon/grab,liorvh/grab,codevlabs/grab,alihalabyah/grab,huiyi1990/grab,giserh/grab,subeax/grab,lorien/grab,DDShadoww/grab,raybuhr/grab,raybuhr/grab,maurobaraldi/grab,lorien/grab,SpaceAppsXploration/grab,pombredanne/grab-1,shaunstanislaus/grab,alihalabyah/grab,maurobaraldi/grab,shaunstanislaus/grab,codevlabs/grab,subeax/grab,kevinlondon/grab,istinspring/grab | grab/tools/parser.py | grab/tools/parser.py | def parse_int(val):
if val is None:
return None
else:
return int(val)
| mit | Python |
|
e426afbe9ccbc72a1aa0d00032144e8b9b2b8cdc | Implement utility for colored, tabular output using fabric's color controls. | locationlabs/gusset | gusset/colortable.py | gusset/colortable.py | """
Pretty table generation.
"""
from itertools import cycle
from string import capwords
from fabric.colors import red, green, blue, magenta, white, yellow
class ColorRow(dict):
"""
Ordered collection of column values.
"""
def __init__(self, table, **kwargs):
super(ColorRow, self).__init__(self)
self.table = table
for column in self.table.columns:
self[column] = kwargs.get(column)
def __str__(self):
"""
Generate a formatted and colored string for this row.
"""
def format_cell(color, item):
column, value = item
return color(" {}".format(value).ljust(1 + self.table.column_widths[column]))
# get items in column order
items = [(column, self[column]) for column in self.table.columns]
# format cells with color and length
cells = [format_cell(color, item) for color, item in zip(cycle(self.table.colors), items)]
return " ".join(cells)
class ColorTable(object):
"""
Simple row/column table.
"""
def __init__(self, *columns, **kwargs):
"""
Create a table with fixed columns.
:param columns: *args style list of column names
:param kwargs: additional options, including `sort_key` and `colors`
"""
self.columns = columns
self.sort_key = kwargs.get("sort_key")
self.colors = kwargs.get("colors", [red, green, blue, magenta, white, yellow])
self.header = ColorRow(self, **dict([(column, capwords(column)) for column in self.columns]))
# initialize column widths based on header
self.column_widths = dict([(column, len(self.header[column])) for column in self.columns])
self.rows = []
@property
def separator(self):
"""
Generate a separator row using current column widths.
"""
cells = dict([(column, "-" * self.column_widths[column]) for column in self.columns])
return ColorRow(self, **cells)
def add(self, **kwargs):
row = ColorRow(self, **kwargs)
# update column widths
for column in self.columns:
self.column_widths[column] = max(self.column_widths[column], len(row[column]))
self.rows.append(row)
def __str__(self):
"""
Generate a colored table.
"""
rows = sorted(self.rows, key=self.sort_key) if self.sort_key else self.rows
return "\n".join(map(str, [self.header, self.separator] + rows))
if __name__ == '__main__':
table = ColorTable("first", "last", sort_key=lambda row: (row["last"], row["first"]))
table.add(first="George", last="Washington")
table.add(first="John", last="Adams")
table.add(first="Thomas", last="Jefferson")
print table
| apache-2.0 | Python |
|
af61c9a44871b1da8a939470492c18a45ab373e1 | Create lineValueDisp.py | task123/AutoTT,task123/AutoTT,task123/AutoTT | lineValueDisp.py | lineValueDisp.py |
import TCP
import Motor
import Steering
import Status
import time
import Cameras
import Lights
import Modes
import os
try:
trip_meter = Motor.TripMeter()
motors = Motor.Motor(trip_meter)
follow_line = Steering.FollowLine(motors, start_speed = 0)
while True:
time.sleep(10)
except:
motors.turn_off()
follow_line.stop()
| mit | Python |
|
3660c183ba1ddec8033ceae21b1b06fd0ab9a8b7 | Add Signal class | sonusz/PhasorToolBox | phasortoolbox/signal.py | phasortoolbox/signal.py | class Signal(object):
run = False | mit | Python |
|
913bb348938c2b54ab7a76c7e16ce9b3fb999dbe | Copy fail. | DMOJ/site,Phoenix1369/site,DMOJ/site,monouno/site,monouno/site,Phoenix1369/site,Minkov/site,Phoenix1369/site,Phoenix1369/site,DMOJ/site,monouno/site,monouno/site,monouno/site,Minkov/site,Minkov/site,Minkov/site,DMOJ/site | judge/management/commands/render_pdf.py | judge/management/commands/render_pdf.py | import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from django.utils import translation
from judge.models import Problem, ProblemTranslation
from judge.pdf_problems import WebKitPdfMaker
class Command(BaseCommand):
help = 'renders a PDF file of a problem'
def add_arguments(self, parser):
parser.add_argument('code', help='code of problem to render')
parser.add_argument('directory', nargs='?', help='directory to store temporaries')
parser.add_argument('-l', '--language', default=settings.LANGUAGE_CODE,
help='language to render PDF in')
def handle(self, *args, **options):
try:
problem = Problem.objects.get(code=options['code'])
except Problem.DoesNotExist:
print 'Bad problem code'
return
try:
trans = problem.translations.get(language=options['language'])
except ProblemTranslation.DoesNotExist:
trans = None
directory = options['directory']
with WebKitPdfMaker(directory, clean_up=directory is None) as maker, \
translation.override(options['language']):
maker.html = get_template('problem/raw.jade').render(Context({
'problem': problem,
'problem_name': problem.name if trans is None else trans.name,
'description': problem.description if trans is None else trans.description,
})).replace('"//', '"http://').replace("'//", "'http://")
for file in ('style.css', 'pygment-github.css'):
maker.load(file, os.path.join(settings.DMOJ_RESOURCES, file))
maker.make(debug=True)
if not maker.success:
print>>sys.stderr, maker.log
elif directory is None:
os.rename(maker.pdffile, problem.code + '.pdf')
| import os
import sys
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Context
from django.template.loader import get_template
from django.utils import translation
from judge.models import Problem, ProblemTranslation
from judge.pdf_problems import WebKitPdfMaker
class Command(BaseCommand):
help = 'renders a PDF file of a problem'
def add_arguments(self, parser):
parser.add_argument('code', help='code of problem to render')
parser.add_argument('directory', nargs='?', help='directory to store temporaries')
parser.add_argument('-l', '--language', default=settings.LANGUAGE_CODE,
help='language to render PDF in')
def handle(self, *args, **options):
try:
problem = Problem.objects.get(code=options['code'])
except Problem.DoesNotExist:
print 'Bad problem code'
return
problem = self.get_object()
try:
trans = problem.translations.get(language=options['language'])
except ProblemTranslation.DoesNotExist:
trans = None
directory = options['directory']
with WebKitPdfMaker(directory, clean_up=directory is None) as maker, \
translation.override(options['language']):
maker.html = get_template('problem/raw.jade').render(Context({
'problem': problem,
'problem_name': problem.name if trans is None else trans.name,
'description': problem.description if trans is None else trans.description,
})).replace('"//', '"http://').replace("'//", "'http://")
for file in ('style.css', 'pygment-github.css'):
maker.load(file, os.path.join(settings.DMOJ_RESOURCES, file))
maker.make(debug=True)
if not maker.success:
print>>sys.stderr, maker.log
elif directory is None:
os.rename(maker.pdffile, problem.code + '.pdf')
| agpl-3.0 | Python |
b31e15d12dbff8eaab71ec523ec16d5f1afe908b | add sharpen pic tool | congminghaoxue/learn_python | sharpen_pic.py | sharpen_pic.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#function: 锐化图像
import os
import os.path
import sys, getopt, argparse
from PIL import Image, ImageEnhance
def sharpenPic(filein,fileout):
im02 =Image.open(filein)
im_30 =ImageEnhance.Sharpness (im02).enhance(2.0)
im_30.save(fileout)
def main():
argc = len(sys.argv)
cmdargs = str(sys.argv)
parser = argparse.ArgumentParser(description="Tool for sharp the image")
parser.add_argument('-f', '--fromdir', required=True, help='the directory path of the input file')
parser.add_argument('-d', '--outdir', required=True, help='the directory of the output file')
args = parser.parse_args()
fromdir = args.fromdir
outdir = args.outdir
for file in os.listdir(fromdir):
if file == "desktop.ini":
continue
filein = os.path.join(fromdir, file)
fileout = os.path.join(outdir, file)
try:
sharpenPic(filein, fileout)
except Exception as e:
print(e)
continue
if __name__ == '__main__':
main() | apache-2.0 | Python |
|
ca53fcbba66dd4999f68f3523367c20a6b5e1e47 | Create script.py | McKay1717/Matrix-polygon-modification | script.py | script.py | import math
def getMatrix():
L = [None] * 3
for j in range(3):
print "Ligne "+str(j)+ "\n"
L[j] = [None] * 3
for i in range(3):
L[j][i] = input("Terme "+str(i)+"\n")
return L
def getPoint():
L = [None] * 3
for j in range(2):
L[j] = input("Terme "+str(j)+"\n")
L[2] = 1
return L
def PrintPoint(L):
s ="["+str(L[0])+","+str(L[1])+","+str(L[2])+"]"
print s
def PrintMatrix(L):
s ="["+str(L[0][0])+","+str(L[0][1])+","+str(L[0][2])+"]"
print s
s ="["+str(L[1][0])+","+str(L[1][1])+","+str(L[1][2])+"]"
print s
s ="["+str(L[2][0])+","+str(L[2][1])+","+str(L[2][2])+"]"
print s
def MatrixProduct(L,M):
result = [[0,0,0],[0,0,0],[0,0,0]]
for i in range(len(L)):
for j in range(len(M[0])):
for k in range(len(M)):
result[i][j] += L[i][k] * M[k][j]
return result
def MatrixPointProduct(L,P):
result = [0,0,0]
for i in range(len(L)):
for j in range(len(P)):
result[i] += L[i][j] * P[j]
return result
def GenRotationMatrix(teta):
L = [[math.cos(teta),-math.sin(teta),0],
[math.sin(teta),math.cos(teta),0],
[0,0,1]]
return L
def GenHomothetieMatrix(k):
L = [[k,0,0],
[0,k,0],
[0,0,1]]
return L
def GenTranslationMatrix(a,b):
L = [[1,0,a],
[0,1,b],
[0,0,1]]
return L
def GenCentredRotationMatrix(teta,a,b):
return MatrixProduct(GenRotationMatrix(teta),GenTranslationMatrix(a,b))
def GenCentredHomothetieMatrix(k,a,b):
return MatrixProduct(GenHomothetieMatrix(k),GenTranslationMatrix(a,b))
def GetPolyon():
nb = input("Combien de point voulez vous ?")
L = [None] * nb
for i in range(nb):
L[i] = getPoint()
return L
GetPolyon()
| apache-2.0 | Python |
|
98e086696ea36d6050de9f23d2380b704fee305d | Create chatbot3.py | wannaphongcom/code-python3-blog | ai/chatbot3.py | ai/chatbot3.py | # ทำ Chat Bot ง่าย ๆ ในภาษา Python
# เขียนโดย นาย วรรณพงษ์ ภัททิยไพบูลย์
# https://python3.wannaphong.com/2015/07/ทำ-chat-bot-ง่าย-ๆ-ในภาษา-python.html
from tinydb import TinyDB, where # เรียกใช้งานโมดูล tinydb
import random
db = TinyDB('db.json') # เรียกใช้ฐานข้อมูลจากไฟล์ db.json
def addword():
print("ไม่พบประโยคนี้ในระบบ คุณต้องการสอนไหม")
addif = str(input("Y or N : "))
if addif == "Y":
q = input("คำถาม : ")
ans = input("คำตอบ : ")
db.insert({q:ans}) # เพิ่มข้อมูลลงฐานข้อมูล
else:
print("Ok")
while True:
text = input("> ")
a = db.search(where(text)) # ค้นหาคำที่เหมือนกันในฐานข้อมูล
if a == []:
addword()
else:
a = random.choice([a for a in db.search(where(text))]) # ทำการลูปในกรณีที่มีคำตอบมากและแยกกันกรอกข้อมูล แล้วทำการสุ่ม
print(a[text])
| mit | Python |
|
0882c8885b88618ea55b97ace256cdf833a1547d | Add tests for pylama isort | PyCQA/isort,PyCQA/isort | tests/test_pylama_isort.py | tests/test_pylama_isort.py | import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "isort.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
| mit | Python |
|
8a7ea0e8d29d443676c8893790625cbeb9d973ad | Test addByUniqueID Survey model | uzh/msregistry | tests/test_survey_model.py | tests/test_survey_model.py | # Copyright (C) 2016 University of Zurich. All rights reserved.
#
# This file is part of MSRegistry Backend.
#
# MSRegistry Backend is free software: you can redistribute it and/or
# modify it under the terms of the version 3 of the GNU Affero General
# Public License as published by the Free Software Foundation, or any
# other later version.
#
# MSRegistry Backend is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version
# 3 of the GNU Affero General Public License for more details.
#
# You should have received a copy of the version 3 of the GNU Affero
# General Public License along with MSRegistry Backend. If not, see
# <http://www.gnu.org/licenses/>.
__author__ = "Filippo Panessa <[email protected]>"
__copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik,"
" University of Zurich")
import unittest
from app import create_app
from app.models import Survey
class SurveyModelTestCase(unittest.TestCase):
uniqueID = 'd4c74594d841139328695756648b6bd6'
def setUp(self):
self.app = create_app('TESTING')
self.app_context = self.app.app_context()
self.app_context.push()
def tearDown(self):
self.app_context.pop()
def test_addByUniqueID(self):
u = Survey()
self.assertTrue(u.addByUniqueID(self.uniqueID, {}))
| agpl-3.0 | Python |
|
280e72331d99a8c49783196951287627a933a659 | Add py solution for 459. Repeated Substring Pattern | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/repeated-substring-pattern.py | py/repeated-substring-pattern.py | class Solution(object):
def repeatedSubstringPattern(self, s):
"""
:type s: str
:rtype: bool
"""
for i in xrange(1, len(s) / 2 + 1):
if len(s) % i == 0 and len(set(s[j:j+i] for j in xrange(0, len(s), i))) == 1:
return True
return False
| apache-2.0 | Python |
|
75dc32ef71fd32c7728269b01a74faf840690473 | Add a slow bot to test timeout feature | Dentosal/python-sc2 | examples/too_slow_bot.py | examples/too_slow_bot.py | import random
import asyncio
import sc2
from sc2 import Race, Difficulty
from sc2.constants import *
from sc2.player import Bot, Computer
from proxy_rax import ProxyRaxBot
class SlowBot(ProxyRaxBot):
async def on_step(self, state, iteration):
await asyncio.sleep(random.random())
await super().on_step(state, iteration)
def main():
sc2.run_game(sc2.maps.get("Abyssal Reef LE"), [
Bot(Race.Terran, SlowBot()),
Computer(Race.Protoss, Difficulty.Medium)
], realtime=False, step_time_limit=0.2)
if __name__ == '__main__':
main()
| mit | Python |
|
7c865c63d5debcf7463ad1b81470d2f044ec4738 | Add lab result models | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/patients/lab_results/models.py | radar/patients/lab_results/models.py | from sqlalchemy import Column, Integer, String, ForeignKey, Numeric, Date, Boolean
from sqlalchemy.orm import relationship
from radar.database import db
from radar.models import PatientMixin, UnitMixin, CreatedModifiedMixin, DataSource
class LabOrderDefinition(db.Model):
__tablename__ = 'lab_order_definitions'
id = Column(Integer, primary_key=True)
code = Column(String, nullable=False)
description = Column(String, nullable=False)
pre_post = Column(Boolean, nullable=False)
class LabResultDefinition(db.Model):
__tablename__ = 'lab_result_definitions'
id = Column(Integer, primary_key=True)
lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)
code = Column(String, nullable=False)
description = Column(String, nullable=False)
class LabOrder(DataSource, PatientMixin, UnitMixin, CreatedModifiedMixin):
__tablename__ = 'lab_orders'
id = Column(Integer, ForeignKey('lab_orders.id'), primary_key=True)
lab_order_definition_id = Column(Integer, ForeignKey('lab_order_definitions.id'), nullable=False)
lab_order_definition = relationship('LabOrderDefinition')
date = Column(Date, nullable=False)
pre_post = Column(String)
lab_results = relationship('LabResult', cascade='all, delete-orphan')
__mapper_args__ = {
'polymorphic_identity': 'hospitalisations',
}
class LabResult(db.Model):
__tablename__ = 'lab_results'
id = Column(Integer, primary_key=True)
lab_order_id = Column(Integer, ForeignKey('lab_orders.id', ondelete='CASCADE', onupdate='CASCADE'), nullable=False)
lab_order = relationship('LabOrder')
lab_result_definition_id = Column(Integer, ForeignKey('lab_result_definitions.id'), nullable=False)
lab_result_definition = relationship('LabResultDefinition')
value = Column(Numeric, nullable=False) | agpl-3.0 | Python |
|
36d8a0e091ec1dd4ff451031810c75cd0431ac44 | add admins.py file | skitoo/aligot,aligot-project/aligot,aligot-project/aligot,aligot-project/aligot | aligot/admin.py | aligot/admin.py | # coding: utf-8
from django.contrib import admin
from .models import Note, NoteBook, NoteRevision, User
admin.site.register(User)
admin.site.register(NoteBook)
admin.site.register(Note)
admin.site.register(NoteRevision)
| mit | Python |
|
4e4b23ebae9274511fa3fad438b198c19b38c98d | Add a breakpad / content_shell integration test | littlstar/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,M4sse/chromium.src,littlstar/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,Chilledheart/chromium,dushu1203/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,dednal/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,Just-D/chromium-1,M4sse/chromium.src,dednal/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,ltilve/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dushu1203/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Just-D/chromium-1,littlstar/chromium.src,littlstar/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,jaruba/chromium.src,dednal/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,ondra-novak/chromium.src,littlstar/chromium.src,M4sse/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,littlstar/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,dednal/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,ondra-novak/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,dednal/chromium.src | content/shell/tools/breakpad_integration_test.py | content/shell/tools/breakpad_integration_test.py | #!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Integration test for breakpad in content shell.
This test checks that content shell and breakpad are correctly hooked up, as
well as that the tools can symbolize a stack trace."""
import glob
import optparse
import os
import shutil
import subprocess
import sys
import tempfile
CONCURRENT_TASKS=4
def main():
parser = optparse.OptionParser()
parser.add_option('', '--build-dir', default='',
help='The build output directory.')
parser.add_option('', '--binary', default='',
help='The path of the binary to generate symbols for.')
parser.add_option('', '--no-symbols', default=False, action='store_true',
help='Symbols are not expected to work.')
parser.add_option('-j', '--jobs', default=CONCURRENT_TASKS, action='store',
type='int', help='Number of parallel tasks to run.')
parser.add_option('-v', '--verbose', action='store_true',
help='Print verbose status output.')
(options, _) = parser.parse_args()
if not options.build_dir:
print "Required option --build-dir missing."
return 1
if not options.binary:
print "Required option --binary missing."
return 1
if not os.access(options.binary, os.X_OK):
print "Cannot find %s." % options.binary
return 1
failure = ''
# Create a temporary directory to store the crash dumps and symbols in.
crash_dir = tempfile.mkdtemp()
try:
print "# Generate symbols."
breakpad_tools_dir = os.path.join(
os.path.dirname(__file__), '..', '..', '..',
'components', 'breakpad', 'tools')
generate_symbols = os.path.join(
breakpad_tools_dir, 'generate_breakpad_symbols.py')
symbols_dir = os.path.join(crash_dir, 'symbols')
cmd = [generate_symbols,
'--build-dir=%s' % options.build_dir,
'--binary=%s' % options.binary,
'--symbols-dir=%s' % symbols_dir,
'--jobs=%d' % options.jobs]
if options.verbose:
cmd.append('--verbose')
print ' '.join(cmd)
failure = 'Failed to run generate_breakpad_symbols.py.'
subprocess.check_call(cmd)
print "# Run content_shell and make it crash."
cmd = [options.binary,
'--dump-render-tree',
'chrome://crash',
'--enable-crash-reporter',
'--crash-dumps-dir=%s' % crash_dir]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run content_shell.'
if options.verbose:
subprocess.check_call(cmd)
else:
with open(os.devnull, 'w') as devnull:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
print "# Retrieve crash dump."
dmp_files = glob.glob(os.path.join(crash_dir, '*.dmp'))
failure = 'Expected 1 crash dump, found %d.' % len(dmp_files)
if len(dmp_files) != 1:
raise Exception(failure)
dmp_file = dmp_files[0]
minidump = os.path.join(crash_dir, 'minidump')
dmp_to_minidump = os.path.join(breakpad_tools_dir, 'dmp2minidump.py')
cmd = [dmp_to_minidump, dmp_file, minidump]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run dmp_to_minidump.'
subprocess.check_call(cmd)
print "# Symbolize crash dump."
minidump_stackwalk = os.path.join(options.build_dir, 'minidump_stackwalk')
cmd = [minidump_stackwalk, minidump, symbols_dir]
if options.verbose:
print ' '.join(cmd)
failure = 'Failed to run minidump_stackwalk.'
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stack = proc.communicate()[0]
# Check whether the stack contains a CrashIntentionally symbol.
found_symbol = 'CrashIntentionally' in stack
if options.no_symbols:
if found_symbol:
if options.verbose:
print stack
failure = 'Found unexpected reference to CrashIntentionally in stack'
raise Exception(failure)
else:
if not found_symbol:
if options.verbose:
print stack
failure = 'Could not find reference to CrashIntentionally in stack.'
raise Exception(failure)
except:
print "FAIL: %s" % failure
return 1
else:
print "PASS: Breakpad integration test ran successfully."
return 0
finally:
try:
shutil.rmtree(crash_dir)
except:
print 'Failed to delete temp directory "%s".' % crash_dir
if '__main__' == __name__:
sys.exit(main())
| bsd-3-clause | Python |
|
f85a5954d337eca9b577664b1ba04e580fdf9b5c | Add slice01.py | devlights/try-python | trypython/basic/slice01.py | trypython/basic/slice01.py | # coding: utf-8
"""
slice 関数についてのサンプルです。
"""
import itertools
from trypython.common.commoncls import SampleBase
class Sample(SampleBase):
def exec(self):
#############################################################
# slice 関数
# - https://docs.python.jp/3/library/functions.html#slice
#
# slice 関数は、インデックスの集合を表すオブジェクトを生成し返す.
# 取得した slice オブジェクトはシーケンスの添字として利用できる.
#############################################################
l = list(range(10))
# インデックス2番目から4番目までの要素を取得
print(l[2:4])
# slice オブジェクト取得
# 生成した slice オブジェクトはシーケンスの添字として利用可能
# 名前が付けられるので、sequence[x:y:z]とするよりわかりやすい
from_two_to_four = slice(2, 4)
print(type(from_two_to_four))
print(l[from_two_to_four])
print('helloworld'[from_two_to_four])
# slice 関数に渡す引数は、前から start, stop, step となっている
step_two = slice(None, None, 2)
print('helloworld'[step_two])
# イテレータに対しては、itertools.islice を使う
it = iter(l)
iter_from_two_to_end_step_two = itertools.islice(it, 0, None, 2)
print([x for x in iter_from_two_to_end_step_two])
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python |
|
cf93f84dd794b63dd373cf59d802000799e32232 | Create main.py | subpath/TelegramBot | example/main.py | example/main.py | mit | Python |
||
4efc45499d1736933691b9de39090b86526ea4e1 | Create 217_contain_duplicates.py | jsingh41/algos | 217_contain_duplicates.py | 217_contain_duplicates.py | """
https://leetcode.com/problems/contains-duplicate/description/
Given an array of integers, find if the array contains any duplicates.
Your function should return true if any value appears at least twice in the array, and it should return false if every element
is distinct.
"""
class Solution(object):
def containsDuplicate(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
dictionary = {}
duplicates = []
is_duplicate = False
for num in nums:
if num in dictionary:
return True
is_duplicate = True
break
else:
dictionary[num] = 1
return is_duplicate
| mit | Python |
|
7ab2298f22de79cd14fae9f3add1417a76bcbcd0 | Add package file. | lucasb/iris-machine-learning | app/__init__.py | app/__init__.py | #!/usr/bin/env python
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__all__ = [
'data_visualization',
'knn_prediction',
'load_dataset',
'model_visualization',
'select_model',
'svm_prediction',
'validate_dataset',
]
| mit | Python |
|
f622255dc2c6695b785213c8d69cb57ae5d8a5e9 | Add pebble sdk version for detecting sdk features | jiangege/pebblejs-project,youtux/PebbleShows,bkbilly/Tvheadend-EPG,bkbilly/Tvheadend-EPG,pebble/pebblejs,fletchto99/pebblejs,sunshineyyy/CatchOneBus,fletchto99/pebblejs,pebble/pebblejs,carlo-colombo/dublin-bus-pebble,youtux/PebbleShows,jsfi/pebblejs,daduke/LMSController,jiangege/pebblejs-project,sunshineyyy/CatchOneBus,carlo-colombo/dublin-bus-pebble,bkbilly/Tvheadend-EPG,carlo-colombo/dublin-bus-pebble,pebble/pebblejs,fletchto99/pebblejs,jsfi/pebblejs,sunshineyyy/CatchOneBus,sunshineyyy/CatchOneBus,youtux/PebbleShows,jiangege/pebblejs-project,daduke/LMSController,daduke/LMSController,sunshineyyy/CatchOneBus,pebble/pebblejs,youtux/PebbleShows,fletchto99/pebblejs,daduke/LMSController,bkbilly/Tvheadend-EPG,bkbilly/Tvheadend-EPG,jsfi/pebblejs,jiangege/pebblejs-project,jsfi/pebblejs,carlo-colombo/dublin-bus-pebble,daduke/LMSController,jiangege/pebblejs-project,pebble/pebblejs,carlo-colombo/dublin-bus-pebble,fletchto99/pebblejs,jsfi/pebblejs | waftools/pebble_sdk_version.py | waftools/pebble_sdk_version.py | from waflib.Configure import conf
@conf
def compare_sdk_version(ctx, platform, version):
target_env = ctx.all_envs[platform] if platform in ctx.all_envs else ctx.env
target_version = (int(target_env.SDK_VERSION_MAJOR or 0x5) * 0xff +
int(target_env.SDK_VERSION_MINOR or 0x19))
other_version = int(version[0]) * 0xff + int(version[1])
diff_version = target_version - other_version
return 0 if diff_version == 0 else diff_version / abs(diff_version)
@conf
def supports_bitmap_resource(ctx):
return (ctx.compare_sdk_version('aplite', [0x5, 0x48]) >= 0)
| mit | Python |
|
79fdfaceee84321bb802f9f99ee500f400f38780 | Add admin to credentials | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/integrations/credentials/admin/__init__.py | dbaas/integrations/credentials/admin/__init__.py | # -*- coding:utf-8 -*-
from django.contrib import admin
from .. import models
admin.site.register(models.IntegrationType, )
admin.site.register(models.IntegrationCredential, )
| bsd-3-clause | Python |
|
acf0ab67db2856c71440093d0b686650e70e58e1 | Create network_class.py | joelrieger/SmithChartPy | network_class.py | network_class.py | """
Author: Joel Rieger
October 29, 2016
Description: Classes and functions to perform basic network abstraction and plotting.
"""
from numpy import pi as pi
class network(object):
"""Class for one dimension network (i.e. a matching network)."""
element_array=[]
def __init__(self,*args):
pass
def compute_node_impedances(self,Zp2,freq):
"""Calculate impedances at each node walking back from the output impedance, Zp2"""
pass
def move_element(self,n_a,n_b):
"""
Moves element to new index shifting other elements accordingly.
Simplies drag-drop action of components
"""
self.element_array.insert(n_b,self.element_array.pop(n_a))
class element(object):
"""Class for a single impedance/admittance element (i.e. capacitor, indcutor, etc.)."""
name=''
icon=''
orientation=0
val={'C':1e-12}
Zfunc=lambda self,x: 1e-14 #function to define series impedance
Yfunc=lambda self,x: 1e14 #function to define admittance
def __init__(self,*args):
pass
def Z(self,freq):
return self.Zfunc(freq)
def Y(self,freq):
return self.Yfunc(freq)
class cap(element):
"""Modification of element class to model an ideal capacitor"""
Zfunc=lambda self,freq: 1.0j/(2*pi*freq*self.val['C']) #function to define series impedance
Yfunc=lambda self,freq: (2*pi*freq*self.val['C']) #function to define admittance
class ind(element):
"""Modification of element class to model an ideal capacitor"""
Zfunc=lambda self,freq: 2*pi*freq*self.val['L']*1j #function to define series impedance
Yfunc=lambda self,freq: 1.0j/(2*pi*freq*self.val['L']) #function to define admittance
class indQ(element):
"""Modification of element class to model an capacitor with a fixed Q"""
Zfunc=lambda self,freq: 2*pi*freq*self.val['L']/self.val['Q']+2*pi*freq*self.val['L']*1j #function to define series impedance
Yfunc=lambda self,freq: 1.0j/self.Zfunc(self,x) #function to define admittance
class capQ(element):
"""Modification of element class to model an capacitor with a fixed L"""
Zfunc=lambda self,freq: 1e-12 #function to define series impedance
Yfunc=lambda self,freq: 1.0/self.Zfunc(self,x) #function to define admittance
if __name__=='__main__':
net=network()
L1=ind()
L1.val['L']=1.9e-9
print L1.Z(2.0e9)
L2=indQ()
L2.val={'L':1.9e-9,'Q':30.0}
print L2.Z(2.0e9)
| mit | Python |
|
58627bd4cbe100a7cbd526be38cd69e8605984cd | Add json-encoder example | nullism/pycnic,nullism/pycnic | examples/json-encoder.py | examples/json-encoder.py | #!/usr/bin/env python3
from pycnic.core import WSGI, Handler
import datetime
import json
class DateTimeEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, datetime.datetime):
return o.isoformat()
return json.JSONEncoder.default(self, o)
class Hello(Handler):
def get(self, name="World"):
return {
"message": "Hello, {name}!".format(name=name),
"date": datetime.datetime.now()
}
class app(WSGI):
debug = True
json_cls = DateTimeEncoder
routes = [
("/", Hello()),
("/([\w]+)", Hello())
]
if __name__ == "__main__":
from wsgiref.simple_server import make_server
try:
print("Serving on 0.0.0.0:8080...")
make_server('0.0.0.0', 8080, app).serve_forever()
except KeyboardInterrupt:
pass
print("Done")
| mit | Python |
|
ebfe2faa5fcf66f3f1ece597922d4a72b59c3e43 | Create B_Averages_ocean.py | Herpinemmanuel/Oceanography | Cas_1/B_Averages_ocean.py | Cas_1/B_Averages_ocean.py | #Averages of U,V,W,T,S and ETA
import numpy as np
import matplotlib.pyplot as plt
from xmitgcm import open_mdsdataset
dir0 = '/homedata/bderembl/runmit/test_southatlgyre' #Case 1 : 38 iterations
ds0 = open_mdsdataset(dir0,prefix=['Eta','U','V','W','T','S'])
print(ds0)
Average_ETA = ds0['Eta'].mean().values
print('Average of Ocean Surface Height Anomaly ')
print(Average_ETA,'m')
#Average_ETA_mask = ds0.Eta.where(ds0.hFacC>0).mean().values
#print('Average of Ocean Surface Height Anomaly without continents')
#print(Average_ETA_mask,'m')
Average_T = ds0['T'].mean().values
print('Average of Ocean Temperature')
print(Average_T,'°C')
#Average_T_mask = ds0['T'].where(ds0.hFacC>0).mean().values
#print('Average of Ocean Temperature without continents')
#print(Average_T_mask,'°C')
Average_S = ds0['S'].mean().values
print('Average of Ocean Salinity')
print(Average_S,'psu')
#Average_S_mask = ds0.S.where(ds0.hFacC>0).mean().values
#print('Average of Ocean Salinity without continents')
#print(Average_S_mask,'psu')
Average_U = ds0['U'].mean().values
print('Average of Meridional component of Ocean Velocity')
print(Average_U,'m/s')
#Average_U_mask = ds0.U.where(ds0.hFacW>0).mean().values
#print('Average of Meridional component of Ocean Velocity without continents')
#print(Average_U_mask,'m/s')
Average_V = ds0['V'].mean().values
print('Average of Zonal component of Ocean Velocity')
print(Average_V,'m/s')
#Average_V_mask = ds0.V.where(ds0.hFacS>0).mean().values
#print('Average of Meridional component of Ocean Velocity without continents')
#print(Average_V_mask,'m/s')
Average_W = ds0['W'].mean().values
print('Average of Vertical component of Ocean Velocity')
print(Average_W,'m/s')
#Average_W_mask = ds0.W.where(ds0.hFacS>0).mean().values
#print('Average of Vertical component of Ocean Velocity without continents')
#print(Average_W_mask,'m/s')
| mit | Python |
|
9afe19676cbb87985939bd0099301a7003a38b7f | check for monitoring file and directory count | site24x7/plugins,site24x7/plugins,site24x7/plugins | samples/folder_check.py | samples/folder_check.py | #!/usr/bin/env python
import json,os,time
PLUGIN_VERSION="1"
HEARTBEAT="true"
#set this value to 1 if the file count needs to be recursive
INCLUDE_RECURSIVE_FILES=None
FOLDER_NAME="/"
THRESHOLD_COUNT=10
def get_data():
folder_checks_data = {}
folder_checks_data['plugin_version'] = PLUGIN_VERSION
folder_checks_data['heartbeat_required'] = HEARTBEAT
try:
if INCLUDE_RECURSIVE_FILES:
file_count = sum([len(files) for r, d, files in os.walk(FOLDER_NAME)])
directory_count = sum([len(d) for r, d, files in os.walk(FOLDER_NAME)])
else:
path, dirs, files = next(os.walk(FOLDER_NAME))
file_count = len(files)
directory_count = len(dirs)
folder_checks_data['file_count'] = file_count
folder_checks_data['directory_count'] = directory_count
#logical conditions
if file_count > THRESHOLD_COUNT:
folder_checks_data['status']=0
folder_checks_data['msg']='File Count Exceeds the threshold'
return folder_checks_data
if directory_count > THRESHOLD_COUNT:
folder_checks_data['status']=0
folder_checks_data['msg']='Directory Count Exceeds the threshold'
return folder_checks_data
if file_count > THRESHOLD_COUNT and directory_count > THRESHOLD_COUNT:
folder_checks_data['status']=0
folder_checks_data['msg']='Folder / Directory Counts Exceeded the threshold'
except Exception as e:
folder_checks_data['status']=0
folder_checks_data['msg']=str(e)
return folder_checks_data
if __name__ == "__main__":
data = get_data()
print(json.dumps(data,indent=4)) | bsd-2-clause | Python |
|
d881ee2866bb422a266871c1b426d76c669025da | Test for CASSANDRA-8741 | iamaleksey/cassandra-dtest,snazy/cassandra-dtest,carlyeks/cassandra-dtest,beobal/cassandra-dtest,thobbs/cassandra-dtest,krummas/cassandra-dtest,bdeggleston/cassandra-dtest,mambocab/cassandra-dtest,iamaleksey/cassandra-dtest,aweisberg/cassandra-dtest,beobal/cassandra-dtest,stef1927/cassandra-dtest,snazy/cassandra-dtest,bdeggleston/cassandra-dtest,blerer/cassandra-dtest,aweisberg/cassandra-dtest,thobbs/cassandra-dtest,spodkowinski/cassandra-dtest,spodkowinski/cassandra-dtest,krummas/cassandra-dtest,pauloricardomg/cassandra-dtest,pauloricardomg/cassandra-dtest,stef1927/cassandra-dtest,riptano/cassandra-dtest,blerer/cassandra-dtest,riptano/cassandra-dtest,carlyeks/cassandra-dtest,pcmanus/cassandra-dtest,mambocab/cassandra-dtest | nodetool_test.py | nodetool_test.py | from ccmlib.node import NodetoolError
from dtest import Tester
from tools import require
class TestNodetool(Tester):
@require("8741")
def test_decommission_after_drain_is_invalid(self):
"""
@jira_ticket CASSANDRA-8741
Running a decommission after a drain should generate
an unsupported operation message and exit with an error
code (which we receive as a NodetoolError exception).
"""
cluster = self.cluster
cluster.populate([3]).start()
version = cluster.version()
node = cluster.nodelist()[0]
node.drain(block_on_log=True)
try:
node.decommission()
self.assertFalse("Expected nodetool error")
except NodetoolError as e:
if version >= "2.1":
self.assertEqual('', e.stderr)
self.assertTrue('Unsupported operation' in e.stdout)
else:
self.assertEqual('', e.stdout)
self.assertTrue('Unsupported operation' in e.stderr)
| apache-2.0 | Python |
|
72cbdd0c1cf804eecb8f503f86e6be237719bf99 | add echo client for testing | ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study,ASMlover/study | network/echo-server/echo-client/main.py | network/echo-server/echo-client/main.py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2016 ASMlover. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list ofconditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materialsprovided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import socket
def main():
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect(('127.0.0.1', 5555))
client_socket.send('This is a echo test')
data = client_socket.recv(4096)
if data:
print 'got data:', data
client_socket.close()
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
be96a2f7e3aeb59727ba88913cc6fda97bf8a423 | Add some unit tests | SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree | InvenTree/company/test_views.py | InvenTree/company/test_views.py | """ Unit tests for Company views (see views.py) """
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
from django.urls import reverse
from django.contrib.auth import get_user_model
from .models import SupplierPart
class CompanyViewTest(TestCase):
fixtures = [
'category',
'part',
'location',
'company',
'supplier_part',
]
def setUp(self):
super().setUp()
# Create a user
User = get_user_model()
User.objects.create_user('username', '[email protected]', 'password')
self.client.login(username='username', password='password')
def test_supplier_part_delete(self):
""" Test the SupplierPartDelete view """
url = reverse('supplier-part-delete')
# Get form using 'part' argument
response = self.client.get(url, {'part': '1'}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
# Get form using 'parts' argument
response = self.client.get(url + '?parts[]=1&parts[]=2', HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
# POST to delete two parts
n = SupplierPart.objects.count()
response = self.client.post(
url,
{
'supplier-part-2': 'supplier-part-2',
'supplier-part-3': 'supplier-part-3',
'confirm_delete': True
},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(response.status_code, 200)
self.assertEqual(n - 2, SupplierPart.objects.count())
| mit | Python |
|
725832be85b7b0455cb735ce8a054007209d9645 | test scan scraper | BilalDev/HolyScrap | src/hsimage.py | src/hsimage.py | import sys
from PIL import Image
img = Image.open(sys.argv[1])
width, height = img.size
xblock = 5
yblock = 5
w_width = width / xblock
w_height = height / yblock
blockmap = [(xb*w_width, yb*w_height, (xb+1)*w_width, (yb+1)*w_height)
for xb in xrange(xblock) for yb in xrange(yblock)]
newblockmap = list(blockmap)
newblockmap[0] = blockmap[14]
newblockmap[1] = blockmap[13]
newblockmap[2] = blockmap[12]
newblockmap[3] = blockmap[11]
newblockmap[4] = blockmap[10]
newblockmap[5] = blockmap[24]
newblockmap[6] = blockmap[23]
newblockmap[7] = blockmap[22]
newblockmap[8] = blockmap[21]
newblockmap[9] = blockmap[20]
newblockmap[10] = blockmap[4]
newblockmap[11] = blockmap[3]
newblockmap[12] = blockmap[2]
newblockmap[13] = blockmap[1]
newblockmap[14] = blockmap[0]
newblockmap[15] = blockmap[19]
newblockmap[16] = blockmap[18]
newblockmap[17] = blockmap[17]
newblockmap[18] = blockmap[16]
newblockmap[19] = blockmap[15]
newblockmap[20] = blockmap[9]
newblockmap[21] = blockmap[8]
newblockmap[22] = blockmap[7]
newblockmap[23] = blockmap[6]
newblockmap[24] = blockmap[5]
result = Image.new(img.mode, (width, height))
for box, sbox in zip(blockmap, newblockmap):
c = img.crop(sbox)
result.paste(c, box)
result.save(sys.argv[1])
| apache-2.0 | Python |
|
dede46a2d5ad1504991b05b8edab4d1ffd781f46 | fix out of range error in tracker remover plugin | jcherqui/searx,jcherqui/searx,dalf/searx,asciimoo/searx,jcherqui/searx,dalf/searx,asciimoo/searx,asciimoo/searx,jcherqui/searx,asciimoo/searx,dalf/searx,dalf/searx | searx/plugins/tracker_url_remover.py | searx/plugins/tracker_url_remover.py | '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <[email protected]>
'''
from flask_babel import gettext
import re
from searx.url_utils import urlunparse, parse_qsl, urlencode
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
if 'parsed_url' not in result:
return True
query = result['parsed_url'].query
if query == "":
return True
parsed_query = parse_qsl(query)
changes = 0
for i, (param_name, _) in enumerate(list(parsed_query)):
for reg in regexes:
if reg.match(param_name):
parsed_query.pop(i - changes)
changes += 1
result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
result['url'] = urlunparse(result['parsed_url'])
break
return True
| '''
searx is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
searx is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with searx. If not, see < http://www.gnu.org/licenses/ >.
(C) 2015 by Adam Tauber, <[email protected]>
'''
from flask_babel import gettext
import re
from searx.url_utils import urlunparse, parse_qsl, urlencode
regexes = {re.compile(r'utm_[^&]+'),
re.compile(r'(wkey|wemail)[^&]*'),
re.compile(r'&$')}
name = gettext('Tracker URL remover')
description = gettext('Remove trackers arguments from the returned URL')
default_on = True
preference_section = 'privacy'
def on_result(request, search, result):
if 'parsed_url' not in result:
return True
query = result['parsed_url'].query
if query == "":
return True
parsed_query = parse_qsl(query)
changed = False
for i, (param_name, _) in enumerate(list(parsed_query)):
for reg in regexes:
if reg.match(param_name):
parsed_query.pop(i)
changed = True
break
if changed:
result['parsed_url'] = result['parsed_url']._replace(query=urlencode(parsed_query))
result['url'] = urlunparse(result['parsed_url'])
return True
| agpl-3.0 | Python |
a6fb8c86e14722527ff004ca1378458df252f8c0 | add doxygen module | dozymoe/fireh_runner | modules/doxygen.py | modules/doxygen.py | """Doxygen module.
Create project's documentation.
Website: http://www.doxygen.org
"""
import os
import shlex
def doxygen(loader, variant=None, *args):
if len(args) == 1:
args = shlex.split(args[0])
if variant is None:
variant = os.environ.get('PROJECT_VARIANT',
loader.config.get('default_variant'))
config = loader.config.get('configuration', {})
config = config.get(variant, {})
binargs = ['doxygen', config['doxygen']['config_file']]
os.execvp(binargs[0], binargs)
commands = (doxygen,)
| mit | Python |
|
7a74f85fc76af2df62bb92ff2997ab1b84caa3a0 | Test dummy IRC bot | homeworkprod/syslog2irc | tests/test_irc_bot_dummy.py | tests/test_irc_bot_dummy.py | """
:Copyright: 2007-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
import pytest
from syslog2irc.irc import create_bot, IrcChannel, IrcConfig
from syslog2irc.signals import irc_channel_joined
@pytest.fixture
def config():
channels = {IrcChannel('#one'), IrcChannel('#two')}
return IrcConfig(
server=None,
nickname='nick',
realname='Nick',
channels=channels,
)
@pytest.fixture
def bot(config):
bot = create_bot(config)
yield bot
bot.disconnect('Done.')
def test_fake_channel_joins(bot):
received_signal_data = []
@irc_channel_joined.connect
def handle_irc_channel_joined(sender, **data):
received_signal_data.append(data)
bot.start()
assert received_signal_data == [
{'channel_name': '#one'},
{'channel_name': '#two'},
]
| mit | Python |
|
3be6fadffbce4cdf5d45f4b34035b55db6abe2fc | add script for creating otu-tree table | kcranston/ottreeindex,OpenTreeOfLife/otindex,OpenTreeOfLife/ottreeindex,OpenTreeOfLife/ottreeindex,OpenTreeOfLife/otindex | ottreeindex/scripts/create_otu_table.py | ottreeindex/scripts/create_otu_table.py | # Collects the OTU - tree relationships across phylesystem
# Prints to file which is then inserted into postgres with COPY
# This is much faster than many inserts
from peyotl.api.phylesystem_api import PhylesystemAPI
from peyotl.phylesystem.phylesystem_umbrella import Phylesystem
from peyotl import gen_otu_dict, iter_node
from peyotl.manip import iter_trees
import setup_db
import psycopg2 as psy
import argparse
import yaml
def create_phylesystem_obj():
# create connection to local phylesystem
phylesystem_api_wrapper = PhylesystemAPI(get_from='local')
phylesystem = phylesystem_api_wrapper.phylesystem_obj
return phylesystem
def getTreeID(cursor,study_id,tree_label):
sqlstring = ('SELECT id FROM {tablename} '
'WHERE study_id=%s and tree_label=%s;'
.format(tablename='tree')
)
data = (study_id,tree_label)
print ' SQL: ',cursor.mogrify(sqlstring,data)
cursor.execute(sqlstring,data)
return cursor.fetchone()[0]
def print_otu_file(connection,cursor,phy,nstudies=None):
filename = "tree_otu.csv"
with open (filename,'w') as f:
# datafile format is 'ottid'\t'treeid' where treeid is not
# the treeid (string) in the nexson, but the treeid (int) from
# the database for faster indexing
counter = 0
for study_id, n in phy.iter_study_objs():
print study_id
otu_dict = gen_otu_dict(n)
mapped_otus = {}
# iterate over the OTUs in the study, collecting
# the mapped ones
for oid, o in otu_dict.items():
label = o['^ot:originalLabel']
ottname = o.get('^ot:ottTaxonName')
if ottname is not None:
ottID = o.get('^ot:ottId')
otu_props = [ottname,ottID]
mapped_otus[oid]=otu_props
print oid,ottID,label,ottname
# now iterate over trees and collect OTUs used in
# each tree
for trees_group_id, tree_label, tree in iter_trees(n):
tree_id = getTreeID(cursor,study_id,tree_label)
if (tree_id is None):
raise LookupError('tree_id for study {s}, tree {t}'
' not found'.format(s=study_id,t=tree_label))
for node_id, node in iter_node(tree):
oid = node.get('@otu')
# no @otu property on internal nodes
if oid is not None:
otu_props = mapped_otus.get(oid)
if otu_props is not None:
ottname = otu_props[0]
ottID = otu_props[1]
print tree_label,oid,ottID,ottname
f.write('{t},{o}\n'.format(t=tree_id,o=ottID))
counter+=1
if (nstudies and counter>=nstudies):
f.close()
break
return filename
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='create otu-tree table')
parser.add_argument('configfile',
help='path to the config file'
)
parser.add_argument('-n',
dest='nstudies',
type=int,
help='load only n studies; if absent, load all studies'
)
args = parser.parse_args()
# read config variables
config_dict={}
with open(args.configfile,'r') as f:
config_dict = yaml.safe_load(f)
connection, cursor = setup_db.connect(config_dict)
phy = create_phylesystem_obj()
print_otu_file(connection,cursor,phy,args.nstudies)
| bsd-2-clause | Python |
|
a8ec11719ccc158fd457ed02f2b8459d1b452975 | Create tweets.py | lvmb/Dissent.in | tweets.py | tweets.py | import sqlite3
def main(cursor):
cursor.execute("select * from tweets")
for tweet in cursor.fetchall():
tid = tweet[0]
tdate = tweet[1]
text = tweet[2]
geo = tweet[3]
t = str(tdate + text + geo)
print '-----tweet: %s ' % text
print '------date: %s' % tdate
print '-------geo: %s' % geo
print '----length: %s' % len(text)
print '\n'
if __name__ == '__main__':
conn = sqlite3.connect('tweets.db')
conn.text_factory = str
cur = conn.cursor()
main(cur)
| unlicense | Python |
|
0fa30986e1f97331f96444e0b3b0f86cbe20c68a | Add tests for JsonBackend __init__ and commit methods | jeffkinnison/shadho,jeffkinnison/shadho | shadho/backend/json/tests/test_db.py | shadho/backend/json/tests/test_db.py | import pytest
from shadho.backend.base.tests.test_db import TestBaseBackend
from shadho.backend.json.db import JsonBackend
import json
import os
import shutil
class TestJsonBackend(object):
def test_init(self):
"""Ensure that initialization sets up the db and filepath."""
# Test default initialization
b = JsonBackend()
assert b.path == os.path.join(os.getcwd(), 'shadho.json')
assert b.db == {'models': {},
'domains': {},
'results': {},
'values': {}}
assert b.commit_frequency == 10
assert b.update_frequency == 10
# Test custom initialization
b = JsonBackend(path='foo.bar',
commit_frequency=42,
update_frequency=42)
assert b.path == os.path.join(os.getcwd(), 'foo.bar')
assert b.db == {'models': {},
'domains': {},
'results': {},
'values': {}}
assert b.commit_frequency == 42
assert b.update_frequency == 42
# Test without specifying a file name
b = JsonBackend(path='/tmp')
assert b.path == os.path.join('/tmp', 'shadho.json')
assert b.db == {'models': {},
'domains': {},
'results': {},
'values': {}}
assert b.commit_frequency == 10
assert b.update_frequency == 10
def test_commit(self):
"""Ensure that commit writes to file and the file is loadable."""
temp = shutil.mkdtemp()
fpath = os.path.join(temp, 'shahdo.json')
# Test saving and loading
b = JsonBackend(path=temp)
assert os.path.isfile(fpath)
with open(fpath, 'r') as f:
db = json.load(f)
assert db == {'models': {},
'domains': {},
'results': {},
'values': {}}
shutil.rmtree(temp)
def test_count(self):
"""Ensure that the correct counts are returned for object classes"""
| mit | Python |
|
871f79a0b2bd235df457e3a1dc502d5c18bd934a | Add some generic python utilities as a basis for scripts | Oletus/gameutils.js,Oletus/gameutils.js | tools/build/common_utils.py | tools/build/common_utils.py | from __future__ import print_function
import os
def game_root_path():
file_path = os.path.dirname(os.path.abspath(__file__))
return os.path.abspath(os.path.join(file_path, '..', '..'))
def files_with_type(root, type):
all_files = [os.path.join(root, filename) for filename in os.listdir(root)]
typed_files = [path for path in all_files if path.endswith('.' + type)]
return typed_files
def sha1_of_file(filepath):
import hashlib
if not os.path.exists(filepath):
return ''
with open(filepath, 'rb') as f:
return hashlib.sha1(f.read()).hexdigest()
def fetch_file(url, target_path, sha1):
if sha1_of_file(target_path) == sha1:
return True # Already downloaded
import urllib
if hasattr(urllib, 'urlretrieve'):
# Python 2
urllib.urlretrieve(url, target_path)
else:
# Python 3
import urllib.request
urllib.request.urlretrieve(url, target_path)
if sha1 == None:
print('sha1 of ' + target_path + ': ' + sha1_of_file(target_path))
elif sha1_of_file(target_path) != sha1:
if os.path.exists(target_path):
os.remove(target_path)
return False
return True
def python27_path():
import sys
exe = ''
if sys.version_info.minor == 7 and sys.version_info.major == 2:
exe = sys.executable
elif sys.platform.startswith("linux"):
exe = '/usr/local/bin/python2.7'
elif sys.platform == "darwin":
exe = '/usr/local/bin/python2.7'
elif sys.platform == "win32":
exe = 'C:\Python27\python.exe'
return exe
if __name__ == '__main__':
print('Game root path: ' + game_root_path())
| mit | Python |
|
7d22c38348ccd411871942ef0dd43ed57794de16 | include benchmark code | bndl/cyheapq | bench.py | bench.py | from statistics import mean
import heapq
import importlib
import time
import numpy as np
r = np.random.random(1000*1000)
mergers = {
'heapq': ('merge', 'nlargest', 'nsmallest'),
'cyheapq': ('merge', 'nlargest', 'nsmallest'),
'cytoolz': ('merge_sorted', 'topk', None),
}
mods = list(mergers.keys())
name_max_len = max(map(len, mods))
def test(runs, loops, f, *args):
times = []
for _ in range(runs):
start = time.monotonic()
for _ in range(loops):
f(*args)
stop = time.monotonic()
times.append(stop-start)
times.sort()
return mean(times[1:-2])
for t in ('merge', 'nlargest', 'nsmallest'):
print('---', t, '---')
for mod, (merge, nlargest, nsmallest) in sorted(mergers.items()):
module = importlib.import_module(mod)
merge = getattr(module, merge)
nlargest = getattr(module, nlargest)
nsmallest = getattr(module, nsmallest) if nsmallest else None
a = list(r)
b = list(r)
if t == 'merge':
print(mod.rjust(name_max_len), 'merge', test(5, 100000, merge, a, a, b, b))
elif t == 'nlargest':
print(mod.rjust(name_max_len), 'nlargest', test(5, 5, nlargest, 10, a))
elif t == 'nsmallest' and nsmallest:
print(mod.rjust(name_max_len), 'nsmallest', test(5, 5, nsmallest, 10, a))
| apache-2.0 | Python |
|
5b2aebb9b9f9fafe291f0890f03c44abd661ca68 | add celery_work | 8cbx/OnlineJudge_Web,8cbx/OnlineJudge_Web,8cbx/OnlineJudge_Web,8cbx/OnlineJudge_Web | celery_work.py | celery_work.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from celery import Celery, platforms
from app import create_app
def make_celery(app):
"""Create the celery process."""
# Init the celery object via app's configuration.
celery = Celery(
app.import_name,
backend=app.config['CELERY_RESULT_BACKEND'],
broker=app.config['CELERY_BROKER_URL'])
# Flask-Celery-Helper to auto-setup the config.
celery.conf.update(app.config)
TaskBase = celery.Task
platforms.C_FORCE_ROOT = True
class ContextTask(TaskBase):
abstract = True
def __call__(self, *args, **kwargs):
"""Will be execute when create the instance object of ContextTesk."""
# Will context(Flask's Extends) of app object(Producer Sit)
# be included in celery object(Consumer Site).
with app.app_context():
return TaskBase.__call__(self, *args, **kwargs)
# Include the app_context into celery.Task.
# Let other Flask extensions can be normal calls.
celery.Task = ContextTask
return celery
flask_app = create_app(os.getenv('FLASK_CONFIG') or 'default')
# 1. Each celery process needs to create an instance of the Flask application.
# 2. Register the celery object into the app object.
celery = make_celery(flask_app) | agpl-3.0 | Python |
|
9c53e59ee0c4e5418b54d47c932454b7b907dc03 | Revert escape nickname, desc, etc in user profile | madflow/seahub,Chilledheart/seahub,cloudcopy/seahub,madflow/seahub,Chilledheart/seahub,cloudcopy/seahub,miurahr/seahub,miurahr/seahub,cloudcopy/seahub,Chilledheart/seahub,miurahr/seahub,madflow/seahub,madflow/seahub,madflow/seahub,Chilledheart/seahub,cloudcopy/seahub,miurahr/seahub,Chilledheart/seahub | seahub/profile/forms.py | seahub/profile/forms.py | # encoding: utf-8
from django import forms
from seahub.profile.models import Profile, DetailedProfile
class ProfileForm(forms.Form):
nickname = forms.CharField(max_length=64, required=False)
intro = forms.CharField(max_length=256, required=False)
def save(self, username):
nickname = self.cleaned_data['nickname']
intro = self.cleaned_data['intro']
Profile.objects.add_or_update(username, nickname, intro)
class DetailedProfileForm(ProfileForm):
department = forms.CharField(max_length=512, required=False)
telephone = forms.CharField(max_length=100, required=False)
def save(self, username):
super(DetailedProfileForm, self).save(username)
department = self.cleaned_data['department']
telephone = self.cleaned_data['telephone']
DetailedProfile.objects.add_or_update(username, department, telephone)
| # encoding: utf-8
from django import forms
from django.utils.html import escape
from seahub.profile.models import Profile, DetailedProfile
class ProfileForm(forms.Form):
nickname = forms.CharField(max_length=64, required=False)
intro = forms.CharField(max_length=256, required=False)
def save(self, username):
nickname = escape(self.cleaned_data['nickname'])
intro = escape(self.cleaned_data['intro'])
Profile.objects.add_or_update(username, nickname, intro)
class DetailedProfileForm(ProfileForm):
department = forms.CharField(max_length=512, required=False)
telephone = forms.CharField(max_length=100, required=False)
def save(self, username):
super(DetailedProfileForm, self).save(username)
department = escape(self.cleaned_data['department'])
telephone = escape(self.cleaned_data['telephone'])
DetailedProfile.objects.add_or_update(username, department, telephone)
| apache-2.0 | Python |
b23ec502b89ab70b9e8edd1868f4e9717392b7b2 | Add missing migrations | pinax/django-user-accounts,pinax/django-user-accounts,GeoNode/geonode-user-accounts,GeoNode/geonode-user-accounts | account/migrations/0004_auto_20170416_1821.py | account/migrations/0004_auto_20170416_1821.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-04-16 18:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0003_passwordexpiry_passwordhistory'),
]
operations = [
migrations.AlterModelOptions(
name='passwordhistory',
options={'verbose_name': 'password history', 'verbose_name_plural': 'password histories'},
),
]
| mit | Python |
|
9a691ae746c5b501ed37792383600da1ba381b20 | Add exitcode.py | pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus | bin/exitcode.py | bin/exitcode.py | #!/usr/bin/env python
#
# Copyright 2010 University Of Southern California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# exitcode.py
#
# This program parses kickstart invocation records looking for failures.
# If failures are found, it prints a message and exits with a non-zero
# exit code. If no failures are found, it exits with 0.
#
# This program also renames the .out and .err file to .out.XXX and .err.XXX
# where XXX is a sequence number. This sequence number is incremented each
# time the program is run with the same kickstart.out argument.
#
# Since XML parsers are slow, this program doesn't parse the full invocation
# XML, but rather looks for the <status> tag in the XML and extracts the raw
# exitcode using simple string manipulations. This turns out to be much
# faster than using an XML parser. On .out files with 1000 invocation
# records this program runs in about 30 milliseconds and uses less than
# 4 MB of physical memory.
#
import sys
import re
import os
from optparse import OptionParser
__author__ = "Gideon Juve <[email protected]>"
def fail(message=None):
if message: print "fail: %s" % message
sys.exit(1)
def rename(outfile):
"""Rename .out and .err files to .out.XXX and .err.XXX where XXX
is the next sequence number. Returns the new name, or fails with
an error message and a non-zero exit code."""
# This is just to prevent the file from being accidentally renamed
# again in testing.
if re.search("\.out\.[0-9]{3}$", outfile):
return outfile
# Must end in .out
if not outfile.endswith(".out"):
fail("%s does not look like a kickstart .out file" % outfile)
# Find next file in sequence
retry = None
for i in range(0,1000):
candidate = "%s.%03d" % (outfile,i)
if not os.path.isfile(candidate):
retry = i
break
# unlikely to occur
if retry is None:
fail("%s has been renamed too many times!" % (outfile))
basename = outfile[:-4]
# rename .out to .out.000
newout = "%s.out.%03d" % (basename,retry)
os.rename(outfile,newout)
# rename .err to .err.000 if it exists
errfile = "%s.err" % (basename)
if os.path.isfile(errfile):
newerr = "%s.err.%03d" % (basename,retry)
os.rename(errfile,newerr)
return newout
def exitcode(outfile):
"""Parse invocation records looking for status codes. Returns
the number of successful invocations, or fails with an error
message and a non-zero exit code."""
# Read the file first
f = open(outfile)
txt = f.read()
f.close()
# Verify the length
if len(txt) == 0:
fail("kickstart produced no output")
# Check the exitcode of all tasks
regex = re.compile(r'raw="(-?[0-9]+)"')
succeeded = 0
e = 0
while True:
b = txt.find("<status", e)
if b < 0: break
e = txt.find("</status>", b)
if e < 0: fail("mismatched <status>")
e = e + len("</status>")
m = regex.search(txt[b:e])
if m: raw = int(m.group(1))
else: fail("<status> was missing valid 'raw' attribute")
if raw != 0:
fail("task exited with raw status %d" % raw)
succeeded = succeeded + 1
# Require at least one task to succeed
if succeeded == 0:
fail("no tasks succeeded")
return succeeded
def main():
usage = "Usage: %prog [options] kickstart.out"
parser = OptionParser(usage)
parser.add_option("-t", "--tasks", action="store", type="int",
dest="tasks", metavar="N",
help="Number of tasks expected. If less than N tasks succeeded, then exitcode will fail.")
parser.add_option("-r", "--return", action="store", type="int",
dest="exitcode", default=0, metavar="R",
help="Return code reported by DAGMan. This can be specified in a DAG using the $RETURN variable.")
parser.add_option("-n", "--no-rename", action="store_false",
dest="rename", default=True,
help="Don't rename kickstart.out and .err to .out.XXX and .err.XXX. Useful for testing.")
(options, args) = parser.parse_args()
if len(args) != 1:
parser.error("please specify kickstart.out")
outfile = args[0]
if not os.path.isfile(outfile):
fail("%s does not exist" % outfile)
# if we are renaming, then rename
if options.rename:
outfile = rename(outfile)
# check supplied exitcode first
if options.exitcode != 0:
fail("dagman reported non-zero exitcode: %d" % options.exitcode)
# check exitcodes of all tasks
succeeded = exitcode(outfile)
# if we know how many tasks to expect, check that they all succeeded
if options.tasks and options.tasks>=0 and succeeded != options.tasks:
fail("wrong number of successful tasks: wanted %d got %d" % \
(options.tasks,succeeded))
# If we reach this, then it was OK
sys.exit(0)
if __name__ == "__main__":
main() | apache-2.0 | Python |
|
ae94990bc8b790b5307ccaee992f09fefc045692 | add Tester lockedNormal | sol-ansano-kim/medic,sol-ansano-kim/medic,sol-ansano-kim/medic | python/medic/plugins/Tester/lockedNormal.py | python/medic/plugins/Tester/lockedNormal.py | from medic.core import testerBase
from maya import OpenMaya
class LockedNormal(testerBase.TesterBase):
Name = "LockedNormal"
Description = "vertex(s) which has locked normal"
Fixable = True
def __init__(self):
super(LockedNormal, self).__init__()
def Match(self, node):
return node.object().hasFn(OpenMaya.MFn.kMesh)
def Test(self, node):
it = None
mesh = None
try:
it = OpenMaya.MItMeshVertex(node.object())
mesh = OpenMaya.MFnMesh(node.object())
except:
return (False, None)
result = False
comp = OpenMaya.MFnSingleIndexedComponent()
comp_obj = comp.create(OpenMaya.MFn.kMeshVertComponent)
while (not it.isDone()):
normal_indices = OpenMaya.MIntArray()
it.getNormalIndices(normal_indices)
for i in range(normal_indices.length()):
if mesh.isNormalLocked(normal_indices[i]):
result = True
comp.addElement(it.index())
break
it.next()
return (result, comp_obj if result else None)
def Fix(self, node, component, parameterParser):
if node.dg().isFromReferencedFile():
return False
target_normal_indices = OpenMaya.MIntArray()
mesh = OpenMaya.MFnMesh(node.object())
it = OpenMaya.MItMeshVertex(node.getPath(), component)
while (not it.isDone()):
normal_indices = OpenMaya.MIntArray()
it.getNormalIndices(normal_indices)
for i in range(normal_indices.length()):
target_normal_indices.append(normal_indices[i])
it.next()
mesh.unlockVertexNormals(target_normal_indices)
return True
Tester = LockedNormal
| mit | Python |
|
088ec16cf33d4be4b396976d9e9ab1a5f17045fc | make contrib an app | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | adhocracy4/contrib/apps.py | adhocracy4/contrib/apps.py | from django.apps import AppConfig
class OrganisationsConfig(AppConfig):
name = 'adhocracy4.contrib'
label = 'a4contrib'
| agpl-3.0 | Python |
|
e020f81593268899a04cce726823c512b8b54762 | copy over the PlotContainerEditor to the more appropriately named and located ComponentEditor. | tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable | enthought/enable2/component_editor.py | enthought/enable2/component_editor.py | """ Defines a Traits editor for displaying an Enable component.
"""
#-------------------------------------------------------------------------------
# Written by: David C. Morrill
# Date: 01/26/2007
# (c) Copyright 2007 by Enthought, Inc.
#----------------------------------------------------------------------------
from enthought.enable2.api import ColorTrait
from enthought.etsconfig.api import ETSConfig
from enthought.traits.ui.api import BasicEditorFactory
if ETSConfig.toolkit == 'wx':
from enthought.traits.ui.wx.editor import Editor
from enthought.enable2.wx_backend.api import Window
elif ETSConfig.toolkit == 'qt4':
from enthought.traits.ui.qt4.editor import Editor
from enthought.enable2.qt4_backend.api import Window
else:
Editor = object
Window = None
class _ComponentEditor( Editor ):
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# The plot editor is scrollable (overrides Traits UI Editor).
scrollable = True
#---------------------------------------------------------------------------
# Finishes initializing the editor by creating the underlying toolkit
# widget:
#---------------------------------------------------------------------------
def init( self, parent ):
""" Finishes initializing the editor by creating the underlying toolkit
widget.
"""
self._window = Window( parent, component=self.value )
self.control = self._window.control
self._window.bg_color = self.factory.bgcolor
#---------------------------------------------------------------------------
# Updates the editor when the object trait changes externally to the editor:
#---------------------------------------------------------------------------
def update_editor( self ):
""" Updates the editor when the object trait changes externally to the
editor.
"""
pass
class ComponentEditor( BasicEditorFactory ):
""" wxPython editor factory for Enable components.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# The class used to create all editor styles (overrides BasicEditorFactory).
klass = _ComponentEditor
# The background color for the window
bgcolor = ColorTrait('sys_window')
| bsd-3-clause | Python |
|
06164dbeb1ec113b24ca25a41e624793d878875f | implement a transferrable voting algorithm | julzhk/codekata | instant_runoff_voting.py | instant_runoff_voting.py | from collections import defaultdict, Counter
def runoff(voters):
"""
a function that calculates an election winner from a list of voter selections using an
Instant Runoff Voting algorithm. https://en.wikipedia.org/wiki/Instant-runoff_voting
Each voter selects several candidates in order of preference.
The votes are tallied from the each voter's first choice.
If the first-place candidate has more than half the total votes, they win.
Otherwise, find the candidate who got the least votes and remove them from each person's voting list.
In case of a tie for least, remove all of the tying candidates.
In case of a complete tie between every candidate, return None
Continue until somebody has more than half the votes; they are the winner.
The function takes a list of voter ballots; each ballot will be a list of candidates in descending order of
preference.
Returns the symbol corresponding to the winning candidate.
"""
final_tally = defaultdict(int)
removed_candidates = []
for this_round in range(len(voters[0])):
this_round_votes = [voter[this_round] for voter in voters if voter[this_round] not in removed_candidates]
tally = dict(Counter(this_round_votes))
for candidate in tally:
final_tally[candidate] +=tally[candidate]
leader = max(final_tally, key=tally.get)
total_votes = sum([final_tally[i] for i in final_tally])
if final_tally[leader] >= total_votes / 2.0:
return leader
# no clear winner
knockout_candidate = min(tally, key=tally.get)
knockout_candidate_votes = tally[knockout_candidate]
for candidate in tally:
if tally[candidate] == knockout_candidate_votes:
removed_candidates.append(candidate)
del final_tally[knockout_candidate]
voters = [
['c', 'a', 'b', 'd', 'e'],
['b', 'e', 'd', 'c', 'a'],
['b', 'e', 'c', 'a', 'd'],
['d', 'b', 'c', 'a', 'e'],
['c', 'b', 'd', 'a', 'e']
]
assert(runoff(voters) == "b")
| mit | Python |
|
5d6ef1cf969bac9fb53db0224eebdeb4a1bb6ff0 | Update app/exceptions/__init__.py | apipanda/openssl,apipanda/openssl,apipanda/openssl,apipanda/openssl | app/exceptions/__init__.py | app/exceptions/__init__.py |
class BadConfigurationError(Exception):
pass
class ClientUnavailableError(Exception):
pass
class ClusterNotConfiguredError(Exception):
pass
| mit | Python |
|
0efb59e8d1bef5a1d8e5e3eb7ffddf09f5b8943a | Add tests to LoadCommand | datasciencebr/jarbas,datasciencebr/jarbas,datasciencebr/jarbas,datasciencebr/serenata-de-amor,Guilhermeslucas/jarbas,Guilhermeslucas/jarbas,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,rogeriochaves/jarbas,marcusrehm/serenata-de-amor,Guilhermeslucas/jarbas,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,Guilhermeslucas/jarbas,rogeriochaves/jarbas,rogeriochaves/jarbas,rogeriochaves/jarbas | jarbas/core/tests/test_load_command.py | jarbas/core/tests/test_load_command.py | from unittest.mock import Mock, patch
from django.test import TestCase
from jarbas.core.management.commands import LoadCommand
from jarbas.core.models import Activity
from jarbas.core.tests import sample_activity_data
class TestStaticMethods(TestCase):
def setUp(self):
self.cmd = LoadCommand()
def test_get_file_name(self):
expected = '1970-01-01-ahoy.xz'
with self.settings(AMAZON_S3_DATASET_DATE='1970-01-01'):
self.assertEqual(expected, self.cmd.get_file_name('ahoy'))
def test_get_model_name(self):
self.assertEqual('Activity', self.cmd.get_model_name(Activity))
class TestPrintCount(TestCase):
def setUp(self):
self.cmd = LoadCommand()
@patch('jarbas.core.management.commands.print')
def test_print_no_records(self, mock_print):
self.cmd.print_count(Activity)
arg = 'Current count: 0 Activitys '
kwargs = {'end': '\r'}
mock_print.assert_called_with(arg, **kwargs)
@patch('jarbas.core.management.commands.print')
def test_print_with_records(self, mock_print):
Activity.objects.create(**sample_activity_data)
self.cmd.print_count(Activity)
arg = 'Current count: 1 Activitys '
kwargs = {'end': '\r'}
mock_print.assert_called_with(arg, **kwargs)
@patch('jarbas.core.management.commands.print')
def test_print_with_permanent_keyword_arg(self, mock_print):
self.cmd.print_count(Activity, permanent=True)
arg = 'Current count: 0 Activitys '
kwargs = {'end': '\n'}
mock_print.assert_called_with(arg, **kwargs)
class TestDropAll(TestCase):
def test_drop_all(self):
self.assertEqual(0, Activity.objects.count())
Activity.objects.create(**sample_activity_data)
self.assertEqual(1, Activity.objects.count())
LoadCommand().drop_all(Activity)
self.assertEqual(0, Activity.objects.count())
class TestLocalMethods(TestCase):
def setUp(self):
self.cmd = LoadCommand()
self.source = '/whatever/works'
self.name = 'ahoy'
def test_get_path(self):
expected = '/whatever/works/1970-01-01-ahoy.xz'
with self.settings(AMAZON_S3_DATASET_DATE='1970-01-01'):
result = self.cmd.get_path(self.source, self.name)
self.assertEqual(expected, result)
@patch('jarbas.core.management.commands.print')
@patch('jarbas.core.management.commands.os.path.exists')
def test_load_local_exists(self, mock_exists, mock_print):
mock_exists.return_value = True
self.assertIsInstance(self.cmd.load_local(self.source, self.name), str)
@patch('jarbas.core.management.commands.print')
@patch('jarbas.core.management.commands.os.path.exists')
def test_load_local_fail(self, mock_exists, mock_print):
mock_exists.return_value = False
self.assertFalse(self.cmd.load_local(self.source, self.name))
class TestRemoteMethods(TestCase):
def setUp(self):
self.cmd = LoadCommand()
self.name = 'ahoy'
self.url = 'https://south.amazonaws.com/jarbas/1970-01-01-ahoy.xz'
self.custom_settings = {
'AMAZON_S3_DATASET_DATE': '1970-01-01',
'AMAZON_S3_REGION': 'south',
'AMAZON_S3_BUCKET': 'jarbas'
}
def test_get_url(self):
with self.settings(**self.custom_settings):
result = self.cmd.get_url(self.name)
self.assertEqual(self.url, result)
@patch('jarbas.core.management.commands.print')
@patch('jarbas.core.management.commands.urlretrieve')
def test_load_remote(self, mock_urlretrieve, mock_print):
with self.settings(**self.custom_settings):
result = self.cmd.load_remote(self.name)
self.assertEqual(self.url, mock_urlretrieve.call_args[0][0])
self.assertIsInstance(result, str)
class TestAddArguments(TestCase):
def test_add_arguments(self):
mock = Mock()
LoadCommand().add_arguments(mock)
self.assertEqual(2, mock.add_argument.call_count) | mit | Python |
|
8affeda715b1facf12de1dab1d445bbe54616306 | Fix JSON serialisation problem with AJAX basket | jmt4/django-oscar,jmt4/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,kapt/django-oscar,okfish/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,Bogh/django-oscar,sasha0/django-oscar,ahmetdaglarbas/e-commerce,solarissmoke/django-oscar,bnprk/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,elliotthill/django-oscar,sasha0/django-oscar,ka7eh/django-oscar,ademuk/django-oscar,josesanch/django-oscar,Bogh/django-oscar,jinnykoo/wuyisj,solarissmoke/django-oscar,pasqualguerrero/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,michaelkuty/django-oscar,saadatqadri/django-oscar,pdonadeo/django-oscar,jlmadurga/django-oscar,binarydud/django-oscar,anentropic/django-oscar,rocopartners/django-oscar,manevant/django-oscar,mexeniz/django-oscar,adamend/django-oscar,django-oscar/django-oscar,monikasulik/django-oscar,okfish/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,WadeYuChen/django-oscar,dongguangming/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,DrOctogon/unwash_ecom,saadatqadri/django-oscar,ka7eh/django-oscar,vovanbo/django-oscar,nickpack/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,faratro/django-oscar,adamend/django-oscar,saadatqadri/django-oscar,binarydud/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,faratro/django-oscar,jinnykoo/wuyisj.com,WillisXChen/django-oscar,eddiep1101/django-oscar,okfish/django-oscar,Bogh/django-oscar,thechampanurag/django-oscar,faratro/django-oscar,kapari/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,marcoantoniooliveira/labweb,DrOctogon/unwash_ecom,mexeniz/django-oscar,manevant/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,sasha0/django-oscar,marcoantoniooliveira/labweb,machtfit/django-oscar,pdonadeo/django-oscar,ademuk/django-oscar,manevant/django-oscar,binarydud/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,saadatqadri/django-oscar,jlmadurga/django-oscar,kapt/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,eddiep1101/django-oscar,nickpack/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj.com,jinnykoo/wuyisj.com,marcoantoniooliveira/labweb,MatthewWilkes/django-oscar,django-oscar/django-oscar,WadeYuChen/django-oscar,django-oscar/django-oscar,pasqualguerrero/django-oscar,itbabu/django-oscar,jmt4/django-oscar,pasqualguerrero/django-oscar,elliotthill/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,monikasulik/django-oscar,WadeYuChen/django-oscar,amirrpp/django-oscar,MatthewWilkes/django-oscar,vovanbo/django-oscar,QLGu/django-oscar,bschuon/django-oscar,nfletton/django-oscar,jinnykoo/wuyisj,kapt/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,vovanbo/django-oscar,adamend/django-oscar,machtfit/django-oscar,monikasulik/django-oscar,rocopartners/django-oscar,faratro/django-oscar,nfletton/django-oscar,QLGu/django-oscar,kapari/django-oscar,monikasulik/django-oscar,spartonia/django-oscar,itbabu/django-oscar,itbabu/django-oscar,Jannes123/django-oscar,rocopartners/django-oscar,binarydud/django-oscar,taedori81/django-oscar,jinnykoo/christmas,pdonadeo/django-oscar,elliotthill/django-oscar,amirrpp/django-oscar,sonofatailor/django-oscar,nfletton/django-oscar,solarissmoke/django-oscar,manevant/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,thechampanurag/django-oscar,sonofatailor/django-oscar,nickpack/django-oscar,michaelkuty/django-oscar,lijoantony/django-oscar,django-oscar/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/wuyisj,anentropic/django-oscar,ahmetdaglarbas/e-commerce,thechampanurag/django-oscar,Bogh/django-oscar,QLGu/django-oscar,john-parton/django-oscar,michaelkuty/django-oscar,pdonadeo/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce,thechampanurag/django-oscar,jmt4/django-oscar,spartonia/django-oscar,jlmadurga/django-oscar,michaelkuty/django-oscar,bschuon/django-oscar,mexeniz/django-oscar,ademuk/django-oscar,sonofatailor/django-oscar,dongguangming/django-oscar,nickpack/django-oscar,taedori81/django-oscar,adamend/django-oscar,jinnykoo/christmas,amirrpp/django-oscar,Jannes123/django-oscar,bschuon/django-oscar,QLGu/django-oscar,okfish/django-oscar,MatthewWilkes/django-oscar,nfletton/django-oscar,kapari/django-oscar,machtfit/django-oscar,lijoantony/django-oscar,john-parton/django-oscar,jinnykoo/christmas,MatthewWilkes/django-oscar,eddiep1101/django-oscar,josesanch/django-oscar,bnprk/django-oscar,john-parton/django-oscar,WadeYuChen/django-oscar,rocopartners/django-oscar,lijoantony/django-oscar,bnprk/django-oscar,john-parton/django-oscar | oscar/core/ajax.py | oscar/core/ajax.py | import six
from django.contrib import messages
from six.moves import map
class FlashMessages(object):
"""
Intermediate container for flash messages.
This is useful as, at the time of creating the message, we don't know
whether the response is an AJAX response or not.
"""
def __init__(self):
self.msgs = {}
def add_message(self, level, message):
self.msgs.setdefault(level, []).append(message)
def add_messages(self, level, messages):
for msg in messages:
self.add_message(level, msg)
def info(self, message):
self.add_message(messages.INFO, message)
def warning(self, message):
self.add_message(messages.WARNING, message)
def error(self, message):
self.add_message(messages.ERROR, message)
def success(self, message):
self.add_message(messages.SUCCESS, message)
def to_json(self):
payload = {}
for level, msgs in self.msgs.items():
tag = messages.DEFAULT_TAGS.get(level, 'info')
payload[tag] = [six.text_type(msg) for msg in msgs]
return payload
def apply_to_request(self, request):
for level, msgs in self.msgs.items():
for msg in msgs:
messages.add_message(request, level, msg)
| import six
from django.contrib import messages
from six.moves import map
class FlashMessages(object):
"""
Intermediate container for flash messages.
This is useful as, at the time of creating the message, we don't know
whether the response is an AJAX response or not.
"""
def __init__(self):
self.msgs = {}
def add_message(self, level, message):
self.msgs.setdefault(level, []).append(message)
def add_messages(self, level, messages):
for msg in messages:
self.add_message(level, msg)
def info(self, message):
self.add_message(messages.INFO, message)
def warning(self, message):
self.add_message(messages.WARNING, message)
def error(self, message):
self.add_message(messages.ERROR, message)
def success(self, message):
self.add_message(messages.SUCCESS, message)
def to_json(self):
payload = {}
for level, msgs in self.msgs.items():
tag = messages.DEFAULT_TAGS.get(level, 'info')
payload[tag] = map(six.text_type, msgs)
return payload
def apply_to_request(self, request):
for level, msgs in self.msgs.items():
for msg in msgs:
messages.add_message(request, level, msg)
| bsd-3-clause | Python |
63d22058d15a11fad7232683630976d472997c33 | Add planetary time recipe | flatangle/flatlib | recipes/planetarytime.py | recipes/planetarytime.py | """
Author: João Ventura <[email protected]>
This recipe shows sample code for handling
planetary times.
"""
from flatlib.datetime import Datetime
from flatlib.geopos import GeoPos
from flatlib.tools import planetarytime
# Build a date and location
date = Datetime('2015/03/13', '17:00', '+00:00')
pos = GeoPos('38n32', '8w54')
# Get the planetary hour table
hourTable = planetarytime.getHourTable(date, pos)
print(hourTable.dayRuler()) # Venus
print(hourTable.nightRuler()) # Mars
print(hourTable.hourRuler()) # Saturn
# Use the info Dict to print hour number information
info = hourTable.currInfo()
print(info['hourNumber']) # 11
print(info['start']) # <2015/03/13 16:42:10 00:00:00>
print(info['end']) # <2015/03/13 17:41:20 00:00:00> | mit | Python |
|
92c8afbb5131374611fb21b4da0b0af1a2f37a45 | add dummy test | pyannote/pyannote-database | tests/dummy.py | tests/dummy.py | import pytest
from pyannote.database import get_databases
def test_dummy():
assert isinstance(get_databases(), list)
| mit | Python |
|
0c8b7fa865df535f5baa33025c184bbf4234b7b1 | Create script to transform shapefile into csv distance matrix | tayden-hakai/Island_MST | shp_to_csv_distances.py | shp_to_csv_distances.py | """Create a csv matrix of distances between shapefile geometry objects.
Requirements: fiona, shapely
Written by: Taylor Denouden
Date: November 25, 2015
"""
import random
import fiona
from shapely.geometry import shape
from scripts.printer import print_progress
def main():
"""Main script execution."""
outfile = open("out.csv", "w")
ids = extract_ids("data/high_polys.shp")
# Write header
print "Writing Header"
outfile.write("NODE")
for i in ids:
outfile.write("," + i)
outfile.write("\n")
# Write rows
print "Writing Rows"
for i, j in enumerate(ids):
print_progress(i/len(ids))
outfile.write(j)
write_row_distances(j, ids, "data/high_polys.shp", outfile)
outfile.write("\n")
print_progress(1)
print
def extract_ids(input_file):
"""Extract all polygon ids from input shapefile."""
with fiona.open(input_file, 'r') as source:
return [shp['id'] for shp in source]
def write_row_distances(i, ids, input_file, outfile):
"""Write distances between shape with id i and all other shapes in ids."""
with fiona.open(input_file, 'r') as source:
source = list(source)
i_shp = shape(source[int(i)]['geometry'])
for j in ids:
j_shp = shape(source[int(j)]['geometry'])
if i_shp.is_valid and j_shp.is_valid:
dist = i_shp.distance(j_shp)
else:
dist = -1
outfile.write("," + str(dist))
if __name__ == "__main__":
main()
| mit | Python |
|
9a4dd1c0c51cf2732b50d5594b2a4bf661b8262f | Add geoip_lookup.py | aaaaalbert/repy-doodles | geoip_lookup.py | geoip_lookup.py | import sys
if len(sys.argv) < 2:
print """geoip_lookup.py ---
"resolve" IP addresses to approximate geo-information
Usage:
python geoip_lookup.py IP [ GEOIP_SERVER ]
where IP is the address to resolve, and
GEOIP_SERVER is an optional GeoIP server to contact.
(The Seattle network testbed provides two GeoIP servers,
http://geoipserver.poly.edu:12679 and http://geoipserver2.poly.edu:12679 )
"""
sys.exit(0)
from repyportability import *
add_dy_support(locals())
geoip_client = dy_import_module("geoip_client.r2py")
try:
geoipserver = sys.argv[2]
geoip_client.geoip_init_client(url=geoipserver)
except IndexError:
geoip_client.geoip_init_client()
ip = sys.argv[1]
print "Address", ip, "is located in", geoip_client.geoip_record_by_addr(ip)
| unlicense | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.