commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
f5bbdea74c0f8a0cc8ac4331ea8adc45c3f266c8
|
converter.py
|
converter.py
|
#
# MIT License
# Copyright (c) 2017 Hampus Tågerud
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
def main():
command = ""
while command != "exit":
command = input("> ").strip()
if __name__ == "__main__":
main()
|
#
# MIT License
# Copyright (c) 2017 Hampus Tågerud
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import sys
def main():
if len(sys.argv) > 1:
for i in range(1, len(sys.argv)):
with open(sys.argv[i]) as f:
lines = f.read().splitlines()
for line in lines:
evaluate(line);
command = ""
while command != "exit":
command = input("> ").strip()
def evaluate(string):
print(string)
if __name__ == "__main__":
main()
|
Read files defined as argument
|
Read files defined as argument
|
Python
|
mit
|
hampustagerud/colorconverter
|
33e88c063fedb11211e3786a9d722a9d12f72ce8
|
contrib/dn42_whoisd.py
|
contrib/dn42_whoisd.py
|
#!/bin/python
# coding: utf-8
import argparse
import asyncio
import lglass.dn42
import lglass.whois.engine
import lglass.whois.server
def create_database(db_path):
return lglass.dn42.DN42Database(db_path)
if __name__ == "__main__":
argparser = argparse.ArgumentParser(description="DN42 Whois server")
argparser.add_argument("--port", "-p", default=4343)
argparser.add_argument("--address", "-a", default="::1,127.0.0.1")
argparser.add_argument("database")
args = argparser.parse_args()
db = create_database(args.database)
engine = lglass.whois.engine.WhoisEngine(db)
server = lglass.whois.server.SimpleWhoisServer(engine)
loop = asyncio.get_event_loop()
coro = asyncio.start_server(server.handle, args.address.split(","),
args.port, loop=loop)
s = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
s.close()
loop.run_until_complete(s.wait_closed())
loop.close()
|
#!/bin/python
# coding: utf-8
import argparse
import asyncio
import lglass.dn42
import lglass.whois.engine
import lglass.whois.server
def create_database(db_path):
return lglass.dn42.DN42Database(db_path)
if __name__ == "__main__":
argparser = argparse.ArgumentParser(description="DN42 Whois server")
argparser.add_argument("--port", "-p", default=4343)
argparser.add_argument("--address", "-a", default="::1,127.0.0.1")
argparser.add_argument("database")
args = argparser.parse_args()
db = create_database(args.database)
engine = lglass.whois.engine.WhoisEngine(db)
engine.type_hints[r"[0-9A-Za-z]+-DN42$"] = {"role", "person"}
server = lglass.whois.server.SimpleWhoisServer(engine)
loop = asyncio.get_event_loop()
coro = asyncio.start_server(server.handle, args.address.split(","),
args.port, loop=loop)
s = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
s.close()
loop.run_until_complete(s.wait_closed())
loop.close()
|
Add type hint for -DN42
|
Add type hint for -DN42
|
Python
|
mit
|
fritz0705/lglass
|
fdb3801a513357c4f5ba655a6216ba59ee1e3df9
|
overlay/Chart.py
|
overlay/Chart.py
|
from SVGGenerator import SVGGenerator
from Label import Label
class Chart(SVGGenerator):
def __init__(self, name, title, data):
SVGGenerator.__init__(self, './chart.svg.mustache')
self.name = name
self.title = title
self.x = 10
self.y = 10
self.width = 110
self.height = 110
self.padding = 5
self.background_color = "white"
self.background_opacity = 0.4
self.path_data = data
self.path_color = "rgb(0,192,0)"
def to_svg(self, data=None):
label = Label(self.width * 0.5, self.height - self.padding, self.title)
label.alignment = "middle"
self.label_svg = label.to_svg(label)
return SVGGenerator.to_svg(self, self)
|
from SVGGenerator import SVGGenerator
from Label import Label
class Chart(SVGGenerator):
def __init__(self, name, title, data):
SVGGenerator.__init__(self, './chart.svg.mustache')
self.name = name
self.title = title
self.x = 10
self.y = 10
self.width = 110
self.height = 110
self.padding = 5
self.background_color = "white"
self.background_opacity = 0.6
self.path_data = data
self.path_color = "rgb(0,192,0)"
def to_svg(self, data=None):
label = Label(self.width * 0.5, self.height - self.padding, self.title)
label.alignment = "middle"
self.label_svg = label.to_svg(label)
return SVGGenerator.to_svg(self, self)
|
Increase opacity on chart backgrounds
|
Increase opacity on chart backgrounds
|
Python
|
mit
|
gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,thelonious/g2x,gizmo-cda/g2x
|
56327baa67d5f05551bc52a1c0466e8d8b905797
|
metrics.py
|
metrics.py
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, array([0.0, 1.0])])
y = np.concatenate([y, array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x)-1):
area += 0.5*(sx[ix+1]-sx[ix])*(sy[ix+1]+sy[ix])
return area
|
"""The metrics module implements functions assessing prediction error for specific purposes."""
import numpy as np
def trapz(x, y):
"""Trapezoidal rule for integrating
the curve defined by x-y pairs.
Assume x and y are in the range [0,1]
"""
assert len(x) == len(y), 'x and y need to be of same length'
x = np.concatenate([x, np.array([0.0, 1.0])])
y = np.concatenate([y, np.array([0.0, 1.0])])
sort_idx = np.argsort(x)
sx = x[sort_idx]
sy = y[sort_idx]
area = 0.0
for ix in range(len(x) - 1):
area += 0.5 * (sx[ix + 1] - sx[ix]) * (sy[ix + 1] + sy[ix])
return area
|
Add the missing 'np.' before 'array'
|
Add the missing 'np.' before 'array'
|
Python
|
mit
|
ceshine/isml15-wed
|
222628c6747bdc3574bcb7cf6257c785ffa6451d
|
inventory_control/database/sql.py
|
inventory_control/database/sql.py
|
"""
So this is where all the SQL commands live
"""
CREATE_SQL = """
CREATE TABLE component_type (
id INT PRIMARY KEY AUTO_INCREMENT,
type VARCHAR(255) UNIQUE
);
CREATE TABLE components (
id INT PRIMARY KEY AUTO_INCREMENT,
sku TEXT,
type INT,
status INT,
FOREIGN KEY (type) REFERENCES component_type(id)
);
CREATE TABLE projects (
id INT PRIMARY KEY AUTO_INCREMENT,
motherboard INT,
power_supply INT,
cpu INT,
hard_drive INT,
proj_case INT,
memory INT,
FOREIGN KEY (motherboard) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (cpu) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (power_supply) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (hard_drive) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (proj_case) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (memory) REFERENCES components(id) ON DELETE CASCADE
);
"""
ADD_COMPONENT_TYPE = """INSERT IGNORE INTO component_type (type) VALUES ('{text}')
"""
GET_COMPONENT_TYPE="""SELECT * FROM component_type WHERE type='{text}'"""
DELETE_COMPONENT_TYPE = """DELETE FROM component_type WHERE type='{text}'
"""
SELECT_ALL_COMPONENTS = """
SELECT * FROM components INNER JOIN component_type
ON components.type = component_type.id;
"""
DROP_SQL = """
DROP TABLE projects;
DROP TABLE components;
DROP TABLE component_type;
"""
|
"""
So this is where all the SQL commands live
"""
CREATE_SQL = """
CREATE TABLE component_type (
id INT PRIMARY KEY AUTO_INCREMENT,
type VARCHAR(255) UNIQUE
);
CREATE TABLE components (
id INT PRIMARY KEY AUTO_INCREMENT,
serial_number VARCHAR(255),
sku TEXT,
type INT,
status INT,
FOREIGN KEY (type) REFERENCES component_type(id)
);
CREATE TABLE projects (
id INT PRIMARY KEY AUTO_INCREMENT,
product_number INT,
motherboard INT,
power_supply INT,
cpu INT,
hard_drive INT,
proj_case INT,
memory INT,
FOREIGN KEY (motherboard) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (cpu) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (power_supply) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (hard_drive) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (proj_case) REFERENCES components(id) ON DELETE CASCADE,
FOREIGN KEY (memory) REFERENCES components(id) ON DELETE CASCADE
);
"""
ADD_COMPONENT_TYPE = """INSERT IGNORE INTO component_type (type) VALUES ('{text}')
"""
GET_COMPONENT_TYPE="""SELECT * FROM component_type WHERE type='{text}'"""
DELETE_COMPONENT_TYPE = """DELETE FROM component_type WHERE type='{text}'
"""
SELECT_ALL_COMPONENTS = """
SELECT * FROM components INNER JOIN component_type
ON components.type = component_type.id;
"""
DROP_SQL = """
DROP TABLE projects;
DROP TABLE components;
DROP TABLE component_type;
"""
|
Add product_number and serial_number identifiers
|
Add product_number and serial_number identifiers
|
Python
|
mit
|
worldcomputerxchange/inventory-control,codeforsanjose/inventory-control
|
4a4a3eed7b959e342e3ff00dfc28f116158839d6
|
tests/test_result.py
|
tests/test_result.py
|
import unittest
from performance_testing.result import Result, File
import os
import shutil
class ResultTestCase(unittest.TestCase):
def setUp(self):
self.current_directory = os.path.dirname(os.path.abspath(__file__))
self.result_directory = os.path.join(self.current_directory, 'assets/test_result')
def test_result_init(self):
if os.path.exists(self.result_directory):
shutil.rmtree(self.result_directory)
self.assertFalse(os.path.exists(self.result_directory))
result = Result(directory=self.result_directory)
self.assertTrue(os.path.exists(self.result_directory))
self.assertTrue(os.path.exists(result.file.path))
|
import unittest
from performance_testing.result import Result, File
import os
import shutil
class ResultTestCase(unittest.TestCase):
def setUp(self):
self.current_directory = os.path.dirname(os.path.abspath(__file__))
self.result_directory = os.path.join(self.current_directory, 'assets/test_result')
def clear_result_dir(self):
if os.path.exists(self.result_directory):
shutil.rmtree(self.result_directory)
def test_result_init(self):
self.clear_result_dir()
self.assertFalse(os.path.exists(self.result_directory))
result = Result(directory=self.result_directory)
self.assertTrue(result.file.path)
def test_file_init(self):
self.clear_result_dir()
file_name = 'foo_bar'
self.assertFalse(os.path.exists(self.result_directory))
file = File(directory=self.result_directory, name=file_name)
self.assertTrue(os.path.exists(self.result_directory))
self.assertTrue(os.path.exists(os.path.join(self.result_directory, file_name)))
def tear_down(self):
self.clear_result_dir()
|
Split up tests for Result and File
|
Split up tests for Result and File
|
Python
|
mit
|
BakeCode/performance-testing,BakeCode/performance-testing
|
a2997b5f76c658ba8ddd933275aa6f37c1bedc50
|
promgen/util.py
|
promgen/util.py
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, **kwargs)
def get(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, **kwargs)
def delete(url, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.delete(url, **kwargs)
|
# Copyright (c) 2017 LINE Corporation
# These sources are released under the terms of the MIT license: see LICENSE
import requests.sessions
from promgen.version import __version__
def post(url, *args, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.post(url, *args, **kwargs)
def get(url, *args, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.get(url, *args, **kwargs)
def delete(url, *args, **kwargs):
with requests.sessions.Session() as session:
session.headers['User-Agent'] = 'promgen/{}'.format(__version__)
return session.delete(url, *args, **kwargs)
|
Make sure we pass *args to the requests session object
|
Make sure we pass *args to the requests session object
|
Python
|
mit
|
kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen
|
aaf0d25cae834222f14303f33ab126be7ae29142
|
pambox/intelligibility_models/__init__.py
|
pambox/intelligibility_models/__init__.py
|
import sepsm
import sii
|
"""
The :mod:`pambox.intelligibility_modesl` module gather speech intelligibility
models.
"""
from .mrsepsm import MrSepsm
from .sepsm import Sepsm
from .sii import Sii
__all__ = ['Sepsm',
'MrSepsm',
'Sii']
|
Define __all__ in intelligibility_models package
|
Define __all__ in intelligibility_models package
Define to import all intelligibility models available if one imports
with *. Also, allows to import the models directly from
intelligibility_models, rather than having to import the modules.
|
Python
|
bsd-3-clause
|
achabotl/pambox
|
e90cc22226189b8950957cbf8637e49ee7798c4b
|
django_token/middleware.py
|
django_token/middleware.py
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').split()
if not auth_header or auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if len(auth_header) != 2:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[1])
if user:
request.user = user
|
from django.http import HttpResponseBadRequest
from django.contrib import auth
class TokenMiddleware(object):
"""
Middleware that authenticates against a token in the http authorization header.
"""
def process_request(self, request):
auth_header = request.META.get('HTTP_AUTHORIZATION', b'').partition(' ')
if auth_header[0].lower() != b'token':
return None
# If they specified an invalid token, let them know.
if not auth_header[2]:
return HttpResponseBadRequest("Improperly formatted token")
user = auth.authenticate(token=auth_header[2])
if user:
request.user = user
|
Use partition instead of split.
|
Use partition instead of split.
|
Python
|
mit
|
jasonbeverage/django-token
|
721be562a175227fa496789e9ce642416c5993b7
|
enactiveagents/controller/controller.py
|
enactiveagents/controller/controller.py
|
"""
Main world controller.
"""
from appstate import AppState
import pygame
import events
class Controller(events.EventListener):
"""
Controller class.
"""
def __init__(self):
pass
def _quit(self):
"""
Gracefully quit the simulator.
"""
quitEvent = events.QuitEvent()
AppState.get_state().get_event_manager().post_event(quitEvent)
def process_input(self):
"""
Process user input.
"""
for event in pygame.event.get():
if event.type == pygame.QUIT:
self._quit()
return
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self._quit()
return
pygame.M
def notify(self, event):
if isinstance(event, events.TickEvent):
self.process_input()
|
"""
Main world controller.
"""
from appstate import AppState
import pygame
import events
class Controller(events.EventListener):
"""
Controller class.
"""
def __init__(self):
pass
def _quit(self):
"""
Gracefully quit the simulator.
"""
quitEvent = events.QuitEvent()
AppState.get_state().get_event_manager().post_event(quitEvent)
def process_input(self):
"""
Process user input.
"""
for event in pygame.event.get():
if event.type == pygame.QUIT:
self._quit()
return
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self._quit()
return
def notify(self, event):
if isinstance(event, events.TickEvent):
self.process_input()
|
Fix bug crashing the experiment on a non-recognized keypress.
|
Fix bug crashing the experiment on a non-recognized keypress.
|
Python
|
mit
|
Beskhue/enactive-agents,Beskhue/enactive-agents,Beskhue/enactive-agents
|
53acdb65defa43db67f11a5c5a41c1353e9498f7
|
tests/test__utils.py
|
tests/test__utils.py
|
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask.array as da
import dask.array.utils as dau
import dask_ndfourier._utils
@pytest.mark.parametrize(
"a, s, n, axis", [
(da.ones((3, 4), chunks=(3, 4)), da.ones((2,), chunks=(2,)), -1, -1),
]
)
def test_norm_args(a, s, n, axis):
s2, n2, axis2 = dask_ndfourier._utils._norm_args(a, s, n=n, axis=axis)
assert isinstance(s2, da.Array)
|
Test that `s` as a Dask Array is preserved
|
Test that `s` as a Dask Array is preserved
Ensure that if `s` is a Dask Array, it will still be a Dask Array after
type normalization is done on the input arguments of the Fourier
filters. This allows Fourier filters to construct a computation around
an unknown `s` in addition to an unknown input array.
|
Python
|
bsd-3-clause
|
dask-image/dask-ndfourier
|
a13829a0c2b95773832e68e6f0a1dc661a288ec4
|
tests/test_action.py
|
tests/test_action.py
|
import unittest
from unittest import mock
from action import PrintAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
|
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
|
Add EmailActionTest class and as well as a test to check if email is sent to the right server.
|
Add EmailActionTest class and as well as a test to check if email is sent to the right server.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
ef27b615702d9ce84db9087898c1f66286e66cf2
|
PRESUBMIT.py
|
PRESUBMIT.py
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
|
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
license = (
r'.*? Copyright \(c\) %(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': input_api.time.strftime('%Y'),
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
return report
def GetPreferredTrySlaves():
return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac']
|
Fix the license header regex.
|
Fix the license header regex.
Most of the files are attributed to Google Inc so I used this instead of
Chromium Authors.
[email protected]
BUG=
TEST=
Review URL: http://codereview.chromium.org/7108074
|
Python
|
bsd-3-clause
|
csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp
|
0819957eda318205e17591dccd81482701eab25c
|
tests/test_sqlite.py
|
tests/test_sqlite.py
|
# This is an example test settings file for use with the Django test suite.
#
# The 'sqlite3' backend requires only the ENGINE setting (an in-
# memory database will be used). All other backends will require a
# NAME and potentially authentication information. See the
# following section in the docs for more information:
#
# https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/
#
# The different databases that Django supports behave differently in certain
# situations, so it is recommended to run the test suite against as many
# database backends as possible. You may want to create a separate settings
# file for each of the backends you test against.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3'
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
|
# This is an example test settings file for use with the Django test suite.
#
# The 'sqlite3' backend requires only the ENGINE setting (an in-
# memory database will be used). All other backends will require a
# NAME and potentially authentication information. See the
# following section in the docs for more information:
#
# https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/unit-tests/
#
# The different databases that Django supports behave differently in certain
# situations, so it is recommended to run the test suite against as many
# database backends as possible. You may want to create a separate settings
# file for each of the backends you test against.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3'
},
'other': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
SECRET_KEY = "django_tests_secret_key"
# To speed up tests under SQLite we use the MD5 hasher as the default one.
# This should not be needed under other databases, as the relative speedup
# is only marginal there.
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
|
Use faster password hasher in sqlite tests
|
Use faster password hasher in sqlite tests
Fixed #18163
|
Python
|
bsd-3-clause
|
dbaxa/django,gunchleoc/django,varunnaganathan/django,yamila-moreno/django,dracos/django,savoirfairelinux/django,crazy-canux/django,mattseymour/django,charettes/django,MatthewWilkes/django,dsanders11/django,ccn-2m/django,beck/django,dgladkov/django,litchfield/django,MounirMesselmeni/django,jyotsna1820/django,ojake/django,hottwaj/django,irwinlove/django,oscaro/django,aroche/django,neiudemo1/django,ericfc/django,indevgr/django,SoftwareMaven/django,zanderle/django,frishberg/django,delinhabit/django,postrational/django,zulip/django,Beauhurst/django,RevelSystems/django,dracos/django,henryfjordan/django,blueyed/django,WillGuan105/django,wweiradio/django,Vixionar/django,denis-pitul/django,hunter007/django,frdb194/django,poiati/django,deployed/django,extremewaysback/django,leekchan/django_test,kswiat/django,MoritzS/django,Proggie02/TestRepo,lwiecek/django,shacker/django,epandurski/django,deployed/django,alexallah/django,aroche/django,jgoclawski/django,andela-ooladayo/django,syaiful6/django,rockneurotiko/django,wkschwartz/django,django/django,KokareIITP/django,Sonicbids/django,mshafiq9/django,waytai/django,TridevGuha/django,freakboy3742/django,adamchainz/django,wetneb/django,beckastar/django,JavML/django,ar45/django,dursk/django,willhardy/django,rsalmaso/django,YangSongzhou/django,arun6582/django,koniiiik/django,druuu/django,BMJHayward/django,liavkoren/djangoDev,takis/django,spisneha25/django,tbeadle/django,spisneha25/django,blindroot/django,marcelocure/django,jscn/django,pjdelport/django,hobarrera/django,treyhunner/django,dfdx2/django,arun6582/django,archen/django,rsalmaso/django,mcardillo55/django,djbaldey/django,wkschwartz/django,salamer/django,frePPLe/django,mammique/django,leeon/annotated-django,marckuz/django,jhg/django,jenalgit/django,tayfun/django,RevelSystems/django,solarissmoke/django,curtisstpierre/django,dhruvagarwal/django,daniponi/django,ulope/django,hunter007/django,Adnn/django,1013553207/django,divio/django,ivandevp/django,kaedroho/django,ulope/django,Vixionar/django,rapilabs/django,willharris/django,blaze33/django,frdb194/django,zanderle/django,darjeeling/django,benjaminjkraft/django,Y3K/django,spisneha25/django,redhat-openstack/django,willhardy/django,katrid/django,SebasSBM/django,andreif/django,erikr/django,wsmith323/django,kaedroho/django,lsqtongxin/django,jnovinger/django,tysonclugg/django,GhostThrone/django,jaywreddy/django,ivandevp/django,jarshwah/django,aidanlister/django,krisys/django,piquadrat/django,jenalgit/django,zhoulingjun/django,ABaldwinHunter/django-clone-classic,bak1an/django,rynomster/django,tomchristie/django,mojeto/django,sarthakmeh03/django,x111ong/django,xwolf12/django,Adnn/django,tuhangdi/django,elkingtonmcb/django,SebasSBM/django,kangfend/django,petecummings/django,takis/django,savoirfairelinux/django,uranusjr/django,carljm/django,rajsadho/django,avanov/django,rmboggs/django,ojengwa/django-1,RevelSystems/django,pquentin/django,haxoza/django,fenginx/django,archen/django,gannetson/django,frankvdp/django,davidharrigan/django,timgraham/django,apollo13/django,Nepherhotep/django,peterlauri/django,pipermerriam/django,shtouff/django,synasius/django,rockneurotiko/django,NullSoldier/django,kevintaw/django,yakky/django,hackerbot/DjangoDev,shownomercy/django,ebar0n/django,dsanders11/django,IRI-Research/django,JorgeCoock/django,saydulk/django,mitchelljkotler/django,shownomercy/django,cainmatt/django,shacker/django,alexallah/django,matiasb/django,mlavin/django,jallohm/django,megaumi/django,andela-ifageyinbo/django,Argon-Zhou/django,simone/django-gb,MounirMesselmeni/django,sbellem/django,drjeep/django,frankvdp/django,pquentin/django,elijah513/django,zulip/django,RossBrunton/django,payeldillip/django,alilotfi/django,jvkops/django,ericholscher/django,yask123/django,chrisfranzen/django,dbaxa/django,yewang15215/django,b-me/django,h4r5h1t/django-hauthy,marissazhou/django,riteshshrv/django,bitcity/django,rizumu/django,frdb194/django,areski/django,ABaldwinHunter/django-clone,delinhabit/django,Vixionar/django,varunnaganathan/django,mitchelljkotler/django,zhaodelong/django,rmboggs/django,lwiecek/django,DONIKAN/django,runekaagaard/django-contrib-locking,ticosax/django,hcsturix74/django,sjlehtin/django,theo-l/django,etos/django,knifenomad/django,sopier/django,jarshwah/django,kangfend/django,aidanlister/django,sam-tsai/django,aroche/django,indevgr/django,riteshshrv/django,andresgz/django,adrianholovaty/django,b-me/django,ojengwa/django-1,aleida/django,kholidfu/django,kutenai/django,adambrenecki/django,barbuza/django,solarissmoke/django,sopier/django,sarthakmeh03/django,deployed/django,MarcJoan/django,stevenewey/django,asser/django,ccn-2m/django,koniiiik/django,errx/django,pasqualguerrero/django,Argon-Zhou/django,yewang15215/django,mattseymour/django,wsmith323/django,craynot/django,Adnn/django,blighj/django,aleida/django,tysonclugg/django,zanderle/django,jylaxp/django,ArnossArnossi/django,ironbox360/django,vmarkovtsev/django,beni55/django,quamilek/django,kutenai/django,blighj/django,MikeAmy/django,akintoey/django,zedr/django,ryangallen/django,JorgeCoock/django,GitAngel/django,ivandevp/django,Leila20/django,mcrowson/django,weiawe/django,zerc/django,runekaagaard/django-contrib-locking,AlexHill/django,jpic/django,mcardillo55/django,waytai/django,alimony/django,areski/django,oinopion/django,ericholscher/django,varunnaganathan/django,myang321/django,h4r5h1t/django-hauthy,monetate/django,Beeblio/django,BlindHunter/django,takeshineshiro/django,jyotsna1820/django,mjtamlyn/django,eugena/django,darjeeling/django,vitaly4uk/django,z0by/django,ifduyue/django,yograterol/django,mmardini/django,mitar/django,jhg/django,poiati/django,shaib/django,hunter007/django,ericfc/django,stevenewey/django,jallohm/django,MoritzS/django,felixxm/django,avneesh91/django,schinckel/django,blueyed/django,mttr/django,stewartpark/django,gannetson/django,barbuza/django,taaviteska/django,mitya57/django,tayfun/django,sjlehtin/django,indevgr/django,monetate/django,litchfield/django,denys-duchier/django,katrid/django,koordinates/django,djbaldey/django,beni55/django,SoftwareMaven/django,akintoey/django,denis-pitul/django,nealtodd/django,phalt/django,aerophile/django,zsiciarz/django,doismellburning/django,darkryder/django,hnakamur/django,cainmatt/django,hnakamur/django,imtapps/django-imt-fork,twz915/django,elena/django,blindroot/django,vincepandolfo/django,Matt-Deacalion/django,hassanabidpk/django,gengue/django,aerophile/django,abomyi/django,sarvex/django,baylee/django,oscaro/django,sephii/django,zhoulingjun/django,tysonclugg/django,sarvex/django,manhhomienbienthuy/django,piquadrat/django,adamchainz/django,rsalmaso/django,Vixionar/django,cainmatt/django,archen/django,memtoko/django,rrrene/django,kcpawan/django,daniponi/django,jhg/django,scorphus/django,lsqtongxin/django,petecummings/django,myang321/django,neiudemo1/django,mlavin/django,delinhabit/django,erikr/django,huang4fstudio/django,jrrembert/django,ryanahall/django,AlexHill/django,andresgz/django,x111ong/django,HonzaKral/django,WSDC-NITWarangal/django,oscaro/django,chyeh727/django,guettli/django,jylaxp/django,marissazhou/django,dfunckt/django,Argon-Zhou/django,Anonymous-X6/django,tragiclifestories/django,Mixser/django,ebar0n/django,timgraham/django,tuhangdi/django,gdub/django,liavkoren/djangoDev,BlindHunter/django,areski/django,RaoUmer/django,mcella/django,django/django,vsajip/django,ataylor32/django,knifenomad/django,henryfjordan/django,irwinlove/django,hassanabidpk/django,gengue/django,wweiradio/django,rajsadho/django,dwightgunning/django,kangfend/django,labcodes/django,Proggie02/TestRepo,druuu/django,ASCrookes/django,coldmind/django,delhivery/django,dhruvagarwal/django,akintoey/django,tanmaythakur/django,adelton/django,anant-dev/django,gchp/django,nielsvanoch/django,YangSongzhou/django,jejimenez/django,asser/django,ytjiang/django,jvkops/django,xrmx/django,camilonova/django,eugena/django,Sonicbids/django,ASCrookes/django,irwinlove/django,AndrewGrossman/django,mitar/django,henryfjordan/django,ABaldwinHunter/django-clone,NullSoldier/django,extremewaysback/django,jrrembert/django,maxsocl/django,rsvip/Django,andreif/django,DrMeers/django,himleyb85/django,jyotsna1820/django,PolicyStat/django,andela-ifageyinbo/django,knifenomad/django,aroche/django,erikr/django,RevelSystems/django,github-account-because-they-want-it/django,frePPLe/django,robhudson/django,lmorchard/django,mattrobenolt/django,jnovinger/django,liuliwork/django,TimYi/django,hcsturix74/django,ataylor32/django,rsalmaso/django,z0by/django,gohin/django,gunchleoc/django,mmardini/django,MarcJoan/django,liu602348184/django,mcella/django,gcd0318/django,postrational/django,tuhangdi/django,pasqualguerrero/django,zulip/django,jasonwzhy/django,jenalgit/django,timgraham/django,ghickman/django,bobcyw/django,ecederstrand/django,bliti/django-nonrel-1.5,beck/django,apocquet/django,IRI-Research/django,jhoos/django,rmboggs/django,dursk/django,vitaly4uk/django,SebasSBM/django,fenginx/django,payeldillip/django,GitAngel/django,hybrideagle/django,jejimenez/django,ghedsouza/django,ataylor32/django,curtisstpierre/django,mattrobenolt/django,imtapps/django-imt-fork,ghickman/django,benspaulding/django,kevintaw/django,EmadMokhtar/Django,aspidites/django,NullSoldier/django,peterlauri/django,szopu/django,auvipy/django,jn7163/django,dgladkov/django,waytai/django,jscn/django,charettes/django,nielsvanoch/django,haxoza/django,ifduyue/django,TridevGuha/django,jpic/django,theo-l/django,redhat-openstack/django,Matt-Deacalion/django,camilonova/django,ghickman/django,jsoref/django,edmorley/django,salamer/django,fpy171/django,mitchelljkotler/django,chrisfranzen/django,supriyantomaftuh/django,feroda/django,frankvdp/django,denis-pitul/django,felixxm/django,felixjimenez/django,bobcyw/django,gannetson/django,rogerhu/django,edevil/django,edmorley/django,lunafeng/django,MikeAmy/django,github-account-because-they-want-it/django,mcrowson/django,kennethlove/django,BrotherPhil/django,lunafeng/django,EliotBerriot/django,huang4fstudio/django,EliotBerriot/django,yakky/django,asser/django,scorphus/django,stevenewey/django,mitya57/django,sergei-maertens/django,leeon/annotated-django,django-nonrel/django,yask123/django,dfunckt/django,adambrenecki/django,jvkops/django,wsmith323/django,willharris/django,Nepherhotep/django,ryanahall/django,harisibrahimkv/django,mjtamlyn/django,bliti/django-nonrel-1.5,pauloxnet/django,avneesh91/django,beck/django,seanwestfall/django,dwightgunning/django,indevgr/django,mcardillo55/django,jyotsna1820/django,vitan/django,mewtaylor/django,programadorjc/django,apollo13/django,yceruto/django,yewang15215/django,fafaman/django,adelton/django,bobcyw/django,anant-dev/django,liuliwork/django,andresgz/django,neiudemo1/django,divio/django,yograterol/django,vitaly4uk/django,alexallah/django,rajsadho/django,jscn/django,rhertzog/django,ziima/django,bikong2/django,feroda/django,denisenkom/django,gengue/django,djbaldey/django,mitya57/django,ericfc/django,koniiiik/django,sgzsh269/django,Leila20/django,digimarc/django,Beauhurst/django,curtisstpierre/django,taaviteska/django,jeezybrick/django,b-me/django,rmboggs/django,dudepare/django,claudep/django,marctc/django,sam-tsai/django,GaussDing/django,BlindHunter/django,EmadMokhtar/Django,liavkoren/djangoDev,kisna72/django,saydulk/django,yamila-moreno/django,mrfuxi/django,bspink/django,delhivery/django,alimony/django,saydulk/django,pauloxnet/django,rizumu/django,denisenkom/django,coldmind/django,iambibhas/django,petecummings/django,andreif/django,nealtodd/django,filias/django,maxsocl/django,anant-dev/django,matiasb/django,marckuz/django,piquadrat/django,pauloxnet/django,rynomster/django,baylee/django,felixjimenez/django,errx/django,elena/django,ziima/django,AndrewGrossman/django,solarissmoke/django,rrrene/django,MoritzS/django,SebasSBM/django,vsajip/django,jsoref/django,andrewsmedina/django,ptoraskar/django,syaiful6/django,mdj2/django,pauloxnet/django,liu602348184/django,sgzsh269/django,crazy-canux/django,aspidites/django,blueyed/django,kennethlove/django,roselleebarle04/django,dbaxa/django,gdi2290/django,alrifqi/django,mcardillo55/django,mattseymour/django,mitar/django,rockneurotiko/django,liuliwork/django,ghedsouza/django,etos/django,rtindru/django,ojake/django,henryfjordan/django,BrotherPhil/django,syphar/django,WSDC-NITWarangal/django,programadorjc/django,adambrenecki/django,vincepandolfo/django,schinckel/django,caotianwei/django,ghickman/django,phalt/django,jmcarp/django,ajoaoff/django,bspink/django,gdub/django,rizumu/django,tragiclifestories/django,Balachan27/django,denys-duchier/django,BrotherPhil/django,etos/django,solarissmoke/django,rapilabs/django,shownomercy/django,marctc/django,github-account-because-they-want-it/django,ericfc/django,roselleebarle04/django,mojeto/django,duqiao/django,digimarc/django,ccn-2m/django,evansd/django,adamchainz/django,risicle/django,dursk/django,fpy171/django,sbellem/django,gcd0318/django,sephii/django,ghedsouza/django,Matt-Deacalion/django,nju520/django,fafaman/django,craynot/django,zsiciarz/django,aspidites/django,Argon-Zhou/django,rsvip/Django,bitcity/django,gitaarik/django,quamilek/django,ebar0n/django,erikr/django,rwillmer/django,rhertzog/django,intgr/django,gengue/django,frishberg/django,alrifqi/django,pquentin/django,mshafiq9/django,zedr/django,Proggie02/TestRepo,ryangallen/django,alx-eu/django,AltSchool/django,programadorjc/django,twz915/django,postrational/django,Balachan27/django,cobalys/django,pipermerriam/django,rockneurotiko/django,elkingtonmcb/django,synasius/django,mrfuxi/django,z0by/django,adamchainz/django,memtoko/django,rynomster/django,aisipos/django,hybrideagle/django,Yong-Lee/django,TimBuckley/effective_django,divio/django,shaib/django,jnovinger/django,jn7163/django,shaib/django,kutenai/django,MikeAmy/django,frePPLe/django,github-account-because-they-want-it/django,salamer/django,benjaminjkraft/django,duqiao/django,daniponi/django,xrmx/django,vincepandolfo/django,hasadna/django,hottwaj/django,bak1an/django,auready/django,jmcarp/django,krisys/django,alimony/django,lwiecek/django,WillGuan105/django,phalt/django,doismellburning/django,ajaali/django,stewartpark/django,simonw/django,darkryder/django,eyohansa/django,hcsturix74/django,WSDC-NITWarangal/django,joequery/django,TimYi/django,elena/django,akaariai/django,ecederstrand/django,fafaman/django,follow99/django,sadaf2605/django,marissazhou/django,salamer/django,rlugojr/django,jgeskens/django,zhoulingjun/django,willharris/django,jgeskens/django,supriyantomaftuh/django,edevil/django,reinout/django,Yong-Lee/django,Korkki/django,litchfield/django,vmarkovtsev/django,MounirMesselmeni/django,webgeodatavore/django,akaihola/django,loic/django,TridevGuha/django,yceruto/django,peterlauri/django,chrisfranzen/django,scorphus/django,takis/django,tcwicklund/django,ojake/django,matiasb/django,adelton/django,roselleebarle04/django,AndrewGrossman/django,oberlin/django,dgladkov/django,rtindru/django,jaywreddy/django,andela-ifageyinbo/django,ABaldwinHunter/django-clone-classic,jejimenez/django,bak1an/django,joequery/django,andyzsf/django,takis/django,sbellem/django,alx-eu/django,django-nonrel/django,mdj2/django,treyhunner/django,dracos/django,googleinterns/django,karyon/django,kholidfu/django,claudep/django,willhardy/django,dbaxa/django,claudep/django,chyeh727/django,ajaali/django,caotianwei/django,hkchenhongyi/django,abomyi/django,GhostThrone/django,knifenomad/django,blindroot/django,dsanders11/django,dsanders11/django,jgoclawski/django,techdragon/django,alexallah/django,mshafiq9/django,marissazhou/django,double-y/django,koordinates/django,jgoclawski/django,zedr/django,wetneb/django,pelme/django,xwolf12/django,ytjiang/django,extremewaysback/django,digimarc/django,mammique/django,sergei-maertens/django,MarkusH/django,jallohm/django,bitcity/django,dudepare/django,supriyantomaftuh/django,mlavin/django,BlindHunter/django,joakim-hove/django,Endika/django,divio/django,RaoUmer/django,pipermerriam/django,AltSchool/django,raphaelmerx/django,myang321/django,avanov/django,chrisfranzen/django,SujaySKumar/django,mewtaylor/django,gitaarik/django,felixxm/django,RossBrunton/django,fenginx/django,Beeblio/django,hackerbot/DjangoDev,GaussDing/django,WillGuan105/django,jrrembert/django,DrMeers/django,hynekcer/django,donkirkby/django,shtouff/django,elkingtonmcb/django,Sonicbids/django,Leila20/django,auvipy/django,yograterol/django,blighj/django,jdelight/django,ojengwa/django-1,vsajip/django,akintoey/django,rlugojr/django,mattrobenolt/django,gdi2290/django,sam-tsai/django,GaussDing/django,hnakamur/django,taaviteska/django,bliti/django-nonrel-1.5,mitya57/django,guettli/django,nju520/django,shaistaansari/django,apocquet/django,aisipos/django,dydek/django,DasIch/django,robhudson/django,Yong-Lee/django,shaistaansari/django,wweiradio/django,lzw120/django,nhippenmeyer/django,ziima/django,sadaf2605/django,maxsocl/django,gcd0318/django,risicle/django,blighj/django,frdb194/django,benspaulding/django,abomyi/django,codepantry/django,uranusjr/django,lsqtongxin/django,dfunckt/django,follow99/django,t0in4/django,rrrene/django,fpy171/django,evansd/django,HousekeepLtd/django,hobarrera/django,krishna-pandey-git/django,tbeadle/django,jsoref/django,TimYi/django,zerc/django,akshatharaj/django,GitAngel/django,denis-pitul/django,akaariai/django,jhoos/django,rtindru/django,Matt-Deacalion/django,sam-tsai/django,gchp/django,mttr/django,jarshwah/django,mbox/django,akaariai/django,elky/django,haxoza/django,simonw/django,druuu/django,takeshineshiro/django,delinhabit/django,KokareIITP/django,jylaxp/django,mbox/django,ckirby/django,gcd0318/django,katrid/django,eugena/django,hackerbot/DjangoDev,zulip/django,marqueedev/django,kevintaw/django,theo-l/django,weiawe/django,mdj2/django,ajoaoff/django,jasonwzhy/django,rogerhu/django,MounirMesselmeni/django,fpy171/django,tbeadle/django,bak1an/django,ironbox360/django,georgemarshall/django,mlavin/django,loic/django,donkirkby/django,georgemarshall/django,ArnossArnossi/django,jn7163/django,schinckel/django,scorphus/django,feroda/django,bobcyw/django,googleinterns/django,tcwicklund/django,kisna72/django,dex4er/django,EliotBerriot/django,gdub/django,raphaelmerx/django,jnovinger/django,gunchleoc/django,delhivery/django,IRI-Research/django,donkirkby/django,simone/django-gb,quxiaolong1504/django,guettli/django,ar45/django,sdcooke/django,joakim-hove/django,elijah513/django,filias/django,rhertzog/django,filias/django,aidanlister/django,hellhovnd/django,ghedsouza/django,sgzsh269/django,ojengwa/django-1,davgibbs/django,payeldillip/django,redhat-openstack/django,dydek/django,koordinates/django,yigitguler/django,BrotherPhil/django,atul-bhouraskar/django,makinacorpus/django,1013553207/django,denys-duchier/django,frePPLe/django,wetneb/django,kamyu104/django,kosz85/django,follow99/django,whs/django,joakim-hove/django,hynekcer/django,syphar/django,vmarkovtsev/django,dwightgunning/django,akshatharaj/django,hottwaj/django,googleinterns/django,charettes/django,zhaodelong/django,double-y/django,daniponi/django,evansd/django,ticosax/django,dfdx2/django,denisenkom/django,helenst/django,mathspace/django,hybrideagle/django,hkchenhongyi/django,georgemarshall/django,ryanahall/django,avanov/django,tayfun/django,ifduyue/django,ckirby/django,shownomercy/django,gchp/django,Y3K/django,aspidites/django,BMJHayward/django,mathspace/django,dudepare/django,YYWen0o0/python-frame-django,mmardini/django,himleyb85/django,RaoUmer/django,simone/django-gb,joequery/django,mcella/django,claudep/django,SujaySKumar/django,codepantry/django,avanov/django,yamila-moreno/django,seanwestfall/django,ar45/django,mmardini/django,Y3K/django,hkchenhongyi/django,megaumi/django,davgibbs/django,stevenewey/django,ArnossArnossi/django,seocam/django,mrfuxi/django,lsqtongxin/django,extremewaysback/django,EliotBerriot/django,kangfend/django,MatthewWilkes/django,manhhomienbienthuy/django,elijah513/django,webgeodatavore/django,quamilek/django,nju520/django,davidharrigan/django,neiudemo1/django,sephii/django,kcpawan/django,WSDC-NITWarangal/django,sjlehtin/django,megaumi/django,mttr/django,Beauhurst/django,evansd/django,whs/django,kcpawan/django,whs/django,drjeep/django,nealtodd/django,hasadna/django,doismellburning/django,ajoaoff/django,pelme/django,mattseymour/django,tysonclugg/django,hassanabidpk/django,EmadMokhtar/Django,intgr/django,unaizalakain/django,taaviteska/django,TimBuckley/effective_django,dhruvagarwal/django,webgeodatavore/django,jasonbot/django,pasqualguerrero/django,chyeh727/django,ptoraskar/django,Yong-Lee/django,ironbox360/django,tbeadle/django,sdcooke/django,t0in4/django,twz915/django,iambibhas/django,aisipos/django,DONIKAN/django,bikong2/django,yewang15215/django,davgibbs/django,MarkusH/django,pipermerriam/django,ajaali/django,joequery/django,koordinates/django,t0in4/django,django/django,ataylor32/django,rizumu/django,Mixser/django,dydek/django,HonzaKral/django,seocam/django,fenginx/django,digimarc/django,szopu/django,zsiciarz/django,ptoraskar/django,ticosax/django,maxsocl/django,alexmorozov/django,kcpawan/django,atul-bhouraskar/django,benjaminjkraft/django,darkryder/django,georgemarshall/django,drjeep/django,tcwicklund/django,sarvex/django,beck/django,lmorchard/django,akaihola/django,t0in4/django,AndrewGrossman/django,GhostThrone/django,atul-bhouraskar/django,x111ong/django,shaib/django,shaistaansari/django,ticosax/django,liuliwork/django,b-me/django,follow99/django,ckirby/django,x111ong/django,moreati/django,iambibhas/django,kosz85/django,risicle/django,rhertzog/django,dwightgunning/django,oinopion/django,felixxm/django,mjtamlyn/django,hkchenhongyi/django,apocquet/django,poiati/django,hackerbot/DjangoDev,ajoaoff/django,freakboy3742/django,darjeeling/django,labcodes/django,dfdx2/django,ryangallen/django,mrbox/django,krishna-pandey-git/django,ericholscher/django,beckastar/django,denys-duchier/django,tanmaythakur/django,kswiat/django,liu602348184/django,alrifqi/django,dex4er/django,rrrene/django,YangSongzhou/django,mrbox/django,savoirfairelinux/django,sergei-maertens/django,himleyb85/django,megaumi/django,1013553207/django,curtisstpierre/django,ryanahall/django,webgeodatavore/django,alilotfi/django,tcwicklund/django,yigitguler/django,Korkki/django,gunchleoc/django,h4r5h1t/django-hauthy,gchp/django,hobarrera/django,JavML/django,myang321/django,risicle/django,oinopion/django,dfunckt/django,django-nonrel/django,rogerhu/django,imtapps/django-imt-fork,camilonova/django,phalt/django,jasonwzhy/django,apocquet/django,haxoza/django,Anonymous-X6/django,hnakamur/django,tomchristie/django,andela-ifageyinbo/django,lunafeng/django,Beauhurst/django,treyhunner/django,simonw/django,himleyb85/django,marqueedev/django,rwillmer/django,mcella/django,riteshshrv/django,MarcJoan/django,beckastar/django,arun6582/django,baylee/django,ivandevp/django,alexmorozov/django,timgraham/django,alexmorozov/django,bspink/django,hynekcer/django,hellhovnd/django,apollo13/django,jvkops/django,dpetzold/django,YYWen0o0/python-frame-django,TimYi/django,leeon/annotated-django,adrianholovaty/django,peterlauri/django,moreati/django,reinout/django,techdragon/django,apollo13/django,PolicyStat/django,sgzsh269/django,quxiaolong1504/django,zsiciarz/django,mcrowson/django,DasIch/django,guettli/django,sarvex/django,delhivery/django,oberlin/django,ASCrookes/django,robhudson/django,nhippenmeyer/django,kosz85/django,vitan/django,jaywreddy/django,shaistaansari/django,andela-ooladayo/django,DasIch/django,szopu/django,koniiiik/django,jrrembert/django,dursk/django,gohin/django,sjlehtin/django,jpic/django,yask123/django,kutenai/django,quxiaolong1504/django,Nepherhotep/django,huang4fstudio/django,areski/django,bitcity/django,jpic/django,mrbox/django,beckastar/django,loic/django,frishberg/django,rtindru/django,pjdelport/django,redhat-openstack/django,xadahiya/django,jejimenez/django,alilotfi/django,makinacorpus/django,asser/django,hassanabidpk/django,mttr/django,SujaySKumar/django,carljm/django,TimBuckley/effective_django,synasius/django,zhaodelong/django,kamyu104/django,dhruvagarwal/django,cainmatt/django,syaiful6/django,wetneb/django,drjeep/django,kosz85/django,barbuza/django,elky/django,MarkusH/django,mshafiq9/django,huang4fstudio/django,WillGuan105/django,devops2014/djangosite,tomchristie/django,devops2014/djangosite,RossBrunton/django,mrbox/django,marckuz/django,raphaelmerx/django,Endika/django,marcelocure/django,anant-dev/django,spisneha25/django,gohin/django,Korkki/django,lzw120/django,alx-eu/django,moreati/django,zerc/django,chyeh727/django,kennethlove/django,elky/django,mewtaylor/django,ziima/django,oberlin/django,zerc/django,wsmith323/django,nealtodd/django,aidanlister/django,GitAngel/django,druuu/django,benjaminjkraft/django,seocam/django,ptoraskar/django,double-y/django,JorgeCoock/django,crazy-canux/django,andrewsmedina/django,jdelight/django,mjtamlyn/django,rlugojr/django,SujaySKumar/django,alx-eu/django,bspink/django,stewartpark/django,unaizalakain/django,avneesh91/django,tanmaythakur/django,yamila-moreno/django,harisibrahimkv/django,akshatharaj/django,ABaldwinHunter/django-clone-classic,mammique/django,epandurski/django,vmarkovtsev/django,ulope/django,irwinlove/django,karyon/django,xrmx/django,Endika/django,jhoos/django,ebar0n/django,zanderle/django,mathspace/django,ojake/django,AltSchool/django,rapilabs/django,pjdelport/django,sergei-maertens/django,ccn-2m/django,mattrobenolt/django,blaze33/django,DONIKAN/django,pasqualguerrero/django,devops2014/djangosite,rlugojr/django,kisna72/django,DasIch/django,xadahiya/django,JorgeCoock/django,craynot/django,mathspace/django,programadorjc/django,eugena/django,takeshineshiro/django,aerophile/django,arun6582/django,coldmind/django,runekaagaard/django-contrib-locking,weiawe/django,twz915/django,akshatharaj/django,xrmx/django,SoftwareMaven/django,dgladkov/django,intgr/django,Beeblio/django,labcodes/django,AltSchool/django,harisibrahimkv/django,hottwaj/django,jgeskens/django,harisibrahimkv/django,jylaxp/django,techdragon/django,labcodes/django,baylee/django,alilotfi/django,alrifqi/django,simonw/django,darjeeling/django,marqueedev/django,ABaldwinHunter/django-clone-classic,syaiful6/django,BMJHayward/django,marcelocure/django,HonzaKral/django,freakboy3742/django,MatthewWilkes/django,dpetzold/django,reinout/django,lunafeng/django,krisys/django,MarcJoan/django,Balachan27/django,rwillmer/django,mbox/django,synasius/django,nhippenmeyer/django,leekchan/django_test,auready/django,yask123/django,yakky/django,PetrDlouhy/django,PetrDlouhy/django,carljm/django,PolicyStat/django,karyon/django,takeshineshiro/django,dex4er/django,tragiclifestories/django,DrMeers/django,sarthakmeh03/django,davgibbs/django,yceruto/django,willharris/django,varunnaganathan/django,feroda/django,vitan/django,YangSongzhou/django,Endika/django,olasitarska/django,rwillmer/django,aleida/django,karyon/django,manhhomienbienthuy/django,vitaly4uk/django,mewtaylor/django,matiasb/django,jallohm/django,oinopion/django,sdcooke/django,jenalgit/django,krishna-pandey-git/django,etos/django,kholidfu/django,jsoref/django,epandurski/django,schinckel/django,ifduyue/django,Y3K/django,double-y/django,RaoUmer/django,moreati/django,caotianwei/django,saydulk/django,elena/django,frishberg/django,HousekeepLtd/django,monetate/django,elkingtonmcb/django,duqiao/django,shtouff/django,lmorchard/django,hynekcer/django,zhaodelong/django,blaze33/django,sbellem/django,monetate/django,syphar/django,Korkki/django,xwolf12/django,kaedroho/django,mrfuxi/django,cobalys/django,kevintaw/django,vincepandolfo/django,andresgz/django,jgoclawski/django,yograterol/django,marqueedev/django,andela-ooladayo/django,jasonbot/django,shacker/django,dpetzold/django,SoftwareMaven/django,whs/django,gitaarik/django,coldmind/django,hcsturix74/django,alexmorozov/django,kisna72/django,YYWen0o0/python-frame-django,z0by/django,payeldillip/django,tragiclifestories/django,1013553207/django,wweiradio/django,ryangallen/django,nemesisdesign/django,ar45/django,xwolf12/django,benspaulding/django,sadaf2605/django,jasonbot/django,xadahiya/django,zhoulingjun/django,edmorley/django,mitchelljkotler/django,hellhovnd/django,ABaldwinHunter/django-clone,akaihola/django,jasonwzhy/django,MikeAmy/django,mojeto/django,h4r5h1t/django-hauthy,mcrowson/django,tanmaythakur/django,adrianholovaty/django,treyhunner/django,uranusjr/django,andela-ooladayo/django,errx/django,kamyu104/django,beni55/django,KokareIITP/django,GaussDing/django,auvipy/django,nielsvanoch/django,Anonymous-X6/django,wkschwartz/django,sarthakmeh03/django,fafaman/django,googleinterns/django,riteshshrv/django,willhardy/django,rapilabs/django,nemesisdesign/django,jasonbot/django,seanwestfall/django,unaizalakain/django,dydek/django,hasadna/django,piquadrat/django,dudepare/django,lzw120/django,shacker/django,MatthewWilkes/django,ytjiang/django,RossBrunton/django,ecederstrand/django,craynot/django,andreif/django,helenst/django,vitan/django,reinout/django,techdragon/django,gdub/django,Nepherhotep/django,yigitguler/django,oscaro/django,PetrDlouhy/django,Mixser/django,ironbox360/django,roselleebarle04/django,ASCrookes/django,jeezybrick/django,rsvip/Django,krishna-pandey-git/django,rajsadho/django,petecummings/django,HonzaKral/django,lmorchard/django,olasitarska/django,memtoko/django,bikong2/django,crazy-canux/django,leekchan/django_test,abomyi/django,seanwestfall/django,jmcarp/django,felixjimenez/django,dpetzold/django,duqiao/django,nemesisdesign/django,marctc/django,HousekeepLtd/django,Proggie02/TestRepo,sdcooke/django,PetrDlouhy/django,blindroot/django,ytjiang/django,intgr/django,katrid/django,jhg/django,weiawe/django,raphaelmerx/django,nhippenmeyer/django,felixjimenez/django,filias/django,blueyed/django,xadahiya/django,hybrideagle/django,djbaldey/django,ArnossArnossi/django,loic/django,ABaldwinHunter/django-clone,AlexHill/django,tomchristie/django,kswiat/django,gannetson/django,kamyu104/django,waytai/django,carljm/django,edevil/django,marckuz/django,andyzsf/django,tuhangdi/django,gdi2290/django,andyzsf/django,ajaali/django,aerophile/django,sopier/django,elijah513/django,django-nonrel/django,edmorley/django,auvipy/django,JavML/django,camilonova/django,unaizalakain/django,codepantry/django,auready/django,manhhomienbienthuy/django,jeezybrick/django,frankvdp/django,Anonymous-X6/django,bikong2/django,donkirkby/django,wkschwartz/django,robhudson/django,oberlin/django,quxiaolong1504/django,auready/django,ckirby/django,Leila20/django,hunter007/django,cobalys/django,dfdx2/django,poiati/django,codepantry/django,alimony/django,andrewsmedina/django,davidharrigan/django,joakim-hove/django,uranusjr/django,MoritzS/django,tayfun/django,jhoos/django,jn7163/django,ecederstrand/django,savoirfairelinux/django,jscn/django,jmcarp/django,davidharrigan/django,yakky/django,akaariai/django,caotianwei/django,jeezybrick/django,olasitarska/django,helenst/django,jaywreddy/django,MarkusH/django,hobarrera/django,Balachan27/django,Mixser/django,theo-l/django,mojeto/django,darkryder/django,rsvip/Django,sopier/django,eyohansa/django,Beeblio/django,litchfield/django,gitaarik/django,supriyantomaftuh/django,TridevGuha/django,krisys/django,BMJHayward/django,lwiecek/django,dracos/django,barbuza/django,eyohansa/django,NullSoldier/django,aisipos/django,rynomster/django,HousekeepLtd/django,adelton/django,gohin/django,pelme/django,epandurski/django,liu602348184/django,KokareIITP/django,quamilek/django,sadaf2605/django,avneesh91/django,elky/django,hellhovnd/django,eyohansa/django,shtouff/django,beni55/django,DONIKAN/django,nemesisdesign/django,stewartpark/django,Adnn/django,jarshwah/django,makinacorpus/django,nju520/django,jdelight/django,kholidfu/django,jdelight/django,syphar/django,GhostThrone/django,seocam/django,marcelocure/django,atul-bhouraskar/django,JavML/django,marctc/django,charettes/django,django/django
|
43b8e4de31d0659561ffedfeb0ab4a42f035eade
|
dev/test_all.py
|
dev/test_all.py
|
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs various tests in the repository."""
import argparse
import subprocess
import repo_util
def main(argv):
root = "//third_party/java_src/j2cl/"
cmd = ["blaze", "test", "--keep_going"]
cmd += [root + t + "/..." for t in argv.test_pattern] or [root + "..."]
cmd += repo_util.create_test_filter(argv.platforms)
subprocess.call(cmd)
def add_arguments(parser):
parser.add_argument(
"test_pattern",
metavar="<root>",
nargs="*",
help="test root(s). e.g. transpiler jre")
def run_for_presubmit():
argv = argparse.Namespace(test_pattern=[])
main(argv)
|
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs various tests in the repository."""
import argparse
import subprocess
import repo_util
def main(argv):
root = "//third_party/java_src/j2cl/"
cmd = ["blaze", "test", "--test_tag_filters=-chamber", "--keep_going"]
cmd += [root + t + "/..." for t in argv.test_pattern] or [root + "..."]
cmd += repo_util.create_test_filter(argv.platforms)
subprocess.call(cmd)
def add_arguments(parser):
parser.add_argument(
"test_pattern",
metavar="<root>",
nargs="*",
help="test root(s). e.g. transpiler jre")
def run_for_presubmit():
argv = argparse.Namespace(test_pattern=[])
main(argv)
|
Exclude chamber workflow from targets tested by j2 testall
|
Exclude chamber workflow from targets tested by j2 testall
PiperOrigin-RevId: 384424406
|
Python
|
apache-2.0
|
google/j2cl,google/j2cl,google/j2cl,google/j2cl,google/j2cl
|
a3c3c5f4cbbea80aada1358ca52c698cf13136cc
|
unittests/test_xmp.py
|
unittests/test_xmp.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from .hamcrest import evaluates_to
from blister.xmp import XMP
class XMPTest (unittest.TestCase):
def test_degenerate (self):
xmp = XMP()
assert_that(xmp, evaluates_to(False))
assert_that(xmp, has_length(0))
assert_that(list(xmp), is_(equal_to([])))
def test_not_all_attrs_exist (self):
xmp = XMP()
assert_that(calling(getattr).with_args(xmp, "fake_namespace"),
raises(AttributeError))
assert_that(calling(getattr).with_args(xmp, "also_fake"),
raises(AttributeError))
def test_default_xmp_namespaces_exist (self):
xmp = XMP()
no_error = xmp.stRef
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from .hamcrest import evaluates_to
from blister.xmp import XMP
class XMPTest (unittest.TestCase):
def setUp (self):
self.xmp = XMP()
def test_degenerate (self):
assert_that(self.xmp, evaluates_to(False))
assert_that(self.xmp, has_length(0))
assert_that(list(self.xmp), is_(equal_to([])))
def test_not_all_attrs_exist (self):
assert_that(calling(getattr).with_args(self.xmp,
"fake_namespace"),
raises(AttributeError))
assert_that(calling(getattr).with_args(self.xmp, "also_fake"),
raises(AttributeError))
def test_default_xmp_namespaces_exist (self):
no_error = self.xmp.stRef
|
Move XMP init into setUp()
|
Move XMP init into setUp()
|
Python
|
bsd-3-clause
|
daaang/blister
|
c5cf8df78106e15a81f976f99d26d361b036318a
|
indra/tools/reading/run_drum_reading.py
|
indra/tools/reading/run_drum_reading.py
|
import sys
import json
from indra.sources.trips.drum_reader import DrumReader
from indra.sources.trips import process_xml
def read_content(content):
sentences = []
for k, v in content.items():
sentences += v
dr = DrumReader(to_read=sentences)
try:
dr.start()
except SystemExit:
pass
statements = []
for extraction in dr.extractions:
statements += process_xml(extraction).statements
return statements
if __name__ == '__main__':
file_name = sys.argv[1]
with open(file_name, 'rt') as fh:
content = json.load(fh)
statements = read_content(content)
print(statements)
|
import sys
import json
import time
import pickle
from indra.sources.trips import process_xml
from indra.sources.trips.drum_reader import DrumReader
def set_pmid(statements, pmid):
for stmt in statements:
for evidence in stmt.evidence:
evidence.pmid = pmid
def read_content(content, host):
all_statements = []
for pmid, sentences in content.items():
print('================================')
print('Processing %d sentences for %s' % (len(sentences), pmid))
ts = time.time()
dr = DrumReader(to_read=sentences, host=host)
try:
dr.start()
except SystemExit:
pass
statements = []
for extraction in dr.extractions:
tp = process_xml(extraction)
statements += tp.statements
set_pmid(statements, pmid)
te = time.time()
print('Reading took %d seconds and produced %d Statements.' %
(te-ts, len(statements)))
all_statements += statements
return all_statements
def save_results(statements, out_fname):
with open(out_fname, 'wb') as fh:
pickle.dump(statements, fh)
if __name__ == '__main__':
host = sys.argv[1]
file_name = sys.argv[2]
with open(file_name, 'rt') as fh:
content = json.load(fh)
statements = read_content(content, host)
save_results(statements, 'results.pkl')
|
Improve batch Drum reading implementation
|
Improve batch Drum reading implementation
|
Python
|
bsd-2-clause
|
bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra
|
b3f516b91d118824bb90f834184aa25a5a5f1c68
|
train.py
|
train.py
|
# -*- coding: utf-8 -*-
import logging
from gensim.models import word2vec
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
sentences = word2vec.LineSentence("wiki_seg.txt")
model = word2vec.Word2Vec(sentences, size=250)
#保存模型,供日後使用
model.save("word2vec.model")
#模型讀取方式
# model = word2vec.Word2Vec.load("your_model_name")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
import logging
from gensim.models import word2vec
def main():
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
sentences = word2vec.LineSentence("wiki_seg.txt")
model = word2vec.Word2Vec(sentences, vector_size=250)
#保存模型,供日後使用
model.save("word2vec.model")
#模型讀取方式
# model = word2vec.Word2Vec.load("your_model_name")
if __name__ == "__main__":
main()
|
Fix the argument name to adapt gensim 4.0.
|
[U] Fix the argument name to adapt gensim 4.0.
|
Python
|
mit
|
zake7749/word2vec-tutorial
|
afd496ccdde07502e6f42ae1b4127d130aed050c
|
docs/conf.py
|
docs/conf.py
|
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from gravity import __version__ # noqa: E402
project = 'Gravity'
copyright = '2022, The Galaxy Project'
author = 'The Galaxy Project'
release = __version__
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
master_doc = 'index'
extensions = []
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = 'alabaster'
html_static_path = ['_static']
|
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from gravity import __version__ # noqa: E402
project = 'Gravity'
copyright = '2022, The Galaxy Project'
author = 'The Galaxy Project'
release = __version__
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
master_doc = 'index'
extensions = []
templates_path = ['_templates']
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = 'default'
html_static_path = ['_static']
|
Reset docs theme, RTD should override
|
Reset docs theme, RTD should override
|
Python
|
mit
|
galaxyproject/gravity
|
9942b7b6e550ec6f76def44a7470f747c47b13a8
|
utils/00-cinspect.py
|
utils/00-cinspect.py
|
""" A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
|
""" A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
ipy = get_ipython()
old_format = ipy.inspector.format
def c_format(raw, *args, **kwargs):
return raw
def my_format(raw, out = None, scheme = ''):
try:
output = old_format(raw, out, scheme)
except:
output = raw
return output
ipy.inspector.format = my_format
|
Patch the colorized formatter to not break for C modules.
|
Patch the colorized formatter to not break for C modules.
|
Python
|
bsd-3-clause
|
punchagan/cinspect,punchagan/cinspect
|
886539f4bd3d67938f90b6500ee625db470284a2
|
UM/View/CompositePass.py
|
UM/View/CompositePass.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Resources import Resources
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, name, width, height):
super().__init__(name, width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._shader.setUniformValue("u_layer_count", texture_unit + 1)
self._shader.setUniformValueArray("u_layers", [range(0, texture_unit)], texture_unit + 1)
self.renderQuad()
self._shader.release()
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Application import Application
from UM.Resources import Resources
from UM.Math.Matrix import Matrix
from UM.View.RenderPass import RenderPass
from UM.View.GL.OpenGL import OpenGL
class CompositePass(RenderPass):
def __init__(self, width, height):
super().__init__("composite", width, height)
self._shader = OpenGL.getInstance().createShaderProgram(Resources.getPath(Resources.Shaders, "composite.shader"))
self._gl = OpenGL.getInstance().getBindingsObject()
self._renderer = Application.getInstance().getRenderer()
def setCompositeShader(self, shader):
self._shader = shader
def renderContents(self):
pass
def renderOutput(self):
self._shader.bind()
texture_unit = 0
for render_pass in self._renderer.getRenderPasses():
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, render_pass.getTextureId())
texture_unit += 1
self._renderer.renderQuad(self._shader)
for i in range(texture_unit):
self._gl.glActiveTexture(texture_unit)
self._gl.glBindTexture(self._gl.GL_TEXTURE_2D, 0)
self._shader.release()
|
Make basic composite pass work
|
Make basic composite pass work
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
a2e4e8593ec4c09d504b74544b134d27d1428ce3
|
emma2/msm/flux/__init__.py
|
emma2/msm/flux/__init__.py
|
from .api import *
|
r"""
===================================================================
flux - Reactive flux an transition pathways (:mod:`emma2.msm.flux`)
===================================================================
.. currentmodule:: emma2.msm.flux
This module contains functions to compute reactive flux networks and
find dominant reaction pathways in such networks.
TPT-object
==========
.. autosummary::
:toctree: generated/
tpt - ReactiveFlux object
Reactive flux
=============
.. autosummary::
:toctree: generated/
flux_matrix - TPT flux network
to_netflux - Netflux from gross flux
flux_production - Net flux-production for all states
flux_producers
flux_consumers
coarsegrain
Reaction rates and fluxes
=========================
.. autosummary::
:toctree: generated/
total_flux
rate
mfpt
Pathway decomposition
=====================
.. autosummary::
:toctree: generated/
pathways
"""
from .api import *
|
Include flux package in doc
|
[msm/flux] Include flux package in doc
|
Python
|
bsd-2-clause
|
arokem/PyEMMA,trendelkampschroer/PyEMMA,trendelkampschroer/PyEMMA,arokem/PyEMMA
|
5ae5c27f69cdfb1c53ada0a2aa90d76c4d3ce421
|
memcached.py
|
memcached.py
|
#!/usr/bin/env python
#
# igcollect - Memcached
#
# Copyright (c) 2016, InnoGames GmbH
#
import telnetlib
import sys
import socket
import time
def main(host='127.0.0.1', port='11211'):
hostname = socket.gethostname().replace('.', '_')
ts = str(int(time.time()))
template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts
for line in command(host, port, 'stats').splitlines():
if line.startswith('STAT '):
header, key, value = line.split()
if key.replace('_', '').isalpha() and is_float(value):
print(template.format(hostname, key, value))
def command(host, port, cmd):
"""Write a command to telnet and return the response"""
client = telnetlib.Telnet(host, port)
client.write(cmd + '\n')
return client.read_until('END')
def is_float(value):
try:
float(value)
except ValueError:
return False
else:
return True
if __name__ == '__main__':
main(*sys.argv[1:])
|
#!/usr/bin/env python
#
# igcollect - Memcached
#
# Copyright (c) 2016, InnoGames GmbH
#
import telnetlib
import sys
import socket
import time
import re
def main(host='127.0.0.1', port='11211'):
hostname = socket.gethostname().replace('.', '_')
ts = str(int(time.time()))
template = 'servers.' + hostname + '.software.memcached.{1} {2} ' + ts
pattern = re.compile('STAT \w+ \d+(.\d+)?$')
for line in command(host, port, 'stats').splitlines():
if pattern.match(line):
header, key, value = line.split()
print(template.format(hostname, key, value))
def command(host, port, cmd):
"""Write a command to telnet and return the response"""
client = telnetlib.Telnet(host, port)
client.write(cmd + '\n')
return client.read_until('END')
def is_float(value):
try:
float(value)
except ValueError:
return False
else:
return True
if __name__ == '__main__':
main(*sys.argv[1:])
|
Use regexp for checking the line
|
Use regexp for checking the line
|
Python
|
mit
|
innogames/igcollect
|
3863bda6af40f62e49f4883468f4947d46f0cccc
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.6.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.6'
|
Update dsub version to 0.3.6
|
Update dsub version to 0.3.6
PiperOrigin-RevId: 281980915
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
9a791b9c5e79011edaa2a9d2f25bf92e0bf17543
|
client/libsinan/version_check_handler.py
|
client/libsinan/version_check_handler.py
|
import libsinan
from libsinan import handler, output, jsax
class VersionCheckTaskHandler(output.SimpleTaskHandler):
def object_end(self):
""" We only get one object per right now so
lets print it out when we get it """
if self.task == "version":
if self.event_type == 'info':
self.version = self.desc
return True
else:
return output.SimpleTaskHandler.object_end(self)
class VersionCheckHandler(handler.Handler):
def handles(self, task):
return task == "version"
def handle(self, largs):
self.do_request(largs, handle)
version = None
def handle(task, conn):
global version
if conn.status == 200:
try:
task_handler = VersionCheckTaskHandler()
jsax.parse(conn, task_handler)
version = task_handler.version
return 0
except ValueError, msg:
print "Got an error back from sinan. Check the logs at ~/.sinan/logs/kernel.log"
else:
return 1
|
import libsinan
from libsinan import handler, output, jsax
class VersionCheckTaskHandler(output.SimpleTaskHandler):
def __init__(self):
output.SimpleTaskHandler.__init__(self)
self.version = None
def object_end(self):
""" We only get one object per right now so
lets print it out when we get it """
if self.task == "version":
if self.event_type == 'info':
self.version = self.desc
return True
else:
return output.SimpleTaskHandler.object_end(self)
class VersionCheckHandler(handler.Handler):
def handles(self, task):
return task == "version"
def handle(self, largs):
self.do_request(largs, handle)
version = None
def handle(task, conn):
global version
if conn.status == 200:
try:
task_handler = VersionCheckTaskHandler()
jsax.parse(conn, task_handler)
version = task_handler.version
return 0
except ValueError, msg:
print "Got an error back from sinan. Check the logs at ~/.sinan/logs/kernel.log"
else:
return 1
|
Make sure version is initialized
|
Make sure version is initialized
|
Python
|
mit
|
erlware-deprecated/sinan,erlware-deprecated/sinan,ericbmerritt/sinan,ericbmerritt/sinan,erlware-deprecated/sinan,ericbmerritt/sinan
|
5abe9a29ae586907304649fe6682e3e8997da310
|
app/views.py
|
app/views.py
|
from index import app
from flask import render_template, request
from config import BASE_URL
from query import get_callout, get_billboard
SHEET_ID = 'tzE2PsqJoWRpENlMr-ZlS8A'
#SHEET_ID = 'tIk5itVcfOHUmakkmpjCcxw' # Demo sheet
#@app.route('/')
#def index():
# page_url = BASE_URL + request.path
# page_title = 'Audio Player'
# stream_name = "My Place"
#
# social = {
# 'title': "VPR Audio Player",
# 'subtitle': "",
# 'img': "static/img/logo/vpr-logo-share.jpg",
# 'description': "Listen to the live streams of VPR News, VPR Classical, the BBC, Jazz24 and My Place.",
# 'twitter_text': "News, Classical, the BBC and more. The VPR Audio Player:",
# 'twitter_hashtag': ""
# }
#
# return render_template('content.html',
# page_title=page_title,
# social=social,
# stream_name=stream_name,
# page_url=page_url)
#
@app.route('/billboard')
def billboard():
billboard = get_billboard(SHEET_ID)
return render_template('billboard.html', billboard=billboard)
@app.route('/callout')
def callout():
callout = get_callout(SHEET_ID)
return render_template('callout.html', callout=callout)
|
from index import app
from flask import render_template, request
from config import BASE_URL
from query import get_callout, get_billboard
SHEET_ID = 'tzE2PsqJoWRpENlMr-ZlS8A'
#SHEET_ID = 'tIk5itVcfOHUmakkmpjCcxw' # Demo sheet
@app.route('/')
def index():
page_url = BASE_URL + request.path
page_title = 'Audio Player'
stream_name = "Replay"
social = {
'title': "VPR Audio Player",
'subtitle': "",
'img': "static/img/logo/vpr-logo-share.jpg",
'description': "Listen to the live streams of VPR News, VPR Classical, the BBC, Jazz24 and My Place.",
'twitter_text': "News, Classical, the BBC and more. The VPR Audio Player:",
'twitter_hashtag': ""
}
return render_template('content.html',
page_title=page_title,
social=social,
stream_name=stream_name,
page_url=page_url)
@app.route('/billboard')
def billboard():
billboard = get_billboard(SHEET_ID)
return render_template('billboard.html', billboard=billboard)
@app.route('/callout')
def callout():
callout = get_callout(SHEET_ID)
return render_template('callout.html', callout=callout)
|
Update stream name to Replay
|
Update stream name to Replay
|
Python
|
apache-2.0
|
vprnet/audio-player,vprnet/audio-player,vprnet/audio-player
|
738b0e1344572d000f51e862000fb719c7035c2c
|
st2reactor/st2reactor/cmd/rulesengine.py
|
st2reactor/st2reactor/cmd/rulesengine.py
|
import os
from oslo.config import cfg
from st2common import log as logging
from st2common.models.db import db_setup
from st2common.models.db import db_teardown
from st2common.constants.logging import DEFAULT_LOGGING_CONF_PATH
from st2reactor.rules import config
from st2reactor.rules import worker
LOG = logging.getLogger('st2reactor.bin.rulesengine')
def _setup():
# Set up logger which logs everything which happens during and before config
# parsing to sys.stdout
logging.setup(DEFAULT_LOGGING_CONF_PATH)
# 1. parse config args
config.parse_args()
# 2. setup logging.
logging.setup(cfg.CONF.rulesengine.logging)
# 3. all other setup which requires config to be parsed and logging to
# be correctly setup.
username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
username=username, password=password)
def _teardown():
db_teardown()
def main():
try:
_setup()
return worker.work()
except:
LOG.exception('(PID:%s) RulesEngine quit due to exception.', os.getpid())
return 1
finally:
_teardown()
|
import os
import sys
from oslo.config import cfg
from st2common import log as logging
from st2common.models.db import db_setup
from st2common.models.db import db_teardown
from st2common.constants.logging import DEFAULT_LOGGING_CONF_PATH
from st2reactor.rules import config
from st2reactor.rules import worker
LOG = logging.getLogger('st2reactor.bin.rulesengine')
def _setup():
# Set up logger which logs everything which happens during and before config
# parsing to sys.stdout
logging.setup(DEFAULT_LOGGING_CONF_PATH)
# 1. parse config args
config.parse_args()
# 2. setup logging.
logging.setup(cfg.CONF.rulesengine.logging)
# 3. all other setup which requires config to be parsed and logging to
# be correctly setup.
username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
username=username, password=password)
def _teardown():
db_teardown()
def main():
try:
_setup()
return worker.work()
except SystemExit as exit_code:
sys.exit(exit_code)
except:
LOG.exception('(PID:%s) RulesEngine quit due to exception.', os.getpid())
return 1
finally:
_teardown()
|
Fix rules engine version reporting.
|
Fix rules engine version reporting.
|
Python
|
apache-2.0
|
Itxaka/st2,punalpatel/st2,grengojbo/st2,grengojbo/st2,jtopjian/st2,nzlosh/st2,lakshmi-kannan/st2,peak6/st2,jtopjian/st2,peak6/st2,alfasin/st2,jtopjian/st2,Itxaka/st2,Itxaka/st2,pinterb/st2,StackStorm/st2,punalpatel/st2,tonybaloney/st2,punalpatel/st2,pixelrebel/st2,StackStorm/st2,pixelrebel/st2,lakshmi-kannan/st2,StackStorm/st2,lakshmi-kannan/st2,alfasin/st2,grengojbo/st2,emedvedev/st2,emedvedev/st2,dennybaa/st2,Plexxi/st2,Plexxi/st2,dennybaa/st2,pinterb/st2,tonybaloney/st2,nzlosh/st2,armab/st2,armab/st2,alfasin/st2,peak6/st2,pixelrebel/st2,dennybaa/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,tonybaloney/st2,emedvedev/st2,armab/st2,pinterb/st2,Plexxi/st2
|
77b5680794a7a60dedf687f4a199e48121f96955
|
tests/performance/benchmark_aggregator.py
|
tests/performance/benchmark_aggregator.py
|
"""
Performance tests for the agent/dogstatsd metrics aggregator.
"""
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
"""
Performance tests for the agent/dogstatsd metrics aggregator.
"""
from aggregator import MetricsAggregator
class TestAggregatorPerf(object):
def test_aggregation_performance(self):
ma = MetricsAggregator('my.host')
flush_count = 10
loops_per_flush = 10000
metric_count = 5
for _ in xrange(flush_count):
for i in xrange(loops_per_flush):
# Counters
for j in xrange(metric_count):
ma.submit_packets('counter.%s:%s|c' % (j, i))
ma.submit_packets('gauge.%s:%s|g' % (j, i))
ma.submit_packets('histogram.%s:%s|h' % (j, i))
ma.submit_packets('set.%s:%s|s' % (j, 1.0))
ma.flush()
if __name__ == '__main__':
t = TestAggregatorPerf()
t.test_aggregation_performance()
|
Add sets + a float value to the benchmark.
|
Add sets + a float value to the benchmark.
|
Python
|
bsd-3-clause
|
yuecong/dd-agent,darron/dd-agent,oneandoneis2/dd-agent,AniruddhaSAtre/dd-agent,packetloop/dd-agent,remh/dd-agent,guruxu/dd-agent,jyogi/purvar-agent,relateiq/dd-agent,JohnLZeller/dd-agent,citrusleaf/dd-agent,lookout/dd-agent,citrusleaf/dd-agent,zendesk/dd-agent,zendesk/dd-agent,JohnLZeller/dd-agent,oneandoneis2/dd-agent,relateiq/dd-agent,cberry777/dd-agent,zendesk/dd-agent,polynomial/dd-agent,jyogi/purvar-agent,manolama/dd-agent,darron/dd-agent,pfmooney/dd-agent,huhongbo/dd-agent,ess/dd-agent,tebriel/dd-agent,amalakar/dd-agent,guruxu/dd-agent,jvassev/dd-agent,Mashape/dd-agent,takus/dd-agent,Wattpad/dd-agent,jvassev/dd-agent,jraede/dd-agent,PagerDuty/dd-agent,GabrielNicolasAvellaneda/dd-agent,JohnLZeller/dd-agent,mderomph-coolblue/dd-agent,joelvanvelden/dd-agent,truthbk/dd-agent,tebriel/dd-agent,eeroniemi/dd-agent,joelvanvelden/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,indeedops/dd-agent,cberry777/dd-agent,pmav99/praktoras,gphat/dd-agent,amalakar/dd-agent,lookout/dd-agent,polynomial/dd-agent,huhongbo/dd-agent,Wattpad/dd-agent,truthbk/dd-agent,remh/dd-agent,pfmooney/dd-agent,takus/dd-agent,jyogi/purvar-agent,gphat/dd-agent,remh/dd-agent,tebriel/dd-agent,darron/dd-agent,amalakar/dd-agent,manolama/dd-agent,jshum/dd-agent,jamesandariese/dd-agent,Mashape/dd-agent,joelvanvelden/dd-agent,urosgruber/dd-agent,jshum/dd-agent,GabrielNicolasAvellaneda/dd-agent,truthbk/dd-agent,brettlangdon/dd-agent,gphat/dd-agent,polynomial/dd-agent,a20012251/dd-agent,darron/dd-agent,citrusleaf/dd-agent,Mashape/dd-agent,citrusleaf/dd-agent,pmav99/praktoras,mderomph-coolblue/dd-agent,benmccann/dd-agent,PagerDuty/dd-agent,cberry777/dd-agent,eeroniemi/dd-agent,brettlangdon/dd-agent,huhongbo/dd-agent,zendesk/dd-agent,eeroniemi/dd-agent,truthbk/dd-agent,packetloop/dd-agent,amalakar/dd-agent,darron/dd-agent,a20012251/dd-agent,jraede/dd-agent,jamesandariese/dd-agent,gphat/dd-agent,mderomph-coolblue/dd-agent,benmccann/dd-agent,a20012251/dd-agent,takus/dd-agent,polynomial/dd-agent,jamesandariese/dd-agent,ess/dd-agent,jraede/dd-agent,c960657/dd-agent,JohnLZeller/dd-agent,AntoCard/powerdns-recursor_check,oneandoneis2/dd-agent,ess/dd-agent,guruxu/dd-agent,indeedops/dd-agent,benmccann/dd-agent,manolama/dd-agent,Wattpad/dd-agent,manolama/dd-agent,a20012251/dd-agent,cberry777/dd-agent,AntoCard/powerdns-recursor_check,c960657/dd-agent,manolama/dd-agent,relateiq/dd-agent,benmccann/dd-agent,jvassev/dd-agent,jshum/dd-agent,jamesandariese/dd-agent,pfmooney/dd-agent,Wattpad/dd-agent,jshum/dd-agent,urosgruber/dd-agent,pmav99/praktoras,joelvanvelden/dd-agent,benmccann/dd-agent,jyogi/purvar-agent,relateiq/dd-agent,Shopify/dd-agent,AntoCard/powerdns-recursor_check,Shopify/dd-agent,AntoCard/powerdns-recursor_check,ess/dd-agent,Mashape/dd-agent,yuecong/dd-agent,jshum/dd-agent,indeedops/dd-agent,GabrielNicolasAvellaneda/dd-agent,gphat/dd-agent,oneandoneis2/dd-agent,brettlangdon/dd-agent,pfmooney/dd-agent,zendesk/dd-agent,brettlangdon/dd-agent,AntoCard/powerdns-recursor_check,c960657/dd-agent,polynomial/dd-agent,pmav99/praktoras,urosgruber/dd-agent,packetloop/dd-agent,oneandoneis2/dd-agent,pfmooney/dd-agent,guruxu/dd-agent,PagerDuty/dd-agent,GabrielNicolasAvellaneda/dd-agent,relateiq/dd-agent,AniruddhaSAtre/dd-agent,huhongbo/dd-agent,jvassev/dd-agent,c960657/dd-agent,GabrielNicolasAvellaneda/dd-agent,packetloop/dd-agent,lookout/dd-agent,tebriel/dd-agent,urosgruber/dd-agent,takus/dd-agent,AniruddhaSAtre/dd-agent,JohnLZeller/dd-agent,Mashape/dd-agent,c960657/dd-agent,jyogi/purvar-agent,citrusleaf/dd-agent,tebriel/dd-agent,yuecong/dd-agent,packetloop/dd-agent,huhongbo/dd-agent,PagerDuty/dd-agent,indeedops/dd-agent,mderomph-coolblue/dd-agent,jraede/dd-agent,indeedops/dd-agent,ess/dd-agent,jvassev/dd-agent,Wattpad/dd-agent,eeroniemi/dd-agent,lookout/dd-agent,Shopify/dd-agent,jraede/dd-agent,guruxu/dd-agent,yuecong/dd-agent,brettlangdon/dd-agent,PagerDuty/dd-agent,jamesandariese/dd-agent,a20012251/dd-agent,amalakar/dd-agent,Shopify/dd-agent,lookout/dd-agent,mderomph-coolblue/dd-agent,joelvanvelden/dd-agent,yuecong/dd-agent,AniruddhaSAtre/dd-agent,urosgruber/dd-agent,remh/dd-agent,pmav99/praktoras,eeroniemi/dd-agent,Shopify/dd-agent,remh/dd-agent,takus/dd-agent,truthbk/dd-agent
|
c9d1a3ad2c3c64f49ec83cf8d09cc6d35915990c
|
airtravel.py
|
airtravel.py
|
"""Model for aircraft flights"""
class Flight:
def __init__(self, number):
if not number[:4].isalpha():
raise ValueError("No airline code in '{}'".format(number))
if not number[:4].isupper():
raise ValueError("Invalid airline code'{}'".format(number))
if not (number[4:].isdigit() and int(number[4:]) <= 999999):
raise ValueError("Invalid route number '{}'".format(number))
self._number = number
def number(self):
return self._number
def airline(self):
return self._number[:4]
class Aircraft:
def __init__(self, registration, model, num_rows, num_seats_per_row):
self._registration = registration
self._model = model
self._num_rows = num_rows
self._num_seats_per_row = num_seats_per_row
def registration(self):
return self._registration
def model(self):
return self._model
def seating_plan(self):
return (range(1, self._num_rows + 1),
"ABCDEFGHJKLMNOP"[:self._num_seats_per_row])
|
"""Model for aircraft flights"""
class Flight:
"""A flight with a specific passenger aircraft."""
def __init__(self, number, aircraft):
if not number[:4].isalpha():
raise ValueError("No airline code in '{}'".format(number))
if not number[:4].isupper():
raise ValueError("Invalid airline code'{}'".format(number))
if not (number[4:].isdigit() and int(number[4:]) <= 999999):
raise ValueError("Invalid route number '{}'".format(number))
self._number = number
self._aircraft = aircraft
rows, seats = self._aircraft.seating_plan()
self._seating = [None] + [{letter: None for letter in seats} for _ in rows]
def number(self):
return self._number
def airline(self):
return self._number[:4]
def aircraft_model(self):
return self._aircraft.model()
class Aircraft:
def __init__(self, registration, model, num_rows, num_seats_per_row):
self._registration = registration
self._model = model
self._num_rows = num_rows
self._num_seats_per_row = num_seats_per_row
def registration(self):
return self._registration
def model(self):
return self._model
def seating_plan(self):
return (range(1, self._num_rows + 1),
"ABCDEFGHJKLMNOP"[:self._num_seats_per_row])
|
Add aircraft and seating arrangement to Flight
|
Add aircraft and seating arrangement to Flight
|
Python
|
mit
|
kentoj/python-fundamentals
|
adcb7af597c77d85eb9234d91e2c0bd8575630e1
|
fcm_django/api/__init__.py
|
fcm_django/api/__init__.py
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceResource, GCMDeviceResource, WNSDeviceResource, APNSDeviceAuthenticatedResource, \
GCMDeviceAuthenticatedResource, WNSDeviceAuthenticatedResource
__all__ = [
"APNSDeviceResource",
"GCMDeviceResource",
"WNSDeviceResource",
"APNSDeviceAuthenticatedResource",
"GCMDeviceAuthenticatedResource",
"WNSDeviceAuthenticatedResource",
]
|
from django.conf import settings
if "tastypie" in settings.INSTALLED_APPS:
# Tastypie resources are importable from the api package level (backwards compatibility)
from .tastypie import APNSDeviceAuthenticatedResource, FCMDeviceResource
__all__ = [
"APNSDeviceAuthenticatedResource",
"FCMDeviceResource",
]
|
Remove references to old resources
|
Remove references to old resources
|
Python
|
mit
|
xtrinch/fcm-django
|
0e5b2af3fe04bd12b95b15215db0416b79c25df6
|
fake_useragent/fake.py
|
fake_useragent/fake.py
|
import os
import random
try:
import json
except ImportError:
import simplejson as json
from fake_useragent import settings
from fake_useragent.build import build_db
class UserAgent(object):
def __init__(self):
super(UserAgent, self).__init__()
# check db json file exists
if not os.path.isfile(settings.DB):
build_db()
# no codecs\with for python 2.5
f = open(settings.DB, 'r')
self.data = json.loads(f.read())
f.close()
def __getattr__(self, attr):
attr = attr.replace(' ', '').replace('_', '').lower()
if attr == 'random':
attr = self.data['randomize'][
str(random.randint(0, self.data['max_random'] - 1))
]
elif attr == 'ie':
attr = 'internetexplorer'
elif attr == 'msie':
attr = 'internetexplorer'
elif attr == 'google':
attr = 'chrome'
elif attr == 'ff':
attr = 'firefox'
return self.data['browsers'][attr][
random.randint(0, settings.BROWSERS_COUNT_LIMIT - 1)
]
|
import os
import random
try:
import json
except ImportError:
import simplejson as json
from fake_useragent import settings
from fake_useragent.build import build_db
class UserAgent(object):
def __init__(self):
super(UserAgent, self).__init__()
# check db json file exists
if not os.path.isfile(settings.DB):
build_db()
# no codecs\with for python 2.5
f = open(settings.DB, 'r')
self.data = json.loads(f.read())
f.close()
def __getattr__(self, attr):
attr = attr.replace(' ', '').replace('_', '').lower()
if attr == 'random':
attr = self.data['randomize'][
str(random.randint(0, self.data['max_random'] - 1))
]
elif attr == 'ie':
attr = 'internetexplorer'
elif attr == 'msie':
attr = 'internetexplorer'
elif attr == 'google':
attr = 'chrome'
elif attr == 'ff':
attr = 'firefox'
try:
return self.data['browsers'][attr][
random.randint(0, settings.BROWSERS_COUNT_LIMIT - 1)
]
except KeyError:
return None
|
Fix non existing useragents shortcut
|
Fix non existing useragents shortcut
|
Python
|
apache-2.0
|
sebalas/fake-useragent,hellysmile/fake-useragent,hellysmile/fake-useragent,hellysmile/fake-useragent,mochawich/fake-useragent
|
13dc6443500d09432c6410b766c5c6eda05fdf7a
|
froide/publicbody/forms.py
|
froide/publicbody/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
is_multi = False
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
is_multi = True
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from froide.helper.form_utils import JSONMixin
from .models import PublicBody
from .widgets import PublicBodySelect
class PublicBodyForm(JSONMixin, forms.Form):
publicbody = forms.ModelChoiceField(
queryset=PublicBody.objects.all(),
widget=PublicBodySelect,
label=_("Search for a topic or a public body:")
)
is_multi = False
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = self.cleaned_data
return data
def get_publicbodies(self):
if self.is_valid():
return [self.cleaned_data['publicbody']]
return []
class MultiplePublicBodyForm(PublicBodyForm):
publicbody = forms.ModelMultipleChoiceField(
queryset=PublicBody.objects.all(),
label=_("Search for a topic or a public body:")
)
is_multi = True
def get_publicbodies(self):
if self.is_valid():
return self.cleaned_data['publicbody']
return []
def as_data(self):
data = super(PublicBodyForm, self).as_data()
if self.is_bound and self.is_valid():
data['cleaned_data'] = {
'publicbody': [x.as_data() for x in
self.cleaned_data['publicbody']]
}
return data
|
Add as_data to multiple publicbody form
|
Add as_data to multiple publicbody form
|
Python
|
mit
|
stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide
|
96cbe6cd5b1d86663fe44c7fb4351fdb9bf7b2eb
|
metafunctions/map.py
|
metafunctions/map.py
|
import typing as tp
import itertools
from metafunctions.concurrent import FunctionMerge
from metafunctions.operators import concat
class MergeMap(FunctionMerge):
def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat):
super().__init__(merge_function, (function, ))
def _get_call_iterators(self, args):
'''
Each element in args is an iterable.
'''
args_iter = zip(*args)
# Note that EVERY element in the func iter will be called, so we need to make sure the
# length of our iterator is the same as the shortest iterable we received.
shortest_arg = min(args, key=len)
func_iter = itertools.repeat(self.functions[0], len(shortest_arg))
return args_iter, func_iter
def _call_function(self, f, args:tuple, kwargs:dict):
'''In MergeMap, args will be a single element tuple containing the args for this function.
'''
return f(*args[0], **kwargs)
def __str__(self):
return f'mmap({self.functions[0]!s})'
def __repr__(self):
return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
|
import typing as tp
import itertools
from metafunctions.concurrent import FunctionMerge
from metafunctions.operators import concat
class MergeMap(FunctionMerge):
def __init__(self, function:tp.Callable, merge_function:tp.Callable=concat):
'''
MergeMap is a FunctionMerge with only one function. When called, it behaves like the
builtin `map` function and calls its function once per item in the iterable(s) it receives.
'''
super().__init__(merge_function, (function, ))
def _get_call_iterators(self, args):
'''
Each element in args is an iterable.
'''
args_iter = zip(*args)
# Note that EVERY element in the func iter will be called, so we need to make sure the
# length of our iterator is the same as the shortest iterable we received.
shortest_arg = min(args, key=len)
func_iter = itertools.repeat(self.functions[0], len(shortest_arg))
return args_iter, func_iter
def _call_function(self, f, args:tuple, kwargs:dict):
'''In MergeMap, args will be a single element tuple containing the args for this function.
'''
return f(*args[0], **kwargs)
def __str__(self):
return f'mmap({self.functions[0]!s})'
def __repr__(self):
return f'{self.__class__.__name__}({self.functions[0]}, merge_function={self._merge_func})'
|
Add a docstring for MergeMap
|
Add a docstring for MergeMap
|
Python
|
mit
|
ForeverWintr/metafunctions
|
c465b1f0c995ac2cb7c6c8b4ad5f721f800e2864
|
argparams.py
|
argparams.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""ARG parameters class
"""
from __future__ import print_function, division
class ARGparams(object):
"""Class for ARG model parameters.
Attributes
----------
scale : float
rho : float
delta : float
Methods
-------
convert_to_theta
Convert parameters to the vector
"""
def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None):
"""Initialize the class instance.
"""
if theta:
assert len(theta) == 3, "Wrong number of parameters in theta!"
[scale, rho, delta] = theta
self.scale = scale
self.rho = rho
self.delta = delta
assert scale > 0, "Scale must be greater than zero!"
self.beta = self.rho / self.scale
self.theta = [scale, rho, delta]
def __repr__(self):
"""This is what is shown when you interactively explore the instance.
"""
params = (self.scale, self.rho, self.delta)
string = "scale = %.2f, rho = %.2f, delta = %.2f" % params
return string
def __str__(self):
"""This is what is shown when you print() the instance.
"""
return self.__repr__()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""ARG parameters class
"""
from __future__ import print_function, division
class ARGparams(object):
"""Class for ARG model parameters.
Attributes
----------
scale : float
rho : float
delta : float
beta : float
theta : list
Raises
------
AssertionError
"""
def __init__(self, scale=.001, rho=.9, delta=1.1, theta=None):
"""Initialize the class instance.
"""
if theta:
assert len(theta) == 3, "Wrong number of parameters in theta!"
[scale, rho, delta] = theta
self.scale = scale
self.rho = rho
self.delta = delta
assert scale > 0, "Scale must be greater than zero!"
self.beta = self.rho / self.scale
self.theta = [scale, rho, delta]
def __repr__(self):
"""This is what is shown when you interactively explore the instance.
"""
params = (self.scale, self.rho, self.delta)
string = "scale = %.2f, rho = %.2f, delta = %.2f" % params
return string
def __str__(self):
"""This is what is shown when you print() the instance.
"""
return self.__repr__()
|
Fix incorrect attributes in ARGparams class
|
Fix incorrect attributes in ARGparams class
|
Python
|
mit
|
khrapovs/argamma
|
037c2bc9857fc1feb59f7d4ad3cb81575177e675
|
src/smsfly/versiontools.py
|
src/smsfly/versiontools.py
|
"""Version tools set."""
import os
from setuptools_scm import get_version
def get_version_from_scm_tag(
*,
root='.',
relative_to=None,
local_scheme='node-and-date',
) -> str:
"""Retrieve the version from SCM tag in Git or Hg."""
try:
return get_version(
root=root,
relative_to=relative_to,
local_scheme=local_scheme,
)
except LookupError:
return 'unknown'
def cut_local_version_on_upload(version):
"""Return empty local version if uploading to PyPI."""
is_pypi_upload = os.getenv('PYPI_UPLOAD') == 'true'
if is_pypi_upload:
return ''
import setuptools_scm.version # only available during setup time
return setuptools_scm.version.get_local_node_and_date(version)
def get_self_version():
"""Calculate the version of the dist itself."""
return get_version_from_scm_tag(local_scheme=cut_local_version_on_upload)
|
"""Version tools set."""
import os
from setuptools_scm import get_version
def get_version_from_scm_tag(
*,
root='.',
relative_to=None,
local_scheme='node-and-date',
):
"""Retrieve the version from SCM tag in Git or Hg."""
try:
return get_version(
root=root,
relative_to=relative_to,
local_scheme=local_scheme,
)
except LookupError:
return 'unknown'
def cut_local_version_on_upload(version):
"""Return empty local version if uploading to PyPI."""
is_pypi_upload = os.getenv('PYPI_UPLOAD') == 'true'
if is_pypi_upload:
return ''
import setuptools_scm.version # only available during setup time
return setuptools_scm.version.get_local_node_and_date(version)
def get_self_version():
"""Calculate the version of the dist itself."""
return get_version_from_scm_tag(local_scheme=cut_local_version_on_upload)
|
Drop func annotations for the sake of Python 3.5
|
Drop func annotations for the sake of Python 3.5
|
Python
|
mit
|
wk-tech/python-smsfly
|
c2598058722531662aab8831640fc367689d2a43
|
tests/utils/test_process_word_vectors.py
|
tests/utils/test_process_word_vectors.py
|
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://s3-us-west-1.amazonaws.com/fasttext-vectors/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
|
import inspect
import os
import pytest
import numpy as np
from subprocess import call
from utils.preprocess_text_word_vectors import txtvec2npy
def test_text_word2vec2npy():
# check whether files are present in folder
vectors_name = 'wiki.fiu_vro.vec'
path = os.path.dirname(inspect.getfile(inspect.currentframe()))
if not os.path.exists(path + '/' + vectors_name):
call(["wget https://dl.fbaipublicfiles.com/fasttext/vectors-wiki/" + vectors_name + " -O " +
path + "/" + vectors_name],
shell=True)
txtvec2npy(path + '/' + vectors_name, './', vectors_name[:-4])
vectors = np.load('./' + vectors_name[:-4] + '.npy').item()
assert len(list(vectors)) == 8769
assert vectors['kihlkunnan'].shape[0] == 300
if __name__ == '__main__':
pytest.main([__file__])
|
Update Fasttext pretrained vectors location
|
Update Fasttext pretrained vectors location
|
Python
|
mit
|
lvapeab/nmt-keras,lvapeab/nmt-keras
|
760447a190b2908d47b14adaa6b1ad1a9369524c
|
app/cron_tasks.py
|
app/cron_tasks.py
|
import logging
import os
import sys
# Tweak import path so that httplib2 (which lives in datasources) can be
# imported as httplib2 while the app is running.
# TODO(mihaip): move httplib2 (and oauth2 and python-twitter) into a third_party
# directory.
APP_DIR = os.path.abspath(os.path.dirname(__file__))
DATASOURCES_DIR = os.path.join(APP_DIR, 'datasources')
sys.path.insert(0, DATASOURCES_DIR)
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
import feedplayback.handlers
def main():
application = webapp.WSGIApplication([
('/cron/feed-playback/advance', feedplayback.handlers.AdvanceCronHandler),
('/tasks/feed-playback/advance', feedplayback.handlers.AdvanceTaskHandler),
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
import logging
import os
import sys
from google.appengine.dist import use_library
use_library('django', '1.2')
os.environ['DJANGO_SETTINGS_MODULE'] = 'django_settings'
# Tweak import path so that httplib2 (which lives in datasources) can be
# imported as httplib2 while the app is running.
# TODO(mihaip): move httplib2 (and oauth2 and python-twitter) into a third_party
# directory.
APP_DIR = os.path.abspath(os.path.dirname(__file__))
DATASOURCES_DIR = os.path.join(APP_DIR, 'datasources')
sys.path.insert(0, DATASOURCES_DIR)
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
import feedplayback.handlers
def main():
application = webapp.WSGIApplication([
('/cron/feed-playback/advance', feedplayback.handlers.AdvanceCronHandler),
('/tasks/feed-playback/advance', feedplayback.handlers.AdvanceTaskHandler),
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
|
Use Django 1.2 for the cron/tasks instance too.
|
Use Django 1.2 for the cron/tasks instance too.
|
Python
|
apache-2.0
|
mihaip/streamspigot,mihaip/streamspigot,mihaip/streamspigot,mihaip/streamspigot
|
3962b88d764c7179f7b051153b337d180a3ba8f4
|
oneflow/settings/snippets/djdt.py
|
oneflow/settings/snippets/djdt.py
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# leto.licorn.org
'82.236.133.193',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'template_timings_panel.panels.TemplateTimings.TemplateTimings',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
# Debug-toolbar related
INSTALLED_APPS += ('debug_toolbar', )
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware', )
INTERNAL_IPS = (
'127.0.0.1',
# gurney.licorn.org
'109.190.93.141',
# my LAN
'192.168.111.23',
'192.168.111.111',
)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'template_timings_panel.panels.TemplateTimings.TemplateTimings',
'debug_toolbar.panels.logger.LoggingPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.version.VersionDebugPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'ENABLE_STACKTRACES' : True,
#'SHOW_TOOLBAR_CALLBACK': custom_show_toolbar,
#'EXTRA_SIGNALS': ['myproject.signals.MySignal'],
#'HIDE_DJANGO_SQL': False,
#'TAG': 'div',
}
|
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.
|
Fix the django debug toolbar not appearing and the user echo thing slowing my loadings in development.
|
Python
|
agpl-3.0
|
WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow
|
bca338a0f945e74c97b4d7dd044090ed3b3f5b11
|
aspen/tests/test_restarter.py
|
aspen/tests/test_restarter.py
|
from aspen import restarter
from aspen.tests.fsfix import attach_teardown
class Foo:
pass
def test_startup_basically_works():
website = Foo()
website.changes_kill = True
website.dotaspen = 'bar'
website.root = 'foo'
restarter.startup(website)
expected = []
actual = restarter.extras
assert actual == expected, actual
attach_teardown(globals())
|
from aspen.cli import restarter
from aspen.tests.fsfix import attach_teardown
class Foo:
pass
def test_startup_basically_works():
website = Foo()
website.changes_kill = True
website.dotaspen = 'bar'
website.root = 'foo'
restarter.install(website)
expected = []
actual = restarter.extras
assert actual == expected, actual
attach_teardown(globals())
|
Fix up test for recent changes to restarter.
|
Fix up test for recent changes to restarter.
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
dd39c73f9044815e82fa950f605b4b929d4f17f5
|
assistscraper/lxml_helpers.py
|
assistscraper/lxml_helpers.py
|
from lxml import html
def document(resource_name):
return html.parse("http://www.assist.org/web-assist/" + resource_name)
# TODO: catch IndexErrors in callers
def find_by_name(tag, name, *, parent):
return parent.xpath('//{tag}[@name="{name}"]'.format(tag=tag,
name=name))[0]
def find_select(name, *, parent):
return find_by_name("select", name, parent=parent)
def option_labels(select):
# Converting to list just because it makes the semantics cleaner, without
# performance impact
return list(select.itertext(tag="option", with_tail=False))
|
from lxml import html
def document(resource_name):
return html.parse("http://www.assist.org/web-assist/" + resource_name)
def find_by_name(tag, name, *, parent):
return parent.find('.//{tag}[@name="{name}"]'.format(tag=tag, name=name))
def find_select(name, *, parent):
return find_by_name("select", name, parent=parent)
def option_labels(select):
# Converting to list just because it makes the semantics cleaner, without
# performance impact
return list(select.itertext(tag="option", with_tail=False))
|
Return None instead of IndexError when there is no match!
|
Return None instead of IndexError when there is no match!
|
Python
|
mit
|
karinassuni/assistscraper
|
68f68a7c29dd49a9306445d02f5a7050aa84259e
|
openacademy/model/openacademy_course.py
|
openacademy/model/openacademy_course.py
|
from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string ='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
|
from openerp import models, fields, api
'''
This module create model of Course
'''
class Course(models.Model):
'''
This class create model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string ='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null',
string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one # api.one send defaults params: cr, uid, id, context
def copy(self, default=None):
print "estoy pasando por la funcion heredada de copy en cursos"
# default['name'] = self.name + ' (copy)'
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
|
Modify copy method into inherit
|
[REF] openacademy: Modify copy method into inherit
|
Python
|
apache-2.0
|
KarenKawaii/openacademy-project
|
6b5e0249374f1adc7e6eafb2e050cd6a2f03d1c9
|
examples/create_repository.py
|
examples/create_repository.py
|
from pyolite import Pyolite
# initial olite object
admin_repository = '~/presslabs/ansible-playbooks/gitolite-admin'
olite = Pyolite(admin_repository=admin_repository)
# create a repo
repo = olite.repos.get_or_create('awesome_name')
repo = olite.repos.get('awesome_name')
repo = olite.repos.create('awesome_name')
# add a new user to repo
repo.users.add('bob', permissions='RW+', path_key='~/.ssh/id_rsa.pub',
raw_key='my-awesome-key')
# add an existing user to repo
repo.users.add('alice')
# show users from repos
print repo.users.all()
# remove user
repo.users.delete('alice')
|
from pyolite import Pyolite
# initial olite object
admin_repository = '~/presslabs/ansible-playbooks/gitolite-admin'
olite = Pyolite(admin_repository=admin_repository)
# create a repo
repo = olite.repos.get_or_create('awesome_name')
repo = olite.repos.get('awesome_name')
repo = olite.repos.create('awesome_name')
# add a new user to repo
bob = repo.users.add('bob', permissions='RW+', key_path='~/.ssh/id_rsa.pub',
key='my-awesome-key')
# add an existing user to repo
repo.users.add('alice', permission='R')
repo.users.modify('alice', permission='W+')
# show users from repos
print repo.users.all()
# remove user
repo.users.delete('alice')
alice = olite.users.get_or_create('alice')
# alice.keys.append('key1')
# alice.repos => list
|
Change the api a little bit
|
Change the api a little bit
|
Python
|
bsd-2-clause
|
shawkinsl/pyolite,PressLabs/pyolite
|
840dce03718947498e72e561e7ddca22c4174915
|
django_olcc/olcc/context_processors.py
|
django_olcc/olcc/context_processors.py
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
record = ImportRecord.objects.latest()
if record:
return {
'last_updated': record.created_at
}
|
from olcc.models import ImportRecord
"""
Inject the last import date into the request context.
"""
def last_updated(request):
try:
return {
'last_updated': ImportRecord.objects.latest().created_at
}
except ImportRecord.DoesNotExist:
pass
|
Fix a DoesNotExist bug in the olcc context processor.
|
Fix a DoesNotExist bug in the olcc context processor.
|
Python
|
mit
|
twaddington/django-olcc,twaddington/django-olcc,twaddington/django-olcc
|
7c4e372ec901e88ed0c6193a5c06f94a4bbc418b
|
EC2/create_instance.py
|
EC2/create_instance.py
|
import boto3
import botocore
import time
ec2 = boto3.resource('ec2', region_name='us-east-1')
client = boto3.client('ec2')
# Create a security group
try:
sg = ec2.create_security_group(GroupName='jupyter', Description='EC2 for Jupyter Notebook')
response = client.authorize_security_group_ingress(GroupName='jupyter', IpPermissions=[{'PrefixListIds': [], 'UserIdGroupPairs': [], 'IpRanges': [{'CidrIp': '0.0.0.0/0'}], 'IpProtocol': 'tcp', 'Ipv6Ranges': [{'CidrIpv6': '::/0'}], 'ToPort': 8888, 'FromPort': 8888}])
print("create a security group")
except botocore.exceptions.ClientError as e:
sg = client.describe_security_groups(GroupNames=['jupyter'])
print("the security group exist")
o = ec2.create_instances(ImageId='ami-e36637f5', MinCount=1, MaxCount=1, InstanceType='i3.xlarge', SecurityGroups=['jupyter'])
print_res = False
while (not print_res):
time.sleep(1)
for i in ec2.instances.filter(InstanceIds=[o[0].id]):
if i.public_ip_address is not None:
print("The public IP address: " + str(i.public_ip_address))
print_res = True
|
import boto3
import botocore
import time
ec2 = boto3.resource('ec2', region_name='us-east-1')
client = boto3.client('ec2')
# Create a security group
try:
sg = ec2.create_security_group(GroupName='jupyter', Description='EC2 for Jupyter Notebook')
response = client.authorize_security_group_ingress(GroupName='jupyter', IpPermissions=[{'PrefixListIds': [], 'UserIdGroupPairs': [], 'IpRanges': [{'CidrIp': '0.0.0.0/0'}], 'IpProtocol': 'tcp', 'Ipv6Ranges': [{'CidrIpv6': '::/0'}], 'ToPort': 8888, 'FromPort': 8888}])
print("create a security group")
except botocore.exceptions.ClientError as e:
sg = client.describe_security_groups(GroupNames=['jupyter'])
print("the security group exist")
o = ec2.create_instances(ImageId='ami-622a0119', MinCount=1, MaxCount=1, InstanceType='i3.8xlarge', SecurityGroups=['jupyter'])
print_res = False
while (not print_res):
time.sleep(1)
for i in ec2.instances.filter(InstanceIds=[o[0].id]):
if i.public_ip_address is not None:
print("The public IP address: " + str(i.public_ip_address))
print_res = True
|
Update the script to create EC2 instance.
|
Update the script to create EC2 instance.
This creates an EC2 i3.8xlarge.
|
Python
|
apache-2.0
|
flashxio/FlashX,icoming/FlashX,flashxio/FlashX,icoming/FlashX,flashxio/FlashX,flashxio/FlashX,icoming/FlashX,flashxio/FlashX,icoming/FlashX,icoming/FlashX,flashxio/FlashX
|
dba918008892214e56bebc8684839f16ae7d7325
|
src/engine/request_handler.py
|
src/engine/request_handler.py
|
import Queue
import json
import EBQP
from . import world
from . import types
from . import consts
from . import loc
class GameRequestHandler:
def __init__(self):
self.world = None
self.responses = {
EBQP.new: self.respond_new,
}
def process(self, request):
request_pieces = request.split(EBQP.packet_delimiter, 1)
command = request_pieces[0]
params = request_pieces[1].strip() if len(request_pieces) > 1 else ''
try:
json_args = json.loads(params)
except Exception as e:
return "process:failure:bad json"
if command in self.responses:
return self.responses[command](json_args)
else:
return "process:failure:unsupported command"
def respond_new(self, args):
uids = args['uids']
self.world = world.World(uids)
self.world.add_unit(uids[0], types.new_unit('Tank', consts.RED, loc.Loc(3, 3)))
self.responses = {
EBQP.view: self.respond_view,
EBQP.move: self.respond_move,
}
return 'new:success'
def respond_view(self, args):
return 'view:success:%s' % self.world.to_json()
#TODO
def respond_move(self, args):
return 'move:failure:unimplemented'
|
import Queue
import json
import EBQP
from . import world
from . import types
from . import consts
from . import loc
class GameRequestHandler:
def __init__(self):
self.world = None
self.responses = {
EBQP.new: self.respond_new,
}
def process(self, request):
request_pieces = request.split(EBQP.packet_delimiter, 1)
command = request_pieces[0]
params = request_pieces[1].strip() if len(request_pieces) > 1 else ''
try:
json_args = json.loads(params)
except Exception as e:
return "process:failure:bad json"
if command in self.responses:
return self.responses[command](json_args)
else:
return "process:failure:unsupported command"
def respond_new(self, args):
uids = args['uids']
self.world = world.World(uids)
if 'debug' in args:
self.world.add_unit(uids[0], types.new_unit('Tank', consts.RED, loc.Loc(3, 3)))
self.responses = {
EBQP.view: self.respond_view,
EBQP.move: self.respond_move,
}
return 'new:success'
def respond_view(self, args):
return 'view:success:%s' % self.world.to_json()
#TODO
def respond_move(self, args):
return 'move:failure:unimplemented'
|
Debug flag to insert tank into game board
|
Debug flag to insert tank into game board
|
Python
|
mit
|
Tactique/game_engine,Tactique/game_engine
|
c958a314dc8ceb72e34ed969d3cff3751d513a49
|
grab/tools/progress.py
|
grab/tools/progress.py
|
import sys
import logging
logger = logging.getLogger('grab.tools.progress')
class Progress(object):
def __init__(self, step=None, total=None, stop=None, name='items', level=logging.DEBUG):
if not total and not step:
raise Exception('Both step and total arguments are None')
if total and not step:
step = int(total / 20)
self.step = step
self.count = 0
self.total = total
self.stop = stop
self.name = name
self.logging_level = level
def tick(self):
self.count += 1
if not self.count % self.step:
if self.total:
percents = ' [%d%%]' % int((self.count / float(self.total)) * 100)
else:
percents = ''
logger.log(self.logging_level, 'Processed %d %s%s' % (self.count, self.name, percents))
if self.count == self.stop:
logger.log(self.logging_level, 'Reached stop value %d' % self.stop)
sys.exit()
|
import sys
import logging
logger = logging.getLogger('grab.tools.progress')
class Progress(object):
def __init__(self, step=None, total=None, stop=None, name='items', level=logging.DEBUG):
if total is None and step is None:
raise Exception('Both step and total arguments are None')
if total and not step:
step = int(total / 20)
if step == 0:
step = total
self.step = step
self.count = 0
self.total = total
self.stop = stop
self.name = name
self.logging_level = level
def tick(self):
self.count += 1
if not self.count % self.step:
if self.total:
percents = ' [%d%%]' % int((self.count / float(self.total)) * 100)
else:
percents = ''
logger.log(self.logging_level, 'Processed %d %s%s' % (self.count, self.name, percents))
if self.count == self.stop:
logger.log(self.logging_level, 'Reached stop value %d' % self.stop)
sys.exit()
|
Fix bug in Progress object
|
Fix bug in Progress object
|
Python
|
mit
|
codevlabs/grab,liorvh/grab,giserh/grab,alihalabyah/grab,SpaceAppsXploration/grab,istinspring/grab,DDShadoww/grab,subeax/grab,subeax/grab,istinspring/grab,pombredanne/grab-1,codevlabs/grab,giserh/grab,lorien/grab,huiyi1990/grab,maurobaraldi/grab,subeax/grab,lorien/grab,kevinlondon/grab,alihalabyah/grab,pombredanne/grab-1,SpaceAppsXploration/grab,huiyi1990/grab,DDShadoww/grab,maurobaraldi/grab,kevinlondon/grab,liorvh/grab,raybuhr/grab,raybuhr/grab,shaunstanislaus/grab,shaunstanislaus/grab
|
6a63fc4abd524da96ee09bfa94f7eae534a9834e
|
tests/managers/object_storage_tests.py
|
tests/managers/object_storage_tests.py
|
"""
SoftLayer.tests.managers.object_storage_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
import SoftLayer
from SoftLayer import fixtures
from SoftLayer import testing
class ObjectStorageTests(testing.TestCase):
def set_up(self):
self.object_storage = SoftLayer.ObjectStorageManager(self.client)
def test_list_accounts(self):
accounts = self.object_storage.list_accounts()
self.assertEquals(accounts,
fixtures.SoftLayer_Account.getHubNetworkStorage)
def test_list_endpoints(self):
accounts = self.set_mock('SoftLayer_Account', 'getHubNetworkStorage')
accounts.return_value = {
'storageNodes': [{
'datacenter': {'name': 'dal05'},
'frontendIpAddress': 'https://dal05/auth/v1.0/',
'backendIpAddress': 'https://dal05/auth/v1.0/'}
],
}
endpoints = self.object_storage.list_endpoints()
self.assertEquals(endpoints,
[{'datacenter': {'name': 'dal05'},
'private': 'https://dal05/auth/v1.0/',
'public': 'https://dal05/auth/v1.0/'}])
|
"""
SoftLayer.tests.managers.object_storage_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
import SoftLayer
from SoftLayer import fixtures
from SoftLayer import testing
class ObjectStorageTests(testing.TestCase):
def set_up(self):
self.object_storage = SoftLayer.ObjectStorageManager(self.client)
def test_list_accounts(self):
accounts = self.object_storage.list_accounts()
self.assertEqual(accounts,
fixtures.SoftLayer_Account.getHubNetworkStorage)
def test_list_endpoints(self):
accounts = self.set_mock('SoftLayer_Account', 'getHubNetworkStorage')
accounts.return_value = {
'storageNodes': [{
'datacenter': {'name': 'dal05'},
'frontendIpAddress': 'https://dal05/auth/v1.0/',
'backendIpAddress': 'https://dal05/auth/v1.0/'}
],
}
endpoints = self.object_storage.list_endpoints()
self.assertEqual(endpoints,
[{'datacenter': {'name': 'dal05'},
'private': 'https://dal05/auth/v1.0/',
'public': 'https://dal05/auth/v1.0/'}])
|
Fix small style issue w/ assertEqual vs assertEquals
|
Fix small style issue w/ assertEqual vs assertEquals
|
Python
|
mit
|
softlayer/softlayer-python,nanjj/softlayer-python,allmightyspiff/softlayer-python,kyubifire/softlayer-python,Neetuj/softlayer-python,skraghu/softlayer-python
|
dfce2472c81c84a6e73315f288c41683ede92363
|
pydarkstar/auction/auctionbase.py
|
pydarkstar/auction/auctionbase.py
|
"""
.. moduleauthor:: Adam Gagorik <[email protected]>
"""
import pydarkstar.darkobject
import pydarkstar.database
class AuctionBase(pydarkstar.darkobject.DarkObject):
"""
Base class for Auction House objects.
:param db: database object
"""
def __init__(self, db, rollback=True, fail=False, *args, **kwargs):
super(AuctionBase, self).__init__(*args, **kwargs)
assert isinstance(db, pydarkstar.database.Database)
self._rollback = bool(rollback)
self._fail = bool(fail)
self._db = db
@property
def db(self):
return self._db
@property
def rollback(self):
return self._rollback
@rollback.setter
def rollback(self, value):
self._rollback = bool(value)
@property
def fail(self):
return self._fail
@fail.setter
def fail(self, value):
self._fail = bool(value)
if __name__ == '__main__':
pass
|
"""
.. moduleauthor:: Adam Gagorik <[email protected]>
"""
import pydarkstar.darkobject
import pydarkstar.database
import contextlib
class AuctionBase(pydarkstar.darkobject.DarkObject):
"""
Base class for Auction House objects.
:param db: database object
"""
def __init__(self, db, rollback=True, fail=False, *args, **kwargs):
super(AuctionBase, self).__init__(*args, **kwargs)
assert isinstance(db, pydarkstar.database.Database)
self._rollback = bool(rollback)
self._fail = bool(fail)
self._db = db
def session(self, *args, **kwargs):
"""
Create database session.
"""
return self._db.session(*args, **kwargs)
@contextlib.contextmanager
def scopped_session(self, **kwargs):
"""
Create scoped database session.
"""
_kwargs = dict(rollback=self.rollback, fail=self.fail)
_kwargs.update(**kwargs)
try:
with self._db.scoped_session(**_kwargs) as session:
yield session
finally:
pass
@property
def db(self):
return self._db
@property
def rollback(self):
return self._rollback
@rollback.setter
def rollback(self, value):
self._rollback = bool(value)
@property
def fail(self):
return self._fail
@fail.setter
def fail(self, value):
self._fail = bool(value)
if __name__ == '__main__':
pass
|
Add session and scoped session to AuctionBase.
|
Add session and scoped session to AuctionBase.
|
Python
|
mit
|
AdamGagorik/pydarkstar,LegionXI/pydarkstar
|
510117cb0f487232d1cd0c5392a4514e1dc1b46e
|
scripts/produce_data.py
|
scripts/produce_data.py
|
#!/usr/bin/python
import time
import sys
import random
import rospy
import cv_bridge
import cv
import rospkg
from geometry_msgs.msg import Vector3
class DataTester(object):
def __init__(self, myo_number, mode="zero"):
self.mode = mode
self._myo_name = "myo_" + str(myo_number)
rospy.init_node("baxter_myo_data_producer")
self._pub_pos = rospy.Publisher(self._myo_name + "/position",
Vector3,
queue_size=10)
self._pub_ori = rospy.Publisher(self._myo_name + "/orientation",
Vector3,
queue_size=10)
self.prt_counter = 0
def publish(self):
"""
Publish dummy data
"""
if self.mode is "zero":
self._zero_publish()
elif self.mode is "random":
self._random_publish()
else:
raise Exception("Mode was not recognised")
def _zero_publish(self):
msg = Vector3(0, 0, 0)
self._pub_pos.publish(msg)
self._pub_ori.publish(msg)
self._print_dot()
def _random_publish(self):
msg = Vector3()
msg.x = random.randint(-180, 180) # rotation around x (roll)
msg.y = random.randint(0, 180) # rotation around y (pitch)
msg.z = random.randint(-180, 180) #rotation around z (yaw)
self._pub_pos.publish(msg)
self._pub_ori.publish(msg)
self._print_dot()
def _print_dot(self):
self.prt_counter += 1
if self.prt_counter > 50:
print "."
self.prt_counter = 0
else:
print ".",
sys.stdout.flush()
def main():
dt = DataTester(0, mode="random")
r = rospy.Rate(10)
while not rospy.is_shutdown():
dt.publish()
r.sleep()
if __name__ == "__main__":
main()
|
Add testing publisher script for dummy myo data
|
Add testing publisher script for dummy myo data
|
Python
|
mit
|
ipab-rad/myo_baxter_pc,ipab-rad/myo_baxter_pc,ipab-rad/baxter_myo,ipab-rad/baxter_myo
|
|
1cdc38742e6fc09595a45c28d179125d3771521c
|
euler010.py
|
euler010.py
|
#!/usr/bin/python
from math import sqrt, ceil, floor
LIMIT = 2000000
"""
This is the first, brute force method, we search for primes,
and put them into an array, so we can use as test later.
This is not fast, and do millons mod test
"""
def isPrime(x):
i = 0
while primeList[i] <= sqrt(x):
if x % primeList[i] == 0:
return 0
i += 1
primeList.append(x)
return 1
primeList = [2]
result = 2
prime = 3
while prime < LIMIT:
if isPrime(prime):
result += prime
prime += 2
print (result)
"""
This method use an optimized Sieve of Eratosthenes algorithm,
it will search only for odd number. More explanation in
Project Euler site
"""
sievebound = (LIMIT - 1) // 2
sieve = [0] * (sievebound)
crosslimit = (floor(sqrt(LIMIT)) - 1) // 2
for i in range(1, crosslimit):
if not sieve[i]:
for j in range(2 * i * (i + 1), sievebound, 2 * i + 1):
sieve[j] = 1
result = 2
for i in range(1, sievebound):
if not sieve[i]:
result += (2 * i + 1)
print(result)
|
Add solutions for problem 10
|
Add solutions for problem 10
|
Python
|
mit
|
cifvts/PyEuler
|
|
cbf9b7605a31cd67b1c94b8157cb6ae55fd36c69
|
zerver/test_urls.py
|
zerver/test_urls.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import django.core.urlresolvers
from django.test import TestCase
import importlib
from zproject import urls
class URLResolutionTest(TestCase):
def check_function_exists(self, module_name, view):
module = importlib.import_module(module_name)
self.assertTrue(hasattr(module, view), "View %s.%s does not exist" % (module_name, view))
# Tests that all views in urls.v1_api_and_json_patterns exist
def test_rest_api_url_resolution(self):
for pattern in urls.v1_api_and_json_patterns:
if not (hasattr(pattern, "_callback_str") and hasattr(pattern, "default_args")):
continue
for view in pattern.default_args.values():
module_name = pattern._callback_str.replace(".rest_dispatch", "")
self.check_function_exists(module_name, view)
# Tests function-based views declared in urls.urlpatterns for
# whether the function exists. We at present do not test the
# class-based views.
def test_non_api_url_resolution(self):
for pattern in urls.urlpatterns:
if not hasattr(pattern, "_callback_str"):
continue
(module_name, base_view) = pattern._callback_str.rsplit(".", 1)
self.check_function_exists(module_name, base_view)
|
Add test that all functions defined in urls.py actually exist.
|
Add test that all functions defined in urls.py actually exist.
This would have caught the create_user_backend issue introduced recently.
|
Python
|
apache-2.0
|
sharmaeklavya2/zulip,AZtheAsian/zulip,rishig/zulip,shubhamdhama/zulip,sup95/zulip,shubhamdhama/zulip,jphilipsen05/zulip,samatdav/zulip,Jianchun1/zulip,zulip/zulip,blaze225/zulip,PhilSk/zulip,vaidap/zulip,dattatreya303/zulip,eeshangarg/zulip,shubhamdhama/zulip,TigorC/zulip,verma-varsha/zulip,mahim97/zulip,hackerkid/zulip,sonali0901/zulip,ahmadassaf/zulip,TigorC/zulip,sharmaeklavya2/zulip,Galexrt/zulip,vikas-parashar/zulip,isht3/zulip,tommyip/zulip,amanharitsh123/zulip,niftynei/zulip,amyliu345/zulip,vabs22/zulip,dhcrzf/zulip,sonali0901/zulip,blaze225/zulip,brainwane/zulip,sharmaeklavya2/zulip,andersk/zulip,zacps/zulip,synicalsyntax/zulip,joyhchen/zulip,KingxBanana/zulip,jphilipsen05/zulip,timabbott/zulip,Jianchun1/zulip,umkay/zulip,andersk/zulip,amanharitsh123/zulip,KingxBanana/zulip,Diptanshu8/zulip,vabs22/zulip,vabs22/zulip,joyhchen/zulip,Frouk/zulip,verma-varsha/zulip,grave-w-grave/zulip,christi3k/zulip,souravbadami/zulip,rht/zulip,TigorC/zulip,Jianchun1/zulip,isht3/zulip,Vallher/zulip,brockwhittaker/zulip,amyliu345/zulip,dawran6/zulip,calvinleenyc/zulip,synicalsyntax/zulip,peguin40/zulip,aakash-cr7/zulip,timabbott/zulip,amyliu345/zulip,jackrzhang/zulip,SmartPeople/zulip,jphilipsen05/zulip,joyhchen/zulip,showell/zulip,Juanvulcano/zulip,rishig/zulip,timabbott/zulip,dwrpayne/zulip,ahmadassaf/zulip,verma-varsha/zulip,Juanvulcano/zulip,Jianchun1/zulip,tommyip/zulip,zacps/zulip,AZtheAsian/zulip,brainwane/zulip,ryanbackman/zulip,amyliu345/zulip,zacps/zulip,punchagan/zulip,ryanbackman/zulip,j831/zulip,Frouk/zulip,niftynei/zulip,shubhamdhama/zulip,punchagan/zulip,rishig/zulip,grave-w-grave/zulip,brainwane/zulip,SmartPeople/zulip,amanharitsh123/zulip,TigorC/zulip,dwrpayne/zulip,paxapy/zulip,christi3k/zulip,Juanvulcano/zulip,Diptanshu8/zulip,AZtheAsian/zulip,arpith/zulip,arpith/zulip,reyha/zulip,Frouk/zulip,sup95/zulip,PhilSk/zulip,brainwane/zulip,jackrzhang/zulip,zulip/zulip,SmartPeople/zulip,mahim97/zulip,rht/zulip,jrowan/zulip,TigorC/zulip,peiwei/zulip,Juanvulcano/zulip,j831/zulip,synicalsyntax/zulip,paxapy/zulip,krtkmj/zulip,umkay/zulip,peguin40/zulip,samatdav/zulip,PhilSk/zulip,susansls/zulip,timabbott/zulip,calvinleenyc/zulip,dhcrzf/zulip,SmartPeople/zulip,susansls/zulip,jackrzhang/zulip,niftynei/zulip,brainwane/zulip,TigorC/zulip,jackrzhang/zulip,jainayush975/zulip,calvinleenyc/zulip,dawran6/zulip,Galexrt/zulip,cosmicAsymmetry/zulip,cosmicAsymmetry/zulip,dwrpayne/zulip,jainayush975/zulip,grave-w-grave/zulip,dwrpayne/zulip,ryanbackman/zulip,ryansnowboarder/zulip,jainayush975/zulip,andersk/zulip,dhcrzf/zulip,brockwhittaker/zulip,vaidap/zulip,kou/zulip,zulip/zulip,synicalsyntax/zulip,reyha/zulip,Vallher/zulip,Vallher/zulip,aakash-cr7/zulip,ryanbackman/zulip,verma-varsha/zulip,rht/zulip,j831/zulip,synicalsyntax/zulip,KingxBanana/zulip,Vallher/zulip,dhcrzf/zulip,hackerkid/zulip,punchagan/zulip,peguin40/zulip,mohsenSy/zulip,j831/zulip,rht/zulip,sonali0901/zulip,shubhamdhama/zulip,rishig/zulip,sup95/zulip,showell/zulip,souravbadami/zulip,dawran6/zulip,KingxBanana/zulip,showell/zulip,hackerkid/zulip,blaze225/zulip,andersk/zulip,isht3/zulip,rishig/zulip,krtkmj/zulip,Jianchun1/zulip,peiwei/zulip,mohsenSy/zulip,sharmaeklavya2/zulip,rht/zulip,ahmadassaf/zulip,arpith/zulip,jackrzhang/zulip,jrowan/zulip,mohsenSy/zulip,jphilipsen05/zulip,verma-varsha/zulip,zulip/zulip,mahim97/zulip,showell/zulip,dhcrzf/zulip,jrowan/zulip,brainwane/zulip,peguin40/zulip,j831/zulip,jphilipsen05/zulip,grave-w-grave/zulip,vaidap/zulip,samatdav/zulip,JPJPJPOPOP/zulip,aakash-cr7/zulip,ryansnowboarder/zulip,umkay/zulip,timabbott/zulip,mahim97/zulip,KingxBanana/zulip,reyha/zulip,AZtheAsian/zulip,vikas-parashar/zulip,tommyip/zulip,arpith/zulip,shubhamdhama/zulip,JPJPJPOPOP/zulip,dattatreya303/zulip,isht3/zulip,timabbott/zulip,ryansnowboarder/zulip,cosmicAsymmetry/zulip,sharmaeklavya2/zulip,cosmicAsymmetry/zulip,joyhchen/zulip,Diptanshu8/zulip,peiwei/zulip,jackrzhang/zulip,Frouk/zulip,eeshangarg/zulip,andersk/zulip,cosmicAsymmetry/zulip,souravbadami/zulip,JPJPJPOPOP/zulip,isht3/zulip,peiwei/zulip,vaidap/zulip,PhilSk/zulip,brockwhittaker/zulip,Galexrt/zulip,susansls/zulip,ahmadassaf/zulip,dhcrzf/zulip,sharmaeklavya2/zulip,umkay/zulip,dattatreya303/zulip,verma-varsha/zulip,ryansnowboarder/zulip,susansls/zulip,amanharitsh123/zulip,arpith/zulip,dawran6/zulip,calvinleenyc/zulip,dattatreya303/zulip,ryanbackman/zulip,dhcrzf/zulip,dattatreya303/zulip,krtkmj/zulip,Galexrt/zulip,zacps/zulip,AZtheAsian/zulip,mohsenSy/zulip,eeshangarg/zulip,Vallher/zulip,eeshangarg/zulip,punchagan/zulip,vikas-parashar/zulip,grave-w-grave/zulip,zacps/zulip,blaze225/zulip,mahim97/zulip,showell/zulip,jrowan/zulip,krtkmj/zulip,Galexrt/zulip,umkay/zulip,jrowan/zulip,SmartPeople/zulip,PhilSk/zulip,punchagan/zulip,vaidap/zulip,peiwei/zulip,calvinleenyc/zulip,ryanbackman/zulip,rishig/zulip,kou/zulip,niftynei/zulip,jrowan/zulip,rht/zulip,jainayush975/zulip,SmartPeople/zulip,souravbadami/zulip,reyha/zulip,shubhamdhama/zulip,ahmadassaf/zulip,sonali0901/zulip,Galexrt/zulip,kou/zulip,christi3k/zulip,Frouk/zulip,dawran6/zulip,tommyip/zulip,ahmadassaf/zulip,Juanvulcano/zulip,peiwei/zulip,dwrpayne/zulip,amanharitsh123/zulip,ryansnowboarder/zulip,vabs22/zulip,mohsenSy/zulip,kou/zulip,paxapy/zulip,Vallher/zulip,brockwhittaker/zulip,peguin40/zulip,krtkmj/zulip,zulip/zulip,vabs22/zulip,samatdav/zulip,jainayush975/zulip,punchagan/zulip,christi3k/zulip,isht3/zulip,andersk/zulip,zacps/zulip,ryansnowboarder/zulip,tommyip/zulip,rht/zulip,tommyip/zulip,kou/zulip,PhilSk/zulip,vikas-parashar/zulip,paxapy/zulip,Galexrt/zulip,AZtheAsian/zulip,Vallher/zulip,mahim97/zulip,sup95/zulip,Diptanshu8/zulip,synicalsyntax/zulip,amyliu345/zulip,hackerkid/zulip,sup95/zulip,krtkmj/zulip,dwrpayne/zulip,andersk/zulip,calvinleenyc/zulip,tommyip/zulip,Frouk/zulip,vikas-parashar/zulip,showell/zulip,JPJPJPOPOP/zulip,vabs22/zulip,susansls/zulip,niftynei/zulip,kou/zulip,sonali0901/zulip,mohsenSy/zulip,kou/zulip,christi3k/zulip,jainayush975/zulip,zulip/zulip,JPJPJPOPOP/zulip,Diptanshu8/zulip,jackrzhang/zulip,dwrpayne/zulip,eeshangarg/zulip,jphilipsen05/zulip,souravbadami/zulip,peguin40/zulip,reyha/zulip,KingxBanana/zulip,ahmadassaf/zulip,brainwane/zulip,paxapy/zulip,amanharitsh123/zulip,Juanvulcano/zulip,christi3k/zulip,punchagan/zulip,aakash-cr7/zulip,dattatreya303/zulip,rishig/zulip,aakash-cr7/zulip,hackerkid/zulip,umkay/zulip,eeshangarg/zulip,blaze225/zulip,showell/zulip,Diptanshu8/zulip,Jianchun1/zulip,joyhchen/zulip,peiwei/zulip,samatdav/zulip,niftynei/zulip,arpith/zulip,aakash-cr7/zulip,reyha/zulip,synicalsyntax/zulip,brockwhittaker/zulip,zulip/zulip,j831/zulip,hackerkid/zulip,ryansnowboarder/zulip,samatdav/zulip,cosmicAsymmetry/zulip,paxapy/zulip,souravbadami/zulip,vaidap/zulip,amyliu345/zulip,blaze225/zulip,umkay/zulip,sup95/zulip,joyhchen/zulip,grave-w-grave/zulip,Frouk/zulip,krtkmj/zulip,timabbott/zulip,eeshangarg/zulip,hackerkid/zulip,sonali0901/zulip,dawran6/zulip,susansls/zulip,brockwhittaker/zulip,vikas-parashar/zulip,JPJPJPOPOP/zulip
|
|
fc35730074f5af647579012b706e531e84da5ab6
|
src/main/python/json_to_csv.py
|
src/main/python/json_to_csv.py
|
# Adapted from http://stackoverflow.com/questions/1871524/convert-from-json-to-csv-using-python
import csv
import json
with open("comments.txt") as file:
data = json.load(file)
with open("comments.csv", "wb") as file:
csv_file = csv.writer(file)
csv_file.writerow(['user:login', 'path', 'commit_id', 'url', 'line',
'html_url', 'created_at', 'body'])
for item in data:
csv_file.writerow([item['user']['login'], item['path'],
item['commit_id'], item['url'], item['line'], item['html_url'],
item['created_at'], item['body']])
|
Add tool for converting in-line notes metadata to .csv
|
Add tool for converting in-line notes metadata to .csv
|
Python
|
mit
|
PovertyAction/github-download
|
|
3bd35228c61d73d8a43ffcda70386b194c9123b2
|
benchmark/TSP/TSPLIB/compare_to_BKS.py
|
benchmark/TSP/TSPLIB/compare_to_BKS.py
|
# -*- coding: utf-8 -*-
import json, sys, os
import numpy as np
# Compare a set of computed solutions to best known solutions on the
# same problems.
def s_round(v, d):
return str(round(v, d))
def log_comparisons(BKS, files):
print ','.join(["Instance", "Jobs", "Vehicles", "Optimal cost", "Solution cost", "Gap (%)", "Computing time (ms)"])
jobs = []
gaps = []
computing_times = []
total_files = len(files)
for f in files:
instance = f[0:f.rfind("_sol.json")]
instance = instance[instance.rfind('/') + 1:]
if instance not in BKS:
total_files -= 1
continue
indicators = BKS[instance]
BK_cost = indicators['upper_bound']
nb_job = indicators['jobs']
jobs.append(nb_job)
line = [
instance,
nb_job,
indicators['vehicles'],
BK_cost
]
with open(f, 'r') as sol_file:
solution = json.load(sol_file)
cost = solution['summary']['cost']
line.append(cost)
gap = 100 * (float(cost) / BK_cost - 1)
line.append(round(gap, 2))
gaps.append(gap)
computing_time = solution['summary']['computing_times']['loading'] + solution['summary']['computing_times']['solving']
line.append(computing_time)
computing_times.append(computing_time)
print ','.join(map(lambda x: str(x), line))
print ','
print 'Average,' + s_round(np.mean(jobs), 1) + ',,,,' + s_round(np.mean(gaps), 2) + ',' + s_round(np.mean(computing_times), 0)
# Percentiles
print ','
gaps_percentiles = np.percentile(gaps, [0, 10, 25, 50, 75, 90, 100])
ct_percentiles = np.percentile(computing_times, [0, 10, 25, 50, 75, 90, 100])
print ',Gaps,Computing times'
titles = ['min', 'lower whisker', 'lower quartile', 'median', 'upper quartile', 'upper whisker', 'max']
for i in range(len(titles)):
print titles[i] + ',' + s_round(gaps_percentiles[i], 2) + ',' + s_round(ct_percentiles[i], 0)
if __name__ == "__main__":
# First argument if the best known solution file.
with open(sys.argv[1], 'r') as sol_file:
bks = json.load(sol_file)
# Remaining arguments are computed solution files to use.
log_comparisons(bks, sys.argv[2:])
|
Automate comparison to best known solutions.
|
Automate comparison to best known solutions.
|
Python
|
bsd-2-clause
|
VROOM-Project/vroom-scripts,VROOM-Project/vroom-scripts
|
|
f6f022a4eb6af051becd5564c1b0de6943918968
|
sudoku_example.py
|
sudoku_example.py
|
#!/usr/bin/env python
import sys
sys.path.append("./src")
from sat import SAT_solver
from sudoku import sudoku, printSudoku, processResult
print "================================================="
print "SUDOKU"
print "================================================="
solver = SAT_solver()
# define bord as follows.
# board is array with nine arrays (rows).
# rows are arrays of nine elements.
# elements are None or int in [1,9].
# None - empty square.
board = [[None, 8, None, 1, 6, None, None, None, 7],
[1, None, 7, 4, None, 3, 6, None, None],
[3, None, None, 5, None, None, 4, 2, None],
[None, 9, None, None, 3, 2, 7, None, 4],
[None, None, None, None, None, None, None, None, None],
[2, None, 4, 8, 1, None, None, 6, None],
[None, 4, 1, None, None, 8, None, None, 6],
[None, None, 6, 7, None, 1, 9, None, 3],
[7, None, None, None, 9, 6, None, 4, None]]
# print sudoku from board definition.
print "Problem:"
print printSudoku(board)
# construct logical formula from board definition.
formula = sudoku(board)
# solve formula using SAT solver.
result = solver.solve(formula)
print "Solution:"
# process and print result of sat solver.
print printSudoku(processResult(result[1]))
|
Add example of solving sudoku puzzle.
|
Add example of solving sudoku puzzle.
|
Python
|
bsd-3-clause
|
urska19/LVR-sat
|
|
4e223603a0216a667acc888268f845b41d16ab03
|
numpy/distutils/tests/test_npy_pkg_config.py
|
numpy/distutils/tests/test_npy_pkg_config.py
|
import os
from tempfile import mkstemp
from numpy.testing import *
from numpy.distutils.npy_pkg_config import read_config
simple = """\
[meta]
Name = foo
Description = foo lib
Version = 0.1
[default]
cflags = -I/usr/include
libs = -L/usr/lib
"""
simple_d = {'cflags': '-I/usr/include', 'libflags': '-L/usr/lib',
'version': '0.1', 'name': 'foo'}
simple_variable = """\
[meta]
Name = foo
Description = foo lib
Version = 0.1
[variables]
prefix = /foo/bar
libdir = ${prefix}/lib
includedir = ${prefix}/include
[default]
cflags = -I${includedir}
libs = -L${libdir}
"""
simple_variable_d = {'cflags': '-I/foo/bar/include', 'libflags': '-L/foo/bar/lib',
'version': '0.1', 'name': 'foo'}
class TestLibraryInfo(TestCase):
def test_simple(self):
fd, filename = mkstemp('foo.ini')
try:
try:
os.write(fd, simple)
finally:
os.close(fd)
out = read_config(filename)
self.failUnless(out.cflags() == simple_d['cflags'])
self.failUnless(out.libs() == simple_d['libflags'])
self.failUnless(out.name == simple_d['name'])
self.failUnless(out.version == simple_d['version'])
finally:
os.remove(filename)
def test_simple_variable(self):
fd, filename = mkstemp('foo.ini')
try:
try:
os.write(fd, simple_variable)
finally:
os.close(fd)
out = read_config(filename)
self.failUnless(out.cflags() == simple_variable_d['cflags'])
self.failUnless(out.libs() == simple_variable_d['libflags'])
self.failUnless(out.name == simple_variable_d['name'])
self.failUnless(out.version == simple_variable_d['version'])
out.vars['prefix'] = '/Users/david'
self.failUnless(out.cflags() == '-I/Users/david/include')
finally:
os.remove(filename)
|
Add two unit-tests for LibraryInfo.
|
Add two unit-tests for LibraryInfo.
git-svn-id: 77a43f9646713b91fea7788fad5dfbf67e151ece@7223 94b884b6-d6fd-0310-90d3-974f1d3f35e1
|
Python
|
bsd-3-clause
|
illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,illume/numpy3k,teoliphant/numpy-refactor,teoliphant/numpy-refactor,illume/numpy3k,Ademan/NumPy-GSoC,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,teoliphant/numpy-refactor,Ademan/NumPy-GSoC,illume/numpy3k,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC
|
|
fc9798c22f56a50233a40cff30ddd60fbecf471b
|
timeside/server/management/commands/timeside-items-post-save.py
|
timeside/server/management/commands/timeside-items-post-save.py
|
from django.core.management.base import BaseCommand
from timeside.server.models import Item
class Command(BaseCommand):
help = "This command will generate all post_save callback and will thus create audio_duration, mime_type and sha1 field if missing"
def handle(self, *args, **options):
for item in Item.objects.all():
item.save()
|
Add management command to update Item fields that gets updated after save()
|
Server: Add management command to update Item fields that gets updated after save()
|
Python
|
agpl-3.0
|
Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide
|
|
2b4ed8cc91ef4f5cd56dae7fbfa9e1a8f5dabcb8
|
backend/tests/api/test_commands.py
|
backend/tests/api/test_commands.py
|
import io
from unittest.mock import Mock, mock_open, patch
import strawberry
from django.core.management import call_command
def test_generate_graphql_schema():
out = io.StringIO()
m_open = mock_open()
@strawberry.type
class TestSchema:
a: int
with patch("api.management.commands.graphql_schema.json.dump") as mock_j, patch(
"api.management.commands.graphql_schema.graphql_sync"
) as p, patch("api.management.commands.graphql_schema.open", m_open, create=True):
m = Mock()
m.data = {"a": 1}
p.return_value = m
call_command("graphql_schema", stdout=out)
assert "Successfully dumped GraphQL schema to schema.json\n" in out.getvalue()
mock_j.assert_called_once()
assert mock_j.call_args_list[0][0][0] == {"data": {"a": 1}}
|
Add tests for the graphql_schema command
|
Add tests for the graphql_schema command
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
|
31c7ed89e66c32c46650ee93bfa8c8b2b8fbfad1
|
cisco_olt_client/tests/test_command.py
|
cisco_olt_client/tests/test_command.py
|
from cisco_olt_client.command import Command
def test_simple_compile():
cmd_str = 'cmd --arg1=val1 --arg2=val2'
cmd = Command('cmd', (('arg1', 'val1'), ('arg2', 'val2')))
assert cmd.compile() == cmd_str
cmd = Command('cmd', {'arg1': 'val1', 'arg2': 'val2'})
# order is not guaranteed
assert '--arg1=val1' in cmd.compile()
assert '--arg2=val2' in cmd.compile()
assert cmd.compile().startswith('cmd ')
cmd = Command('cmd', ['--arg1=val1', '--arg2=val2'])
assert cmd.compile() == cmd_str
|
Add test for command compilation
|
Add test for command compilation
|
Python
|
mit
|
Vnet-as/cisco-olt-client
|
|
2f450c0cb3d4c440b695696f88b72202c2f7d788
|
tests/emukit/core/test_optimization.py
|
tests/emukit/core/test_optimization.py
|
import numpy as np
from emukit.core import ParameterSpace
from emukit.core import ContinuousParameter, InformationSourceParameter
from emukit.core.acquisition import Acquisition
from emukit.core.optimization import AcquisitionOptimizer
from emukit.core.optimization import MultiSourceAcquisitionOptimizer
class SimpleSquareAcquisition(Acquisition):
def __init__(self):
pass
def evaluate(self, x):
y = - x[:, 0]**2 + np.sum(x[:, 1:], axis=1) + 1
return np.atleast_2d(y).T
@property
def has_gradients(self):
return False
def test_acquisition_optimizer():
space = ParameterSpace([ContinuousParameter('x', 0, 1)])
acquisition = SimpleSquareAcquisition()
optimizer = AcquisitionOptimizer(space)
opt_x, opt_val = optimizer.optimize(acquisition)
np.testing.assert_array_equal(opt_x, np.array([[0.]]))
np.testing.assert_array_equal(opt_val, np.array([[1.]]))
def test_multi_source_acquisition_optimizer():
space = ParameterSpace([ContinuousParameter('x', 0, 1),
InformationSourceParameter(2)])
acquisition = SimpleSquareAcquisition()
single_optimizer = AcquisitionOptimizer(space)
optimizer = MultiSourceAcquisitionOptimizer(single_optimizer, space)
opt_x, opt_val = optimizer.optimize(acquisition)
np.testing.assert_array_equal(opt_x, np.array([[0., 1.]]))
np.testing.assert_array_equal(opt_val, np.array([[2.]]))
|
Test acquisition optimizer and multi source acquisition optimizer
|
Test acquisition optimizer and multi source acquisition optimizer
|
Python
|
apache-2.0
|
EmuKit/emukit
|
|
f574a74f99d1b8aa0fa107ba2416699104d1f36d
|
inspector/cbv/templatetags/cbv_tags.py
|
inspector/cbv/templatetags/cbv_tags.py
|
from django import template
from django.conf import settings
register = template.Library()
@register.filter
def called_same(qs, name):
return [item for item in qs if item.name==name]
|
Add filter that gets items with the same .name from a list.
|
Add filter that gets items with the same .name from a list.
|
Python
|
bsd-2-clause
|
abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector
|
|
1a65b417129e0a32a079509c3e3868ced275b4b6
|
utils/validate.py
|
utils/validate.py
|
#!/usr/bin/env python
import sys
import json
import fileinput
import dateutil.parser
line_number = 0
for line in fileinput.input():
line_number += 1
try:
tweet = json.loads(line)
except Exception as e:
sys.stderr.write("uhoh, we got a problem on line: %d\n%s\n" % (line_number, e))
|
Add a little validation utility.
|
Add a little validation utility.
|
Python
|
mit
|
DocNow/twarc,miku/twarc,kevinbgunn/twarc,edsu/twarc,remagio/twarc,hugovk/twarc,ericscartier/twarc,kevinbgunn/twarc,ericscartier/twarc,miku/twarc,remagio/twarc
|
|
e872f249590244814e67894fc48b97d63ccad2c2
|
tools/data/window_file_select_vid_classes.py
|
tools/data/window_file_select_vid_classes.py
|
#!/usr/bin/env python
import argparse
import scipy.io as sio
import os
import os.path as osp
import numpy as np
from vdetlib.vdet.dataset import index_det_to_vdet
if __name__ == '__main__':
parser = argparse.ArgumentParser('Convert a window file for DET for VID.')
parser.add_argument('window_file')
parser.add_argument('save_window_file')
args = parser.parse_args()
f = open(args.window_file, 'r')
save_file = open(args.save_window_file, 'w')
boxes = []
image_ind = 0
count = 0
while 1:
image_ind += 1
if image_ind % 1000 == 0:
print "Processed {} files.".format(image_ind)
# read number line
number_line = f.readline().strip()
if len(number_line) == 0: break # end of the file
assert number_line[0] == '#'
# read image line
img_path = f.readline().strip()
image_specs = []
for i in xrange(4): image_specs.append(f.readline().strip())
num = int(f.readline().strip())
cur_boxes = []
only_bg = True
for i in xrange(num):
box_target = map(float, f.readline().strip().split())
# skip background or other non-vid classes
if int(box_target[0]) not in index_det_to_vdet: continue
# map DET index to VID
box_target[0] = index_det_to_vdet[box_target[0]]
cur_boxes.append(box_target)
if box_target[0] != 0:
only_bg = False
if len(cur_boxes) == 0 or only_bg: continue
save_file.write('# {}\n'.format(count))
count += 1
save_file.write('{}\n'.format(img_path))
for i in xrange(4): save_file.write('{}\n'.format(image_specs[i]))
selected_num = len(cur_boxes)
save_file.write('{}\n'.format(selected_num))
for box_target in cur_boxes:
save_file.write('{:.0f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:f} {:f} {:f} {:f}\n'.format(*box_target))
if image_ind % 1000 != 0:
print "Processed {} files.".format(image_ind)
f.close()
save_file.close()
|
Add script to convert DET window file to VID window file.
|
Add script to convert DET window file to VID window file.
|
Python
|
mit
|
myfavouritekk/TPN
|
|
268d67b3b6e81ba3b01a3e106dbabd5f03f42a50
|
glitter/block_admin.py
|
glitter/block_admin.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import warnings
from glitter.blockadmin.blocks import BlockAdmin, site
from .models import BaseBlock # noqa
BlockModelAdmin = BlockAdmin
__all__ = ['site', 'BlockModelAdmin']
warnings.warn(
"BlockModelAdmin has been moved to blockadmin.blocks.BlockAdmin",
DeprecationWarning,
stacklevel=2
)
|
Add deprecation warning and backward compatibility
|
Add deprecation warning and backward compatibility
|
Python
|
bsd-3-clause
|
developersociety/django-glitter,developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter,developersociety/django-glitter,blancltd/django-glitter
|
|
81771f60b00d605dfe1bc07f1af6660cd3c1e0f2
|
magnum/tests/unit/cmd/test_conductor.py
|
magnum/tests/unit/cmd/test_conductor.py
|
# Copyright 2016 - Fujitsu, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from magnum.cmd import conductor
from magnum.tests import base
class TestMagnumConductor(base.TestCase):
@mock.patch('oslo_service.service.launch')
@mock.patch.object(conductor, 'rpc_service')
@mock.patch('magnum.common.service.prepare_service')
def test_conductor(self, mock_prep, mock_rpc, mock_launch):
conductor.main()
server = mock_rpc.Service.create.return_value
launcher = mock_launch.return_value
mock_prep.assert_called_once_with(mock.ANY)
mock_rpc.Service.create.assert_called_once_with(
base.CONF.conductor.topic,
mock.ANY, mock.ANY, binary='magnum-conductor')
mock_launch.assert_called_once_with(base.CONF, server)
launcher.wait.assert_called_once_with()
|
Improve unit test coverage for cmd/conductor.py
|
Improve unit test coverage for cmd/conductor.py
Add new unit tests for cmd/conductor.py.
Increase the coverage for cmd/conductor.py from 0 to 100%.
Change-Id: I0c65ee9bc161046bcbd2b0ef5a8faf2f50a0f43e
Partial-Bug: #1511667
|
Python
|
apache-2.0
|
ArchiFleKs/magnum,ArchiFleKs/magnum,openstack/magnum,openstack/magnum
|
|
232aef0417fc10ecc73820b73d4b104498ff3bd3
|
parse.py
|
parse.py
|
# KlupuNG
# Copyright (C) 2013 Koodilehto Osk <http://koodilehto.fi>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import klupung.ktweb
for dirpath, dirnames, filenames in os.walk(sys.argv[1]):
# Continue if the dir is not a meeting document dir.
if "htmtxt0.htm" not in filenames:
continue
meetingdoc = klupung.ktweb.parse_meetingdoc(dirpath)
print(meetingdoc)
|
Add simple script for parsing meeting doc dirs
|
Add simple script for parsing meeting doc dirs
|
Python
|
agpl-3.0
|
tuomasjjrasanen/klupu,tuomasjjrasanen/klupu
|
|
5f4580cdc2f46ef9294057372609e1b9a48f7041
|
tests/test_cardxml.py
|
tests/test_cardxml.py
|
from hearthstone import cardxml
def test_cardxml_load():
cardid_db, _ = cardxml.load()
dbf_db, _ = cardxml.load_dbf()
assert cardid_db
assert dbf_db
for card_id, card in cardid_db.items():
assert dbf_db[card.dbf_id].id == card_id
for dbf_id, card in dbf_db.items():
assert cardid_db[card.id].dbf_id == dbf_id
|
Add a test for the cardxml databases
|
tests: Add a test for the cardxml databases
|
Python
|
mit
|
HearthSim/python-hearthstone
|
|
24ba638d16433ce298fca9dfd4e12cad01c86728
|
scripts/one_hot_encoding.py
|
scripts/one_hot_encoding.py
|
import sys
import pandas as pd
sys.path.append('..')
from utils.preprocessing import one_hot_encoding
path = '../datasets/processed/'
train_users = pd.read_csv(path + 'semi_processed_train_users.csv')
test_users = pd.read_csv(path + 'semi_processed_test_users.csv')
# Join users
users = pd.concat((train_users, test_users), axis=0, ignore_index=True)
users = users.set_index('id')
drop_list = [
'date_account_created',
'date_first_active',
'timestamp_first_active'
]
# Drop columns
users = users.drop(drop_list, axis=1)
# Encode categorical features
categorical_features = [
'gender', 'signup_method', 'signup_flow', 'language', 'affiliate_channel',
'affiliate_provider', 'first_affiliate_tracked', 'signup_app',
'first_device_type', 'first_browser', 'most_used_device'
]
users = one_hot_encoding(users, categorical_features)
train_users = train_users.set_index('id')
test_users = test_users.set_index('id')
users.index.name = 'id'
processed_train_users = users.loc[train_users.index]
processed_test_users = users.loc[test_users.index]
processed_test_users.drop('country_destination', inplace=True, axis=1)
processed_train_users.to_csv('train_users.csv')
processed_test_users.to_csv('test_users.csv')
|
Add scrit to finish preprocessing
|
Add scrit to finish preprocessing
|
Python
|
mit
|
davidgasquez/kaggle-airbnb
|
|
8e7ac2d9b4c281520c2a5d65d6d10cc39f64181d
|
controller/single_instance_task.py
|
controller/single_instance_task.py
|
import functools
from django.core.cache import cache
def single_instance_task(timeout):
def task_exc(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
lock_id = "celery-single-instance-" + func.__name__
acquire_lock = lambda: cache.add(lock_id, "true", timeout)
release_lock = lambda: cache.delete(lock_id)
if acquire_lock():
try:
func(*args, **kwargs)
finally:
release_lock()
return wrapper
return task_exc
|
Add in single instance task file
|
Add in single instance task file
|
Python
|
agpl-3.0
|
edx/edx-ora,edx/edx-ora,edx/edx-ora,edx/edx-ora
|
|
17f14574a35d985571e71023587ddb858a8b3ba2
|
tests/test_engines.py
|
tests/test_engines.py
|
#!/usr/bin/env python
from __future__ import print_function
import unittest
try:
from unittest import mock
except ImportError:
import mock
import imp
import os.path
import engines
class TestInit(unittest.TestCase):
def test_init(self):
mock_engines = {}
mock_listdir = mock.Mock(return_value=(
'text_engine.txt',
'0_engine.py',
'.period_engine.py',
'@at_engine.py',
'__init__.py',
'_underscore_engine.py',
'normal_engine.py',
'compiled_engine.pyc',
'optimized_engine.pyo',
))
valid_engines = {
'_underscore_engine': ('UnderscoreEngine', 'underscore'),
'normal_engine': ('NormalEngine', 'normal'),
'compiled_engine': ('CompiledEngine', 'compiled'),
'optimized_engine': ('OptimizedEngine', 'optimized'),
}
result_dict = {handle: None for __, (__, handle) in valid_engines.items()}
def mock_import_module(name, package):
self.assertEqual(package, 'engines')
assert name.startswith('.')
name = name[1:]
module = imp.new_module('%s.%s' % (package, name))
if name == '__init__':
module.Engine = engines.Engine
else:
self.assertIn(name, valid_engines)
class_name, engine_handle = valid_engines[name]
class MockEngine(engines.Engine):
handle = engine_handle
MockEngine.__name__ = class_name
MockEngine.__qualname__ = class_name
result_dict[engine_handle] = MockEngine
setattr(module, class_name, MockEngine)
return module
with mock.patch.object(engines, 'engines', mock_engines), \
mock.patch('os.listdir', mock_listdir), \
mock.patch('importlib.import_module', mock_import_module):
engines._init()
mock_listdir.assert_called_once_with(os.path.dirname(engines.__file__))
self.assertDictEqual(mock_engines, result_dict)
class TestEngine(unittest.TestCase):
def test_not_instantiable(self):
self.assertRaises(AssertionError, engines.Engine, dirname='/tmp/', tolerant=False)
def test_handle_is_none(self):
class TestEngine(engines.Engine):
pass
engine = TestEngine(dirname='/tmp/', tolerant=False)
self.assertIsNone(engine.handle)
def test_apply_not_implemented(self):
class TestEngine(engines.Engine):
pass
engine = TestEngine(dirname='/tmp/', tolerant=False)
self.assertRaises(NotImplementedError, engine.apply, {})
if __name__ == '__main__':
unittest.main()
|
Add tests for engines package.
|
Add tests for engines package.
|
Python
|
mit
|
blubberdiblub/eztemplate
|
|
41ebf7cbb3c23ddbd47ef0259490d6669538faa1
|
rockit/core/tests/test_task_settings.py
|
rockit/core/tests/test_task_settings.py
|
from django.test import TestCase
from rockit.core import holders
from rockit.core import tasks
class TaskSettingsTestCase(TestCase):
def test_it_should_be_able_to_call_task(self):
holder = holders.SettingsHolder()
holder = tasks.settings(holder)
self.assertNotEqual(0, len(holder.get_content()))
|
Add unit test for task settings
|
Add unit test for task settings
|
Python
|
mit
|
acreations/rockit-server,acreations/rockit-server,acreations/rockit-server,acreations/rockit-server
|
|
d50b67c5e16775861f251e794f75daecab64223b
|
tests/test_assigned_labels.py
|
tests/test_assigned_labels.py
|
from ghi_assist.hooks.assigned_label_hook import AssignedLabelHook
def test_assign():
"""Test successful assignment."""
hook = AssignedLabelHook()
payload = {"action": "assigned",
"issue": {"labels": [{"name": "alpha"},
{"name": "beta"},
{"name": "gamma"}]}}
assert hook.should_perform_action(payload), "Need to perform action"
assert len(hook.labels) == 4, "Should be four labels"
assert "status: claimed" in hook.labels, "Needs to be claimed"
def test_unassign():
"""Test successful unassignment."""
hook = AssignedLabelHook()
payload = {"action": "unassigned",
"issue": {"labels": [{"name": "alpha"},
{"name": "beta"},
{"name": "gamma"},
{"name": "status: claimed"}]}}
assert hook.should_perform_action(payload), "Needs to perform action"
assert len(hook.labels) == 3, "Should be three labels"
assert "status: claimed" not in hook.labels, "Needs to be unclaimed"
def test_unneeded_assign():
"""Test unnecessary assignment."""
hook = AssignedLabelHook()
payload = {"action": "assigned",
"issue": {"labels": [{"name": "alpha"},
{"name": "beta"},
{"name": "gamma"},
{"name": "status: claimed"}]}}
assert not hook.should_perform_action(payload), "No need to perform action"
assert len(hook.labels) == 4, "Should be four labels"
assert "status: claimed" in hook.labels, "Needs to be claimed"
def test_unneeded_unassign():
"""Test unnecessary unassignment."""
hook = AssignedLabelHook()
payload = {"action": "unassigned",
"issue": {"labels": [{"name": "alpha"},
{"name": "beta"},
{"name": "gamma"}]}}
assert not hook.should_perform_action(payload), "No need to perform action"
assert len(hook.labels) == 3, "Should be three labels"
assert "status: claimed" not in hook.labels, "Needs to be unclaimed"
|
Add tests for (un)claiming issues
|
Add tests for (un)claiming issues
Test that labels are added and removed correctly to note when an issue
is assigned or unassigned.
|
Python
|
agpl-3.0
|
afuna/ghi-assist
|
|
d9a522df5827867897e4a2bbaf680db563fb983e
|
scripts/tile_images.py
|
scripts/tile_images.py
|
"""Tile images."""
import os
import random
import argparse
from collections import defaultdict
import dtoolcore
import numpy as np
from jicbioimage.core.image import Image
from skimage.transform import downscale_local_mean
from dtoolutils import (
temp_working_dir,
stage_outputs
)
from image_utils import join_horizontally, join_vertically
def ensure_uri(path_or_uri):
if ':' in path_or_uri:
return path_or_uri
else:
return "disk:{}".format(path_or_uri)
def tile_plots(dataset, working_dir):
is_jpeg_overlay = dataset.get_overlay("is_jpeg")
date_overlay = dataset.get_overlay("date")
ordering_overlay = dataset.get_overlay("ordering")
dates = set(date_overlay.values())
fnames = []
dates = ['2016-06-28']
# print [date_overlay[identifier] for identifier in dataset.identifiers]
for date in dates:
selected = {}
for identifier in dataset.identifiers:
if is_jpeg_overlay[identifier] and date_overlay[identifier] == date:
try:
selected[ordering_overlay[identifier]] = identifier
except TypeError:
pass
print(selected.keys())
def generate_column(numbers):
images = []
for i in numbers:
i = selected[i]
image_fpath = dataset.item_content_abspath(i)
images.append(
downscale_local_mean(Image.from_file(image_fpath), (5, 5, 1))
)
column = join_horizontally(images)
return column
columns = []
for o in range(12):
numbers = range(59-o, 10-o, -12)
columns.append(generate_column(numbers))
tiled = join_vertically(columns)
fname = 'tiled-{}.png'.format(date)
output_fpath = os.path.join(working_dir, fname)
with open(output_fpath, 'wb') as fh:
fh.write(tiled.png())
fnames.append((fname, {}))
return fnames
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--dataset-uri')
parser.add_argument('--output-uri')
args = parser.parse_args()
dataset_uri = ensure_uri(args.dataset_uri)
output_uri = ensure_uri(args.output_uri)
dataset = dtoolcore.DataSet.from_uri(dataset_uri)
output_dataset = dtoolcore.ProtoDataSet.from_uri(output_uri)
with temp_working_dir() as working_dir:
outputs = tile_plots(dataset, working_dir)
stage_outputs(
outputs,
working_dir,
dataset,
output_dataset,
[],
None
)
if __name__ == '__main__':
main()
|
Add script to tile images
|
Add script to tile images
|
Python
|
mit
|
JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field,JIC-Image-Analysis/senescence-in-field
|
|
7af557a6c40508e758c020539647e6578c779018
|
example_crm/dev_malcolm.py
|
example_crm/dev_malcolm.py
|
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'temp.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
|
Add dev env for malcolm
|
Add dev env for malcolm
|
Python
|
apache-2.0
|
pkimber/crm,pkimber/crm,pkimber/crm
|
|
56583a6e15bc4dbfb1c80739e3942eed733b91e3
|
get-version-from-git.py
|
get-version-from-git.py
|
#!/usr/bin/env python
from __future__ import print_function
# Edit these constants if desired. NOTE that if you change DEFAULT_TAG_FORMAT,
# you'll need to change the .lstrip('v") part of parse_tag() as well.
DEFAULT_TAG_FORMAT="v[0-9]*" # Shell glob format, not regex
DEFAULT_VERSION_IF_NO_TAGS="0.0.0"
import subprocess
import os
def cmd_output(cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
return stdout.rstrip() # Trim newlines from end of output
def git_describe(commitish = None):
"Run `git describe` (on commitish, if passed) and return its stdout"
cmd = ['git', 'describe', '--long', '--match={}'.format(DEFAULT_TAG_FORMAT), '--always']
if commitish:
cmd.append(commitish)
return cmd_output(cmd)
def git_commit_count():
"Get total commit count in repo"
cmd = ['git', 'rev-list', 'HEAD', '--count']
return cmd_output(cmd)
def parse_tag(git_tag):
"Parse git describe output into its component parts"
result = { 'GIT_VN_FULL': git_tag }
parts = git_tag.split('-')
if len(parts) == 1:
# No version tags found; build our own
result['GIT_VN_SHA'] = parts[0]
result['GIT_VN_COMMITS'] = git_commit_count()
result['GIT_VN_TAG'] = DEFAULT_VERSION_IF_NO_TAGS
# Reconstruct GIT_VN_FULL to match normal "git describe" output
result['GIT_VN_FULL'] = "v{GIT_VN_TAG}-{GIT_VN_COMMITS}-g{GIT_VN_SHA}".format(**result)
else:
result['GIT_VN_SHA'] = parts[-1].lstrip('g')
result['GIT_VN_COMMITS'] = parts[-2]
result['GIT_VN_TAG'] = '-'.join(parts[:-2]).lstrip('v')
return result
def teamcity_log(tag_parts):
for name, val in tag_parts.iteritems():
print("##teamcity[setParameter name='env.{}' value='{}']".format(name, val))
def console_log(tag_parts):
for name, val in tag_parts.iteritems():
print("{}={}".format(name, val))
def is_running_under_teamcity():
"Return True if we're running in a TeamCity agent environment"
return os.environ.has_key('TEAMCITY_VERSION')
if __name__ == '__main__':
output = git_describe()
parts = parse_tag(output)
if is_running_under_teamcity():
teamcity_log(parts)
else:
console_log(parts)
|
Add the script for people to download
|
Add the script for people to download
|
Python
|
mit
|
rmunn/version-numbers-from-git,rmunn/version-numbers-from-git,rmunn/version-numbers-from-git
|
|
7b18fd4e2f4b975e891d31994f15e30d7fd50d1b
|
ply_speed.py
|
ply_speed.py
|
import cProfile
import time
from res import types
from src import ai
from src import coordinate
from src import historynode
plyNum = 5
aiObject = ai.AI()
game = historynode.HistoryNode()
game.setState(coordinate.Coordinate(3, 7), types.GOOSE)
game.setState(coordinate.Coordinate(4, 7), types.GOOSE)
game.setState(coordinate.Coordinate(5, 7), types.GOOSE)
game.setState(coordinate.Coordinate(3, 6), types.GOOSE)
game.setState(coordinate.Coordinate(4, 6), types.GOOSE)
game.setState(coordinate.Coordinate(5, 6), types.GOOSE)
game.setState(coordinate.Coordinate(1, 5), types.GOOSE)
game.setState(coordinate.Coordinate(2, 5), types.GOOSE)
game.setState(coordinate.Coordinate(3, 5), types.GOOSE)
game.setState(coordinate.Coordinate(4, 5), types.GOOSE)
game.setState(coordinate.Coordinate(5, 5), types.GOOSE)
game.setState(coordinate.Coordinate(6, 5), types.GOOSE)
game.setState(coordinate.Coordinate(7, 5), types.GOOSE)
game.setState(coordinate.Coordinate(1, 4), types.GOOSE)
game.setState(coordinate.Coordinate(2, 4), types.GOOSE)
game.setState(coordinate.Coordinate(3, 4), types.GOOSE)
game.setState(coordinate.Coordinate(4, 4), types.GOOSE)
game.setState(coordinate.Coordinate(5, 4), types.GOOSE)
game.setState(coordinate.Coordinate(6, 4), types.GOOSE)
game.setState(coordinate.Coordinate(7, 4), types.GOOSE)
game.setState(coordinate.Coordinate(1, 3), types.GOOSE)
game.setState(coordinate.Coordinate(2, 3), types.GOOSE)
game.setState(coordinate.Coordinate(6, 3), types.GOOSE)
game.setState(coordinate.Coordinate(7, 3), types.GOOSE)
game.setState(coordinate.Coordinate(3, 1), types.FOX)
game.setState(coordinate.Coordinate(5, 1), types.FOX)
game.pretty_print_board()
start = time.time()
actualValue = aiObject.findBestMove(game, True, plyNum)
end = time.time()
# print("Running again with cProfile")
# aiObject2 = ai.AI(0.5, 0.5)
# cProfile.run('aiObject2.findBestMove(game, False, plyNum)')
actualValue.pretty_print_board()
print("New: Calculated {0} positions in {1:.3f} seconds at {2:.3f} pos/sec".format(
aiObject.moveCount,
end - start,
aiObject.moveCount/(end-start)))
|
Add script to test search speed
|
Add script to test search speed
|
Python
|
mit
|
blairck/jaeger
|
|
153fd9e9c0b9e251c423b811f3d67522d469d9bc
|
all-domains/tutorials/cracking-the-coding-interview/arrays-left-rotation/solution.py
|
all-domains/tutorials/cracking-the-coding-interview/arrays-left-rotation/solution.py
|
# https://www.hackerrank.com/challenges/ctci-array-left-rotation
# Python 3
def array_left_rotation(a, n, k):
# Convert generator to a list
arr = list(a)
for _ in range(k):
temp = arr.pop(0)
arr.append(temp)
# Return a generator from the list
return (x for x in arr)
n, k = map(int, input().strip().split(' '))
a = map(int, input().strip().split(' '))
answer = array_left_rotation(a, n, k);
print(*answer, sep=' ')
|
Solve first problem for Cracking the coding interview
|
Solve first problem for Cracking the coding interview
|
Python
|
mit
|
arvinsim/hackerrank-solutions
|
|
ea74801222231145421187a005756db752e9a2f8
|
tests/test_flask_get.py
|
tests/test_flask_get.py
|
import unittest
from flask import Flask
from flask.ext.autodoc import Autodoc
class TestAutodocWithFlask(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
self.autodoc = Autodoc(self.app)
@self.app.route('/')
@self.autodoc.doc()
def index():
"""Returns a hello world message"""
return 'Hello World!'
self.client = self.app.test_client()
def test_html(self):
@self.app.route('/docs')
def html_docs():
return self.autodoc.html()
response = self.client.get('/docs')
self.assertEqual(response.status_code, 200)
|
Add a test for HTML retrieval.
|
Add a test for HTML retrieval.
|
Python
|
mit
|
jwg4/flask-autodoc,jwg4/flask-autodoc
|
|
f9733f0fcbf94aabda179e50a4eb694f117c208d
|
cdf/renderers.py
|
cdf/renderers.py
|
from cdf.config import DJANGO_VERSIONS, VERSION
from cdf.jinja_utils import template_env
class BasePageRenderer(object):
def __init__(self, klasses):
self.klasses = klasses
def render(self, filename):
template = template_env.get_template(self.template_name)
context = self.get_context()
rendered_template = template.render(context)
with open(filename, 'w') as f:
f.write(rendered_template)
def get_context(self):
other_versions = list(DJANGO_VERSIONS)
other_versions.remove(VERSION)
return {
'version_prefix': 'Django',
'version': VERSION,
'versions': DJANGO_VERSIONS,
'other_versions': other_versions,
'klasses': self.klasses
}
class IndexPageRenderer(BasePageRenderer):
template_name = 'index.html'
|
Add base renderer and an index page renderer
|
Add base renderer and an index page renderer
|
Python
|
mit
|
ana-balica/classy-django-forms,ana-balica/classy-django-forms,ana-balica/classy-django-forms
|
|
32de58c3a36bf9f9f8ec98e904aee989ebe3428e
|
install_deps.py
|
install_deps.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Installs dependencies appropriate for the Python version."""
import subprocess
import sys
subprocess.call("pip", "install --use-mirrors -r requirements.txt")
if sys.version_info[0] >= 3: # Python 3
# No Python 3-specific dependencies right now
pass
else: # Python 2
subprocess.call("pip", "install --use-mirrors unittest2")
|
Install different deps for Python 3 vs. Python 2.
|
Install different deps for Python 3 vs. Python 2.
|
Python
|
mit
|
gthank/pto,gthank/pto
|
|
1394800c7c7bd62e0191cdb85612fa3066789424
|
weekday_greeting_slackbot.py
|
weekday_greeting_slackbot.py
|
#!/usr/bin/env python3
from slackclient import SlackClient
import json
import time
slack_token = ""
channel = ""
message = ""
report_slack_token = ""
report_channel = ""
report_slackbot_name = ""
report_alert_list = ""
def report_result(result):
if result.get("ok"):
report_message = "*SUCCESS*\n"
else:
report_message = \
"*ERROR* (Alerting user(s) " + report_alert_list + ")" + '\n'
SlackClient(report_slack_token).api_call(
"chat.postMessage",
channel = "#" + report_channel,
link_names = 1,
text = ">>> " + time.strftime("%Y-%m-%d %H:%M:%S") + " | " +
"Report from _" + report_slackbot_name + '_' + '\n' +
"Operation status: " + report_message +
"Response body:\n```\n" +
json.dumps(result, indent=4, sort_keys=True) + '\n```'
)
def main():
result = SlackClient(slack_token).api_call(
"chat.postMessage",
channel = "#" + channel,
text = message
)
report_result(result)
if __name__ == "__main__":
main()
|
Add basic Slackbot script with reporting enabled
|
Add basic Slackbot script with reporting enabled
|
Python
|
mit
|
jleung51/scripts,jleung51/scripts,jleung51/scripts
|
|
e65641c7a2d944a39cf0cf4988d6b03e74a9712b
|
examples/sqlite_fts_compression.py
|
examples/sqlite_fts_compression.py
|
#
# Small example demonstrating the use of zlib compression with the Sqlite
# full-text search extension.
#
import zlib
from peewee import *
from playhouse.sqlite_ext import *
db = SqliteExtDatabase(':memory:')
class SearchIndex(FTSModel):
content = SearchField()
class Meta:
database = db
@db.func('zlib_compress')
def _zlib_compress(data):
if data is not None:
if isinstance(data, str):
data = data.encode('utf8')
return zlib.compress(data, 9)
@db.func('zlib_decompress')
def _zlib_decompress(data):
if data is not None:
return zlib.decompress(data)
SearchIndex.create_table(
tokenize='porter',
compress='zlib_compress',
uncompress='zlib_decompress')
phrases = [
'A faith is a necessity to a man. Woe to him who believes in nothing.',
('All who call on God in true faith, earnestly from the heart, will '
'certainly be heard, and will receive what they have asked and desired.'),
('Be faithful in small things because it is in them that your strength '
'lies.'),
('Faith consists in believing when it is beyond the power of reason to '
'believe.'),
('Faith has to do with things that are not seen and hope with things that '
'are not at hand.')]
for phrase in phrases:
SearchIndex.create(content=phrase)
# Use the simple ranking algorithm.
query = SearchIndex.search('faith things', with_score=True)
for row in query:
print(round(row.score, 2), row.content.decode('utf8'))
print('---')
# Use the Okapi-BM25 ranking algorithm.
query = SearchIndex.search_bm25('believe', with_score=True)
for row in query:
print(round(row.score, 2), row.content.decode('utf8'))
db.close()
|
Add small example of sqlite FTS with compression.
|
Add small example of sqlite FTS with compression.
|
Python
|
mit
|
coleifer/peewee,coleifer/peewee,coleifer/peewee
|
|
df6fba6742e4fddfebd305a1ed624927f26e0f45
|
endpoints/__init__.py
|
endpoints/__init__.py
|
#!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Endpoints module."""
# pylint: disable=wildcard-import
from api_config import api
from api_config import API_EXPLORER_CLIENT_ID
from api_config import AUTH_LEVEL
from api_config import EMAIL_SCOPE
from api_config import Issuer
from api_config import method
from api_exceptions import *
from apiserving import *
from endpoints_dispatcher import *
import message_parser
from resource_container import ResourceContainer
from users_id_token import get_current_user, get_verified_jwt, convert_jwks_uri
from users_id_token import InvalidGetUserCall
from users_id_token import SKIP_CLIENT_ID_CHECK
__version__ = '2.3.1'
|
#!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Endpoints module."""
# pylint: disable=wildcard-import
from api_config import api
from api_config import API_EXPLORER_CLIENT_ID
from api_config import AUTH_LEVEL
from api_config import EMAIL_SCOPE
from api_config import Issuer
from api_config import method
from api_exceptions import *
from apiserving import *
from endpoints_dispatcher import *
import message_parser
from resource_container import ResourceContainer
from users_id_token import get_current_user, get_verified_jwt, convert_jwks_uri
from users_id_token import InvalidGetUserCall
from users_id_token import SKIP_CLIENT_ID_CHECK
__version__ = '2.4.0'
|
Bump minor version (2.3.1 -> 2.4.0)
|
Bump minor version (2.3.1 -> 2.4.0)
Rationale:
* Discovery docs now properly contain the OAuth2 scopes
* Improved security definition generation in OpenAPI specs
|
Python
|
apache-2.0
|
cloudendpoints/endpoints-python,inklesspen/endpoints-python,inklesspen/endpoints-python,cloudendpoints/endpoints-python
|
ebc88a28e2f8018b2887970bddb0423243ee8292
|
src/core/templatetags/nose_tools.py
|
src/core/templatetags/nose_tools.py
|
from nose import tools
from django import template
register = template.Library()
class NoseNode(template.Node):
def render(self, context):
tools.set_trace() # Debugger will stop here
return ''
@register.tag
def set_trace(parser, token):
return NoseNode()
|
Add template tag for debugging
|
Add template tag for debugging
|
Python
|
agpl-3.0
|
BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway,BirkbeckCTP/janeway
|
|
56319a8562d10b43189ffa968b5ecc477f50a6c9
|
src/python/BasicAvg.py
|
src/python/BasicAvg.py
|
"""
>>> from pyspark.context import SparkContext
>>> sc = SparkContext('local', 'test')
>>> b = sc.parallelize([1, 2, 3, 4])
>>> basicAvg(b)
2.5
"""
import sys
from pyspark import SparkContext
def basicAvg(nums):
"""Compute the avg"""
sumCount = nums.map(lambda x: (x,1)).fold((0, 0), (lambda x, y: (x[0] + y[0], x[1] + y[1])))
return sumCount[0] / float(sumCount[1])
if __name__ == "__main__":
master = "local"
if len(sys.argv) == 2:
master = sys.argv[1]
sc = SparkContext(master, "Sum")
nums = sc.parallelize([1, 2, 3, 4])
avg = basicAvg(nums)
print avg
|
Add a basic avg example
|
Add a basic avg example
|
Python
|
mit
|
kpraveen420/learning-spark,mmirolim/learning-spark,shimizust/learning-spark,jindalcastle/learning-spark,JerryTseng/learning-spark,GatsbyNewton/learning-spark,tengteng/learning-spark,tengteng/learning-spark,feynman0825/learning-spark,SunGuo/learning-spark,bhagatsingh/learning-spark,baokunguo/learning-spark-examples,diogoaurelio/learning-spark,feynman0825/learning-spark,diogoaurelio/learning-spark,concerned3rdparty/learning-spark,ellis429/learning-spark-examples,gaoxuesong/learning-spark,shimizust/learning-spark,XiaoqingWang/learning-spark,huixiang/learning-spark,bhagatsingh/learning-spark,DINESHKUMARMURUGAN/learning-spark,qingkaikong/learning-spark-examples,mmirolim/learning-spark,XiaoqingWang/learning-spark,mmirolim/learning-spark,kpraveen420/learning-spark,ellis429/learning-spark,ellis429/learning-spark,DINESHKUMARMURUGAN/learning-spark,GatsbyNewton/learning-spark,SunGuo/learning-spark,SunGuo/learning-spark,feynman0825/learning-spark,mohitsh/learning-spark,databricks/learning-spark,zaxliu/learning-spark,obinsanni/learning-spark,ellis429/learning-spark-examples,UsterNes/learning-spark,huixiang/learning-spark,huixiang/learning-spark,obinsanni/learning-spark,UsterNes/learning-spark,JerryTseng/learning-spark,junwucs/learning-spark,noprom/learning-spark,gaoxuesong/learning-spark,junwucs/learning-spark,rex1100/learning-spark,GatsbyNewton/learning-spark,qingkaikong/learning-spark-examples,negokaz/learning-spark,JerryTseng/learning-spark,negokaz/learning-spark,ramyasrigangula/learning-spark,anjuncc/learning-spark-examples,asarraf/learning-spark,ramyasrigangula/learning-spark,noprom/learning-spark,jindalcastle/learning-spark,SunGuo/learning-spark,NBSW/learning-spark,ellis429/learning-spark-examples,holdenk/learning-spark-examples,jaehyuk/learning-spark,tengteng/learning-spark,zaxliu/learning-spark,anjuncc/learning-spark-examples,NBSW/learning-spark,shimizust/learning-spark,qingkaikong/learning-spark-examples,ramyasrigangula/learning-spark,rex1100/learning-spark,zaxliu/learning-spark,ellis429/learning-spark,kpraveen420/learning-spark,obinsanni/learning-spark,huydx/learning-spark,NBSW/learning-spark,shimizust/learning-spark,zaxliu/learning-spark,noprom/learning-spark,XiaoqingWang/learning-spark,mohitsh/learning-spark,dsdinter/learning-spark-examples,huixiang/learning-spark,bhagatsingh/learning-spark,asarraf/learning-spark,UsterNes/learning-spark,ellis429/learning-spark-examples,JerryTseng/learning-spark,diogoaurelio/learning-spark,holdenk/learning-spark-examples,huydx/learning-spark,huixiang/learning-spark,huydx/learning-spark,kpraveen420/learning-spark,bhagatsingh/learning-spark,obinsanni/learning-spark,concerned3rdparty/learning-spark,kod3r/learning-spark,baokunguo/learning-spark-examples,feynman0825/learning-spark,GatsbyNewton/learning-spark,obinsanni/learning-spark,anjuncc/learning-spark-examples,asarraf/learning-spark,databricks/learning-spark,asarraf/learning-spark,jindalcastle/learning-spark,bhagatsingh/learning-spark,diogoaurelio/learning-spark,XiaoqingWang/learning-spark,coursera4ashok/learning-spark,kod3r/learning-spark,ellis429/learning-spark-examples,huydx/learning-spark,junwucs/learning-spark,negokaz/learning-spark,junwucs/learning-spark,SunGuo/learning-spark,DINESHKUMARMURUGAN/learning-spark,databricks/learning-spark,gaoxuesong/learning-spark,DINESHKUMARMURUGAN/learning-spark,GatsbyNewton/learning-spark,databricks/learning-spark,dsdinter/learning-spark-examples,mohitsh/learning-spark,kpraveen420/learning-spark,baokunguo/learning-spark-examples,shimizust/learning-spark,anjuncc/learning-spark-examples,negokaz/learning-spark,qingkaikong/learning-spark-examples,junwucs/learning-spark,zaxliu/learning-spark,noprom/learning-spark,DINESHKUMARMURUGAN/learning-spark,baokunguo/learning-spark-examples,kod3r/learning-spark,holdenk/learning-spark-examples,feynman0825/learning-spark,NBSW/learning-spark,ramyasrigangula/learning-spark,coursera4ashok/learning-spark,concerned3rdparty/learning-spark,dsdinter/learning-spark-examples,gaoxuesong/learning-spark,NBSW/learning-spark,asarraf/learning-spark,coursera4ashok/learning-spark,jaehyuk/learning-spark,gaoxuesong/learning-spark,ellis429/learning-spark,tengteng/learning-spark,noprom/learning-spark,mmirolim/learning-spark,huydx/learning-spark,UsterNes/learning-spark,concerned3rdparty/learning-spark,baokunguo/learning-spark-examples,UsterNes/learning-spark,concerned3rdparty/learning-spark,jaehyuk/learning-spark,holdenk/learning-spark-examples,qingkaikong/learning-spark-examples,XiaoqingWang/learning-spark,mohitsh/learning-spark,jindalcastle/learning-spark,kod3r/learning-spark,jaehyuk/learning-spark,dsdinter/learning-spark-examples,kod3r/learning-spark,databricks/learning-spark,anjuncc/learning-spark-examples,negokaz/learning-spark,rex1100/learning-spark,ellis429/learning-spark,dsdinter/learning-spark-examples,mmirolim/learning-spark,JerryTseng/learning-spark,ramyasrigangula/learning-spark,diogoaurelio/learning-spark,coursera4ashok/learning-spark,holdenk/learning-spark-examples,jindalcastle/learning-spark,jaehyuk/learning-spark,tengteng/learning-spark,mohitsh/learning-spark,coursera4ashok/learning-spark
|
|
8003a4b4b2aaaaba54570f670c7a5df93fe8434d
|
tests/pykafka/utils/test_compression.py
|
tests/pykafka/utils/test_compression.py
|
import unittest2
from pykafka.utils import compression
class CompressionTests(unittest2.TestCase):
"""Keeping these simple by verifying what goes in is what comes out."""
text = "The man in black fled across the desert, and the gunslinger followed."
def test_gzip(self):
encoded = compression.encode_gzip(self.text)
self.assertNotEqual(self.text, encoded)
decoded = compression.decode_gzip(encoded)
self.assertEqual(self.text, decoded)
def test_snappy(self):
encoded = compression.encode_snappy(self.text)
self.assertNotEqual(self.text, encoded)
decoded = compression.decode_snappy(encoded)
self.assertEqual(self.text, decoded)
def test_snappy_xerial(self):
encoded = compression.encode_snappy(self.text, xerial_compatible=True)
self.assertNotEqual(self.text, encoded)
decoded = compression.decode_snappy(encoded)
self.assertEqual(self.text, decoded)
if __name__ == '__main__':
unittest2.main()
|
Add specific tests for compression.
|
Add specific tests for compression.
|
Python
|
apache-2.0
|
vortec/pykafka,benauthor/pykafka,yungchin/pykafka,aeroevan/pykafka,sammerry/pykafka,thedrow/samsa,wikimedia/operations-debs-python-pykafka,tempbottle/pykafka,yungchin/pykafka,thedrow/samsa,wikimedia/operations-debs-python-pykafka,jofusa/pykafka,thedrow/samsa,jofusa/pykafka,appsoma/pykafka,vortec/pykafka,appsoma/pykafka,sammerry/pykafka,wikimedia/operations-debs-python-pykafka,sontek/pykafka,benauthor/pykafka,fortime/pykafka,sontek/pykafka,fortime/pykafka,aeroevan/pykafka,tempbottle/pykafka,benauthor/pykafka
|
|
6eca5cd06da3a195f226d2b864b8af41b62bda45
|
kargtom/twodim/LongestChain/longestChain_002.py
|
kargtom/twodim/LongestChain/longestChain_002.py
|
def longestChain(words):
lendict = {}
lenlist = []
# build the dictory where
# the key is the length of a word,
# and the value is the set of words with the length
for word in words:
l = len(word)
if len(word) in lendict:
lendict[l][word] = -1
else:
lendict[l] = {word : -1}
lenlist.append(l)
lenlist.sort()
if len(lenlist) == 1:
return 1
maxsize = 1
maxpossible = len(lenlist)
for i in range(len(lenlist) - 1, 0, -1):
l = lenlist[i]
group = lendict[l]
for word in group:
lendict = lc(l, word, lendict)
tmpsize = lendict[l][word]
if tmpsize > maxsize:
maxsize = tmpsize
#if maxsize == maxpossible:
# return maxsize
#maxpossible -= 1
#if maxsize >= maxpossible:
# break
print lendict
return maxsize
def lc(l, word, lendict):
if l not in lendict:
return lendict
if word not in lendict[l]:
return lendict
if lendict[l][word] != -1:
return lendict
if l - 1 not in lendict:
lendict[l][word] = 1
return lendict
maxsize = 1
for i in range(len(word)):
nextword = word[:i] + word[i + 1:]
lendict = lc(l - 1, nextword, lendict)
if l - 1 in lendict and nextword in lendict[l - 1]:
tmpsize = lendict[l - 1][nextword] + 1
if tmpsize > maxsize:
maxsize = tmpsize
lendict[l][word] = maxsize
print lendict
return lendict
words = ['a', 'abcd', 'bcd', 'abd', 'cd', 'c']
words = ['a', 'aa', 'ba', 'aaa', 'aab', 'aac', 'aad', 'kkkk']
#words = ['bcd', 'abcd', 'a', 'aa', 'aaa', 'bbb']
#words = ['a', 'b', 'ba', 'bca', 'bda', 'bdca']
print longestChain(words)
|
Create the top-down longestPath with optimization
|
Create the top-down longestPath with optimization
horizontal optimization and vertical optimization
|
Python
|
mit
|
Chasego/codi,Chasego/cod,Chasego/codirit,Chasego/codirit,Chasego/codirit,Chasego/codirit,cc13ny/algo,Chasego/codi,cc13ny/Allin,cc13ny/algo,cc13ny/algo,Chasego/cod,Chasego/cod,cc13ny/algo,cc13ny/Allin,cc13ny/algo,cc13ny/Allin,Chasego/codi,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/cod,Chasego/codi,Chasego/cod,cc13ny/Allin
|
|
a906d3523bb9d6a1ca233a9d5e0e33c477c96e60
|
test/test_retriever.py
|
test/test_retriever.py
|
"""Tests for the EcoData Retriever"""
from StringIO import StringIO
from engine import Engine
def test_escape_single_quotes():
"""Test escaping of single quotes"""
test_engine = Engine()
assert test_engine.escape_single_quotes("1,2,3,'a'") == "1,2,3,\\'a\\'"
def test_escape_double_quotes():
"""Test escaping of double quotes"""
test_engine = Engine()
assert test_engine.escape_double_quotes('"a",1,2,3') == '\\"a\\",1,2,3'
def test_drop_statement():
"Test the creation of drop statements"
test_engine = Engine()
assert test_engine.drop_statement('TABLE', 'tablename') == "DROP TABLE IF EXISTS tablename"
|
Add some basic unit tests for functions with no dependencies
|
Add some basic unit tests for functions with no dependencies
|
Python
|
mit
|
embaldridge/retriever,bendmorris/retriever,goelakash/retriever,embaldridge/retriever,henrykironde/deletedret,davharris/retriever,henrykironde/deletedret,goelakash/retriever,bendmorris/retriever,davharris/retriever,davharris/retriever,bendmorris/retriever,embaldridge/retriever
|
|
e966e0774ad1474335a654cbe8f594d61ee97c3d
|
proselint/checks/misc/creditcard.py
|
proselint/checks/misc/creditcard.py
|
# -*- coding: utf-8 -*-
"""MSC: Credit card number printed.
---
layout: post
error_code: MSC
source: ???
source_url: ???
title: credit card number printed
date: 2014-06-10 12:31:19
categories: writing
---
Credit card number printed.
"""
from proselint.tools import blacklist
err = "MSC102"
msg = u"Don't put credit card numbers in plain text."
credit_card_numbers = [
"4\d{15}",
"5[1-5]\d{14}",
"3[4,7]\d{13}",
"3[0,6,8]\d{12}",
"6011\d{12}",
]
check = blacklist(credit_card_numbers, err, msg)
|
Add a credit card number checker
|
Add a credit card number checker
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint
|
|
6f2f857528d5d1df227f56422222c8de72c2e012
|
tests/functional/test_service_alias.py
|
tests/functional/test_service_alias.py
|
# Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import botocore.session
# service_name => alias
ALIAS_CASES = {
'sagemaker-runtime': 'runtime.sagemaker'
}
def test_can_use_service_alias():
session = botocore.session.get_session()
for (name, alias) in ALIAS_CASES.items():
yield _instantiates_the_same_client, session, name, alias
def _instantiates_the_same_client(session, service_name, service_alias):
client_kwargs = {
'region_name': 'us-east-1',
'aws_access_key_id': 'foo',
'aws_secret_access_key': 'bar',
}
original_client = session.create_client(service_name, **client_kwargs)
aliased_client = session.create_client(service_alias, **client_kwargs)
original_model_name = original_client.meta.service_model.service_name
aliased_model_name = aliased_client.meta.service_model.service_name
assert original_model_name == aliased_model_name
|
Add functional test for service name aliases
|
Add functional test for service name aliases
|
Python
|
apache-2.0
|
pplu/botocore,boto/botocore
|
|
e96a8dd7854809e64e27f1b06cd380586a628da0
|
tests/functional/test_six_threading.py
|
tests/functional/test_six_threading.py
|
"""
Regression test for six issue #98 (https://github.com/benjaminp/six/issues/98)
"""
from mock import patch
import sys
import threading
import time
from botocore.vendored import six
_original_setattr = six.moves.__class__.__setattr__
def _wrapped_setattr(key, value):
# Monkey patch six.moves.__setattr__ to simulate
# a poorly-timed thread context switch
time.sleep(0.1)
return _original_setattr(six.moves, key, value)
def _reload_six():
# Issue #98 is caused by a race condition in six._LazyDescr.__get__
# which is only called once per moved module. Reload six so all the
# moved modules are reset.
if sys.version_info < (3, 0):
reload(six)
elif sys.version_info < (3, 4):
import imp
imp.reload(six)
else:
import importlib
importlib.reload(six)
class _ExampleThread(threading.Thread):
def __init__(self):
super(_ExampleThread, self).__init__()
self.daemon = False
self.exc_info = None
def run(self):
try:
# Simulate use of six by
# botocore.configloader.raw_config_parse()
# Should raise AttributeError if six < 1.9.0
six.moves.configparser.RawConfigParser()
except Exception:
self.exc_info = sys.exc_info()
def test_six_thread_safety():
_reload_six()
with patch('botocore.vendored.six.moves.__class__.__setattr__',
wraps=_wrapped_setattr):
threads = []
for i in range(2):
t = _ExampleThread()
threads.append(t)
t.start()
while threads:
t = threads.pop()
t.join()
if t.exc_info:
six.reraise(*t.exc_info)
|
Add test for six.moves thread safety
|
Add test for six.moves thread safety
|
Python
|
apache-2.0
|
pplu/botocore,boto/botocore
|
|
0400dce44abca87cc0c0069b062f1f6942640125
|
tests/test_cli_info.py
|
tests/test_cli_info.py
|
# -*- coding: utf-8 -*-
import pytest
COOKIECUTTER_DJANGO_INFO = """Name: cookiecutter-django
Author: pydanny
Repository: https://github.com/pydanny/cookiecutter-django
Context: {
"author_name": "Your Name",
"description": "A short description of the project.",
"domain_name": "example.com",
"email": "Your email",
"now": "2016/03/05",
"open_source_license": [
"MIT",
"BSD",
"Not open source"
],
"project_name": "project_name",
"repo_name": "{{ cookiecutter.project_name|replace(' ', '_') }}",
"timezone": "UTC",
"use_celery": "n",
"use_mailhog": "n",
"use_newrelic": "n",
"use_opbeat": "n",
"use_python2": "n",
"use_sentry": "n",
"use_whitenoise": "y",
"version": "0.1.0",
"windows": "n",
"year": "{{ cookiecutter.now[:4] }}"
}
"""
@pytest.fixture
def templates_file():
return 'tests/templates.json'
def test_template_info(cli_runner, tmp_rc, templates_file, ):
result = cli_runner([
'-c', tmp_rc, 'info',
'--load-file', templates_file,
'cookiecutter-django'
])
assert result.exit_code == 0
assert result.output == COOKIECUTTER_DJANGO_INFO
|
Implement an integration test for cibopath info
|
Implement an integration test for cibopath info
|
Python
|
bsd-3-clause
|
hackebrot/cibopath
|
|
a98c80247b5ec978e811cd6444596010d67c6a45
|
tests/test_randvars.py
|
tests/test_randvars.py
|
"""Tests of randvars module"""
import numpy as np
from dapper.tools.randvars import GaussRV
def test_gauss_rv():
M = 4
nsamples = 5
grv = GaussRV(mu=0, C=0, M=M)
assert (grv.sample(nsamples) == np.zeros((nsamples, M))).all()
test_gauss_rv()
|
Add simple test of GaussRV class
|
Add simple test of GaussRV class
|
Python
|
mit
|
nansencenter/DAPPER,nansencenter/DAPPER
|
|
6ab93cfc86f1fdf714a9921fcefd8f0dc36d55d1
|
test/test_i18n_keys.py
|
test/test_i18n_keys.py
|
# -*- coding: utf-8 -*-
import re
import glob
import json
###############################################################################
# Find used keys in python code #
###############################################################################
def find_expected_string_keys():
# Try to find :
# m18n.g( "foo"
# MoulinetteError("foo"
p1 = re.compile(r"m18n\.g\(\s*[\"\'](\w+)[\"\']")
p2 = re.compile(r"MoulinetteError\([\'\"](\w+)[\'\"]")
p3 = re.compile(r"MoulinetteValidationError\([\'\"](\w+)[\'\"]")
p4 = re.compile(r"MoulinetteAuthenticationError\([\'\"](\w+)[\'\"]")
python_files = glob.glob("moulinette/*.py")
python_files.extend(glob.glob("moulinette/*/*.py"))
for python_file in python_files:
content = open(python_file).read()
for m in p1.findall(content):
if m.endswith("_"):
continue
yield m
for m in p2.findall(content):
if m.endswith("_"):
continue
yield m
for m in p3.findall(content):
if m.endswith("_"):
continue
yield m
for m in p4.findall(content):
if m.endswith("_"):
continue
yield m
###############################################################################
# Load en locale json keys #
###############################################################################
def keys_defined_for_en():
return json.loads(open("locales/en.json").read()).keys()
###############################################################################
# Compare keys used and keys defined #
###############################################################################
expected_string_keys = set(find_expected_string_keys())
keys_defined = set(keys_defined_for_en())
def test_undefined_i18n_keys():
undefined_keys = expected_string_keys.difference(keys_defined)
undefined_keys = sorted(undefined_keys)
if undefined_keys:
raise Exception(
"Those i18n keys should be defined in en.json:\n"
" - " + "\n - ".join(undefined_keys)
)
def test_unused_i18n_keys():
unused_keys = keys_defined.difference(expected_string_keys)
unused_keys = sorted(unused_keys)
if unused_keys:
raise Exception(
"Those i18n keys appears unused:\n" " - " + "\n - ".join(unused_keys)
)
|
Add a test for i18n keys
|
Add a test for i18n keys
|
Python
|
agpl-3.0
|
YunoHost/moulinette
|
|
d616adf1ec2a2326f15607cbb30fee14c8023af2
|
dojo/db_migrations/0021_auto_20191102_0956.py
|
dojo/db_migrations/0021_auto_20191102_0956.py
|
# Generated by Django 2.2.4 on 2019-11-02 09:56
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dojo', '0020_system_settings_allow_anonymous_survey_repsonse'),
]
operations = [
migrations.AlterField(
model_name='finding',
name='cve',
field=models.TextField(max_length=20, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'. ", regex='^[A-Z]{1,3}-\\d{4}-\\d{4,7}$')]),
),
migrations.AlterField(
model_name='finding_template',
name='cve',
field=models.TextField(max_length=20, null=True, validators=[django.core.validators.RegexValidator(message="Vulnerability ID must be entered in the format: 'ABC-9999-9999'. ", regex='^[A-Z]{1,3}-\\d{4}-\\d{4,7}$')]),
),
]
|
Add fix for CVE regex
|
Add fix for CVE regex
Signed-off-by: Kirill Gotsman <[email protected]>
|
Python
|
bsd-3-clause
|
rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo
|
|
21420f6c730fb7e4063cddd28de3e7580c6efb36
|
bin/versionbuild.py
|
bin/versionbuild.py
|
#!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013-2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Version build stamping script.
This module generates and inserts a patch component of the semantic version
stamp for Barbican, intended to ensure that a strictly monotonically increasing
version is produced for consecutive development releases. Some repositories
such as yum use this increasing semantic version to select the latest
package for installations.
This process may not be required if a bug in the 'pbr' library is fixed:
https://bugs.launchpad.net/pbr/+bug/1206730
"""
import os
from datetime import datetime
from time import mktime
# Determine version of this application.
SETUP_FILE = 'setup.cfg'
VERSIONFILE = os.path.join(SETUP_FILE)
current_dir = os.getcwd()
if current_dir.endswith('bin'):
VERSIONFILE = os.path.join('..', SETUP_FILE)
def get_patch():
"""Return a strictly monotonically increasing version patch.
This method is providing the 'patch' component of the semantic version
stamp for Barbican. It currently returns an epoch in seconds, but
could use a build id from the build system.
"""
dt = datetime.now()
return int(mktime(dt.timetuple()))
def update_versionfile(patch):
"""Update the 'patch' version information per the provided patch."""
temp_name = VERSIONFILE + '~'
file_new = open(temp_name, 'w')
try:
with open(VERSIONFILE, 'r') as file_old:
for line in file_old:
if line.startswith('version ='):
subs = line.split('.')
if len(subs) <= 2:
file_new.write(''.join([line[:-1], '.',
str(patch), '\n']))
else:
subs[2] = str(patch)
file_new.write('.'.join(subs))
if len(subs) == 3:
file_new.write('\n')
else:
file_new.write(line)
finally:
file_new.close()
os.rename(temp_name, VERSIONFILE)
if __name__ == '__main__':
patch = get_patch()
print 'patch: ', patch
update_versionfile(patch)
|
Add script to ensure semantic versions work with continuous build.
|
Add script to ensure semantic versions work with continuous build.
This module generates and inserts a patch component of the semantic version
stamp for Barbican, intended to ensure that a strictly monotonically increasing
version is produced for consecutive development releases. Some repositories
such as yum use this increasing semantic version to select the latest
package for installations.
This process may not be required if a bug in the 'pbr' library is fixed:
https://bugs.launchpad.net/pbr/+bug/1206730
Change-Id: I19623d76b161331c2033ea129e7f3f67518520fa
|
Python
|
apache-2.0
|
cneill/barbican,cneill/barbican,cloudkeep/barbican,openstack/barbican,jmvrbanac/barbican,cloudkeep/barbican,jmvrbanac/barbican,openstack/barbican,MCDong/barbican,MCDong/barbican
|
|
ab142f01ec932faaed05441b74c4be760a963374
|
tests/rules_tests/RulesTest.py
|
tests/rules_tests/RulesTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 15.08.2017 15:31
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import *
class RulesTest(TestCase):
pass
if __name__ == '__main__':
main()
|
Add file for test of behaviour when rules are pass
|
Add file for test of behaviour when rules are pass
|
Python
|
mit
|
PatrikValkovic/grammpy
|
|
7e97663eb29452769103684fce9166a0db17ab5a
|
speedtest.py
|
speedtest.py
|
#!/usr/bin/env python
from fastcomp import compare
import random
import string
def randomstr(minlen=5, maxlen=7):
charset = '01'
length = random.randint(minlen, maxlen)
return ''.join(random.choice(charset) for i in range(length))
if __name__ == "__main__":
import timeit
# Set up conditions
setup = """
from __main__ import compare, randomstr
cases = [(randomstr(), randomstr()) for x in range(1000)]"""
main = "for case in cases: compare(*case)"
loops = 100
# Run timeit
timer = timeit.Timer(main, setup=setup)
result = timer.repeat(number=loops)
# Result
best = round(min(result)*1000/loops, 2)
print('{} loops, best of 3: {} msec per loop'.format(loops, best))
|
Add a script for speed measurement.
|
Add a script for speed measurement.
|
Python
|
mit
|
fujimotos/fastcomp
|
|
f2396baa459c61fbbcd3c4889868f813a373d7e8
|
tests/providers/conftest.py
|
tests/providers/conftest.py
|
import pytest
from web3.web3.ipcprovider import IPCProvider
from web3.web3.rpcprovider import TestRPCProvider, RPCProvider
@pytest.fixture(params=['tester', 'rpc', 'ipc'])
def disconnected_provider(request):
"""
Supply a Provider that's not connected to a node.
(See also the web3 fixture.)
"""
if request.param == 'tester':
provider = TestRPCProvider()
provider.server.shutdown()
provider.server.server_close()
return provider
elif request.param == 'rpc':
return RPCProvider(port=9999)
elif request.param == 'ipc':
return IPCProvider(ipc_path='nonexistent')
else:
raise ValueError(request.param)
|
Add fixture for disconnected providers
|
Add fixture for disconnected providers
|
Python
|
mit
|
pipermerriam/web3.py,shravan-shandilya/web3.py
|
|
824c591204c7939a854d1d618cf32358387dbff0
|
tests/test_location.py
|
tests/test_location.py
|
from SUASSystem import *
import math
import numpy
import unittest
from dronekit import LocationGlobalRelative
class locationTestCase(unittest.TestCase):
def setUp(self):
self.position = Location(5, 12, 20)
def test_get_lat(self):
self.assertEquals(5, self.position.get_lat())
|
Add initial location unit tests
|
Add initial location unit tests
|
Python
|
mit
|
FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition
|
|
a7d8442482b7862b96adf3c8f40015072221f600
|
pygments/styles/igor.py
|
pygments/styles/igor.py
|
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic
class IgorStyle(Style):
default_style = ""
styles = {
Comment: 'italic #FF0000',
Keyword: '#0000FF',
Name.Function: '#C34E00',
Name.Class: '#007575',
String: '#009C00'
}
|
Add custom style which imitates the offical coloring
|
Add custom style which imitates the offical coloring
|
Python
|
bsd-2-clause
|
dscorbett/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,pygments/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.