commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
e72c7fb4249895f2d6f4c9f36153786b75d5e8fa
|
chainer/functions/reshape.py
|
chainer/functions/reshape.py
|
import numpy
from chainer import function
from chainer.utils import type_check
class Reshape(function.Function):
type_check_prod = type_check.Variable(numpy.prod, 'prod')
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
self.type_check_prod(in_types[0].shape) ==
self.type_check_prod(self.shape)
)
def check_type_backward(self, in_types, out_types):
type_check.expect(
out_types.size() == 1,
self.type_check_prod(in_types[0].shape) ==
self.type_check_prod(out_types[0].shape)
)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
import numpy
from chainer import function
from chainer.utils import type_check
_type_check_prod = type_check.Variable(numpy.prod, 'prod')
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
_type_check_prod(in_types[0].shape) ==
_type_check_prod(self.shape)
)
def check_type_backward(self, in_types, out_types):
type_check.expect(
out_types.size() == 1,
_type_check_prod(in_types[0].shape) ==
_type_check_prod(out_types[0].shape)
)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (~chainer.Variable): Input variable.
shape (tuple of ints): Target shape.
Returns:
~chainer.Variable: Variable that holds a reshaped version of the input
variable.
"""
return Reshape(shape)(x)
|
Move type_chack_prod module level variable and change its name to _type_check_prod
|
Move type_chack_prod module level variable and change its name to _type_check_prod
|
Python
|
mit
|
kikusu/chainer,niboshi/chainer,sou81821/chainer,elviswf/chainer,anaruse/chainer,okuta/chainer,okuta/chainer,AlpacaDB/chainer,tkerola/chainer,ktnyt/chainer,wkentaro/chainer,chainer/chainer,woodshop/complex-chainer,jfsantos/chainer,keisuke-umezawa/chainer,yanweifu/chainer,truongdq/chainer,niboshi/chainer,aonotas/chainer,ronekko/chainer,cupy/cupy,cemoody/chainer,chainer/chainer,benob/chainer,tigerneil/chainer,ytoyama/yans_chainer_hackathon,wkentaro/chainer,niboshi/chainer,sinhrks/chainer,kuwa32/chainer,hvy/chainer,bayerj/chainer,keisuke-umezawa/chainer,sinhrks/chainer,pfnet/chainer,kiyukuta/chainer,hvy/chainer,AlpacaDB/chainer,jnishi/chainer,muupan/chainer,jnishi/chainer,laysakura/chainer,tscohen/chainer,t-abe/chainer,rezoo/chainer,okuta/chainer,wkentaro/chainer,benob/chainer,woodshop/chainer,keisuke-umezawa/chainer,hvy/chainer,truongdq/chainer,kashif/chainer,keisuke-umezawa/chainer,muupan/chainer,ktnyt/chainer,hidenori-t/chainer,umitanuki/chainer,Kaisuke5/chainer,ktnyt/chainer,okuta/chainer,ikasumi/chainer,chainer/chainer,cupy/cupy,jnishi/chainer,chainer/chainer,jnishi/chainer,1986ks/chainer,kikusu/chainer,niboshi/chainer,minhpqn/chainer,wkentaro/chainer,cupy/cupy,ysekky/chainer,delta2323/chainer,cupy/cupy,masia02/chainer,wavelets/chainer,hvy/chainer,ktnyt/chainer,t-abe/chainer
|
737bf244f36b73a54b5b4f89f0c7e604d3f34b72
|
tests/grammar_term-nonterm_test/NonterminalGetTest.py
|
tests/grammar_term-nonterm_test/NonterminalGetTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Grammar
from grammpy import Nonterminal
class TempClass(Nonterminal):
pass
class Second(Nonterminal):
pass
class Third(Nonterminal):
pass
class TerminalGetTest(TestCase):
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import Grammar
from grammpy import Nonterminal
class TempClass(Nonterminal):
pass
class Second(Nonterminal):
pass
class Third(Nonterminal):
pass
class TerminalGetTest(TestCase):
def test_getNontermEmpty(self):
gr = Grammar()
self.assertIsNone(gr.get_nonterm(TempClass))
self.assertIsNone(gr.get_nonterm(Second))
self.assertIsNone(gr.get_nonterm(Third))
def test_getNontermClass(self):
gr = Grammar()
gr.add_nonterm(TempClass)
self.assertEqual(gr.get_nonterm(TempClass), TempClass)
def test_getNontermArray(self):
gr = Grammar()
gr.add_nonterm([TempClass, Second, Third])
g = gr.get_term([Second, TempClass])
for i in g:
self.assertTrue(i in [TempClass, Second, Third])
self.assertEqual(g[0], Second)
self.assertEqual(g[1], TempClass)
def test_dontGetNontermArray(self):
gr = Grammar()
gr.add_term([TempClass, Second])
g = gr.get_term([TempClass, Third])
self.assertEqual(g[0], TempClass)
self.assertIsNone(g[1])
def test_getNontermTuple(self):
gr = Grammar()
gr.add_term([TempClass, Second, Third])
g = gr.get_term((Third, TempClass))
for i in g:
self.assertTrue(i in [TempClass, Second, Third])
self.assertEqual(g[0], Third)
self.assertEqual(g[1], TempClass)
def test_dontGetNontermTuple(self):
gr = Grammar()
gr.add_term([TempClass, Second])
g = gr.get_term((TempClass, Third))
self.assertEqual(g[0], TempClass)
self.assertIsNone(g[1])
if __name__ == '__main__':
main()
|
Add tests of get nonterms
|
Add tests of get nonterms
|
Python
|
mit
|
PatrikValkovic/grammpy
|
0b1702314fca978db1d0475ff3bc14977e7675a2
|
hxl_proxy/__init__.py
|
hxl_proxy/__init__.py
|
"""
Top-level Flask application for HXL Proxy
David Megginson
January 2015
License: Public Domain
Documentation: http://hxlstandard.org
"""
import os
import requests_cache
from flask import Flask, g, request
from flask_cache import Cache
import werkzeug.datastructures
# Main application object
app = Flask(__name__)
app.config.from_object('hxl_proxy.default_config')
if os.environ.get('HXL_PROXY_CONFIG'):
app.config.from_envvar('HXL_PROXY_CONFIG')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# Set up cache
cache = Cache(app,config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': app.config.get('CACHE_DIR', '/tmp/'),
'CACHE_THRESHOLD': app.config.get('CACHE_MAX_ITEMS', 1000),
'CACHE_DEFAULT_TIMEOUT': app.config.get('CACHE_DEFAULT_TIMEOUT_SECONDS', 3600)
})
requests_cache.install_cache('/tmp/hxl_proxy_requests')
# Needed to register annotations
import hxl_proxy.controllers
# end
|
"""
Top-level Flask application for HXL Proxy
David Megginson
January 2015
License: Public Domain
Documentation: http://hxlstandard.org
"""
import os
import requests_cache
from flask import Flask, g, request
from flask_cache import Cache
import werkzeug.datastructures
# Main application object
app = Flask(__name__)
app.config.from_object('hxl_proxy.default_config')
if os.environ.get('HXL_PROXY_CONFIG'):
app.config.from_envvar('HXL_PROXY_CONFIG')
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
# Set up cache
cache = Cache(app,config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': app.config.get('CACHE_DIR', '/tmp/'),
'CACHE_THRESHOLD': app.config.get('CACHE_MAX_ITEMS', 1000),
'CACHE_DEFAULT_TIMEOUT': app.config.get('CACHE_DEFAULT_TIMEOUT_SECONDS', 3600)
})
requests_cache.install_cache('/tmp/hxl_proxy_requests', expire_after=3600)
# Needed to register annotations
import hxl_proxy.controllers
# end
|
Add 1-hour expiry to requests_cache (formerly 5 minutes).
|
Add 1-hour expiry to requests_cache (formerly 5 minutes).
|
Python
|
unlicense
|
HXLStandard/hxl-proxy,HXLStandard/hxl-proxy,HXLStandard/hxl-proxy,HXLStandard/hxl-proxy
|
093b08f6bd03bd938ae7b7a18297708faa353766
|
django_lightweight_queue/middleware/transaction.py
|
django_lightweight_queue/middleware/transaction.py
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
if not connection.in_atomic_block:
transaction.set_autocommit(False)
def process_result(self, job, result, duration):
if not connection.in_atomic_block:
transaction.commit()
def process_exception(self, job, time_taken, *exc_info):
if not connection.in_atomic_block:
transaction.rollback()
# Legacy
if not hasattr(connection, 'in_atomic_block'):
class TransactionMiddleware(object):
def process_job(self, job):
transaction.enter_transaction_management()
transaction.managed(True)
def process_result(self, job, result, duration):
if not transaction.is_managed():
return
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
def process_exception(self, job, time_taken, *exc_info):
if transaction.is_dirty():
transaction.rollback()
transaction.leave_transaction_management()
|
from django.db import transaction, connection
class TransactionMiddleware(object):
def process_job(self, job):
transaction.atomic(savepoint=False).__enter__()
def process_result(self, job, result, duration):
transaction.atomic(savepoint=False).__exit__(None, None, None)
def process_exception(self, job, time_taken, *exc_info):
transaction.atomic(savepoint=False).__exit__(*exc_info)
# Legacy
if not hasattr(connection, 'in_atomic_block'):
class TransactionMiddleware(object):
def process_job(self, job):
transaction.enter_transaction_management()
transaction.managed(True)
def process_result(self, job, result, duration):
if not transaction.is_managed():
return
if transaction.is_dirty():
transaction.commit()
transaction.leave_transaction_management()
def process_exception(self, job, time_taken, *exc_info):
if transaction.is_dirty():
transaction.rollback()
transaction.leave_transaction_management()
|
Use Django's Atomic decorator logic
|
Use Django's Atomic decorator logic
We now keep Autocommit on it’s new default of True, as we only need the
ability to rollback the contents of a queue job. By setting
savepoint=False, the whole job will roll back if anything fails, rather
than just up to the containing savepoint.
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue
|
9eec7f7f39dc7e1af6e78e4be8d01b50626a4eb5
|
tests/acceptance/test_scoring.py
|
tests/acceptance/test_scoring.py
|
import shelve
def test_shows_player_rating(browser, test_server, database_url):
with shelve.open(database_url) as db:
db.clear()
db['p1'] = 1000
app = ScoringApp(browser, test_server)
app.visit('/')
app.shows('P1 1000')
def test_user_adding(browser, test_server):
app = ScoringApp(browser, test_server)
app.visit('/players')
app.add_player('test')
app.is_in_page('/players/test')
app.shows('TEST 1000')
class ScoringApp(object):
def __init__(self, browser, get_url):
self._browser = browser
self._get_url = get_url
def visit(self, url):
self._browser.visit(self._get_url(url))
def shows(self, text):
assert self._browser.is_text_present(text)
def add_player(self, name):
self._browser.fill('player-name', name)
self._browser.find_by_id('submit').click()
def is_in_page(self, url):
assert self._browser.url == self._get_url(url)
|
import shelve
from whatsmyrank.players import START_RANK
from whatsmyrank.players import PlayerRepository
def test_shows_player_rating(browser, test_server, database_url):
player_repo = PlayerRepository(database_url, START_RANK)
player_repo.create('p1')
app = ScoringApp(browser, test_server)
app.visit('/')
app.shows('P1 1000')
def test_user_adding(browser, test_server):
app = ScoringApp(browser, test_server)
app.visit('/players')
app.add_player('test')
app.is_in_page('/players/test')
app.shows('TEST 1000')
class ScoringApp(object):
def __init__(self, browser, get_url):
self._browser = browser
self._get_url = get_url
def visit(self, url):
self._browser.visit(self._get_url(url))
def shows(self, text):
assert self._browser.is_text_present(text)
def add_player(self, name):
self._browser.fill('player-name', name)
self._browser.find_by_id('submit').click()
def is_in_page(self, url):
assert self._browser.url == self._get_url(url)
|
Remove database details from acceptance test
|
Remove database details from acceptance test
|
Python
|
bsd-2-clause
|
abele/whatsmyrank,abele/whatsmyrank
|
d436bcc20be8eb81960a53d442f699e42e2f9ea7
|
src/tkjoincsv.py
|
src/tkjoincsv.py
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if not os.path.isfile(output_filename):
exit(0)
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
import tkFileDialog
import joincsv
import os.path
import sys
if __name__ == '__main__':
filetypes=[("Spreadsheets", "*.csv"),
("Spreadsheets", "*.xls"),
("Spreadsheets", "*.xlsx")]
if len(sys.argv) == 2:
input_filename = sys.argv[1]
else:
input_filename = tkFileDialog.askopenfilename(filetypes=filetypes)
if not os.path.isfile(input_filename):
exit(0)
output_filename = tkFileDialog.asksaveasfilename(filetypes=filetypes, defaultextension=".csv")
if output_filename:
joiner = joincsv.RecordJoiner(input_filename)
joiner.save(output_filename)
|
Allow saving to a file that does not already exist again.
|
Allow saving to a file that does not already exist again.
|
Python
|
apache-2.0
|
peterSW/corow
|
342d62a42bb4e1993bbe9d755e6daabcaffe4122
|
chdb.py
|
chdb.py
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE categories
''')
db.execute('''
DROP TABLE articles
''')
db.execute('''
DROP TABLE snippets
''')
db.execute('''
DROP TABLE articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
import sqlite3
DB_FILENAME = 'citationhunt.sqlite3'
def init_db():
return sqlite3.connect(DB_FILENAME)
def reset_db():
db = init_db()
with db:
db.execute('''
DROP TABLE IF EXISTS categories
''')
db.execute('''
DROP TABLE IF EXISTS articles
''')
db.execute('''
DROP TABLE IF EXISTS snippets
''')
db.execute('''
DROP TABLE IF EXISTS articles_categories
''')
db.execute('''
CREATE TABLE categories (id TEXT PRIMARY KEY, title TEXT)
''')
db.execute('''
INSERT INTO categories VALUES ("unassigned", "unassigned")
''')
db.execute('''
CREATE TABLE articles_categories (article_id TEXT, category_id TEXT,
FOREIGN KEY(article_id) REFERENCES articles(page_id)
ON DELETE CASCADE,
FOREIGN KEY(category_id) REFERENCES categories(id)
ON DELETE CASCADE)
''')
db.execute('''
CREATE TABLE articles (page_id TEXT PRIMARY KEY, url TEXT,
title TEXT)
''')
db.execute('''
CREATE TABLE snippets (id TEXT PRIMARY KEY, snippet TEXT,
section TEXT, article_id TEXT, FOREIGN KEY(article_id)
REFERENCES articles(page_id) ON DELETE CASCADE)
''')
return db
def create_indices():
db = init_db()
db.execute('''CREATE INDEX IF NOT EXISTS snippets_articles
ON snippets(article_id);''')
|
Revert "Remove IF EXISTS from DROP TABLE when resetting the db."
|
Revert "Remove IF EXISTS from DROP TABLE when resetting the db."
This reverts commit 271668a20a2262fe6211b9f61146ad90d8096486 [formerly a7dce25964cd740b0d0db86b255ede60c913e73d].
Former-commit-id: 08199327c411663a199ebf36379e88a514935399
|
Python
|
mit
|
eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt
|
8b3ca76b980f126912de1bc8ffa067c199693eb3
|
cinder/db/sqlalchemy/migrate_repo/versions/061_add_snapshot_id_timestamp_to_backups.py
|
cinder/db/sqlalchemy/migrate_repo/versions/061_add_snapshot_id_timestamp_to_backups.py
|
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, DateTime, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
backups = Table('backups', meta, autoload=True)
snapshot_id = Column('snapshot_id', String(length=36))
data_timestamp = Column('data_timestamp', DateTime)
backups.create_column(snapshot_id)
backups.update().values(snapshot_id=None).execute()
backups.create_column(data_timestamp)
backups.update().values(data_timestamp=None).execute()
# Copy existing created_at timestamp to data_timestamp
# in the backups table.
backups_list = list(backups.select().execute())
for backup in backups_list:
backup_id = backup.id
backups.update().\
where(backups.c.id == backup_id).\
values(data_timestamp=backup.created_at).execute()
|
# Copyright (c) 2015 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column, DateTime, MetaData, String, Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
backups = Table('backups', meta, autoload=True)
snapshot_id = Column('snapshot_id', String(length=36))
data_timestamp = Column('data_timestamp', DateTime)
backups.create_column(snapshot_id)
backups.create_column(data_timestamp)
backups.update().values(data_timestamp=backups.c.created_at).execute()
|
Fix race conditions in migration 061
|
Fix race conditions in migration 061
Migration 061 is supposed to add new `data_timestamp` field and populate
it with value of `created_at` column. This was done by selecting all the
backups and doing updates one-by-one. As it wasn't done in transaction
solution was prone to race condition when a new backup is added while
running the migration. This means that this migration could cause
problems when running in live environment. With blueprint
online-schema-upgrades we want to make Cinder able to perform migrations
live.
A solution is to change this statement to a single DB query which
updates all the rows. This commit also removes unnecessary update to
snapshot_id added there. As this column is nullable it will by default
be NULL, so there's no need to set it manually to that value.
As before and after this commit the migration does logically the same,
this should be safe even if someone is doing inter-release deployments.
An alternative would be to simply add transaction to the update step in
the migration, but that would effectively lock the table for longer
period of time than atomic one-query update.
Closes-Bug: 1530358
Change-Id: Ib8733c096a3dbe2bad00beaf5734936ffcddda33
|
Python
|
apache-2.0
|
phenoxim/cinder,cloudbase/cinder,j-griffith/cinder,phenoxim/cinder,Nexenta/cinder,Datera/cinder,mahak/cinder,mahak/cinder,j-griffith/cinder,ge0rgi/cinder,Nexenta/cinder,openstack/cinder,cloudbase/cinder,eharney/cinder,eharney/cinder,Hybrid-Cloud/cinder,bswartz/cinder,NetApp/cinder,Hybrid-Cloud/cinder,Datera/cinder,openstack/cinder,bswartz/cinder,NetApp/cinder,dims/cinder,dims/cinder
|
960ce03fc6d861c8df8d7aef5042f71c101794ca
|
pavement.py
|
pavement.py
|
# -*- coding: utf-8 -*-
from paver.easy import *
@task
def test(options):
info("Running tests for Python 2")
sh('python2 tests.py')
info("Running tests for Python 3")
sh('python3 tests.py')
@task
def coverage(options):
info("Running coverage for Python 2")
sh('coverage2 run --source ldapom ./tests.py')
sh('coverage2 report')
info("Running coverage for Python 3")
sh('coverage3 run --source ldapom ./tests.py')
sh('coverage3 report')
|
# -*- coding: utf-8 -*-
from paver.easy import *
@task
def test(options):
info("Running tests for Python 2")
sh('python2 -m unittest -v tests')
info("Running tests for Python 3")
sh('python3 -m unittest -v tests')
@task
def coverage(options):
info("Running coverage for Python 2")
sh('coverage2 run --source ldapom ./tests.py')
sh('coverage2 report')
info("Running coverage for Python 3")
sh('coverage3 run --source ldapom ./tests.py')
sh('coverage3 report')
|
Make paver unittest run more verbose
|
Make paver unittest run more verbose
|
Python
|
mit
|
HaDiNet/ldapom
|
f69ea0232881c923e71bd2716fb6faa5d0d99491
|
yithlibraryserver/tests/test_views.py
|
yithlibraryserver/tests/test_views.py
|
# Yith Library Server is a password storage server.
# Copyright (C) 2012 Yaco Sistemas
# Copyright (C) 2012 Alejandro Blanco Escudero <[email protected]>
# Copyright (C) 2012 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
from yithlibraryserver import testing
class ViewTests(testing.TestCase):
def test_home(self):
res = self.testapp.get('/')
self.assertEqual(res.status, '200 OK')
|
# Yith Library Server is a password storage server.
# Copyright (C) 2012 Yaco Sistemas
# Copyright (C) 2012 Alejandro Blanco Escudero <[email protected]>
# Copyright (C) 2012 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
from yithlibraryserver import testing
class ViewTests(testing.TestCase):
def test_home(self):
res = self.testapp.get('/')
self.assertEqual(res.status, '200 OK')
def test_tos(self):
res = self.testapp.get('/tos')
self.assertEqual(res.status, '200 OK')
|
Test the new tos view
|
Test the new tos view
|
Python
|
agpl-3.0
|
lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server
|
69e5e6e3cbddc2c5c2f1ebc13095c88b9f9dbe56
|
src/moore/urls.py
|
src/moore/urls.py
|
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from search import views as search_views
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
urlpatterns = [
url(r'^admin/', include(wagtailadmin_urls)),
url(r'', include('involvement.urls')),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
url(r'^accounts/', include('members.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from search import views as search_views
from wagtail.wagtailadmin import urls as wagtailadmin_urls
from wagtail.wagtailcore import urls as wagtail_urls
from wagtail.wagtaildocs import urls as wagtaildocs_urls
urlpatterns = [
url(r'', include('involvement.urls')), # Needs to be imported before wagtail admin
url(r'^admin/', include(wagtailadmin_urls)),
url(r'^documents/', include(wagtaildocs_urls)),
url(r'^search/$', search_views.search, name='search'),
url(r'^accounts/', include('members.urls')),
url(r'^i18n/', include('django.conf.urls.i18n')),
# For anything not caught by a more specific rule above, hand over to
# Wagtail's page serving mechanism. This should be the last pattern in
# the list:
url(r'', include(wagtail_urls)),
]
if settings.DEBUG:
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Serve static and media files from development server
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(
settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT
)
|
Fix the inaccessible pages within the involvement package
|
:bug: Fix the inaccessible pages within the involvement package
|
Python
|
agpl-3.0
|
Dekker1/moore,UTNkar/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore,UTNkar/moore,Dekker1/moore
|
a3f1bd9b27bb605fe363a69a34a92862a1899da1
|
notifications/alliance_selections.py
|
notifications/alliance_selections.py
|
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class AllianceSelectionNotification(BaseNotification):
def __init__(self, event):
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_abbrev
@property
def _type(self):
return NotificationType.ALLIANCE_SELECTION
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event'] = ModelToDict.eventConverter(self.event)
return data
|
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class AllianceSelectionNotification(BaseNotification):
def __init__(self, event):
self.event = event
self._event_feed = event.key_name
self._district_feed = event.event_district_abbrev
@property
def _type(self):
return NotificationType.ALLIANCE_SELECTION
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['event_key'] = self.event.key_name
data['message_data']['event'] = ModelToDict.eventConverter(self.event)
return data
|
Add event name and key to alliance selection notifications
|
Add event name and key to alliance selection notifications
This info is already included in 'event', but adding for consistency
|
Python
|
mit
|
jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance
|
30a2a16aff030235941eac3786cc49b42e0ed868
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
bootstrap/conf/salt/state/run-tracking-db/scripts/import_sample_data.py
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://localhost:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print (e)
exit(1)
|
import pandas as pd
import sys
df = pd.read_csv(sys.argv[1])
df.columns = [c.lower() for c in df.columns]
from sqlalchemy import create_engine
engine = create_engine('postgresql://localhost:5432/germline_genotype_tracking')
try:
df.to_sql("pcawg_samples", engine)
except ValueError as e:
if str(e) != "Table 'pcawg_samples' already exists.":
print str(e)
exit(1)
else:
print str(e)
|
Print an error message when table already exists without failing the script.
|
Print an error message when table already exists without failing the script.
|
Python
|
mit
|
llevar/germline-regenotyper,llevar/germline-regenotyper
|
69559162db6818371a41b7b3e3092d767d198f3f
|
core/create_event.py
|
core/create_event.py
|
from django.template.loader import render_to_string
from core.default_eventpage_content import (
get_default_eventpage_data,
get_default_menu,
)
from core.models import Event
def create_event_from_event_application(event_application):
""" Creates event based on the data from the object.
If the event has previous_event - we are copying data from the
existing old event for the given city.
"""
name = 'Django Girls {}'.format(event_application.city)
email = '{}@djangogirls.org'.format(event_application.website_slug)
# TODO: take care of copying the event
event = Event.objects.create(
date=event_application.date,
city=event_application.city,
country=event_application.country,
latlng=event_application.latlng,
page_url=event_application.website_slug,
name=name,
page_title=name,
email=email,
)
# populate content & menu from the default event
event.add_default_content()
event.adefault_menu()
return event
|
from django.template.loader import render_to_string
from core.default_eventpage_content import (
get_default_eventpage_data,
get_default_menu,
)
from core.models import Event
def create_event_from_event_application(event_application):
""" Creates event based on the data from the object.
If the event has previous_event - we are copying data from the
existing old event for the given city.
"""
name = 'Django Girls {}'.format(event_application.city)
email = '{}@djangogirls.org'.format(event_application.website_slug)
# TODO: take care of copying the event
event = Event.objects.create(
date=event_application.date,
city=event_application.city,
country=event_application.country,
latlng=event_application.latlng,
page_url=event_application.website_slug,
name=name,
page_title=name,
email=email,
)
# populate content & menu from the default event
event.add_default_content()
event.add_default_menu()
return event
|
Fix typo in method call
|
Fix typo in method call
Ticket #342
|
Python
|
bsd-3-clause
|
DjangoGirls/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,patjouk/djangogirls
|
6113b60187da1da42b26bee81556aad3efef57c4
|
nipype/interfaces/tests/test_afni.py
|
nipype/interfaces/tests/test_afni.py
|
from nipype.interfaces import afni
from nose.tools import assert_equal
def test_To3d():
cmd = afni.To3d()
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d'
cmd = afni.To3d(anat=True)
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -anat'
cmd = afni.To3d()
cmd.inputs.datum = 'float'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -datum float'
|
from nipype.interfaces import afni
from nose.tools import assert_equal
def test_To3d():
cmd = afni.To3d()
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d'
cmd = afni.To3d(anat=True)
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -anat'
cmd = afni.To3d()
cmd.inputs.datum = 'float'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -datum float'
cmd = afni.To3d()
cmd.inputs.session = '/home/bobama'
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -session /home/bobama'
cmd = afni.To3d(prefix='foo.nii.gz')
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d -prefix foo.nii.gz'
cmd = afni.To3d(infiles='/data/*.dcm')
cmd._compile_command()
yield assert_equal, cmd.cmdline, 'to3d /data/*.dcm'
|
Add tests to afni To3d.
|
Add tests to afni To3d.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@165 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
rameshvs/nipype,grlee77/nipype,dmordom/nipype,glatard/nipype,fprados/nipype,rameshvs/nipype,wanderine/nipype,mick-d/nipype_source,dgellis90/nipype,sgiavasis/nipype,arokem/nipype,Leoniela/nipype,pearsonlab/nipype,pearsonlab/nipype,FCP-INDI/nipype,mick-d/nipype,satra/NiPypeold,gerddie/nipype,FredLoney/nipype,dgellis90/nipype,JohnGriffiths/nipype,dmordom/nipype,pearsonlab/nipype,mick-d/nipype_source,iglpdc/nipype,glatard/nipype,carolFrohlich/nipype,fprados/nipype,grlee77/nipype,gerddie/nipype,gerddie/nipype,JohnGriffiths/nipype,iglpdc/nipype,blakedewey/nipype,christianbrodbeck/nipype,mick-d/nipype_source,blakedewey/nipype,rameshvs/nipype,dmordom/nipype,dgellis90/nipype,rameshvs/nipype,fprados/nipype,grlee77/nipype,blakedewey/nipype,glatard/nipype,arokem/nipype,wanderine/nipype,sgiavasis/nipype,FCP-INDI/nipype,gerddie/nipype,FredLoney/nipype,carolFrohlich/nipype,JohnGriffiths/nipype,Leoniela/nipype,sgiavasis/nipype,mick-d/nipype,carlohamalainen/nipype,sgiavasis/nipype,pearsonlab/nipype,carolFrohlich/nipype,FredLoney/nipype,FCP-INDI/nipype,blakedewey/nipype,mick-d/nipype,grlee77/nipype,arokem/nipype,christianbrodbeck/nipype,JohnGriffiths/nipype,wanderine/nipype,dgellis90/nipype,FCP-INDI/nipype,wanderine/nipype,mick-d/nipype,carlohamalainen/nipype,iglpdc/nipype,glatard/nipype,iglpdc/nipype,Leoniela/nipype,satra/NiPypeold,carlohamalainen/nipype,arokem/nipype,carolFrohlich/nipype
|
e2bac19e08197dc33756d7b7cf1f88e4ba808ae1
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.4.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.4.1'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import current_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
Add missing module to the project.
|
Add missing module to the project.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
bf1bafbbebeab86a213e4c4bed0be6f1b18404c6
|
python/grizzly/grizzly/lazy_op.py
|
python/grizzly/grizzly/lazy_op.py
|
"""Summary
"""
from weld.weldobject import *
def to_weld_type(weld_type, dim):
"""Summary
Args:
weld_type (TYPE): Description
dim (TYPE): Description
Returns:
TYPE: Description
"""
for i in xrange(dim):
weld_type = WeldVec(weld_type)
return weld_type
class LazyOpResult:
"""Wrapper class around as yet un-evaluated Weld computation results
Attributes:
dim (int): Dimensionality of the output
expr (WeldObject / Numpy.ndarray): The expression that needs to be
evaluated
weld_type (WeldType): Type of the output object
"""
def __init__(self, expr, weld_type, dim):
"""Summary
Args:
expr (TYPE): Description
weld_type (TYPE): Description
dim (TYPE): Description
"""
self.expr = expr
self.weld_type = weld_type
self.dim = dim
def evaluate(self, verbose=True, decode=True):
"""Summary
Args:
verbose (bool, optional): Description
decode (bool, optional): Description
Returns:
TYPE: Description
"""
if isinstance(self.expr, WeldObject):
return self.expr.evaluate(
to_weld_type(
self.weld_type,
self.dim),
verbose,
decode)
return self.expr
|
"""Summary
"""
from weld.weldobject import *
def to_weld_type(weld_type, dim):
"""Summary
Args:
weld_type (TYPE): Description
dim (TYPE): Description
Returns:
TYPE: Description
"""
for i in xrange(dim):
weld_type = WeldVec(weld_type)
return weld_type
class LazyOpResult:
"""Wrapper class around as yet un-evaluated Weld computation results
Attributes:
dim (int): Dimensionality of the output
expr (WeldObject / Numpy.ndarray): The expression that needs to be
evaluated
weld_type (WeldType): Type of the output object
"""
def __init__(self, expr, weld_type, dim):
"""Summary
Args:
expr (TYPE): Description
weld_type (TYPE): Description
dim (TYPE): Description
"""
self.expr = expr
self.weld_type = weld_type
self.dim = dim
def evaluate(self, verbose=True, decode=True, passes=None):
"""Summary
Args:
verbose (bool, optional): Description
decode (bool, optional): Description
Returns:
TYPE: Description
"""
if isinstance(self.expr, WeldObject):
return self.expr.evaluate(
to_weld_type(
self.weld_type,
self.dim),
verbose,
decode,
passes=passes)
return self.expr
|
Add passes to Grizzly's lazyOp
|
Add passes to Grizzly's lazyOp
|
Python
|
bsd-3-clause
|
rahulpalamuttam/weld,rahulpalamuttam/weld,weld-project/weld,sppalkia/weld,weld-project/weld,weld-project/weld,rahulpalamuttam/weld,weld-project/weld,sppalkia/weld,sppalkia/weld,sppalkia/weld,sppalkia/weld,weld-project/weld,rahulpalamuttam/weld,rahulpalamuttam/weld
|
d12907dd681c1d16c623b9dcceed9ff5e85c2ac6
|
views.py
|
views.py
|
from django.shortcuts import render
def intro(request, template='intro.html'):
response = render(request, template)
response['X-Frame-Options'] = 'SAMEORIGIN'
return response
|
from django.shortcuts import render
from django.views.decorators.clickjacking import xframe_options_sameorigin
@xframe_options_sameorigin
def intro(request, template='intro.html'):
response = render(request, template)
return response
|
Use X-Frame-Options decorator to override middleware.
|
Use X-Frame-Options decorator to override middleware.
|
Python
|
bsd-3-clause
|
m8ttyB/pontoon-intro,mathjazz/pontoon-intro,mathjazz/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,Osmose/pontoon-intro,jotes/pontoon-intro,m8ttyB/pontoon-intro,jotes/pontoon-intro,mathjazz/pontoon-intro,m8ttyB/pontoon-intro,Osmose/pontoon-intro
|
ffc01b11b0a63b22ddab341e2f0cab0707551409
|
src/puzzle/problems/logic_problem.py
|
src/puzzle/problems/logic_problem.py
|
import ast
import sys
from data.logic import _grammar_transformer
from puzzle.problems import problem
class LogicProblem(problem.Problem):
@staticmethod
def score(lines):
if len(lines) <= 1:
return 0
program = '\n'.join(lines)
try:
parsed = ast.parse(program)
if isinstance(parsed, ast.Module):
return min(1, len(parsed.body) / 10)
except:
return 0
return sys.float_info.epsilon
def _parse(self):
return _grammar_transformer.transform('\n'.join(self.lines))
def _solve(self):
parsed = self._parse()
ast.fix_missing_locations(parsed)
compiled = compile(parsed, '<string>', 'exec')
variables = {}
exec(compiled, variables)
model = variables['model']
solver = model.load('Mistral')
solver.solve()
solutions = model.get_solutions()
# TODO: Return valid solutions.
return solutions
|
import ast
import sys
from data.logic import _grammar_transformer
from puzzle.problems import problem
class LogicProblem(problem.Problem):
@staticmethod
def score(lines):
if len(lines) <= 1:
return 0
program = '\n'.join(lines)
try:
parsed = ast.parse(program)
if isinstance(parsed, ast.Module):
return min(1, len(parsed.body) / 10)
except:
return 0
return sys.float_info.epsilon
def _parse(self):
return _grammar_transformer.transform('\n'.join(self.lines))
def _solve(self):
parsed = self._parse()
compiled = compile(parsed, '<string>', 'exec')
variables = {}
exec(compiled, variables)
model = variables['model']
solver = model.load('Mistral')
solver.solve()
solutions = model.get_solutions()
# TODO: Return valid solutions.
return solutions
|
Remove redundant ast.fix_missing_locations call. Moved to transformer.
|
Remove redundant ast.fix_missing_locations call. Moved to transformer.
|
Python
|
mit
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
32a831d575b5354468a8f9c2a815f9f1aa03f2fb
|
api/caching/listeners.py
|
api/caching/listeners.py
|
from api.caching.tasks import ban_url
from framework.tasks.handlers import enqueue_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_task(ban_url.s(abs_url))
|
from functools import partial
from api.caching.tasks import ban_url
from framework.tasks.postcommit_handlers import enqueue_postcommit_task
from modularodm import signals
@signals.save.connect
def log_object_saved(sender, instance, fields_changed, cached_data):
abs_url = None
if hasattr(instance, 'absolute_api_v2_url'):
abs_url = instance.absolute_api_v2_url
if abs_url is not None:
enqueue_postcommit_task(partial(ban_url, abs_url))
|
Switch cache ban request to new postcommit synchronous method
|
Switch cache ban request to new postcommit synchronous method
|
Python
|
apache-2.0
|
kwierman/osf.io,amyshi188/osf.io,baylee-d/osf.io,felliott/osf.io,chrisseto/osf.io,amyshi188/osf.io,felliott/osf.io,samchrisinger/osf.io,cslzchen/osf.io,icereval/osf.io,cwisecarver/osf.io,sloria/osf.io,kwierman/osf.io,kch8qx/osf.io,asanfilippo7/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,mluo613/osf.io,monikagrabowska/osf.io,felliott/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,mluo613/osf.io,kwierman/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,mluke93/osf.io,aaxelb/osf.io,TomBaxter/osf.io,acshi/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,wearpants/osf.io,RomanZWang/osf.io,mluo613/osf.io,samchrisinger/osf.io,abought/osf.io,mfraezz/osf.io,chennan47/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,pattisdr/osf.io,caneruguz/osf.io,Nesiehr/osf.io,abought/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,doublebits/osf.io,binoculars/osf.io,zamattiac/osf.io,adlius/osf.io,amyshi188/osf.io,mattclark/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,jnayak1/osf.io,abought/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,kch8qx/osf.io,RomanZWang/osf.io,alexschiller/osf.io,wearpants/osf.io,acshi/osf.io,HalcyonChimera/osf.io,acshi/osf.io,Nesiehr/osf.io,hmoco/osf.io,kch8qx/osf.io,hmoco/osf.io,billyhunt/osf.io,mluke93/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,jnayak1/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,caseyrollins/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,doublebits/osf.io,zachjanicki/osf.io,amyshi188/osf.io,acshi/osf.io,doublebits/osf.io,billyhunt/osf.io,mfraezz/osf.io,mfraezz/osf.io,chennan47/osf.io,crcresearch/osf.io,adlius/osf.io,caneruguz/osf.io,chennan47/osf.io,chrisseto/osf.io,leb2dg/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,mluo613/osf.io,pattisdr/osf.io,emetsger/osf.io,doublebits/osf.io,samchrisinger/osf.io,erinspace/osf.io,jnayak1/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,saradbowman/osf.io,cwisecarver/osf.io,mattclark/osf.io,caneruguz/osf.io,baylee-d/osf.io,caneruguz/osf.io,crcresearch/osf.io,emetsger/osf.io,zamattiac/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,kch8qx/osf.io,rdhyee/osf.io,icereval/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,samchrisinger/osf.io,icereval/osf.io,TomBaxter/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,sloria/osf.io,wearpants/osf.io,aaxelb/osf.io,billyhunt/osf.io,caseyrollins/osf.io,hmoco/osf.io,Nesiehr/osf.io,TomBaxter/osf.io,mluo613/osf.io,wearpants/osf.io,emetsger/osf.io,mluke93/osf.io,sloria/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,binoculars/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,RomanZWang/osf.io,erinspace/osf.io,alexschiller/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,adlius/osf.io,hmoco/osf.io,mluke93/osf.io,monikagrabowska/osf.io,binoculars/osf.io,RomanZWang/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,adlius/osf.io,acshi/osf.io,monikagrabowska/osf.io,doublebits/osf.io,monikagrabowska/osf.io,abought/osf.io,zamattiac/osf.io,RomanZWang/osf.io,alexschiller/osf.io,felliott/osf.io,emetsger/osf.io,pattisdr/osf.io,zamattiac/osf.io,erinspace/osf.io,jnayak1/osf.io,rdhyee/osf.io,leb2dg/osf.io,mattclark/osf.io,rdhyee/osf.io
|
90963666f22bea81d433724d232deaa0f3e2fec1
|
st2common/st2common/exceptions/db.py
|
st2common/st2common/exceptions/db.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.exceptions import StackStormBaseException
class StackStormDBObjectNotFoundError(StackStormBaseException):
pass
class StackStormDBObjectMalformedError(StackStormBaseException):
pass
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.exceptions import StackStormBaseException
class StackStormDBObjectNotFoundError(StackStormBaseException):
pass
class StackStormDBObjectMalformedError(StackStormBaseException):
pass
class StackStormDBObjectConflictError(StackStormBaseException):
"""
Exception that captures a DB object conflict error.
"""
def __init__(self, message, conflict_id):
super(StackStormDBObjectConflictError, self).__init__(message)
self.conflict_id = conflict_id
|
Add a special exception for capturing object conflicts.
|
Add a special exception for capturing object conflicts.
|
Python
|
apache-2.0
|
jtopjian/st2,StackStorm/st2,StackStorm/st2,emedvedev/st2,dennybaa/st2,StackStorm/st2,alfasin/st2,pixelrebel/st2,nzlosh/st2,Itxaka/st2,StackStorm/st2,dennybaa/st2,punalpatel/st2,Plexxi/st2,lakshmi-kannan/st2,lakshmi-kannan/st2,grengojbo/st2,Itxaka/st2,jtopjian/st2,alfasin/st2,punalpatel/st2,peak6/st2,tonybaloney/st2,pinterb/st2,lakshmi-kannan/st2,tonybaloney/st2,Plexxi/st2,punalpatel/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,alfasin/st2,Plexxi/st2,peak6/st2,Plexxi/st2,armab/st2,pinterb/st2,dennybaa/st2,nzlosh/st2,nzlosh/st2,Itxaka/st2,jtopjian/st2,pixelrebel/st2,armab/st2,peak6/st2,pixelrebel/st2,pinterb/st2,nzlosh/st2,grengojbo/st2,grengojbo/st2,armab/st2
|
39b57462b69d78825fd217822d9be2f1eea5a06d
|
src/ansible/models.py
|
src/ansible/models.py
|
from django.db import models
from django.conf import settings
class Playbook(models.Model):
name = models.CharField(max_length=200)
inventory = models.CharField(max_length=200, default="hosts")
user = models.CharField(max_length=200, default="ubuntu")
directory = models.CharField(max_length=200, editable=False, default="dir")
def __str__(self):
return "%s" % self.name
def format_directory(self):
directory = self.name.lower()
directory = directory.replace(" ","-")
return directory
def save(self, *args, **kwargs):
self.directory = self.format_directory()
super(Playbook, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "playbooks"
class Registry(models.Model):
playbook = models.ForeignKey(Playbook, on_delete=models.CASCADE)
name = models.CharField(max_length=200)
item = models.FilePathField(path=settings.PLAYBOOK_DIR, recursive=True)
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name_plural = "registries"
|
from django.db import models
from django.conf import settings
class Playbook(models.Model):
name = models.CharField(max_length=200)
inventory = models.CharField(max_length=200, default="hosts")
user = models.CharField(max_length=200, default="ubuntu")
directory = models.CharField(max_length=200, editable=False, default="dir")
def __str__(self):
return "%s" % self.name
def format_directory(self):
directory = self.name.lower()
directory = directory.replace(" ","-")
return directory
def save(self, *args, **kwargs):
self.directory = self.format_directory()
super(Playbook, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = "playbooks"
class Registry(models.Model):
playbook = models.ForeignKey("Playbook", default=1, on_delete=models.CASCADE)
name = models.CharField(max_length=200)
item = models.FilePathField(path=settings.PLAYBOOK_DIR, recursive=True)
def __str__(self):
return "%s" % self.name
class Meta:
verbose_name_plural = "registries"
|
Set default value for Registry.playbook
|
Set default value for Registry.playbook
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
4bf129f49eb608e34e46f60edb1d23303dd2ed27
|
examples/__main__.py
|
examples/__main__.py
|
import os
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
tornado.options.define("port", default=8888, type=int)
if __name__ == "__main__":
tornado.options.parse_command_line()
application = tornado.web.Application([], **{
"static_path": ".",
"static_url_prefix": "/"
})
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(tornado.options.options.port)
print "http://0.0.0.0:%d/examples/index.html" % tornado.options.options.port
tornado.ioloop.IOLoop.instance().start()
|
import os
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
tornado.options.define("port", default=8888, type=int)
if __name__ == "__main__":
tornado.options.parse_command_line()
application = tornado.web.Application([], **{
"static_path": ".",
"static_url_prefix": "/"
})
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(tornado.options.options.port)
print "http://localhost:%d/examples/index.html" % tornado.options.options.port
tornado.ioloop.IOLoop.instance().start()
|
Use localhost rather than 0.0.0.0.
|
Use localhost rather than 0.0.0.0.
|
Python
|
bsd-3-clause
|
qqqlllyyyy/d3,xc145214/d3,wangjun/d3,imshibaji/d3,alexgarciac/d3,markengelstad/d3,KevinMarkVI/d3,amorwilliams/d3,clayzermk1/d3,codingang/d3-1,overmind1980/d3,peterwu8/d3,baegjins/d3,christianevans214/d3,ilovezy/d3,ilo10/d3,UpstatePedro/d3,nbende/d3,trankmichael/d3,oanaradulescu/d3,MartinDavila/d3,Sachin-Ganesh/d3,nheinzer1210/d3,nbende/d3,zhoux10/d3,ChaofengZhou/d3,HomeMango/d3,jsan4christ/d3,kyroskoh/d3,quevedin/d3,LikeGit-2013/d3,jling90/d3,jamesgarfield/d3,lypzln/d3,Acker2015/d3,vp2177/d3,KehindeAyanleye/d3,ammula88/d3,lrinili/d3,datawrapper/d3-light,okcd00/d3,zziuni/d3,kx1911/d3,kevinresol/d3,wangjun/d3,rlugojr/d3,salamer/d3,whoinlee/d3,folpindo/d3,madlen99/d3,MeganBeneteau/d3,angeliaz/d3,chiu/d3,emmanuelq2/d3,lauraweaver/d3,guangyue/d3,ABaldwinHunter/d3-classic,ClaireRutkoske/d3,nicgallardo/d3,littlstar/d3,DagoCIMA/Prueba1,CallAnotherBarry/d3,MilkXin/dingTalk,johnnyg3p/d3,boshika/d3,tessafallon/nypl-pg,cool-Blue/d3,jordancheah/d3,yuhwkim/d3,aihua/d3,kpcorda/d3,macressler/d3,ssrx17/d3,ferrero-zhang/d3,larskris/d3,2947721120/redolent-hockeypuck,lypzln/d3,chenflat/d3,lihanhui/d3,excelwang/d3,bsipocz/d3,UmarMughal/d3,jimibue/d3,fredlang/d3,behzad88/d3,xuzhaokui/d3,xiaojie123/d3,stefwalter/d3,ralic/d3,kyroskoh/d3,halcyonstar/d3,josealbertohm/d3,codingang/d3-1,latur19318/d3,wallmarkets/d3,nishant8BITS/d3,mingyaaaa/d3,abgaryanharutyun/d3,AndBicScadMedia/d3,Vardhan17/d3,itxd/d3,DataVizApril/d3,ornelaxh/d3,lunyang/d3,tessafallon/nypl-pg,supzann3/d3,nirmalks/d3,clarinete/d3,amorwilliams/d3,bqevin/d3,elpoisterio/d3,ThinkedCoder/d3,salamer/d3,XiqianZ/d3,standino/go-easy-portal,hkjels/d3,mhsmith/d3,jsan4christ/d3,CallAnotherBarry/d3,marianbida/d3,johnochs/d3,fsanchezro/d3,futuraprime/d3,amccartney/d3,bestwpw/d3,ayshaabbas/d3,manashmndl/d3,alexgarciac/d3,patrickkillalea/d3,ananya77041/d3,Eric-Zhong/d3,dushmis/d3,juliaty/d3,elpoisterio/d3,danbwhiting/d3,azilnoor/d3,josealbertohm/d3,amccartney/d3,Henrilin28/d3,esparza83/d3,ptkeller/d3,suezse/d3,zhoux10/d3,emirhartato/d3,Genie77998/d3,3mao/d3,teefresh/d3,Lchchen/d3,macressler/d3,alex179ohm/d3,hillaryskye/d3,nonconforme/d3,hyrole/d3,sigma-random/d3,reynoldqi/TestFirst,Hbl15/d3,Wombatpm/d3,m4s0/d3,AromaR/d3,idkwim/d3,alex-zhang/d3,Jonham/d3,edktsnr/d3,CrandellWS/d3,s-a-r-id/d3,charlesDADI/d3,hillaryskye/d3,SystemicEmotions/d3,Petah/d3,aluck19/d3,supriyantomaftuh/d3,dieface/d3,ansjcy/d3,n1n9-jp/d3,dbirchak/d3,esparza83/d3,Lchchen/d3,ayadoguchi/d3_practice,mainelander/d3,zzzzw/d3,rohittiwarirvt/d3,dogobox/d3,vinicius5581/d3,Simon-Lau/d3,SumAll/d3,lukeskip/d3,jamesblunt/d3,quevedin/d3,ykominami/d3,leohmoraes/d3,KehindeAyanleye/d3,Sigfried/d3,18Shubhamgupta/d3,stefanom/d3,Petermuturi/d3,Ossehoht/d3,ZhuLiangwh/d3,FangMath/d3,yuhwkim/d3,oanaradulescu/d3,uetsujitomoya/d3,witcxc/d3,kennethd/d3,yuanzhiqian/d3,ABaldwinHunter/d3,tempbottle/d3,supzann3/d3,ajtulloch/d3,liang42hao/d3,Jai-Chaudhary/d3,mendax-grip/d3,Shuffguy/d3,sk187/d3,hqren/d3,smartpcr/d3,larskotthoff/d3,concord-consortium/d3,rubenv/d3,vp2177/d3,GibbleBots/d3,AromaR/d3,omarbenites/d3,aljachimiak/d3,sigma-random/d3,8DTechnologies/d3,kjeldandersen/d3,imshibaji/d3,atsolakid/d3,xujie-nm/d3,cbtpro/d3,dogobox/d3,aigouzz/d3,chenflat/d3,okierie/d3,Tinysymphony/d3,JungMinu/d3,VirosaLi/d3,bestwpw/d3,nirajvora/d3,berlynhubler/d3,itxd/d3,nicksrandall/d3,welloncn/d3,kevinresol/d3,suryasingh/d3,2947721120/redolent-hockeypuck,Teino1978-Corp/Teino1978-Corp-d3,hyrole/d3,XiqianZ/d3,chen-ben/d3,fredlang/d3,moodboom/d3,kewal07/d3,aluck19/d3,stity/d3,Yelp/d3,ssrx17/d3,qqqlllyyyy/d3,nilqed/d3,chiu/d3,leitzler/d3,evdevgit/d3,Ossehoht/d3,ABaldwinHunter/d3-classic,MeganBeneteau/d3,w124384389/d3,dongnhut/d3,jamesblunt/d3,smartpcr/d3,kx1911/d3,Eric-Zhong/d3,standino/go-easy-portal,mujiatong/d3,elkingtonmcb/d3,aleksa000777/d3,emirhartato/d3,ammula88/d3,littlstar/d3,JoaquinSiabra/d3,batnasan06/d3,3mao/d3,aihua/d3,Limky/d3,studiowangfei/d3,lukeskip/d3,UpstatePedro/d3,alex-zhang/d3,juliaty/d3,jessiejea/d3,aaronhoffman/d3,edktsnr/d3,buzybee83/d3,tessafallon/nypl-pg,suezse/d3,Simon-Lau/d3,mayblue9/d3,lunyang/d3,xc145214/d3,yuhualingfeng/d3,okierie/d3,402332509/d3,HomeMango/d3,woshiniuren/d3,xiaochen0620/d3,allenjin/d3,reiaaoyama/d3,mingyaaaa/d3,krystism/d3,Acker2015/d3,evdevgit/d3,burakkp/d3,qodbtn41/d3,mujiatong/d3,abhuzz/d3,polzak/d3,ChaofengZhou/d3,allenjin/d3,roma07/d3,geoff111/d3,Vardhan17/d3,iorikiir/d3,yuanzhiqian/d3,Ali925/d3,KevinMarkVI/d3,sysexits/d3,markpj1/d3,HardlyHaki/d3,18Shubhamgupta/d3,AlpinHologramm/d3,peterwu8/d3,julialintern/d3,sunios/d3,hgarnelo/d3,mssyogi/d3,behzad88/d3,radovanx/d3,reynoldqi/TestFirst,gorcz/d3,DataVizApril/d3,lijanele/d3,iyogeshjoshi/d3,boshika/d3,lihanhui/d3,mhsmith/d3,kyoungchinseo/d3,elkingtonmcb/d3,danbwhiting/d3,awdesch/d3,trinath3/d3,halcyonstar/d3,ZhuLiangwh/d3,jakimhartford/d3,mssjtxwd/d3,elancom/d3,rubenv/d3,ThinkedCoder/d3,Ahmad-Hilali/d3,Ahmad-Hilali/d3,hkjels/d3,mattcale7/d3,Tinysymphony/d3,stefanom/d3,ornelaxh/d3,fsaglam2002/d3,anant10/dinto,chen-ben/d3,lijanele/d3,n1n9-jp/d3,ralic/d3,iyogeshjoshi/d3,JeebsM/d3,kingland/d3,christianevans214/d3,larskotthoff/d3,darshanhs90/d3,cool-Blue/d3,jordancheah/d3,taylorhxu/d3,mountwe/d34kpor,caseytrombley/d3,navjotahuja92/d3,ananya77041/d3,aigouzz/d3,markpj1/d3,zicouser/d3,Teino1978-Corp/Teino1978-Corp-d3,jamesgarfield/d3,shashi-dokania/d3,kiwizhang/d3,buzybee83/d3,nirmalks/d3,shaximinion/d3,Jonathan-S-Phillips/d3,guangyue/d3,shaximinion/d3,nicgallardo/d3,circuithub/d3-serverside,m-br/d3,mlawry/d3,emmanuelq2/d3,madlen99/d3,standino/go-easy-portal,2947721120/squealing-octo-capsicum,chaoallsome/d3,mountwe/d34kpor,gyenumula/d3,house-fiction/d3,clayzermk1/d3,sajeetharan/d3,DagoCIMA/Prueba1,1174751315/d3,lovewitty/d3,onlyyouandty/d3,kjeldandersen/d3,s-a-r-id/d3,triggerThis/d3,fsanchezro/d3,joycedelatorre/d3,LikeGit-2013/d3,Jonekee/d3,li0t/d3,npmcomponent/ignacioola-d3,kidaa/d3,Sachin-Ganesh/d3,elancom/d3,atsolakid/d3,dongnhut/d3,ClaireRutkoske/d3,nilqed/d3,GibbleBots/d3,alex/d3,eaglesjava/d3,keyanzhang/d3,leitzler/d3,humanrelationships/d3,Shuffguy/d3,bsipocz/d3,xujie-nm/d3,Limky/d3,alex179ohm/d3,markengelstad/d3,v11yu/d3,makenti/d3,julialintern/d3,nicksrandall/d3,woshiniuren/d3,joycedelatorre/d3,aljachimiak/d3,kyoungchinseo/d3,xiaochen0620/d3,m4s0/d3,supriyantomaftuh/d3,gorcz/d3,azilnoor/d3,monoc44/d3,RacingTadpole/d3,idkwim/d3,nheinzer1210/d3,pradeeptas/d3,ilovezy/d3,JunichiWatanuki/d3,xuzhaokui/d3,danforthdesign/d3,lovewitty/d3,moodboom/d3,clarinete/d3,kitlomerc/d3,agarbuno/d3,aleksa000777/d3,matheosu/d3,jeanpan/d3,kbarnhart/d3,10000TB/d3,maureenwaitherero/d3,HardlyHaki/d3,sajeetharan/d3,salambolog/d3,studiowangfei/d3,ayshaabbas/d3,hubandbob/d3,maureenwaitherero/d3,kaijie/d3,lrinili/d3,triggerThis/d3,nonconforme/d3,webmonarch/d3,402332509/d3,trankmichael/d3,kaktus40/d3,reiaaoyama/d3,polzak/d3,1174751315/d3,mendax-grip/d3,DaEunPark/d3,v11yu/d3,SystemicEmotions/d3,ee08b397/d3,ilo10/d3,trinath3/d3,danforthdesign/d3,stefwalter/d3,anant10/dinto,ferrero-zhang/d3,anusornc/d3,Naomifh/moboshock,salambolog/d3,kidaa/d3,smartinsantos/d3,2947721120/squealing-octo-capsicum,stity/d3,okcd00/d3,chewett/d3,welloncn/d3,erhanBLC/d3,awdesch/d3,kitlomerc/d3,matheosu/d3,xudongcamsys/d3,patrickkillalea/d3,mssjtxwd/d3,forkmsrini/d3,hgarnelo/d3,XueQian/d3,hqren/d3,shashi-dokania/d3,w124384389/d3,batnasan06/d3,gyenumula/d3,makenti/d3,CrandellWS/d3,roma07/d3,dieface/d3,datawrapper/d3-light,marianbida/d3,jeanpan/d3,omarbenites/d3,MartinDavila/d3,monoc44/d3,fatmazaman/d3,angeliaz/d3,Lyoneidas/d3,leeleo26/d3,JungMinu/d3,leeleo26/d3,sk187/d3,AlpinHologramm/d3,FredrikAhlberg/IconLiveD3.js,pradeeptas/d3,RCGTDev/d3,zicouser/d3,ansjcy/d3,webmonarch/d3,onlyyouandty/d3,kbarnhart/d3,ishang27/d3,mlawry/d3,nirajvora/d3,abhuzz/d3,FangMath/d3,kiwizhang/d3,joehannes-libs/d3,sysexits/d3,VirosaLi/d3,JunichiWatanuki/d3,mattcale7/d3,hubandbob/d3,cbtpro/d3,darshanhs90/d3,geoff111/d3,mainelander/d3,kaijie/d3,abgaryanharutyun/d3,caseytrombley/d3,manashmndl/d3,dbirchak/d3,kennethd/d3,iamcap/d3,jessiejea/d3,excelwang/d3,diazmartin/d3,Hbl15/d3,johnochs/d3,larskris/d3,Jonekee/d3,Petermuturi/d3,johnnyg3p/d3,Jonathan-S-Phillips/d3,m-br/d3,ishang27/d3,lauraweaver/d3,zzzzw/d3,keyanzhang/d3,aaron-goshine/d3,fatmazaman/d3,nishant8BITS/d3,mayblue9/d3,teefresh/d3,staceb/d3,joehannes-libs/d3,wallmarkets/d3,youprofit/d3,bqevin/d3,house-fiction/d3,nitishmadhukar/d3,iorikiir/d3,XueQian/d3,UmarMughal/d3,krystism/d3,uetsujitomoya/d3,latur19318/d3,Petah/d3,8DTechnologies/d3,10000TB/d3,JoaquinSiabra/d3,Genie77998/d3,forkmsrini/d3,jling90/d3,AndBicScadMedia/d3,sunios/d3,baegjins/d3,RCGTDev/d3,futuraprime/d3,nitishmadhukar/d3,vinicius5581/d3,charlesDADI/d3,whoinlee/d3,chaoallsome/d3,berlynhubler/d3,JeebsM/d3,kewal07/d3,li0t/d3,xiaojie123/d3,burakkp/d3,ykominami/d3,diazmartin/d3,youprofit/d3,taylorhxu/d3,tempbottle/d3,ee08b397/d3,Ali925/d3,d3/d3,fsaglam2002/d3,yuhualingfeng/d3,rohittiwarirvt/d3,Lyoneidas/d3,leohmoraes/d3,ABaldwinHunter/d3,radovanx/d3,qodbtn41/d3,mssyogi/d3,davidvmckay/d3,Sigfried/d3,datawrapper/d3-light,ptkeller/d3,smartinsantos/d3,jakimhartford/d3,overmind1980/d3,kpcorda/d3,liang42hao/d3,npmcomponent/mbostock-d3,standino/go-easy-portal,Jonham/d3,anusornc/d3,folpindo/d3
|
8a583c522dff8fc2671d7b51042a8c9ff07e86dc
|
pyp2rpmlib/package_data.py
|
pyp2rpmlib/package_data.py
|
class PackageData(object):
def __init__(self, local_file, name, version):
self.local_file = local_file
self.name = name
self.version = version
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return None
class PypiData(PackageData):
def __init__(self, local_file, name, version, md5, url):
super(PackageData, self).__init__(local_file, name, version)
self.md5 = md5
self.url = url
class LocalData(PackageData):
def __init__(self, local_file, name, version):
super(PackageData, self).__init__(local_file, name, version)
|
class PackageData(object):
def __init__(self, local_file, name, version):
self.local_file = local_file
self.name = name
self.version = version
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return 'TODO:'
@property
def pkg_name(self, name):
if self.name.lower().find('py') != -1:
return self.name
else:
return 'python-%s'
class PypiData(PackageData):
def __init__(self, local_file, name, version, md5, url):
super(PackageData, self).__init__(local_file, name, version)
self.md5 = md5
self.url = url
class LocalData(PackageData):
def __init__(self, local_file, name, version):
super(PackageData, self).__init__(local_file, name, version)
|
Return TODO rather than None, add pkg_name property for PackageData
|
Return TODO rather than None, add pkg_name property for PackageData
|
Python
|
mit
|
MichaelMraka/pyp2rpm,mcyprian/pyp2rpm,fedora-python/pyp2rpm,joequant/pyp2rpm,henrysher/spec4pypi,pombredanne/pyp2rpm,yuokada/pyp2rpm
|
c7512104dce2e9ca83e8400b399b4f77113f9368
|
packs/travisci/actions/lib/action.py
|
packs/travisci/actions/lib/action.py
|
import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
CONTENT_TYPE = 'application/json'
class TravisCI(Action):
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config['Authorization']
headers['Content-Type'] = self.config['Content-Type']
return headers
def _perform_request(self, path, method, data=None, requires_auth=False):
url = API_URL + path
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(url, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(url, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(url, data=data, headers=headers)
return response
|
import httplib
import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
CONTENT_TYPE = 'application/json'
class TravisCI(Action):
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config['Authorization']
headers['Content-Type'] = self.config['Content-Type']
return headers
def _perform_request(self, path, method, data=None, requires_auth=False):
url = API_URL + path
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(url, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(url, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(url, data=data, headers=headers)
if response.status_code in [httplib.FORBIDDEN, httplib.UNAUTHORIZED]:
msg = ('Invalid or missing Travis CI auth token. Make sure you have'
'specified valid token in the config file')
raise Exception(msg)
return response
|
Throw on invalid / missing credentials.
|
Throw on invalid / missing credentials.
|
Python
|
apache-2.0
|
pidah/st2contrib,lmEshoo/st2contrib,psychopenguin/st2contrib,psychopenguin/st2contrib,armab/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,pidah/st2contrib,armab/st2contrib,armab/st2contrib,StackStorm/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,pearsontechnology/st2contrib,digideskio/st2contrib,digideskio/st2contrib
|
fa3ec9a764ca0d646588e908395367ce553981e1
|
tca/chat/views.py
|
tca/chat/views.py
|
from django.shortcuts import render
from rest_framework import viewsets
from chat.models import Member
from chat.models import ChatRoom
from chat.serializers import MemberSerializer
from chat.serializers import ChatRoomSerializer
class MemberViewSet(viewsets.ModelViewSet):
model = Member
serializer_class = MemberSerializer
class ChatRoomViewSet(viewsets.ModelViewSet):
model = ChatRoom
serializer_class = ChatRoomSerializer
|
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from rest_framework import viewsets
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from chat.models import Member
from chat.models import ChatRoom
from chat.serializers import MemberSerializer
from chat.serializers import ChatRoomSerializer
class MemberViewSet(viewsets.ModelViewSet):
model = Member
serializer_class = MemberSerializer
class ChatRoomViewSet(viewsets.ModelViewSet):
model = ChatRoom
serializer_class = ChatRoomSerializer
@action()
def add_member(self, request, pk=None):
chat_room = self.get_object()
if 'lrz_id' not in request.DATA:
# Invalid request
return Response(status=status.HTTP_400_BAD_REQUEST)
member = get_object_or_404(Member, lrz_id=request.DATA['lrz_id'])
chat_room.members.add(member)
return Response({
'status': 'success',
})
|
Add an action for adding members to a chat room
|
Add an action for adding members to a chat room
Even though django-rest-framework supports a Ruby-on-Rails style
of updating existing resources by issuing a PATCH or PUT request,
such updates are unsafe and can cause race-conditions to lose
some state. The implementation of this action isn't fully RESTful,
but neither is the PATCH-style used here, so it is still a good
choice for solving a possible race condition situation.
|
Python
|
bsd-3-clause
|
mlalic/TumCampusAppBackend,mlalic/TumCampusAppBackend
|
01e911926d37fa981fd7703f751ff91f052313e2
|
tkLibs/__init__.py
|
tkLibs/__init__.py
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'listbox', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .listbox import listbox
from .window import window
|
__all__ = ['autoScrollbar', 'button', 'combobox', 'entry', 'frame', 'label', 'listbox', 'toplevel', 'window']
from .autoScrollbar import autoScrollbar
from .button import button
from .combobox import combobox
from .entry import entry
from .frame import frame
from .label import label
from .listbox import listbox
from .toplevel import toplevel
from .window import window
|
Add import of new widgets.
|
Add import of new widgets.
|
Python
|
mit
|
Kyle-Fagan/tkLibs
|
a6ce4b1add0bcc664240cd63b27e460194e27c3f
|
src/glob2/__init__.py
|
src/glob2/__init__.py
|
from __future__ import absolute_import
from .impl import *
__version__ = (0, 3)
|
from __future__ import absolute_import
from .impl import *
__version__ = (0, 4)
|
Increment version number for new release.
|
Increment version number for new release.
|
Python
|
bsd-2-clause
|
musically-ut/python-glob2
|
71cb7a3d83cbb352a358ba8ac260584a6666b5ad
|
seleniumbase/config/proxy_list.py
|
seleniumbase/config/proxy_list.py
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
* https://github.com/mertguvencli/http-proxy-list
"""
PROXY_LIST = {
"example1": "170.39.193.236:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
* https://github.com/mertguvencli/http-proxy-list
"""
PROXY_LIST = {
"example1": "151.181.91.10:80", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
Update the example proxy list
|
Update the example proxy list
|
Python
|
mit
|
seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase
|
1d88ea54d1f4ce63893b906a5b79faa4dd25243f
|
grow/commands/convert.py
|
grow/commands/convert.py
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import *
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
from grow.pods import pods
from grow.pods import storage
from grow.conversion import content_locale_split
import click
import os
@click.command()
@click.argument('pod_path', default='.')
@click.option('--type', type=click.Choice(['content_locale_split']))
def convert(pod_path, type):
"""Converts pod files from an earlier version of Grow."""
root = os.path.abspath(os.path.join(os.getcwd(), pod_path))
pod = pods.Pod(root, storage=storage.FileStorage)
if type == 'content_locale_split':
content_locale_split.Converter.convert(pod)
else:
raise click.UsageError(
'Unable to convert files without a --type option.\n'
'Run `grow convert --help` to see valid --type values.')
|
Adjust import to fix build with PyInstaller.
|
Adjust import to fix build with PyInstaller.
|
Python
|
mit
|
grow/pygrow,grow/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/grow
|
3c442bc0304394095053b1a71e4a0aa37f72522b
|
sale_require_contract/__openerp__.py
|
sale_require_contract/__openerp__.py
|
# -*- coding: utf-8 -*-
{
'name': 'Sale Order Require Contract on Confirmation',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Sale Order Require Contract on Confirmation
===========================================
""",
'author': 'ADHOC SA',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'sale',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
'name': 'Sale Order Require Contract on Confirmation',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Sale Order Require Contract on Confirmation
===========================================
""",
'author': 'ADHOC SA',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'sale',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': False,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
FIX disable sale require contract
|
FIX disable sale require contract
|
Python
|
agpl-3.0
|
ingadhoc/sale,ingadhoc/partner,adhoc-dev/account-financial-tools,ingadhoc/sale,ingadhoc/account-payment,ingadhoc/account-invoicing,ingadhoc/account-analytic,adhoc-dev/odoo-addons,ingadhoc/odoo-addons,ingadhoc/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/product,maljac/odoo-addons,bmya/odoo-addons,syci/ingadhoc-odoo-addons,HBEE/odoo-addons,maljac/odoo-addons,ingadhoc/account-financial-tools,ClearCorp/account-financial-tools,ingadhoc/odoo-addons,dvitme/odoo-addons,dvitme/odoo-addons,adhoc-dev/odoo-addons,HBEE/odoo-addons,ingadhoc/sale,jorsea/odoo-addons,syci/ingadhoc-odoo-addons,dvitme/odoo-addons,sysadminmatmoz/ingadhoc,ingadhoc/sale,syci/ingadhoc-odoo-addons,bmya/odoo-addons,maljac/odoo-addons,jorsea/odoo-addons,adhoc-dev/odoo-addons,jorsea/odoo-addons,bmya/odoo-addons,HBEE/odoo-addons,sysadminmatmoz/ingadhoc,adhoc-dev/account-financial-tools,ingadhoc/product,ingadhoc/stock,ClearCorp/account-financial-tools
|
688bec4dc00dd1040901ca446c6b6cc7fa6fbbcb
|
downstream-farmer/utils.py
|
downstream-farmer/utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
def urlencode(string):
return urlencode(string)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from urllib import quote_plus
except ImportError:
from urllib.parse import quote_plus
def urlify(string):
""" You might be wondering: why is this here at all, since it's basically
doing exactly what the quote_plus function in urllib does. Well, to keep
the 2 & 3 stuff all in one place, meaning rather than try to import the
urllib stuff twice in each file where url-safe strings are needed, we keep
it all in one file: here.
Supporting multiple Pythons is hard.
:param string: String to URLify
:return: URLified string
"""
return quote_plus(string)
|
Add documentation and py3k compat
|
Add documentation and py3k compat
|
Python
|
mit
|
Storj/downstream-farmer
|
30e567adb809810930616493fd92ef1c40c9207b
|
dthm4kaiako/users/forms.py
|
dthm4kaiako/users/forms.py
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
"""Forms for user application."""
from django.forms import ModelForm
from django.contrib.auth import get_user_model, forms
from captcha.fields import ReCaptchaField
from captcha.widgets import ReCaptchaV3
User = get_user_model()
class SignupForm(ModelForm):
"""Sign up for user registration."""
captcha = ReCaptchaField(widget=ReCaptchaV3, label='')
class Meta:
"""Metadata for SignupForm class."""
model = get_user_model()
fields = ['first_name', 'last_name']
def signup(self, request, user):
"""Extra logic when a user signs up.
Required by django-allauth.
"""
user.first_name = self.cleaned_data['first_name']
user.last_name = self.cleaned_data['last_name']
user.save()
class UserChangeForm(forms.UserChangeForm):
"""Form class for changing user."""
class Meta(forms.UserChangeForm.Meta):
"""Metadata for UserChangeForm class."""
model = User
fields = ('email', 'last_name')
class UserCreationForm(forms.UserCreationForm):
"""Form class for creating user."""
class Meta(forms.UserCreationForm.Meta):
"""Metadata for UserCreationForm class."""
model = User
fields = ('email', 'first_name', 'last_name')
|
Add recaptcha to signup page
|
Add recaptcha to signup page
Signup page is currently not used, but
doing it now in case it is forgotten later.
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
c0eb0f902b0fcbea29c8a3bf70f80ca9384cce9f
|
scripts/remove_after_use/send_mendeley_reauth_email.py
|
scripts/remove_after_use/send_mendeley_reauth_email.py
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django
setup_django()
from website import mails
from osf.models import OSFUser
from addons.mendeley.models import UserSettings
import progressbar
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main(dry=True):
user = OSFUser.load('qrgl2')
qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner')
pbar = progressbar.ProgressBar(maxval=qs.count()).start()
for i, each in enumerate(qs):
user = each.owner
logger.info('Sending email to OSFUser {}'.format(user._id))
if not dry:
mails.send_mail(
mail=mails.MENDELEY_REAUTH,
to_addr=user.username,
can_change_preferences=False,
user=user
)
pbar.update(i + 1)
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
# -*- coding: utf-8 -*-
import sys
import logging
from website.app import setup_django
setup_django()
from website import mails
from osf.models import OSFUser
from addons.mendeley.models import UserSettings
import progressbar
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
def main(dry=True):
qs = UserSettings.objects.filter(owner__is_active=True).select_related('owner').order_by('pk')
count = qs.count()
pbar = progressbar.ProgressBar(maxval=count).start()
logger.info('Sending email to {} users'.format(count))
for i, each in enumerate(qs):
user = each.owner
logger.info('Sending email to OSFUser {}'.format(user._id))
if not dry:
mails.send_mail(
mail=mails.MENDELEY_REAUTH,
to_addr=user.username,
can_change_preferences=False,
user=user
)
pbar.update(i + 1)
logger.info('Sent email to {} users'.format(count))
if __name__ == '__main__':
dry = '--dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
|
Remove junk and add more logging
|
Remove junk and add more logging
|
Python
|
apache-2.0
|
cslzchen/osf.io,icereval/osf.io,brianjgeiger/osf.io,mattclark/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,adlius/osf.io,cslzchen/osf.io,mattclark/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,icereval/osf.io,erinspace/osf.io,felliott/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,saradbowman/osf.io,felliott/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,cslzchen/osf.io,baylee-d/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,sloria/osf.io,felliott/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,sloria/osf.io,icereval/osf.io,sloria/osf.io,cslzchen/osf.io,caseyrollins/osf.io,erinspace/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,erinspace/osf.io,adlius/osf.io,adlius/osf.io,HalcyonChimera/osf.io
|
75f01ff3be060e033b24d141b0ca824cb7f81c22
|
tests/twisted/avahi/test-register.py
|
tests/twisted/avahi/test-register.py
|
from saluttest import exec_test
import avahitest
from avahitest import AvahiListener
import time
def test(q, bus, conn):
a = AvahiListener(q)
a.listen_for_service("_presence._tcp")
conn.Connect()
q.expect('service-added',
name='test-register@' + avahitest.get_host_name())
if __name__ == '__main__':
exec_test(test, { "published-name": "test-register" })
|
from saluttest import exec_test
import avahitest
from avahitest import AvahiListener
from avahitest import txt_get_key
from avahi import txt_array_to_string_array
import time
PUBLISHED_NAME="test-register"
FIRST_NAME="lastname"
LAST_NAME="lastname"
def test(q, bus, conn):
a = AvahiListener(q)
a.listen_for_service("_presence._tcp")
conn.Connect()
e = q.expect('service-added',
name=PUBLISHED_NAME + "@" + avahitest.get_host_name())
service = a.resolver_for_service(e)
e = q.expect('service-resolved', service = service)
for (key, val) in { "1st": FIRST_NAME,
"last": LAST_NAME,
"status": "avail",
"txtvers": "1" }.iteritems():
v = txt_get_key(e.txt, key)
assert v == val, (key, val, v)
if __name__ == '__main__':
exec_test(test, { "published-name": PUBLISHED_NAME,
"first-name": FIRST_NAME,
"last-name": LAST_NAME })
|
Test that the service is register with the correct txt record
|
Test that the service is register with the correct txt record
|
Python
|
lgpl-2.1
|
freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut,freedesktop-unofficial-mirror/telepathy__telepathy-salut
|
f898d1cc96fe66a097def29552f3774f3509be83
|
insultgenerator/words.py
|
insultgenerator/words.py
|
import pkg_resources
import random
_insulting_adjectives = []
def _load_wordlists():
global _insulting_adjectives
insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt")
_insulting_adjectives = insulting_adjective_list.decode().split('\n')
def get_insulting_adjective():
return _insulting_adjectives[0]
return random.choice(_insulting_adjectives)
_load_wordlists()
|
import pkg_resources
import random
_insulting_adjectives = []
def _load_wordlists():
global _insulting_adjectives
insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt")
_insulting_adjectives = insulting_adjective_list.decode().split('\n')
def get_insulting_adjective():
return random.choice(_insulting_adjectives)
_load_wordlists()
|
Revert "Adding test failure to ensure that CI is functioning correctly"
|
Revert "Adding test failure to ensure that CI is functioning correctly"
This reverts commit 754be81c1ccc385d8e7b418460271966d7db2361.
|
Python
|
mit
|
tr00st/insult_generator
|
6fbe58692005e5c8b7a9c4f4e98984ae86d347a2
|
pinax/messages/context_processors.py
|
pinax/messages/context_processors.py
|
from .models import Thread
def user_messages(request):
c = {}
if request.user.is_authenticated():
c["inbox_count"] = Thread.inbox(request.user).count()
return c
|
from .models import Thread
def user_messages(request):
c = {}
if request.user.is_authenticated():
c["inbox_threads"] = Thread.inbox(request.user)
c["unread_threads"] = Thread.unread(request.user)
return c
|
Return querysets in context processor to be more useful
|
Return querysets in context processor to be more useful
|
Python
|
mit
|
eldarion/user_messages,pinax/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages,eldarion/user_messages,arthur-wsw/pinax-messages
|
c63a1c2bc92267ac2b5ffc52c7189942d034c37b
|
src/dashboard/src/installer/views.py
|
src/dashboard/src/installer/views.py
|
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
from django.shortcuts import render
from django.http import HttpResponse
def welcome(request):
return render(request, 'installer/welcome.html', locals())
|
# This file is part of Archivematica.
#
# Copyright 2010-2012 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.models import User
from django.shortcuts import render
from django.http import HttpResponse
def welcome(request):
return render(request, 'installer/welcome.html', locals())
def create_superuser(request):
pass
# username
# password
# email
# User.objects.create_superuser(username, email, password)
|
Define skeleton for the function that will create the superuser
|
Define skeleton for the function that will create the superuser
Autoconverted from SVN (revision:2929)
|
Python
|
agpl-3.0
|
artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history,artefactual/archivematica-history
|
c555c53290c8894c80dc7991081dd5d7591fda8c
|
helpers/run_feeds.py
|
helpers/run_feeds.py
|
from core.feed import Feed
import core.config.celeryimports
if __name__ == '__main__':
all_feeds = Feed.objects()
for n in all_feeds:
print "Testing: {}".format(n)
n.update()
|
import sys
from core.feed import Feed
import core.config.celeryimports
if __name__ == '__main__':
if len(sys.argv) == 1:
all_feeds = Feed.objects()
elif len(sys.argv) >= 2:
all_feeds = [Feed.objects.get(name=sys.argv[1])]
print all_feeds
for n in all_feeds:
print "Testing: {}".format(n)
n.update()
|
Add argument to run single feed
|
Add argument to run single feed
|
Python
|
apache-2.0
|
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
|
12254ea15b1f761ad63095ed7244f347d42e4c85
|
file_encryptor/__init__.py
|
file_encryptor/__init__.py
|
from file_encryptor import (convergence, key_generators)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from file_encryptor import (convergence, key_generators)
__version__ = '0.2.0'
|
Add copyright, license and version information.
|
Add copyright, license and version information.
|
Python
|
mit
|
Storj/file-encryptor
|
864dac9b2586891f62700e3170421617aca48a88
|
deployment/config.py
|
deployment/config.py
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 500
collections = ["gamemodels", "sessions", "users"]
|
class Azure:
resource_group = "MajavaShakki"
location = "northeurope"
cosmosdb_name = f"{resource_group}mongo".lower()
plan_name = f"{resource_group}Plan"
site_name = f"{resource_group}Site"
class Mongo:
database_name = "Majavashakki"
collection_throughput = 500
system_indexes_collection = "undefined" # https://github.com/Automattic/mongoose/issues/6989
collections = ["gamemodels", "sessions", "users", system_indexes_collection]
|
Configure throughput for 'undefined' collection
|
Configure throughput for 'undefined' collection
https://github.com/Automattic/mongoose/issues/6989
https://jira.mongodb.org/browse/NODE-1662
|
Python
|
mit
|
Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki
|
f0c684ab89fa4fe698c9e20e7e904d3371fe58e2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup
setup(
name='alfred-workflow-packager',
version='0.11.0',
description='A CLI utility for packaging and exporting Alfred workflows',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='[email protected]',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'biplist >= 1, < 2',
'jsonschema >= 2, < 3'
],
entry_points={
'console_scripts': [
'alfred-workflow-packager=awp.main:main',
'workflow-packager=awp.main:main'
]
}
)
|
#!/usr/bin/env python
# coding=utf-8
from setuptools import setup
setup(
name='alfred-workflow-packager',
version='0.11.0',
description='A CLI utility for packaging and exporting Alfred workflows',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='[email protected]',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'biplist >= 1, < 2',
'jsonschema >= 2, < 3'
],
entry_points={
'console_scripts': [
'alfred-workflow-packager=awp.main:main',
'workflow-packager=awp.main:main',
'awp=awp.main:main'
]
}
)
|
Add awp as an additional shell command
|
Add awp as an additional shell command
|
Python
|
mit
|
caleb531/alfred-workflow-packager
|
6218287f7123e7c952e35ef8e12cacb985cea435
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.core import Extension
VERSION = '1.0.5'
LONG_DESCRIPTION = """
A numerical geometric algebra module for python. BSD License.
"""
setup(
name='clifford',
version=VERSION,
license='bsd',
description='Numerical Geometric Algebra Module',
long_description=LONG_DESCRIPTION,
author='Robert Kern',
author_email='[email protected]',
url='http://clifford.readthedocs.io',
packages=find_packages(),
install_requires=[
'numpy',
'scipy',
'numba==0.45.1',
'h5py',
'sparse',
],
package_dir={'clifford':'clifford'},
classifiers=[
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
python_requires='>=3.5',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.core import Extension
import os
version_path = os.path.join('clifford', '_version.py')
exec(open(version_path).read())
LONG_DESCRIPTION = """
A numerical geometric algebra module for python. BSD License.
"""
setup(
name='clifford',
version=__version__,
license='bsd',
description='Numerical Geometric Algebra Module',
long_description=LONG_DESCRIPTION,
author='Robert Kern',
author_email='[email protected]',
url='http://clifford.readthedocs.io',
packages=find_packages(),
install_requires=[
'numpy',
'scipy',
'numba==0.45.1',
'h5py',
'sparse',
],
package_dir={'clifford':'clifford'},
classifiers=[
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
python_requires='>=3.5',
)
|
Add missing change from gh-172
|
Add missing change from gh-172
Not sure how this got lost
|
Python
|
bsd-3-clause
|
arsenovic/clifford,arsenovic/clifford
|
2ef3699e0635f96ee6371806d157ec1d159b60b7
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.1',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
setup(
name='ppp_datamodel',
version='0.1',
description='Data model for the Projet Pensées Profondes.',
url='https://github.com/ProjetPP/PPP-datamodel-Python',
author='Valentin Lorentz',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
],
packages=[
'ppp_datamodel',
],
)
|
Fix classifiers (Python 3.2 is supported).
|
Fix classifiers (Python 3.2 is supported).
|
Python
|
agpl-3.0
|
ProjetPP/PPP-datamodel-Python,ProjetPP/PPP-datamodel-Python
|
e09d2d2be0c2dbb605f562dca857562acc0c0fdb
|
setup.py
|
setup.py
|
"""This is the docstring for the example.py module. Modules names should
have short, all-lowercase names. The module name may have underscores if
this improves readability.
Every module should have a docstring at the very top of the file. The
module's docstring may extend over multiple lines. If your docstring does
extend over multiple lines, the closing three quotation marks must be on
a line by itself, preferably preceded by a blank line."""
__copyright__ = "tba"
__license__ = "tba"
__author__ = "gplssm"
from setuptools import find_packages, setup
setup(name='ego.powerflow',
author='openego development group',
description='Powerflow analysis based on PyPSA',
version='0.0.1',
license="GNU GENERAL PUBLIC LICENSE Version 3",
packages=find_packages(),
install_requires=['pandas >= 0.17.0, <=0.19.1',
'pypsa >= 0.6.2, <= 0.6.2',
'sqlalchemy >= 1.0.15, <= 1.1.4',
'oemof.db >=0.0.4, <=0.0.4',
'geoalchemy2 >= 0.3.0, <=0.3.0',
'matplotlib >= 1.5.3, <=1.5.3']
)
|
"""This is the docstring for the example.py module. Modules names should
have short, all-lowercase names. The module name may have underscores if
this improves readability.
Every module should have a docstring at the very top of the file. The
module's docstring may extend over multiple lines. If your docstring does
extend over multiple lines, the closing three quotation marks must be on
a line by itself, preferably preceded by a blank line."""
__copyright__ = "tba"
__license__ = "tba"
__author__ = "gplssm"
from setuptools import find_packages, setup
setup(name='ego.powerflow',
author='openego development group',
description='Powerflow analysis based on PyPSA',
version='0.0.1',
license="GNU GENERAL PUBLIC LICENSE Version 3",
packages=find_packages(),
install_requires=['pandas >= 0.17.0, <=0.19.1',
'pypsa >= 0.8.0, <= 0.8.0',
'sqlalchemy >= 1.0.15, <= 1.1.4',
'oemof.db >=0.0.4, <=0.0.4',
'geoalchemy2 >= 0.3.0, <=0.3.0',
'matplotlib >= 1.5.3, <=1.5.3']
)
|
Update PyPSA version in requirements
|
Update PyPSA version in requirements
|
Python
|
agpl-3.0
|
openego/ego.powerflow
|
506612277718e13d852330553f52903544e3f3c2
|
setup.py
|
setup.py
|
from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-memcacheify',
version = '0.8',
py_modules = ('memcacheify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['django-pylibmc==0.5.0'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = '[email protected]',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-memcacheify',
keywords = 'django heroku cloud cache memcache memcached awesome epic',
description = 'Automatic Django memcached configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
|
from os.path import abspath, dirname, join, normpath
from setuptools import setup
setup(
# Basic package information:
name = 'django-heroku-memcacheify',
version = '0.8',
py_modules = ('memcacheify',),
# Packaging options:
zip_safe = False,
include_package_data = True,
# Package dependencies:
install_requires = ['django-pylibmc>=0.6.1'],
# Metadata for PyPI:
author = 'Randall Degges',
author_email = '[email protected]',
license = 'UNLICENSE',
url = 'https://github.com/rdegges/django-heroku-memcacheify',
keywords = 'django heroku cloud cache memcache memcached awesome epic',
description = 'Automatic Django memcached configuration on Heroku.',
long_description = open(normpath(join(dirname(abspath(__file__)),
'README.md'))).read()
)
|
Update django-pylibmc dependency to >=0.6.1
|
Update django-pylibmc dependency to >=0.6.1
As a bonus, django-pylibmc 0.5.0+ supports Python 3, for which testing
will be enabled in another PR.
I've switched to using greater than, since the Python packaging
guidelines say it's not best practice to use `install_requires` to pin
dependencies to specific versions:
http://python-packaging-user-guide.readthedocs.org/en/latest/requirements/#install-requires
|
Python
|
unlicense
|
rdegges/django-heroku-memcacheify
|
5c3aee40f21b6346120df99698b1f273886b5e70
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup
import ibei
setup(name = "ibei",
version = ibei.__version__,
author = "Joshua Ryan Smith",
author_email = "[email protected]",
packages = ["ibei", "physicalproperty"],
url = "https://github.com/jrsmith3/ibei",
description = "Calculator for incomplete Bose-Einstein integral",
classifiers = ["Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Physics",
"Natural Language :: English",],
install_requires = ["numpy",
"sympy",
"astropy"],)
|
# -*- coding: utf-8 -*-
from setuptools import setup
import ibei
setup(name="ibei",
version=ibei.__version__,
author="Joshua Ryan Smith",
author_email="[email protected]",
packages=["ibei", "physicalproperty"],
url="https://github.com/jrsmith3/ibei",
description="Calculator for incomplete Bose-Einstein integral",
classifiers=["Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Physics",
"Natural Language :: English", ],
install_requires=["numpy",
"sympy",
"astropy"],)
|
Fix pep8 and whitespace issues
|
Fix pep8 and whitespace issues
|
Python
|
mit
|
jrsmith3/ibei
|
182985866082ae039629ac3bcd84e62e169655ba
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import pyvim
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='pyvim',
author='Jonathan Slenders',
version=pyvim.__version__,
license='LICENSE',
url='https://github.com/jonathanslenders/pyvim',
description='Pure Pyton Vi Implementation',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'prompt-toolkit==0.46',
'ptpython==0.21', # For the Python completion (with Jedi.)
'pyflakes', # For Python error reporting.
'docopt', # For command line arguments.
],
entry_points={
'console_scripts': [
'pyvim = pyvim.entry_points.run_pyvim:run',
]
},
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import pyvim
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='pyvim',
author='Jonathan Slenders',
version=pyvim.__version__,
license='LICENSE',
url='https://github.com/jonathanslenders/pyvim',
description='Pure Pyton Vi Implementation',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'prompt-toolkit==0.50',
'ptpython==0.22', # For the Python completion (with Jedi.)
'pyflakes', # For Python error reporting.
'docopt', # For command line arguments.
],
entry_points={
'console_scripts': [
'pyvim = pyvim.entry_points.run_pyvim:run',
]
},
)
|
Upgrade of prompt-toolkit and ptpython.
|
Upgrade of prompt-toolkit and ptpython.
|
Python
|
bsd-3-clause
|
jonathanslenders/pyvim,amjith/pyvim
|
0e400261b2dad04dc9f290cdbc5b16222487d4e3
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name="pysovo",
version="0.4.1",
packages=['pysovo', 'pysovo.comms',
'pysovo.tests', 'pysovo.tests.resources'],
package_data={'pysovo':['tests/resources/*.xml', 'templates/*.txt']},
description="Utility scripts for reacting to received VOEvent packets",
author="Tim Staley",
author_email="[email protected]",
url="https://github.com/timstaley/pysovo",
install_requires=required
)
|
#!/usr/bin/env python
from setuptools import setup
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name="pysovo",
version="0.4.1",
packages=['pysovo', 'pysovo.comms', 'pysovo.triggers',
'pysovo.tests', 'pysovo.tests.resources'],
package_data={'pysovo':['tests/resources/*.xml', 'templates/*.txt']},
description="Utility scripts for reacting to received VOEvent packets",
author="Tim Staley",
author_email="[email protected]",
url="https://github.com/timstaley/pysovo",
install_requires=required
)
|
Add triggers module to install.
|
Add triggers module to install.
|
Python
|
bsd-2-clause
|
timstaley/pysovo
|
343422a7f609d0f0c1484ea4573064c7f9d54156
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.core import Extension
module = Extension('confluent_kafka.cimpl',
include_dirs = ['/usr/local/include'],
libraries= ['rdkafka'],
sources=['confluent_kafka/src/confluent_kafka.c',
'confluent_kafka/src/Producer.c',
'confluent_kafka/src/Consumer.c'])
setup(name='confluent-kafka',
version='0.9.1',
description='Confluent\'s Apache Kafka client for Python',
author='Confluent Inc',
author_email='[email protected]',
url='https://github.com/confluentinc/confluent-kafka-python',
ext_modules=[module],
packages=find_packages())
|
#!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.core import Extension
module = Extension('confluent_kafka.cimpl',
include_dirs = ['/usr/local/include'],
libraries= ['rdkafka'],
sources=['confluent_kafka/src/confluent_kafka.c',
'confluent_kafka/src/Producer.c',
'confluent_kafka/src/Consumer.c'])
setup(name='confluent-kafka',
version='0.9.1',
description='Confluent\'s Apache Kafka client for Python',
author='Confluent Inc',
author_email='[email protected]',
url='https://github.com/confluentinc/confluent-kafka-python',
ext_modules=[module],
packages=find_packages(),
data_files = [('', ['LICENSE'])])
|
Include LICENSE file in bdist
|
Include LICENSE file in bdist
|
Python
|
apache-2.0
|
blindroot/confluent-kafka-python,blindroot/confluent-kafka-python
|
54098ac30100cc40373689a35dfc2a1f96a5844d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
deps = [
'mozillapulse',
'mozci>=0.7.0',
'requests',
]
setup(name='pulse-actions',
version='0.1.4',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='[email protected]',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
from setuptools import setup, find_packages
deps = [
'mozillapulse',
'mozci>=0.7.3',
'requests',
]
setup(name='pulse-actions',
version='0.1.4',
description='A pulse listener that acts upon messages with mozci.',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
author='Alice Scarpa',
author_email='[email protected]',
license='MPL 2.0',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=deps,
url='https://github.com/adusca/pulse_actions',
entry_points={
'console_scripts': [
'run-pulse-actions = pulse_actions.worker:main'
],
})
|
Update mozci version to handle credentials in env variables
|
Update mozci version to handle credentials in env variables
|
Python
|
mpl-2.0
|
vaibhavmagarwal/pulse_actions,nikkisquared/pulse_actions,armenzg/pulse_actions,adusca/pulse_actions,mozilla/pulse_actions
|
ccafb464e8baadcf5064708b8f2e2ccc9f6943b4
|
setup.py
|
setup.py
|
from ez_setup import use_setuptools
use_setuptools() # nopep8
from setuptools import setup, find_packages
import os
import re
with open(os.path.join('activitysim', '__init__.py')) as f:
info = re.search(r'__.*', f.read(), re.S)
exec(info[0])
setup(
name='activitysim',
version=__version__,
description=__doc__,
author='contributing authors',
author_email='[email protected]',
license='BSD-3',
url='https://github.com/activitysim/activitysim',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
include_package_data=True,
entry_points={'console_scripts': ['activitysim=activitysim.cli.main:main']},
install_requires=[
'pyarrow >= 2.0',
'numpy >= 1.16.1',
'openmatrix >= 0.3.4.1',
'pandas >= 1.1.0',
'pyyaml >= 5.1',
'tables >= 3.5.1',
'toolz >= 0.8.1',
'psutil >= 4.1',
'requests >= 2.7',
'numba >= 0.51.2',
'orca >= 1.6',
]
)
|
from ez_setup import use_setuptools
use_setuptools() # nopep8
from setuptools import setup, find_packages
import os
import re
with open(os.path.join('activitysim', '__init__.py')) as f:
info = re.search(r'__.*', f.read(), re.S)
exec(info[0])
setup(
name='activitysim',
version=__version__,
description=__doc__,
author='contributing authors',
author_email='[email protected]',
license='BSD-3',
url='https://github.com/activitysim/activitysim',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
include_package_data=True,
entry_points={'console_scripts': ['activitysim=activitysim.cli.main:main']},
install_requires=[
'pyarrow >= 2.0',
'numpy >= 1.16.1',
'openmatrix >= 0.3.4.1',
'pandas >= 1.1.0',
'pyyaml >= 5.1',
'tables >= 3.5.1',
'cytoolz >= 0.8.1',
'psutil >= 4.1',
'requests >= 2.7',
'numba >= 0.51.2',
'orca >= 1.6',
]
)
|
Use Cython version of toolz
|
Use Cython version of toolz
Usage of activitysim within popultionsim fails with missing module cytoolz - i believe that activitysim is using cytoolz internally rather than toolz
|
Python
|
agpl-3.0
|
synthicity/activitysim,synthicity/activitysim
|
85e5ea5b4fcc7039a6d3441fcc65fc3d0825f16c
|
setup.py
|
setup.py
|
import os, sys
from setuptools import setup
setup(
name='reddit_comment_scraper',
version='2.0.0',
description='A simple Reddit-scraping script',
url='https://github.com/jfarmer/reddit_comment_scraper',
author='Jesse Farmer',
author_email='[email protected]',
license='MIT',
packages=['reddit_comment_scraper'],
install_requires=[
'unicodecsv==0.9.4',
'praw==2.1.19'
],
entry_points={
'console_scripts': [
'scrape_comments=reddit_comment_scraper:main'
]
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows :: Windows 7'
],
)
|
import os, sys
from setuptools import setup
setup(
name='reddit_comment_scraper',
version='2.0.0',
description='A simple Reddit-scraping script',
url='https://github.com/jfarmer/reddit_comment_scraper',
author='Jesse Farmer',
author_email='[email protected]',
license='MIT',
packages=['reddit_comment_scraper'],
install_requires=[
'unicodecsv==0.9.4',
'praw==2.1.19'
],
entry_points={
'console_scripts': [
'scrape_comments=reddit_comment_scraper:main'
]
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows :: Windows 8.1',
'Operating System :: Microsoft :: Windows :: Windows 8',
'Operating System :: Microsoft :: Windows :: Windows 7',
'Operating System :: Microsoft :: Windows :: Windows Vista'
],
)
|
Add more versions of Windows + environment
|
Add more versions of Windows + environment
|
Python
|
mit
|
jfarmer/reddit_comment_scraper,chrisswk/reddit_comment_scraper
|
c1edd4d1a9ba3ef57ea3524013efdb4faa51fc94
|
setup.py
|
setup.py
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from setuptools import setup, find_packages
import os
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
README = ''
setup(name='niprov',
version='0.1.post1',
author='Jasper J.F. van den Bosch',
author_email='[email protected]',
description='provenance for neuroimaging data',
packages=find_packages(),
url = 'https://github.com/ilogue/niprov',
test_suite="tests",
scripts=['executables/provenance'],
zip_safe=False,
license='BSD',
long_description=README,
classifiers=[
'License :: OSI Approved :: BSD License',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'],
package_data={'niprov': ['discovery-filter.txt']},
include_package_data=True,
)
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from setuptools import setup, find_packages
import os
try:
import pypandoc
README = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
README = ''
setup(name='niprov',
version='0.1.post2',
author='Jasper J.F. van den Bosch',
author_email='[email protected]',
description='provenance for neuroimaging data',
packages=find_packages(),
url = 'https://github.com/ilogue/niprov',
test_suite="tests",
scripts=['executables/provenance'],
zip_safe=False,
license='BSD',
long_description=README,
classifiers=[
'License :: OSI Approved :: BSD License',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering'],
package_data={'niprov': ['discovery-filter.txt']},
include_package_data=True,
)
|
Patch version bump to .post2
|
Patch version bump to .post2
|
Python
|
bsd-3-clause
|
ilogue/niprov,ilogue/niprov,ilogue/niprov,ilogue/niprov
|
cba6cbc22faf1bf1d1351417854a06b1472c13dc
|
setup.py
|
setup.py
|
from distutils.core import setup
import os
from setuptools import find_packages
__version__ = 'unknown'
version_path = os.path.join(os.path.split(__file__)[0], 'banneret/version.py')
with open(version_path) as version_file:
exec(version_file.read())
URL = 'https://github.com/lancelote/banneret'
setup(
name='banneret',
packages=find_packages(exclude=['tests', '*.test', '*.test.*']),
version=__version__,
description='CLI helpers for PyCharm management',
author='Pavel Karateev',
author_email='[email protected]',
url=URL,
download_url=URL + '/archive/{}.tar.gz'.format(__version__),
keywords=['pycharm', 'cli'],
entry_points={
'console_scripts': [
'bnrt = banneret.main:main'
]
},
install_requires=[
'click'
],
extras_require={
'test': ['pytest', 'pytest-mock', 'tox'],
'lint': ['pylint', 'pydocstyle', 'pycodestyle', 'mypy'],
'docker': ['docker']
}
)
|
from distutils.core import setup
import os
from setuptools import find_packages
__version__ = 'unknown'
version_path = os.path.join(os.path.split(__file__)[0], 'banneret/version.py')
with open(version_path) as version_file:
exec(version_file.read())
URL = 'https://github.com/lancelote/banneret'
setup(
name='banneret',
packages=find_packages(exclude=['tests', '*.test', '*.test.*']),
version=__version__,
description='CLI helpers for PyCharm management',
author='Pavel Karateev',
author_email='[email protected]',
url=URL,
download_url=URL + '/archive/{}.tar.gz'.format(__version__),
keywords=['pycharm', 'cli'],
entry_points={
'console_scripts': [
'bnrt = banneret.main:cli'
]
},
install_requires=[
'click'
],
extras_require={
'test': ['pytest', 'pytest-mock', 'tox'],
'lint': ['pylint', 'pydocstyle', 'pycodestyle', 'mypy'],
'docker': ['docker']
}
)
|
Switch entry point to click's cli
|
Switch entry point to click's cli
|
Python
|
mit
|
lancelote/banneret
|
0eba6d5b468bf2a03883638fd7d3500585029b86
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = [
]
test_requirements = [
]
dist = setup(
name='cloudpickle',
version='0.1.0',
description='Extended pickling support for Python objects',
author='Cloudpipe',
author_email='[email protected]',
url='https://github.com/cloudpipe/cloudpickle',
install_requires=requirements,
license='LICENSE.txt',
packages=['cloudpickle'],
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: System :: Distributed Computing',
],
test_suite='tests',
tests_require=test_requirements
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = [
]
test_requirements = [
'pytest',
'pytest-cov'
]
dist = setup(
name='cloudpickle',
version='0.1.0',
description='Extended pickling support for Python objects',
author='Cloudpipe',
author_email='[email protected]',
url='https://github.com/cloudpipe/cloudpickle',
install_requires=requirements,
license='LICENSE.txt',
packages=['cloudpickle'],
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: System :: Distributed Computing',
],
test_suite='tests',
tests_require=test_requirements
)
|
Bring in pytest and pytest-cov
|
Bring in pytest and pytest-cov
|
Python
|
bsd-3-clause
|
pczerkas/cloudpickle,pczerkas/cloudpickle
|
3eb3cc047f2f5a358066eac8f806580089d70df2
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
)
|
from distutils.core import setup
setup(name='dscsrf',
version='1.0',
description='Global double-submit Flask CSRF',
packages=['dscsrf'],
py_modules=['flask'],
author='sc4reful',
url = 'https://github.com/sc4reful/dscsrf',
keywords = ['security', 'flask', 'website', 'csrf'],
download_url = 'https://github.com/sc4reful/dscsrf/tarball/1.0',
)
|
Prepare for tagging for PyPI
|
Prepare for tagging for PyPI
|
Python
|
mit
|
wkoathp/dscsrf
|
666cb18b25a22c0192420755148684d8e005572c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='relengapi-mapper',
version='0.1',
description='hg to git mapper',
author='Chris AtLee',
author_email='[email protected]',
url='https://github.com/catlee/mapper',
packages=find_packages(),
namespace_packages=['relengapi', 'relengapi.blueprints'],
entry_points={
"relengapi_blueprints": [
'mapper = relengapi.blueprints.mapper:bp',
],
},
install_requires=[
'Flask',
'relengapi',
'IPy',
'python-dateutil',
],
license='MPL2',
extras_require = {
'test': [
'nose',
'mock'
]
}
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='relengapi-mapper',
version='0.1',
description='hg to git mapper',
author='Chris AtLee',
author_email='[email protected]',
url='https://github.com/petemoore/mapper',
packages=find_packages(),
namespace_packages=['relengapi', 'relengapi.blueprints'],
entry_points={
"relengapi_blueprints": [
'mapper = relengapi.blueprints.mapper:bp',
],
},
install_requires=[
'Flask',
'relengapi',
'IPy',
'python-dateutil',
],
license='MPL2',
extras_require = {
'test': [
'nose',
'mock'
]
}
)
|
Update url to current RoR
|
Update url to current RoR
|
Python
|
mpl-2.0
|
mozilla-releng/services,lundjordan/services,Callek/build-relengapi,srfraser/services,andrei987/services,mozilla/build-relengapi,hwine/build-relengapi,La0/mozilla-relengapi,lundjordan/services,Callek/build-relengapi,djmitche/build-relengapi,garbas/mozilla-releng-services,lundjordan/services,hwine/build-relengapi,Callek/build-relengapi,mozilla-releng/services,mozilla/build-relengapi,hwine/build-relengapi,andrei987/services,lundjordan/build-relengapi,lundjordan/build-relengapi,djmitche/build-relengapi,garbas/mozilla-releng-services,mozilla/build-relengapi,andrei987/services,garbas/mozilla-releng-services,lundjordan/services,Callek/build-relengapi,garbas/mozilla-releng-services,srfraser/services,mozilla/build-relengapi,La0/mozilla-relengapi,djmitche/build-relengapi,srfraser/services,djmitche/build-relengapi,lundjordan/build-relengapi,La0/mozilla-relengapi,mozilla-releng/services,La0/mozilla-relengapi,mozilla-releng/services,andrei987/services,hwine/build-relengapi,srfraser/services,lundjordan/build-relengapi
|
f53667ef43e1b57b611e55155571cf985fdf40d5
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='ocdsmerge',
version='0.6.6',
author='Open Contracting Partnership',
author_email='[email protected]',
url='https://github.com/open-contracting/ocds-merge',
description='A library and reference implementation for merging OCDS releases',
license='BSD',
packages=find_packages(exclude=['tests', 'tests.*']),
long_description=long_description,
long_description_content_type='text/x-rst',
install_requires=[
'jsonref',
'requests',
],
extras_require={
'test': [
'coveralls',
'jsonschema',
'pytest',
'pytest-cov',
'pytest-vcr',
],
},
classifiers=[
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
)
|
from setuptools import find_packages, setup
with open('README.rst') as f:
long_description = f.read()
setup(
name='ocdsmerge',
version='0.6.6',
author='Open Contracting Partnership',
author_email='[email protected]',
url='https://github.com/open-contracting/ocds-merge',
description='A library and reference implementation for merging OCDS releases',
license='BSD',
packages=find_packages(exclude=['tests', 'tests.*']),
long_description=long_description,
long_description_content_type='text/x-rst',
install_requires=[
'jsonref',
'requests',
],
extras_require={
'test': [
'coveralls',
'jsonschema',
'pytest',
'pytest-cov',
'pytest-vcr',
],
},
classifiers=[
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
Add Python :: Implementation classifiers
|
build: Add Python :: Implementation classifiers
|
Python
|
bsd-3-clause
|
open-contracting/ocds-merge
|
5c6bb0b463f0cfde26f56251bb3225e358d4b2b6
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='corker',
version='0.4',
description='Another WSGI Framework',
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
license='BSD',
author='Joshua D. Boyd',
author_email='[email protected]',
url='https://github.com/jd-boyd/corker',
packages=find_packages(),
package_data={'': ['README.md', 'LICENSE.txt']},
install_requires=['webob', 'routes'],
tests_require=['nose', 'webtest'],
)
|
from setuptools import setup, find_packages
setup(name='corker',
version='0.4.1',
description='Another WSGI Framework',
classifiers=["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules",
'Programming Language :: Python',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
],
license='BSD',
author='Joshua D. Boyd',
author_email='[email protected]',
url='https://github.com/jd-boyd/corker',
packages=find_packages(),
package_data={'': ['README.md', 'LICENSE.txt']},
install_requires=['webob', 'routes'],
tests_require=['nose', 'webtest'],
)
|
Bump to version 0.4.1 since 0.4 was mis-packaged.
|
Bump to version 0.4.1 since 0.4 was mis-packaged.
|
Python
|
bsd-2-clause
|
jd-boyd/corker,vs-networks/corker
|
19d2fcf0da030f32d9cef9384a5b5113dfcd8443
|
setup.py
|
setup.py
|
from setuptools import setup
from rewrite_external_links import get_version
description = (
'Rewrite all external (off-site) links to go via a message page, '
+ 'using a middleware class.'
)
setup(
name="django-rewrite-external-links",
packages=["rewrite_external_links"],
include_package_data=True,
version=get_version(),
description=description,
author="Incuna Ltd",
author_email="[email protected]",
url="https://github.com/incuna/django-rewrite-external-links",
)
|
from setuptools import setup
from rewrite_external_links import get_version
description = (
'Rewrite all external (off-site) links to go via a message page, '
'using a middleware class.'
)
setup(
name="django-rewrite-external-links",
packages=["rewrite_external_links"],
include_package_data=True,
version=get_version(),
description=description,
author="Incuna Ltd",
author_email="[email protected]",
url="https://github.com/incuna/django-rewrite-external-links",
)
|
Fix W503 line break before binary operator
|
Fix W503 line break before binary operator
|
Python
|
bsd-2-clause
|
incuna/django-rewrite-external-links,incuna/django-rewrite-external-links
|
a0b75e48cad0be6028e6ff12873d1bd73307734c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
from setuptools import find_packages
import derpibooru
setup(
name = "DerPyBooru",
description = "Python bindings for Derpibooru's API",
url = "https://github.com/joshua-stone/DerPyBooru",
version = "0.5.2",
author = "Joshua Stone",
author_email = "[email protected]",
license = "Simplified BSD License",
platforms = ["any"],
packages = find_packages(),
install_requires = ["requests"],
include_package_data = True,
download_url = "https://github.com/joshua-stone/DerPyBooru/tarball/0.5.2",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"License :: OSI Approved :: Simplified BSD License",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
#!/usr/bin/env python
from setuptools import setup
from setuptools import find_packages
import derpibooru
setup(
name = "DerPyBooru",
description = "Python bindings for Derpibooru's API",
url = "https://github.com/joshua-stone/DerPyBooru",
version = "0.5.2",
author = "Joshua Stone",
author_email = "[email protected]",
license = "Simplified BSD License",
platforms = ["any"],
packages = find_packages(),
install_requires = ["requests"],
include_package_data = True,
download_url = "https://github.com/joshua-stone/DerPyBooru/tarball/0.5.2",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries :: Python Modules"
]
)
|
Change license classifier to BSD License to remove PyPI error
|
Change license classifier to BSD License to remove PyPI error
|
Python
|
bsd-2-clause
|
joshua-stone/DerPyBooru
|
5a92773a1d9c40e745026ca318ae21bfce2d4fb6
|
flaskext/cache/backends.py
|
flaskext/cache/backends.py
|
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
password=app.config.get('CACHE_REDIS_PASSWORD', None)
))
return RedisCache(*args, **kwargs)
|
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache,
GAEMemcachedCache, FileSystemCache)
def null(app, args, kwargs):
return NullCache()
def simple(app, args, kwargs):
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return SimpleCache(*args, **kwargs)
def memcached(app, args, kwargs):
args.append(app.config['CACHE_MEMCACHED_SERVERS'])
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return MemcachedCache(*args, **kwargs)
def gaememcached(app, args, kwargs):
kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX']))
return GAEMemcachedCache(*args, **kwargs)
def filesystem(app, args, kwargs):
args.append(app.config['CACHE_DIR'])
kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD']))
return FileSystemCache(*args, **kwargs)
# RedisCache is supported since Werkzeug 0.7.
try:
from werkzeug.contrib.cache import RedisCache
except ImportError:
pass
else:
def redis(app, args, kwargs):
kwargs.update(dict(
host=app.config.get('CACHE_REDIS_HOST', 'localhost'),
port=app.config.get('CACHE_REDIS_PORT', 6379),
))
password = app.config.get('CACHE_REDIS_PASSWORD')
if password:
kwargs['password'] = password
return RedisCache(*args, **kwargs)
|
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
|
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
|
Python
|
bsd-3-clause
|
kazeeki/mezmorize,kazeeki/mezmorize,j-fuentes/flask-cache,ordbogen/flask-cache,j-fuentes/flask-cache,thadeusb/flask-cache,alexey-sveshnikov/flask-cache,ordbogen/flask-cache,alexey-sveshnikov/flask-cache,thadeusb/flask-cache,gerasim13/flask-cache,gerasim13/flask-cache
|
f019902da41a5b224537ef8a4a87e90701ec538a
|
project_template/project_settings.py
|
project_template/project_settings.py
|
# Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
|
# Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
|
Enable extra GLAMkit features by default in ICEkit project
|
Enable extra GLAMkit features by default in ICEkit project
Enable GLAMkit features by default for new ICEkit projects, since they
will probably be wanted anyway and because it will also indirectly
ensure we run unit tests for all GLAMkit features.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
c9c0104456ef7d5dcda29db67788112a8435945b
|
scripts/createDataModel.py
|
scripts/createDataModel.py
|
# script :: creating a datamodel that fits mahout from ratings.dat
ratings_dat = open('../data/movielens-1m/ratings.dat', 'r')
ratings_csv = open('../data/movielens-1m/ratings_without_timestamp.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = ','.join(arr[:3])+'\n';
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
|
#!/usr/bin/env python
# script :: creating a datamodel that fits mahout from ratings.dat
ratings_dat = open('../data/movielens-1m/users.dat', 'r')
ratings_csv = open('../data/movielens-1m/users.txt', 'w')
for line in ratings_dat:
arr = line.split('::')
new_line = '\t'.join(arr)
ratings_csv.write(new_line)
ratings_dat.close()
ratings_csv.close()
|
Convert data delimiter from :: to tab character.
|
Convert data delimiter from :: to tab character.
|
Python
|
mit
|
monsendag/goldfish,ntnu-smartmedia/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish,ntnu-smartmedia/goldfish,monsendag/goldfish
|
a71bd3f953b0363df82d1e44b0d6df6cbe4d449b
|
vocab.py
|
vocab.py
|
import fire
import json
import sys
from source import VocabularyCom
from airtable import Airtable
class CLI:
class source:
"""Import word lists from various sources"""
def vocabulary_com(self, list_url, pretty=False):
result = VocabularyCom().collect(list_url)
if pretty:
print json.dumps(result, indent=4, sort_keys=True)
else:
json.dump(result, sys.stdout)
class airtable:
"""Sync lists to Airtable"""
def load(self, list_url, endpoint, key):
airtable = Airtable(endpoint, key)
words = VocabularyCom().collect(list_url)
airtable.load(words)
print 'List loaded to Airtable.'
def load_from_stdin(self, endpoint, key):
words = json.load(sys.stdin)
airtable = Airtable(endpoint, key)
airtable.load(words)
print 'List loaded to Airtable.'
if __name__ == '__main__':
fire.Fire(CLI)
|
import fire
import json
import sys
from source import VocabularyCom
from airtable import Airtable
class CLI:
class source:
"""Import word lists from various sources"""
def vocabulary_com(self, list_url, pretty=False):
result = VocabularyCom().collect(list_url)
if pretty:
print json.dumps(result, indent=4, sort_keys=True)
else:
json.dump(result, sys.stdout)
class airtable:
"""Sync lists to Airtable"""
def load(self, list_url, endpoint, key):
words = VocabularyCom().collect(list_url)
self._load(words, endpoint, key)
def load_from_stdin(self, endpoint, key):
words = json.load(sys.stdin)
self._load(words, endpoint, key)
def _load(self, words, endpoint, key):
airtable = Airtable(endpoint, key)
airtable.load(words)
print 'Loaded %d terms to Airtable.' % len(words)
if __name__ == '__main__':
fire.Fire(CLI)
|
Print number of terms loaded to Airtable.
|
Print number of terms loaded to Airtable.
|
Python
|
mit
|
zqureshi/vocab
|
c354d130cb542c2a5d57e519ce49175daa597e9c
|
froide/accesstoken/apps.py
|
froide/accesstoken/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccessTokenConfig(AppConfig):
name = 'froide.accesstoken'
verbose_name = _('Secret Access Token')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import AccessToken
if user is None:
return
AccessToken.objects.filter(user=user).delete()
|
import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AccessTokenConfig(AppConfig):
name = 'froide.accesstoken'
verbose_name = _('Secret Access Token')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import AccessToken
if user is None:
return
AccessToken.objects.filter(user=user).delete()
def export_user_data(user):
from .models import AccessToken
access_tokens = (
AccessToken.objects.filter(user=user)
)
if access_tokens:
yield ('access_tokens.json', json.dumps([
{
'purpose': a.purpose,
'timestamp': a.timestamp.isoformat(),
}
for a in access_tokens]).encode('utf-8')
)
|
Add user data export for accesstokens
|
Add user data export for accesstokens
|
Python
|
mit
|
fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide
|
9ad1929ee16a805acb9e8fbc57312466fdb1770e
|
cnxepub/tests/scripts/test_collated_single_html.py
|
cnxepub/tests/scripts/test_collated_single_html.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2016, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import mimetypes
import os.path
import tempfile
import unittest
try:
from unittest import mock
except ImportError:
import mock
from lxml import etree
from ...html_parsers import HTML_DOCUMENT_NAMESPACES
from ...testing import TEST_DATA_DIR, captured_output
class CollatedSingleHTMLTestCase(unittest.TestCase):
maxDiff = None
@property
def target(self):
from ...scripts.collated_single_html.main import main
return main
@property
def path_to_xhtml(self):
return os.path.join(TEST_DATA_DIR, 'desserts-single-page.xhtml')
def test_valid(self):
return_code = self.target([self.path_to_xhtml])
self.assertEqual(return_code, 0)
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2016, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
import io
import mimetypes
import os.path
import sys
import tempfile
import unittest
from lxml import etree
from ...html_parsers import HTML_DOCUMENT_NAMESPACES
from ...testing import TEST_DATA_DIR, captured_output
class CollatedSingleHTMLTestCase(unittest.TestCase):
maxDiff = None
@property
def target(self):
from ...scripts.collated_single_html.main import main
return main
@property
def path_to_xhtml(self):
return os.path.join(TEST_DATA_DIR, 'desserts-single-page.xhtml')
def test_valid(self):
return_code = self.target([self.path_to_xhtml])
self.assertEqual(return_code, 0)
def test_valid_with_tree(self):
# Capture stdout
orig_stdout = sys.stdout
self.addCleanup(setattr, sys, 'stdout', orig_stdout)
stdout = sys.stdout = io.BytesIO()
return_code = self.target([self.path_to_xhtml, '-d'])
self.assertEqual(return_code, 0)
stdout.seek(0)
self.assertIn('Fruity', stdout.read())
|
Add a test for the validate-collated tree output
|
Add a test for the validate-collated tree output
|
Python
|
agpl-3.0
|
Connexions/cnx-epub,Connexions/cnx-epub,Connexions/cnx-epub
|
020eea26390ab7fa20527ed24021522512a100a5
|
account_operating_unit/__manifest__.py
|
account_operating_unit/__manifest__.py
|
# © 2019 Eficent Business and IT Consulting Services S.L.
# © 2019 Serpent Consulting Services Pvt. Ltd.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
"name": "Accounting with Operating Units",
"summary": "Introduces Operating Unit (OU) in invoices and "
"Accounting Entries with clearing account",
"version": "13.0.1.1.0",
"author": "Eficent, "
"Serpent Consulting Services Pvt. Ltd.,"
"WilldooIT Pty Ltd,"
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/operating-unit",
"category": "Accounting & Finance",
"depends": ["account", "analytic_operating_unit"],
"license": "LGPL-3",
"data": [
"security/account_security.xml",
"views/account_move_view.xml",
"views/account_journal_view.xml",
"views/company_view.xml",
"views/account_payment_view.xml",
"views/account_invoice_report_view.xml",
],
}
|
# © 2019 Eficent Business and IT Consulting Services S.L.
# © 2019 Serpent Consulting Services Pvt. Ltd.
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
"name": "Accounting with Operating Units",
"summary": "Introduces Operating Unit (OU) in invoices and "
"Accounting Entries with clearing account",
"version": "13.0.1.1.0",
"author": "Eficent, "
"Serpent Consulting Services Pvt. Ltd.,"
"WilldooIT Pty Ltd,"
"Odoo Community Association (OCA)",
"website": "https://github.com/OCA/operating-unit",
"category": "Accounting & Finance",
"depends": ["account", "operating_unit"],
"license": "LGPL-3",
"data": [
"security/account_security.xml",
"views/account_move_view.xml",
"views/account_journal_view.xml",
"views/company_view.xml",
"views/account_payment_view.xml",
"views/account_invoice_report_view.xml",
],
}
|
Reduce the width of dependency from analytic_operating_unit to operating_unit
|
Reduce the width of dependency from analytic_operating_unit to operating_unit
|
Python
|
agpl-3.0
|
OCA/operating-unit,OCA/operating-unit
|
eba354cfaa96754b814daeb7fa453e538b07a879
|
krcurrency/utils.py
|
krcurrency/utils.py
|
""":mod:`krcurrency.utils` --- Helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from bs4 import BeautifulSoup as BS
import requests
__all__ = 'request',
def request(url, encoding='utf-8', parselib='lxml'):
"""url로 요청한 후 돌려받은 값을 BeautifulSoup 객체로 변환해서 반환합니다.
"""
r = requests.get(url)
if r.status_code != 200:
return None
soup = None
try:
soup = BS(r.text, parselib)
except Exception as e:
pass
return soup
|
""":mod:`krcurrency.utils` --- Helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from bs4 import BeautifulSoup as BS
import requests
__all__ = 'request', 'tofloat',
def request(url, encoding='utf-8', parselib='lxml'):
"""url로 요청한 후 돌려받은 값을 BeautifulSoup 객체로 변환해서 반환합니다.
"""
r = requests.get(url)
if r.status_code != 200:
return None
soup = None
try:
soup = BS(r.text, parselib)
except Exception as e:
pass
return soup
def tofloat(text):
transformed = None
try:
text = text.replace(',', '')
transformed = float(text)
except:
pass
return transformed
|
Add tofloat function that transforms from any float-based string into float
|
Add tofloat function that transforms from any float-based string into float
|
Python
|
mit
|
ssut/py-krcurrency
|
ee498b8a22def03fb745ebab53a875c9097d44b6
|
test/mgdpck/syntax_test.py
|
test/mgdpck/syntax_test.py
|
import mgdpck
from mgdpck import _version
from mgdpck import data_access
from mgdpck import exceptions
from mgdpck import logging_util
from mgdpck import model
from mgdpck import actions
from mgdpck.readers import *
from mgdpck.writters import *
from scripts import mgd
|
import mgdpck
from mgdpck import _version
from mgdpck import data_access
from mgdpck import exceptions
from mgdpck import logging_util
from mgdpck import model
from mgdpck import actions
from mgdpck.readers import *
from mgdpck.writters import *
# from scripts import mgd
|
Remove a problematic test on travis.
|
Remove a problematic test on travis.
|
Python
|
apache-2.0
|
Djabx/mgd
|
c1da1e8d15990efa7b30de241e3604bc824792dc
|
py101/introduction/__init__.py
|
py101/introduction/__init__.py
|
""""
Introduction Adventure
Author: igui
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"Introduction Adventure test"
def __init__(self, sourcefile):
"Inits the test"
super(TestOutput, self).__init__()
self.sourcefile = sourcefile
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
@staticmethod
def mock_print(stringy):
"Mock function"
pass
def runTest(self):
"Makes a simple test of the output"
raw_program = codecs.open(self.sourcefile).read()
code = compile(raw_program, self.sourcefile, 'exec', optimize=0)
exec(code)
self.assertEqual(
self.__mockstdout.getvalue().lower().strip(),
'hello world',
"Should have printed 'Hello World'"
)
class Adventure(BaseAdventure):
"Introduction Adventure"
title = _('Introduction')
@classmethod
def test(cls, sourcefile):
"Test against the provided file"
suite = unittest.TestSuite()
suite.addTest(TestOutput(sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
""""
Introduction Adventure
Author: Ignacio Avas ([email protected])
"""
import codecs
import io
import sys
import unittest
from story.adventures import AdventureVerificationError, BaseAdventure
from story.translation import gettext as _
class TestOutput(unittest.TestCase):
"Introduction Adventure test"
def __init__(self, sourcefile):
"Inits the test"
super(TestOutput, self).__init__()
self.sourcefile = sourcefile
def setUp(self):
self.__old_stdout = sys.stdout
sys.stdout = self.__mockstdout = io.StringIO()
def tearDown(self):
sys.stdout = self.__old_stdout
self.__mockstdout.close()
@staticmethod
def mock_print(stringy):
"Mock function"
pass
def runTest(self):
"Makes a simple test of the output"
raw_program = codecs.open(self.sourcefile).read()
code = compile(raw_program, self.sourcefile, 'exec', optimize=0)
exec(code)
self.assertEqual(
self.__mockstdout.getvalue().lower().strip(),
'hello world',
"Should have printed 'Hello World'"
)
class Adventure(BaseAdventure):
"Introduction Adventure"
title = _('Introduction')
@classmethod
def test(cls, sourcefile):
"Test against the provided file"
suite = unittest.TestSuite()
suite.addTest(TestOutput(sourcefile))
result = unittest.TextTestRunner().run(suite)
if not result.wasSuccessful():
raise AdventureVerificationError()
|
Correct Author string in module
|
Correct Author string in module
|
Python
|
mit
|
sophilabs/py101
|
2651ddf1946ec489195ec9c3fb23e00e5735c79c
|
sites/cozylan/extension.py
|
sites/cozylan/extension.py
|
"""
Site-specific code extension
"""
from __future__ import annotations
from typing import Any
from flask import g
from byceps.services.seating import seat_service
from byceps.services.ticketing import ticket_service
def template_context_processor() -> dict[str, Any]:
"""Extend template context."""
if g.party_id is None:
return {}
sale_stats = ticket_service.get_ticket_sale_stats(g.party_id)
seat_utilization = seat_service.get_seat_utilization(g.party_id)
return {
'ticket_sale_stats': sale_stats,
'seat_utilization': seat_utilization,
}
|
"""
Site-specific code extension
"""
from __future__ import annotations
from typing import Any
from flask import g
from byceps.services.seating import seat_service
from byceps.services.ticketing import ticket_service
def template_context_processor() -> dict[str, Any]:
"""Extend template context."""
context = {}
if g.party_id is not None:
sale_stats = ticket_service.get_ticket_sale_stats(g.party_id)
seat_utilization = seat_service.get_seat_utilization(g.party_id)
context['ticket_sale_stats'] = sale_stats
context['seat_utilization'] = seat_utilization
return context
|
Restructure context assembly for CozyLAN site
|
Restructure context assembly for CozyLAN site
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
624ce97b011100cc1aac9446c7f1c8a97eae5f34
|
workshops/migrations/0040_add_country_to_online_events.py
|
workshops/migrations/0040_add_country_to_online_events.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def add_country_to_online_events(apps, schema_editor):
"""Add an 'Online' country to all events tagged with 'online' tag."""
Event = apps.get_model('workshops', 'Event')
Tag = apps.get_model('workshops', 'Tag')
online, _ = Tag.objects.get_or_create(
name='online',
defaults={'details': 'Events taking place entirely online'},
)
# Oceanic Pole of Inaccessibility coordinates:
# https://en.wikipedia.org/wiki/Pole_of_inaccessibility#Oceanic_pole_of_inaccessibility
latitude = -48.876667
longitude = -123.393333
Event.objects.filter(country__isnull=True, tags__in=[online]) \
.update(country='W3', latitude=latitude, longitude=longitude,
venue='Internet')
class Migration(migrations.Migration):
dependencies = [
('workshops', '0039_add_permission_groups'),
]
operations = [
migrations.RunPython(add_country_to_online_events),
]
|
Migrate online events to the Pole of Inaccessibility lat/long
|
Migrate online events to the Pole of Inaccessibility lat/long
...and 'internet' as a venue.
|
Python
|
mit
|
pbanaszkiewicz/amy,vahtras/amy,swcarpentry/amy,swcarpentry/amy,wking/swc-amy,vahtras/amy,wking/swc-amy,wking/swc-amy,vahtras/amy,pbanaszkiewicz/amy,wking/swc-amy,pbanaszkiewicz/amy,swcarpentry/amy
|
d2046bcf01d02ae9dee91363f61b6afcba3f882f
|
weave-and-docker-platform/app/app.py
|
weave-and-docker-platform/app/app.py
|
from flask import Flask
from redis import Redis
import os
app = Flask(__name__)
redis = Redis(host='redis', port=6379)
@app.route('/')
def hello():
redis.incr('hits')
return 'Hello World! I have been seen %s times.' % redis.get('hits')
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
from flask import Flask
from redis import Redis
import os
app = Flask(__name__)
redis = Redis(host='redis', port=6379)
@app.route('/')
def hello():
redis.incr('hits')
return 'Hello World! I have been seen %s times.\n' % redis.get('hits')
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
|
Add new line. This was annoying.
|
Add new line. This was annoying.
|
Python
|
apache-2.0
|
pessoa/guides,pessoa/guides
|
9ad98b4bbed0c67f25576187996e7e1d534f6a90
|
mammoth/__init__.py
|
mammoth/__init__.py
|
from .results import Result
from . import docx, conversion, style_reader
def convert_to_html(fileobj):
return docx.read(fileobj).bind(lambda document:
conversion.convert_document_element_to_html(document, styles=_create_default_styles())
)
def _create_default_styles():
lines = filter(None, map(lambda line: line.strip(), _default_styles.split("\n")))
return map(style_reader.read_style, lines)
_default_styles = """
p:unordered-list(1) => ul > li:fresh
"""
|
from .results import Result
from . import docx, conversion, style_reader
def convert_to_html(fileobj):
return docx.read(fileobj).bind(lambda document:
conversion.convert_document_element_to_html(document, styles=_create_default_styles())
)
def _create_default_styles():
lines = filter(None, map(lambda line: line.strip(), _default_styles.split("\n")))
return map(style_reader.read_style, lines)
_default_styles = """
p.Heading1 => h1:fresh
p.Heading2 => h2:fresh
p.Heading3 => h3:fresh
p.Heading4 => h4:fresh
p:unordered-list(1) => ul > li:fresh
p:unordered-list(2) => ul|ol > li > ul > li:fresh
p:unordered-list(3) => ul|ol > li > ul|ol > li > ul > li:fresh
p:unordered-list(4) => ul|ol > li > ul|ol > li > ul|ol > li > ul > li:fresh
p:unordered-list(5) => ul|ol > li > ul|ol > li > ul|ol > li > ul|ol > li > ul > li:fresh
p:ordered-list(1) => ol > li:fresh
p:ordered-list(2) => ul|ol > li > ol > li:fresh
p:ordered-list(3) => ul|ol > li > ul|ol > li > ol > li:fresh
p:ordered-list(4) => ul|ol > li > ul|ol > li > ul|ol > li > ol > li:fresh
p:ordered-list(5) => ul|ol > li > ul|ol > li > ul|ol > li > ul|ol > li > ol > li:fresh
"""
|
Add full list of default styles
|
Add full list of default styles
|
Python
|
bsd-2-clause
|
mwilliamson/python-mammoth,JoshBarr/python-mammoth
|
87f483668e352a5807ffafca061238f4a7f86fab
|
tests/Mechanics/Threads.py
|
tests/Mechanics/Threads.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from numpy.testing import assert_approx_equal, assert_allclose
from UliEngineering.Mechanics.Threads import *
import unittest
class TestThreads(unittest.TestCase):
def test_thread_params(self):
self.assertEqual(threads["M3"].outer_diameter, 41)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from numpy.testing import assert_approx_equal, assert_allclose
from UliEngineering.Mechanics.Threads import *
import unittest
class TestThreads(unittest.TestCase):
def test_thread_params(self):
self.assertEqual(threads["M3"].outer_diameter, 3.0)
|
Fix intentionally bad testcase which was not run before
|
Fix intentionally bad testcase which was not run before
|
Python
|
apache-2.0
|
ulikoehler/UliEngineering
|
afd6b5b29b60c59689e0a1be38a0483a7e4db312
|
miniraf/__init__.py
|
miniraf/__init__.py
|
import argparse
import astropy.io.fits as fits
import numpy as np
import calc
import combine
if __name__=="__main__":
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
args = argparser.parse_args()
print(args)
args.func(args)
|
import argparse
import calc
import combine
from combine import stack_fits_data
from calc import load_fits_data
def _argparse():
argparser = argparse.ArgumentParser()
subparsers = argparser.add_subparsers(help="sub-command help")
calc.create_parser(subparsers)
combine.create_parser(subparsers)
return argparser.parse_args()
def main():
args = _argparse()
args.func(args)
if __name__=="__main__":
main()
|
Create main() entry point for final script
|
Create main() entry point for final script
Signed-off-by: Lizhou Sha <[email protected]>
|
Python
|
mit
|
vulpicastor/miniraf
|
60bb1425e94e15b59a05b485113cc68ed0146ac8
|
nbtutor/__init__.py
|
nbtutor/__init__.py
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
# -*- coding: utf-8 -*-
"""
nbtutor - a small utility to indicate which cells should be cleared (exercises).
"""
import os
try:
from nbconvert.preprocessors.base import Preprocessor
except ImportError:
from IPython.nbconvert.preprocessors.base import Preprocessor
from traitlets import Unicode
class ClearExercisePreprocessor(Preprocessor):
solutions_dir = Unicode("_solutions").tag(config=True)
def __init__(self, **kw):
if not os.path.exists(self.solutions_dir):
os.makedirs(self.solutions_dir)
super(Preprocessor, self).__init__(**kw)
def preprocess_cell(self, cell, resources, index):
if 'clear_cell' in cell.metadata and cell.metadata.clear_cell:
fname = os.path.join(
self.solutions_dir, resources['metadata']['name'] + str(cell['execution_count']) + '.py')
with open(fname, 'w') as f:
f.write(cell['source'])
cell['source'] = ["# %load {0}".format(fname)]
cell['outputs'] = []
# cell['source'] = []
return cell, resources
|
Create solutions directory if it does not exist
|
Create solutions directory if it does not exist
|
Python
|
bsd-2-clause
|
jorisvandenbossche/nbtutor,jorisvandenbossche/nbtutor
|
09195f50e328d3aee4cc60f0702d8605ea520eb3
|
tests/sentry/utils/models/tests.py
|
tests/sentry/utils/models/tests.py
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
|
from __future__ import absolute_import
from django.db import models
from sentry.utils.models import Model
from sentry.testutils import TestCase
# There's a good chance this model wont get created in the db, so avoid
# assuming it exists in these tests.
class DummyModel(Model):
foo = models.CharField(max_length=32)
class ModelTest(TestCase):
def test_foo_hasnt_changed_on_init(self):
inst = DummyModel(id=1, foo='bar')
self.assertFalse(inst.has_changed('foo'))
def test_foo_has_changes_before_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
def test_foo_hasnt_changed_after_save(self):
inst = DummyModel(id=1, foo='bar')
inst.foo = 'baz'
self.assertTrue(inst.has_changed('foo'))
self.assertEquals(inst.old_value('foo'), 'bar')
models.signals.post_save.send(instance=inst, sender=type(inst), created=False)
self.assertFalse(inst.has_changed('foo'))
|
Add missing assertion in test
|
Add missing assertion in test
|
Python
|
bsd-3-clause
|
NickPresta/sentry,jokey2k/sentry,1tush/sentry,zenefits/sentry,SilentCircle/sentry,wujuguang/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,Natim/sentry,NickPresta/sentry,BuildingLink/sentry,rdio/sentry,BuildingLink/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,JackDanger/sentry,SilentCircle/sentry,ifduyue/sentry,beeftornado/sentry,hongliang5623/sentry,nicholasserra/sentry,NickPresta/sentry,camilonova/sentry,fotinakis/sentry,jokey2k/sentry,fuziontech/sentry,BuildingLink/sentry,korealerts1/sentry,looker/sentry,wong2/sentry,ifduyue/sentry,BayanGroup/sentry,gencer/sentry,beni55/sentry,mvaled/sentry,SilentCircle/sentry,wujuguang/sentry,imankulov/sentry,jean/sentry,JTCunning/sentry,1tush/sentry,looker/sentry,songyi199111/sentry,zenefits/sentry,jean/sentry,1tush/sentry,ngonzalvez/sentry,TedaLIEz/sentry,daevaorn/sentry,drcapulet/sentry,NickPresta/sentry,BayanGroup/sentry,BuildingLink/sentry,JackDanger/sentry,camilonova/sentry,vperron/sentry,beeftornado/sentry,gencer/sentry,kevinastone/sentry,nicholasserra/sentry,mitsuhiko/sentry,Natim/sentry,looker/sentry,mvaled/sentry,camilonova/sentry,ewdurbin/sentry,korealerts1/sentry,songyi199111/sentry,Kryz/sentry,alexm92/sentry,Natim/sentry,daevaorn/sentry,argonemyth/sentry,ifduyue/sentry,rdio/sentry,kevinastone/sentry,gencer/sentry,jean/sentry,daevaorn/sentry,argonemyth/sentry,ewdurbin/sentry,imankulov/sentry,wujuguang/sentry,fotinakis/sentry,argonemyth/sentry,imankulov/sentry,TedaLIEz/sentry,vperron/sentry,drcapulet/sentry,songyi199111/sentry,BayanGroup/sentry,daevaorn/sentry,kevinlondon/sentry,gg7/sentry,beni55/sentry,gencer/sentry,vperron/sentry,rdio/sentry,pauloschilling/sentry,mvaled/sentry,mvaled/sentry,llonchj/sentry,boneyao/sentry,Kryz/sentry,gg7/sentry,BuildingLink/sentry,jean/sentry,looker/sentry,pauloschilling/sentry,jean/sentry,mitsuhiko/sentry,zenefits/sentry,zenefits/sentry,alexm92/sentry,pauloschilling/sentry,alexm92/sentry,hongliang5623/sentry,mvaled/sentry,wong2/sentry,JTCunning/sentry,beeftornado/sentry,beni55/sentry,JamesMura/sentry,jokey2k/sentry,fuziontech/sentry,korealerts1/sentry,JamesMura/sentry,JTCunning/sentry,llonchj/sentry,JamesMura/sentry,JackDanger/sentry,drcapulet/sentry,fuziontech/sentry,kevinlondon/sentry,gg7/sentry,zenefits/sentry,boneyao/sentry,felixbuenemann/sentry,SilentCircle/sentry,kevinastone/sentry,TedaLIEz/sentry,looker/sentry,rdio/sentry,felixbuenemann/sentry,boneyao/sentry,ewdurbin/sentry,gencer/sentry,ngonzalvez/sentry,ifduyue/sentry,felixbuenemann/sentry,fotinakis/sentry,fotinakis/sentry,nicholasserra/sentry,hongliang5623/sentry,llonchj/sentry,wong2/sentry,kevinlondon/sentry
|
57d49b185d1daf0e6a27e0daee8960c2816615cc
|
alg_kruskal_minimum_spanning_tree.py
|
alg_kruskal_minimum_spanning_tree.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree in weighted graph.
Time complexity for graph G(V, E): TBD.
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def kruskal():
"""Kruskal's algorithm for minimum spanning tree
in weighted graph.
Time complexity for graph G(V, E):
O(|E|+|V|+|E|log(|V|)) = O(|E|log(|V|^2)) = O(|E|log(|V|)).
"""
pass
def main():
w_graph_d = {
'a': {'b': 1, 'd': 4, 'e': 3},
'b': {'a': 1, 'd': 4, 'e': 2},
'c': {'e': 4, 'f': 5},
'd': {'a': 4, 'b': 4, 'e': 4},
'e': {'a': 3, 'b': 2, 'c': 4, 'd': 4, 'f': 7},
'f': {'c': 5, 'e': 7}
}
print('w_graph_d:\n{}'.format(w_graph_d))
print('Kruskal\'s minimum spanning tree:')
pass
if __name__ == '__main__':
main()
|
Revise doc string and add time complexity
|
Revise doc string and add time complexity
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
c1dc5494c461677e15be52576c55585742ad4a7a
|
bluebottle/bb_follow/migrations/0003_auto_20180530_1621.py
|
bluebottle/bb_follow/migrations/0003_auto_20180530_1621.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-05-30 14:21
from __future__ import unicode_literals
from django.db import migrations
def fix_followers(apps, schema_editor):
Donation = apps.get_model('donations', 'Donation')
Follow = apps.get_model('bb_follow', 'Follow')
ContentType = apps.get_model('contenttypes', 'ContentType')
for donation in Donation.objects.\
filter(order__status__in=['success', 'pending']).\
exclude(order__order_type='recurring').all():
user = donation.order.user
followed_object = donation.project
content_type = ContentType.objects.get_for_model(followed_object)
# A Follow object should link the project to the user, not the
# donation and the user
if user and followed_object and user != followed_object.owner:
if not Follow.objects.filter(user=user, object_id=followed_object.id, content_type=content_type).count():
Follow.objects.create(user=user, object_id=followed_object.id, content_type=content_type)
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('bb_follow', '0002_follow_user'),
]
operations = [
migrations.RunPython(fix_followers, dummy)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-05-30 14:21
from __future__ import unicode_literals
from django.db import migrations
def fix_followers(apps, schema_editor):
Donation = apps.get_model('donations', 'Donation')
Follow = apps.get_model('bb_follow', 'Follow')
ContentType = apps.get_model('contenttypes', 'ContentType')
for donation in Donation.objects.\
filter(order__status__in=['success', 'pending']).\
exclude(order__order_type='recurring').all():
user = donation.order.user
followed_object = donation.project
content_type = ContentType.objects.get_for_model(followed_object)
# A Follow object should link the project to the user, not the
# donation and the user
if user and followed_object and user != followed_object.owner:
if not Follow.objects.filter(user=user, object_id=followed_object.id, content_type=content_type).count():
Follow.objects.create(user=user, object_id=followed_object.id, content_type=content_type)
def dummy(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('bb_follow', '0002_follow_user'),
('donations', '0008_auto_20170927_1021')
]
operations = [
migrations.RunPython(fix_followers, dummy)
]
|
Add donation migration dependancy in bb_follow
|
Add donation migration dependancy in bb_follow
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
0284126969e76a55a00aa4e4ce22f089d543c1dc
|
vizier/__init__.py
|
vizier/__init__.py
|
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.10"
|
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Init file."""
import os
import sys
THIS_DIR = os.path.dirname(os.path.realpath(__file__))
PROTO_ROOT = os.path.realpath(os.path.join(THIS_DIR, "service"))
sys.path.append(PROTO_ROOT)
__version__ = "0.0.11"
|
Update to 0.0.11 version for PyPI.
|
Update to 0.0.11 version for PyPI.
PiperOrigin-RevId: 484429105
|
Python
|
apache-2.0
|
google/vizier,google/vizier
|
51801b0902e5a33c38e4c3cbff243ca6529bab64
|
app/awards/forms.py
|
app/awards/forms.py
|
from flask.ext.wtf import Form
from wtforms import SelectField, RadioField
# TODO add validation to ensure same award isn't assigned twice
class AwardWinnerForm(Form):
# TODO these category constants should live separately
category_id = SelectField(u'Award category',
choices=[(0, 'Champions Award'),
(1, 'Research Award'),
(2, 'Presentation Award'),
(3, 'Innovative Solution Award'),
(4, 'Mechanical Design Award'),
(5, 'Programming Award'),
(6, 'Strategy and Innovation Award'),
(7, 'Teamwork Award'),
(8, 'Inspiration Award'),
(9, 'Team Spirit Award'),
(10, 'Robot Performance Award')],
coerce=int)
place = RadioField(u'Place',
choices=[(0, '1st'), (1, '2nd'), (2, '3rd')],
default=0,
coerce=int)
team_id = SelectField(u'Team', coerce=int)
|
from flask.ext.wtf import Form
from wtforms import SelectField, RadioField
from .models import AwardCategory
# TODO add validation to ensure same award isn't assigned twice
class AwardWinnerForm(Form):
# TODO these category constants should live separately
category_id = SelectField(u'Award category',
choices=[(i.value, i.friendly_name) for i in AwardCategory],
coerce=int)
place = RadioField(u'Place',
choices=[(0, '1st'), (1, '2nd'), (2, '3rd')],
default=0,
coerce=int)
team_id = SelectField(u'Team', coerce=int)
|
Update award winner form to use award category enum for select field choices
|
Update award winner form to use award category enum for select field choices
|
Python
|
mit
|
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
|
1c5a4afa06f56ca8fd7c36b633b7f73d259f1281
|
lib/filesystem/__init__.py
|
lib/filesystem/__init__.py
|
import os
__author__ = 'mfliri'
def create_directory(output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
|
import os
def create_directory(output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
|
Remove author notice and add newline at end of file
|
Remove author notice and add newline at end of file
|
Python
|
mit
|
alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,alphagov/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer,gds-attic/transactions-explorer
|
96a6b929d80bd5ad8a7bf5d09955b3e45e5bbe56
|
test/test_Spectrum.py
|
test/test_Spectrum.py
|
#!/usr/bin/env python
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans())
def test_spectrum_assigns_hypothesis_data(y, x, z):
spec = Spectrum.Spectrum(y, x, z)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
calib_val = 0
spec = Spectrum.Spectrum(y, x, calibrated=calib_val)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == calib_val
|
#!/usr/bin/env python
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans())
def test_spectrum_assigns_hypothesis_data(y, x, z):
spec = Spectrum.Spectrum(y, x, z)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
calib_val = 0
spec = Spectrum.Spectrum(y, x, calibrated=calib_val)
assert spec.flux == y
assert spec.xaxis == x
assert spec.calibrated == calib_val
@given(st.lists(st.floats()), st.lists(st.floats()), st.booleans(), st.floats(), st.floats())
def test_wav_select(y, x, calib, wav_min, wav_max):
# Create specturm
spec = Spectrum.Spectrum(y, xaxis=x, calibrated=calib)
# Select wavelength values
spec.wav_select(wav_min, wav_max)
# All values in selected spectrum should be less than the max and greater than the min value.
if isinstance(spec.xaxis, list):
assert all([xval >= wav_min for xval in spec.xaxis])
assert all([xval <= wav_max for xval in spec.xaxis])
else:
assert all(spec.xaxis >= wav_min)
assert all(spec.xaxis <= wav_max)
##Also need to test asignment!
# spec2 = spec.wav_selector()
|
Test property of wavelength selection
|
Test property of wavelength selection
That afterwards the values are all above and below the min and max
values used.
|
Python
|
mit
|
jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload
|
c61e595098cd4b03828a81db98fb1e2b91b2eec0
|
anna/model/utils.py
|
anna/model/utils.py
|
import tensorflow as tf
def rnn_cell(num_units, dropout, mode, residual=False, name=None, reuse=None):
dropout = dropout if mode == tf.contrib.learn.ModeKeys.TRAIN else 0.0
cell = tf.nn.rnn_cell.GRUCell(num_units, name=name, reuse=reuse)
if dropout > 0.0:
keep_prop = (1.0 - dropout)
cell = tf.nn.rnn_cell.DropoutWrapper(
cell=cell,
input_keep_prob=keep_prop,
output_keep_prob=keep_prop,
state_keep_prob=keep_prop
)
if residual:
cell = tf.nn.rnn_cell.ResidualWrapper(cell)
return cell
|
import tensorflow as tf
def rnn_cell(num_units, dropout, mode, residual=False, name=None, reuse=None):
dropout = dropout if mode == tf.contrib.learn.ModeKeys.TRAIN else 0.0
cell = tf.nn.rnn_cell.GRUCell(num_units, name=name, reuse=reuse)
if dropout > 0.0:
keep_prop = (1.0 - dropout)
cell = tf.nn.rnn_cell.DropoutWrapper(
cell=cell,
input_keep_prob=keep_prop,
)
if residual:
cell = tf.nn.rnn_cell.ResidualWrapper(cell)
return cell
|
Remove dropout from output/state in rnn cells
|
Remove dropout from output/state in rnn cells
|
Python
|
mit
|
jpbottaro/anna
|
37207b2e91eee3f7f2417ff8157fada890c2d05b
|
blaze/tests/test_calc.py
|
blaze/tests/test_calc.py
|
import blaze
from blaze.datadescriptor import dd_as_py
import unittest
class TestBasic(unittest.TestCase):
def test_add(self):
types = ['int8', 'int16', 'int32', 'int64']
for type_ in types:
a = blaze.array(range(3), dshape=type_)
c = blaze.eval(((a+a)*a))
self.assertEqual(dd_as_py(c._data), [0, 2, 8])
#FIXME: Need to convert uint8 from dshape to ctypes
# in _get_ctypes of blaze_kernel.py
def test_mixed(self):
types1 = ['int8', 'int16', 'int32', 'int64']
types2 = ['int16', 'int32', 'float32', 'float64']
for ty1, ty2 in zip(types1, types2):
a = blaze.array(range(1,6), dshape=ty1)
b = blaze.array(range(5), dshape=ty2)
c = (a+b)*(a-b)
c = blaze.eval(c)
result = [a*a - b*b for (a,b) in zip(range(1,6),range(5))]
self.assertEqual(dd_as_py(c._data), result)
if __name__ == '__main__':
unittest.main()
|
import blaze
from blaze.datadescriptor import dd_as_py
import unittest
class TestBasic(unittest.TestCase):
def test_add(self):
types = ['int8', 'int16', 'int32', 'int64']
for type_ in types:
a = blaze.array(range(3), dshape=type_)
c = blaze.eval(a+a)
self.assertEqual(dd_as_py(c._data), [0, 2, 4])
c = blaze.eval(((a+a)*a))
self.assertEqual(dd_as_py(c._data), [0, 2, 8])
#FIXME: Need to convert uint8 from dshape to ctypes
# in _get_ctypes of blaze_kernel.py
def test_mixed(self):
types1 = ['int8', 'int16', 'int32', 'int64']
types2 = ['int16', 'int32', 'float32', 'float64']
for ty1, ty2 in zip(types1, types2):
a = blaze.array(range(1,6), dshape=ty1)
b = blaze.array(range(5), dshape=ty2)
c = (a+b)*(a-b)
c = blaze.eval(c)
result = [a*a - b*b for (a,b) in zip(range(1,6),range(5))]
self.assertEqual(dd_as_py(c._data), result)
if __name__ == '__main__':
unittest.main()
|
Add a test case of just two identical arguments to a blaze func
|
Add a test case of just two identical arguments to a blaze func
|
Python
|
bsd-3-clause
|
cowlicks/blaze,ChinaQuants/blaze,scls19fr/blaze,ContinuumIO/blaze,AbhiAgarwal/blaze,mrocklin/blaze,dwillmer/blaze,AbhiAgarwal/blaze,alexmojaki/blaze,maxalbert/blaze,ContinuumIO/blaze,markflorisson/blaze-core,maxalbert/blaze,LiaoPan/blaze,FrancescAlted/blaze,scls19fr/blaze,markflorisson/blaze-core,cowlicks/blaze,cpcloud/blaze,jdmcbr/blaze,alexmojaki/blaze,jcrist/blaze,FrancescAlted/blaze,AbhiAgarwal/blaze,mwiebe/blaze,mrocklin/blaze,nkhuyu/blaze,nkhuyu/blaze,caseyclements/blaze,dwillmer/blaze,FrancescAlted/blaze,mwiebe/blaze,aterrel/blaze,aterrel/blaze,markflorisson/blaze-core,caseyclements/blaze,mwiebe/blaze,cpcloud/blaze,FrancescAlted/blaze,xlhtc007/blaze,jcrist/blaze,AbhiAgarwal/blaze,mwiebe/blaze,LiaoPan/blaze,markflorisson/blaze-core,aterrel/blaze,xlhtc007/blaze,jdmcbr/blaze,ChinaQuants/blaze
|
aa114d31ab8f97430faf0c16de2d6aff577a0d20
|
anchorhub/lib/filetolist.py
|
anchorhub/lib/filetolist.py
|
"""
Class for FileToList
"""
class FileToList(object):
"""
FileToList is a helper class used to import text files and turn them into
lists, with each index in the list representing a single line from the
text file.
"""
@staticmethod
def to_list(file_path):
"""
Static method. Takes in a file path, and outputs a list of stings.
Each element in the list corresponds to a line in the file.
:param file_path: string file path
:return: A list of strings, with elements in the list corresponding
to lines in the file pointed to in file_path
"""
l = []
f = open(file_path)
for line in f:
l.append(line)
f.close()
return l
|
"""
Class for FileToList
"""
class FileToList(object):
"""
FileToList is a helper class used to import text files and turn them into
lists, with each index in the list representing a single line from the
text file.
"""
@staticmethod
def to_list(file_path):
"""
Static method. Takes in a file path, and outputs a list of stings.
Each element in the list corresponds to a line in the file.
:param file_path: string file path
:return: A list of strings, with elements in the list corresponding
to lines in the file pointed to in file_path
"""
l = []
f = open(file_path, 'rb')
for line in f:
l.append(line)
f.close()
return l
|
Add 'rb' flag to FileToList
|
Add 'rb' flag to FileToList
|
Python
|
apache-2.0
|
samjabrahams/anchorhub
|
66946f72d243f1836df0dbd8917f204011ec1701
|
hs_core/autocomplete_light_registry.py
|
hs_core/autocomplete_light_registry.py
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = ""
if choice.first_name:
label += choice.first_name
if choice.last_name:
if choice.first_name:
label += " "
label += choice.last_name
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
from autocomplete_light import shortcuts as autocomplete_light
from django.contrib.auth.models import User, Group
class UserAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields = ['username', 'first_name', 'last_name']
split_words = True
def choices_for_request(self):
self.choices = self.choices.filter(is_active=True)
return super(UserAutocomplete, self).choices_for_request()
def choice_label(self, choice):
label = " ".join([choice.first_name or "", choice.userprofile.middle_name or "", choice.last_name or ""])
if choice.userprofile.organization:
if choice.first_name or choice.last_name:
label += ", "
label += choice.userprofile.organization
if choice.username:
label += "".join([" (", choice.username, ")"])
return label
autocomplete_light.register(User, UserAutocomplete)
class GroupAutocomplete(autocomplete_light.AutocompleteModelBase):
search_fields=['name']
def choices_for_request(self):
self.choices = self.choices.filter(gaccess__active=True).exclude(name='Hydroshare Author')
return super(GroupAutocomplete, self).choices_for_request()
autocomplete_light.register(Group, GroupAutocomplete)
|
Add middle name display to autocomplete widget
|
Add middle name display to autocomplete widget
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
f5d36900f7b0503a60a526fd70b57ecb91625fa0
|
armstrong/core/arm_sections/views.py
|
armstrong/core/arm_sections/views.py
|
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self):
return self.get_section()
def get_section(self):
return get_object_or_404(self.get_queryset(),
full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
from django.core.urlresolvers import reverse
from django.views.generic import DetailView
from django.contrib.syndication.views import Feed
from django.shortcuts import get_object_or_404
from .models import Section
class SimpleSectionView(DetailView):
context_object_name = 'section'
model = Section
def get_object(self, queryset=None):
return self.get_section(queryset=queryset)
def get_section(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
return get_object_or_404(queryset, full_slug=self.kwargs['full_slug'])
class SectionFeed(Feed):
def __init__(self, section_view, *args, **kwargs):
self.section_view = section_view
def get_object(self, request, full_slug):
return Section.objects.get(full_slug=full_slug)
def title(self, section):
return section.title
def link(self, section):
return reverse(self.section_view,
kwargs={'full_slug': section.full_slug})
def description(self, section):
return section.summary
def items(self, section):
return section.items
|
Handle queryset argument to get_object
|
Handle queryset argument to get_object
|
Python
|
apache-2.0
|
texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections
|
7e766747dbda4548b63b278e062335c8a10fe008
|
src/vimapt/library/vimapt/data_format/yaml.py
|
src/vimapt/library/vimapt/data_format/yaml.py
|
from pureyaml import dump as dumps
from pureyaml import load as loads
__all__ = ['dumps', 'loads']
|
from __future__ import absolute_import
import functools
from yaml import dump, Dumper, load, Loader
dumps = functools.partial(dump, Dumper=Dumper)
loads = functools.partial(load, Loader=Loader)
__all__ = ['dumps', 'loads']
|
Use PyYAML as YAML's loader and dumper
|
Use PyYAML as YAML's loader and dumper
|
Python
|
mit
|
howl-anderson/vimapt,howl-anderson/vimapt
|
f600ec497a6ff20c4cd8c983e27482fc77ab4deb
|
moksha/api/hub/consumer.py
|
moksha/api/hub/consumer.py
|
"""
Consumers
=========
A `Consumer` is a simple consumer of messages. Based on a given `routing_key`,
your consumer's :meth:`consume` method will be called with the message.
Example consumers:
-tapping into a koji build, and sending a notification?
- hook into a given RSS feed and save data in a DB?
Adding a new consumer
---------------------
Adding a new Consumer to Moksha is as easy as adding it to the `[moksha.consumer]`
entry-point in your `setup.py` file::
[moksha.consumer]
myconsumer = myproject.module:MyConsumer
"""
class Consumer(object):
queue = None
def consume(self, message):
raise NotImplementedError
|
# This file is part of Moksha.
#
# Moksha is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Moksha is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Moksha. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2008, Red Hat, Inc.
# Authors: Luke Macken <[email protected]>
class Consumer(object):
""" A message consumer """
topic = None
def consume(self, message):
raise NotImplementedError
|
Update our message Consumer api to consume a `topic`, not a `queue`.
|
Update our message Consumer api to consume a `topic`, not a `queue`.
|
Python
|
apache-2.0
|
lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,pombredanne/moksha,mokshaproject/moksha,ralphbean/moksha,mokshaproject/moksha,mokshaproject/moksha,ralphbean/moksha,ralphbean/moksha,pombredanne/moksha,lmacken/moksha
|
0a5513672c7d591338f3a7db1f87c78f137f7d1f
|
apps/mozorg/urls.py
|
apps/mozorg/urls.py
|
from django.conf.urls.defaults import *
from views import home, contribute, channel, firefox_performance, firefox_features, firefox_customize, firefox_happy, firefox_security, firefox_speed, firefox_technology, button, new, sandstone, geolocation
urlpatterns = patterns('',
url(r'^home/', home, name='mozorg.home'),
(r'^button/', button),
(r'^channel/', channel),
(r'^new/', new),
(r'^sandstone/', sandstone),
url(r'^contribute/', contribute, name='mozorg.contribute'),
(r'^firefox/geolocation/', geolocation),
url(r'^firefox/customize/', firefox_customize, name='mozorg.firefox_customize'),
url(r'^firefox/features/', firefox_features, name='mozorg.firefox_features'),
url(r'^firefox/happy/', firefox_happy, name='mozorg.firefox_happy'),
url(r'^firefox/performance/', firefox_performance, name='mozorg.firefox_performance'),
url(r'^firefox/security/', firefox_security, name='mozorg.firefox_security'),
url(r'^firefox/speed/', firefox_speed, name='mozorg.firefox_speed'),
url(r'^firefox/technology/', firefox_technology, name='mozorg.firefox_technology'),
)
|
from django.conf.urls.defaults import *
from views import home, contribute, channel, firefox_performance, firefox_features, firefox_customize, firefox_happy, firefox_security, firefox_speed, firefox_technology, button, new, sandstone, geolocation
urlpatterns = patterns('',
url(r'^$', home, name='mozorg.home'),
(r'^button/', button),
(r'^channel/', channel),
(r'^new/', new),
(r'^sandstone/', sandstone),
url(r'^contribute/', contribute, name='mozorg.contribute'),
(r'^firefox/geolocation/', geolocation),
url(r'^firefox/customize/', firefox_customize, name='mozorg.firefox_customize'),
url(r'^firefox/features/', firefox_features, name='mozorg.firefox_features'),
url(r'^firefox/happy/', firefox_happy, name='mozorg.firefox_happy'),
url(r'^firefox/performance/', firefox_performance, name='mozorg.firefox_performance'),
url(r'^firefox/security/', firefox_security, name='mozorg.firefox_security'),
url(r'^firefox/speed/', firefox_speed, name='mozorg.firefox_speed'),
url(r'^firefox/technology/', firefox_technology, name='mozorg.firefox_technology'),
)
|
Fix URL for home page (thanks jlongster)
|
Fix URL for home page (thanks jlongster)
|
Python
|
mpl-2.0
|
marcoscaceres/bedrock,davehunt/bedrock,pmclanahan/bedrock,sylvestre/bedrock,SujaySKumar/bedrock,Sancus/bedrock,TheJJ100100/bedrock,kyoshino/bedrock,mkmelin/bedrock,elin-moco/bedrock,kyoshino/bedrock,schalkneethling/bedrock,jacshfr/mozilla-bedrock,elin-moco/bedrock,mozilla/bedrock,mmmavis/bedrock,kyoshino/bedrock,chirilo/bedrock,CSCI-462-01-2017/bedrock,elin-moco/bedrock,SujaySKumar/bedrock,andreadelrio/bedrock,mozilla/bedrock,pascalchevrel/bedrock,andreadelrio/bedrock,craigcook/bedrock,yglazko/bedrock,mozilla/mwc,mahinthjoe/bedrock,amjadm61/bedrock,dudepare/bedrock,mmmavis/bedrock,ericawright/bedrock,sgarrity/bedrock,glogiotatidis/bedrock,marcoscaceres/bedrock,pmclanahan/bedrock,TheJJ100100/bedrock,Sancus/bedrock,mmmavis/lightbeam-bedrock-website,glogiotatidis/bedrock,TheoChevalier/bedrock,rishiloyola/bedrock,mozilla/bedrock,jgmize/bedrock,SujaySKumar/bedrock,amjadm61/bedrock,bensternthal/bedrock,l-hedgehog/bedrock,ericawright/bedrock,ckprice/bedrock,mozilla/mwc,jgmize/bedrock,mozilla/bedrock,jgmize/bedrock,alexgibson/bedrock,TheoChevalier/bedrock,sgarrity/bedrock,mermi/bedrock,mermi/bedrock,petabyte/bedrock,craigcook/bedrock,analytics-pros/mozilla-bedrock,mahinthjoe/bedrock,chirilo/bedrock,jacshfr/mozilla-bedrock,pmclanahan/bedrock,sgarrity/bedrock,flodolo/bedrock,davehunt/bedrock,amjadm61/bedrock,Jobava/bedrock,mermi/bedrock,bensternthal/bedrock,pascalchevrel/bedrock,davehunt/bedrock,ckprice/bedrock,MichaelKohler/bedrock,TheoChevalier/bedrock,Jobava/bedrock,mahinthjoe/bedrock,gauthierm/bedrock,sylvestre/bedrock,CSCI-462-01-2017/bedrock,yglazko/bedrock,malena/bedrock,davidwboswell/documentation_autoresponse,mmmavis/bedrock,CSCI-462-01-2017/bedrock,elin-moco/bedrock,amjadm61/bedrock,malena/bedrock,sylvestre/bedrock,glogiotatidis/bedrock,davidwboswell/documentation_autoresponse,malena/bedrock,gauthierm/bedrock,l-hedgehog/bedrock,mkmelin/bedrock,kyoshino/bedrock,sylvestre/bedrock,mmmavis/bedrock,gauthierm/bedrock,sgarrity/bedrock,TheJJ100100/bedrock,davehunt/bedrock,ckprice/bedrock,rishiloyola/bedrock,rishiloyola/bedrock,Jobava/bedrock,petabyte/bedrock,jacshfr/mozilla-bedrock,pascalchevrel/bedrock,MichaelKohler/bedrock,yglazko/bedrock,MichaelKohler/bedrock,mermi/bedrock,chirilo/bedrock,petabyte/bedrock,SujaySKumar/bedrock,mahinthjoe/bedrock,dudepare/bedrock,chirilo/bedrock,alexgibson/bedrock,analytics-pros/mozilla-bedrock,mkmelin/bedrock,CSCI-462-01-2017/bedrock,jacshfr/mozilla-bedrock,l-hedgehog/bedrock,amjadm61/bedrock,flodolo/bedrock,analytics-pros/mozilla-bedrock,jpetto/bedrock,marcoscaceres/bedrock,davidwboswell/documentation_autoresponse,schalkneethling/bedrock,Sancus/bedrock,schalkneethling/bedrock,craigcook/bedrock,andreadelrio/bedrock,alexgibson/bedrock,gerv/bedrock,gauthierm/bedrock,schalkneethling/bedrock,gerv/bedrock,mozilla/mwc,bensternthal/bedrock,malena/bedrock,davidwboswell/documentation_autoresponse,mmmavis/lightbeam-bedrock-website,ckprice/bedrock,Jobava/bedrock,mmmavis/lightbeam-bedrock-website,craigcook/bedrock,mozilla/mwc,bensternthal/bedrock,jpetto/bedrock,yglazko/bedrock,glogiotatidis/bedrock,hoosteeno/bedrock,l-hedgehog/bedrock,andreadelrio/bedrock,jacshfr/mozilla-bedrock,jpetto/bedrock,pmclanahan/bedrock,petabyte/bedrock,alexgibson/bedrock,mkmelin/bedrock,hoosteeno/bedrock,ericawright/bedrock,dudepare/bedrock,MichaelKohler/bedrock,analytics-pros/mozilla-bedrock,hoosteeno/bedrock,marcoscaceres/bedrock,TheJJ100100/bedrock,Sancus/bedrock,dudepare/bedrock,hoosteeno/bedrock,pascalchevrel/bedrock,ericawright/bedrock,jpetto/bedrock,TheoChevalier/bedrock,rishiloyola/bedrock,gerv/bedrock,gerv/bedrock,jgmize/bedrock,flodolo/bedrock,flodolo/bedrock
|
357a445021bd459cc0196269033ea181594a1456
|
UliEngineering/Physics/NTC.py
|
UliEngineering/Physics/NTC.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities regarding NTC thermistors
See http://www.vishay.com/docs/29053/ntcintro.pdf for details
"""
from UliEngineering.Physics.Temperature import zero_point_celsius, normalize_temperature
from UliEngineering.EngineerIO import normalize_numeric
from UliEngineering.Units import Unit
import numpy as np
__all__ = ["ntc_resistance"]
def ntc_resistance(r25, b25, t) -> Unit("Ω"):
"""
Compute the NTC resistance by temperature and NTC parameters
Parameters
----------
r25 : float or EngineerIO string
The NTC resistance at 25°C, sometimes also called "nominal resistance"
b25: float or EngineerIO string
The NTC b-constant (e.g. b25/50, b25/85 or b25/100)
t : temperature
The temperature. Will be interpreted using normalize_temperature()
"""
# Normalize inputs
r25 = normalize_numeric(r25)
b25 = normalize_numeric(b25)
t = normalize_temperature(t) # t is now in Kelvins
# Compute resistance
return r25 * np.exp(b25 * (1./t - 1./(25. + zero_point_celsius)))
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities regarding NTC thermistors
See http://www.vishay.com/docs/29053/ntcintro.pdf for details
"""
from UliEngineering.Physics.Temperature import normalize_temperature
from UliEngineering.EngineerIO import normalize_numeric
from UliEngineering.Units import Unit
import numpy as np
from scipy.constants import zero_Celsius
__all__ = ["ntc_resistance"]
def ntc_resistance(r25, b25, t) -> Unit("Ω"):
"""
Compute the NTC resistance by temperature and NTC parameters
Parameters
----------
r25 : float or EngineerIO string
The NTC resistance at 25°C, sometimes also called "nominal resistance"
b25: float or EngineerIO string
The NTC b-constant (e.g. b25/50, b25/85 or b25/100)
t : temperature
The temperature. Will be interpreted using normalize_temperature()
"""
# Normalize inputs
r25 = normalize_numeric(r25)
b25 = normalize_numeric(b25)
t = normalize_temperature(t) # t is now in Kelvins
# Compute resistance
return r25 * np.exp(b25 * (1./t - 1./(25. + zero_Celsius)))
|
Fix build error due to replacing zero_point_celsius by scipy equivalent
|
Fix build error due to replacing zero_point_celsius by scipy equivalent
|
Python
|
apache-2.0
|
ulikoehler/UliEngineering
|
cddb0ae5c9c2d96c5902943f8b341ab2b698235f
|
paveldedik/forms.py
|
paveldedik/forms.py
|
# -*- coding: utf-8 -*-
from flask.ext.mongoengine.wtf import model_form
from paveldedik.models import User, Post
post_args = {
'title': {'label': u'Title'},
'leading': {'label': u'Leading'},
'content': {'label': u'Content'},
}
UserForm = model_form(User)
PostForm = model_form(Post, field_args=post_args)
|
# -*- coding: utf-8 -*-
from flask.ext.mongoengine.wtf import model_form
from paveldedik.models import User, Post
#: Model the user form. Additional field arguments can be included using
#: the key-word argument ``field_args``. For more information about using
#: WTForms follow `this link<http://flask.pocoo.org/snippets/60/>`_.
UserForm = model_form(User)
#: Model the post form. The attribute ``post_is`` must be excluded so that
#: the field is not required during form validation and it is not rewritten
#: when calling `populate_obj` on the :class:`models.Post` instance.
PostForm = model_form(Post, exclude=['post_id'])
|
Exclude post_id from the wtform.
|
Exclude post_id from the wtform.
|
Python
|
mit
|
paveldedik/blog,paveldedik/blog
|
9f00aeff695dd587bf7db1e126e57596616ef95e
|
backend/messages.py
|
backend/messages.py
|
# -*- coding: utf-8 -*-
import json
from enum import Enum
class BEMessages(Enum):
ALL_MAIN_BROADCAST = 'ALL_MAIN_BROADCAST'
class FEMessages(Enum):
pass
class AllMainBroadCast(object):
message_type = BEMessages.ALL_MAIN_BROADCAST
def __init__(self):
pass
def broadcast(self, handler):
data = {
'type': self.message_type,
'content': 'TEST', # TODO: grab message data from class vars
}
json_content = json.dumps(data, ensure_ascii=False)
handler.send(json_content)
|
# -*- coding: utf-8 -*-
import json
from enum import Enum
class BEMessages(Enum):
ALL_MAIN_BROADCAST = 'ALL_MAIN_BROADCAST'
class FEMessages(Enum):
pass
class Broadcast(object):
data = {}
def __init__(self, *args, **kwargs):
self.data.update(kwargs)
def broadcast(self, handler):
data = {
'type': self.message_type,
'content': '', # TODO: grab message data from class vars
}
json_content = json.dumps(data, ensure_ascii=False)
handler.send(json_content)
class AllMainBroadcast(Broadcast):
message_type = BEMessages.ALL_MAIN_BROADCAST
|
Split Broadcast into base class
|
Split Broadcast into base class
|
Python
|
mit
|
verekia/hackarena,verekia/hackarena,verekia/hackarena,verekia/hackarena
|
98b84a09fb64f0647cd136f63603a5f37de2b0ee
|
tensorforce/tests/test_trpo_agent.py
|
tensorforce/tests/test_trpo_agent.py
|
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import unittest
from tensorforce.tests.base_agent_test import BaseAgentTest
from tensorforce.agents import TRPOAgent
class TestTRPOAgent(BaseAgentTest, unittest.TestCase):
agent = TRPOAgent
deterministic = False
config = dict(
batch_size=8,
learning_rate=1e-2
)
multi_config = dict(
batch_size=64,
learning_rate=0.1
)
|
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import unittest
from tensorforce.tests.base_agent_test import BaseAgentTest
from tensorforce.agents import TRPOAgent
class TestTRPOAgent(BaseAgentTest, unittest.TestCase):
agent = TRPOAgent
deterministic = False
config = dict(
batch_size=16,
learning_rate=0.005
)
multi_config = dict(
batch_size=64,
learning_rate=0.1
)
|
Update TRPO, inconsistent results between Travis and local.
|
Update TRPO, inconsistent results between Travis and local.
|
Python
|
apache-2.0
|
reinforceio/tensorforce,lefnire/tensorforce
|
bbff08c49df269ad24851d4264fd3f1dbd141358
|
test/contrib/test_securetransport.py
|
test/contrib/test_securetransport.py
|
# -*- coding: utf-8 -*-
import contextlib
import socket
import ssl
import pytest
try:
from urllib3.contrib.securetransport import WrappedSocket
except ImportError:
pass
def setup_module():
try:
from urllib3.contrib.securetransport import inject_into_urllib3
inject_into_urllib3()
except ImportError as e:
pytest.skip('Could not import SecureTransport: %r' % e)
def teardown_module():
try:
from urllib3.contrib.securetransport import extract_from_urllib3
extract_from_urllib3()
except ImportError:
pass
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1 # noqa: F401
from ..with_dummyserver.test_socketlevel import ( # noqa: F401
TestSNI, TestSocketClosing, TestClientCerts
)
def test_no_crash_with_empty_trust_bundle():
with contextlib.closing(socket.socket()) as s:
ws = WrappedSocket(s)
with pytest.raises(ssl.SSLError):
ws._custom_validate(True, b"")
|
# -*- coding: utf-8 -*-
import contextlib
import socket
import ssl
import pytest
try:
from urllib3.contrib.securetransport import WrappedSocket
except ImportError:
pass
def setup_module():
try:
from urllib3.contrib.securetransport import inject_into_urllib3
inject_into_urllib3()
except ImportError as e:
pytest.skip('Could not import SecureTransport: %r' % e)
def teardown_module():
try:
from urllib3.contrib.securetransport import extract_from_urllib3
extract_from_urllib3()
except ImportError:
pass
from ..with_dummyserver.test_https import TestHTTPS, TestHTTPS_TLSv1 # noqa: F401
from ..with_dummyserver.test_socketlevel import ( # noqa: F401
TestSNI, TestSocketClosing, TestClientCerts
)
def test_no_crash_with_empty_trust_bundle():
with contextlib.closing(socket.socket()) as s:
ws = WrappedSocket(s)
with pytest.raises(ssl.SSLError):
ws._custom_validate(True, b"")
|
Fix whitespace issue in SecureTransport test
|
Fix whitespace issue in SecureTransport test
|
Python
|
mit
|
urllib3/urllib3,sigmavirus24/urllib3,sigmavirus24/urllib3,urllib3/urllib3
|
141b46d1f5178df7e110aee7b2b50ce6f5b44b7a
|
contrib/python-copper/t/test_wsgi.py
|
contrib/python-copper/t/test_wsgi.py
|
# -*- coding: utf-8 -*-
from copper.wsgi_support import wsgi
def test_http_handler(copper_client, copper_http_client):
def application(environ, start_response):
message = 'Hello, %s!' % (environ['PATH_INFO'],)
start_response('200 OK', [
('Content-Type', 'text/plain; charset=UTF-8'),
('Content-Length', '%d' % len(message)),
])
return [message]
with copper_client.publish('http:/hello/', wsgi(application)):
result = copper_http_client.open('copper:///hello/world').read()
assert result == 'Hello, /hello/world!'
result = copper_http_client.open('copper:///hello/foobar').read()
assert result == 'Hello, /hello/foobar!'
res = copper_http_client.open('copper:///hello')
assert res.code == 404
res = copper_http_client.open('copper:///foobar')
assert res.code == 404
|
# -*- coding: utf-8 -*-
from copper.wsgi_support import wsgi
def test_http_handler(copper_client, copper_http_client):
def application(environ, start_response):
message = 'Hello, %s!' % (environ['PATH_INFO'],)
start_response('200 OK', [
('Content-Type', 'text/plain; charset=UTF-8'),
('Content-Length', '%d' % len(message)),
])
return [message]
with copper_client.publish('http:hello', wsgi(application)):
result = copper_http_client.open('copper:///hello/world').read()
assert result == 'Hello, /world!'
result = copper_http_client.open('copper:///hello/foobar').read()
assert result == 'Hello, /foobar!'
res = copper_http_client.open('copper:///hello')
assert res.code == 404
res = copper_http_client.open('copper:///foobar')
assert res.code == 404
|
Fix python tests re: http routing
|
Fix python tests re: http routing
|
Python
|
mit
|
snaury/copper,snaury/copper,snaury/copper
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.