commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
1a089c634bc608e5862ce549ed598e50c02b8d09 | Bump version | mishbahr/django-users2,mishbahr/django-users2 | users/__init__.py | users/__init__.py | __version__ = '0.1.3'
| __version__ = '0.1.2'
| bsd-3-clause | Python |
44202d1c178d76c5db22a9b9ce4e7138a0cb73c7 | upgrade to v3.9.4 | rainmattertech/pykiteconnect | kiteconnect/__version__.py | kiteconnect/__version__.py | __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.9.4"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "[email protected]"
__license__ = "MIT"
| __title__ = "kiteconnect"
__description__ = "The official Python client for the Kite Connect trading API"
__url__ = "https://kite.trade"
__download_url__ = "https://github.com/zerodhatech/pykiteconnect"
__version__ = "3.9.2"
__author__ = "Zerodha Technology Pvt ltd. (India)"
__author_email__ = "[email protected]"
__license__ = "MIT"
| mit | Python |
f1217f04f17daa3d77c9a3197b33d87b8f775056 | Replace OpenERP by Odoo | guewen/l10n-switzerland,open-net-sarl/l10n-switzerland,cyp-opennet/ons_cyp_github,cyp-opennet/ons_cyp_github,BT-aestebanez/l10n-switzerland,BT-fgarbely/l10n-switzerland,eLBati/l10n-switzerland,CompassionCH/l10n-switzerland,CompassionCH/l10n-switzerland,open-net-sarl/l10n-switzerland,BT-fgarbely/l10n-switzerland,BT-ojossen/l10n-switzerland,cgaspoz/l10n-switzerland,ndtran/l10n-switzerland,michl/l10n-switzerland,BT-ojossen/l10n-switzerland,BT-csanchez/l10n-switzerland | l10n_ch_zip/__openerp__.py | l10n_ch_zip/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author Nicolas Bessi. Copyright Camptocamp SA
# Contributor: WinGo SA
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
{'name': 'Switzerland - Postal codes (ZIP) list',
'summary': 'Loads all Swiss postal codes',
'version': '1.0.1',
'depends': ['base', 'base_location'],
'author': 'Camptocamp',
'description': """
Swiss postal code (ZIP) list
============================
This module will load all Swiss postal codes (ZIP) in Odoo to
ease the input of partners.
It is not mandatory to use Odoo in Switzerland, but can improve the user experience.
""",
'website': 'http://www.camptocamp.com',
'data': ['l10n_ch_better_zip.xml'],
'demo_xml': [],
'installable': True,
'active': False}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author Nicolas Bessi. Copyright Camptocamp SA
# Contributor: WinGo SA
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
{'name': 'Switzerland - Postal codes (ZIP) list',
'summary': 'Loads all Swiss postal codes',
'version': '1.0.1',
'depends': ['base', 'base_location'],
'author': 'Camptocamp',
'description': """
Swiss postal code (ZIP) list
============================
This module will load all Swiss postal codes (ZIP) in OpenERP to
ease the input of partners.
It is not mandatory to use OpenERP in Switzerland, but can improve the user experience.
""",
'website': 'http://www.camptocamp.com',
'data': ['l10n_ch_better_zip.xml'],
'demo_xml': [],
'installable': True,
'active': False}
| agpl-3.0 | Python |
b37988c7d6b260793cc8e88e0057f1a59d2fcc0b | fix migration file | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/icds_reports/migrations/0060_added_phone_number_to_views.py | custom/icds_reports/migrations/0060_added_phone_number_to_views.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-10 14:05
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0059_update_blob_paths'),
]
operations = [
migrator.get_migration('agg_awc_daily.sql'),
migrator.get_migration('agg_ccs_record_monthly.sql'),
migrator.get_migration('agg_child_health_monthly.sql'),
migrator.get_migration('child_health_monthly.sql'),
migrator.get_migration('daily_attendance.sql'),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-09-10 14:05
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import RawSQLMigration
migrator = RawSQLMigration(('custom', 'icds_reports', 'migrations', 'sql_templates', 'database_views'))
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0057_aggregateccsrecordpostnatalcareforms_is_ebf'),
]
operations = [
migrator.get_migration('agg_awc_daily.sql'),
migrator.get_migration('agg_ccs_record_monthly.sql'),
migrator.get_migration('agg_child_health_monthly.sql'),
migrator.get_migration('child_health_monthly.sql'),
migrator.get_migration('daily_attendance.sql'),
]
| bsd-3-clause | Python |
c5f2b65aa172b10206950a5981a06afef5742173 | Improve reliability of galera_consistency.py | andymcc/rpc-openstack,jacobwagner/rpc-openstack,claco/rpc-openstack,jpmontez/rpc-openstack,jacobwagner/rpc-openstack,cfarquhar/rpc-openstack,sigmavirus24/rpc-openstack,cloudnull/rpc-openstack,busterswt/rpc-openstack,briancurtin/rpc-maas,prometheanfire/rpc-openstack,robb-romans/rpc-openstack,cloudnull/rpc-maas,BjoernT/rpc-openstack,mattt416/rpc-openstack,BjoernT/rpc-openstack,galstrom21/rpc-openstack,jpmontez/rpc-openstack,xeregin/rpc-openstack,miguelgrinberg/rpc-openstack,sigmavirus24/rpc-openstack,claco/rpc-openstack,rcbops/rpc-openstack,byronmccollum/rpc-openstack,nrb/rpc-openstack,cloudnull/rpc-maas,git-harry/rpc-openstack,xeregin/rpc-openstack,major/rpc-openstack,busterswt/rpc-openstack,darrenchan/rpc-openstack,claco/rpc-openstack,miguelgrinberg/rpc-openstack,git-harry/rpc-openstack,byronmccollum/rpc-openstack,hughsaunders/rpc-openstack,galstrom21/rpc-openstack,npawelek/rpc-maas,mancdaz/rpc-openstack,byronmccollum/rpc-openstack,hughsaunders/rpc-openstack,busterswt/rpc-openstack,cfarquhar/rpc-openstack,briancurtin/rpc-maas,robb-romans/rpc-openstack,nrb/rpc-openstack,mattt416/rpc-openstack,cfarquhar/rpc-maas,nrb/rpc-openstack,cfarquhar/rpc-maas,stevelle/rpc-openstack,rcbops/rpc-openstack,darrenchan/rpc-openstack,cfarquhar/rpc-maas,major/rpc-openstack,stevelle/rpc-openstack,prometheanfire/rpc-openstack,npawelek/rpc-maas,briancurtin/rpc-maas,shannonmitchell/rpc-openstack,npawelek/rpc-maas,jpmontez/rpc-openstack,xeregin/rpc-openstack,andymcc/rpc-openstack,xeregin/rpc-openstack,mattt416/rpc-openstack,sigmavirus24/rpc-openstack,darrenchan/rpc-openstack,sigmavirus24/rpc-openstack,cloudnull/rpc-openstack,stevelle/rpc-openstack,shannonmitchell/rpc-openstack,andymcc/rpc-openstack,cloudnull/rpc-maas,miguelgrinberg/rpc-openstack,darrenchan/rpc-openstack,mancdaz/rpc-openstack | galera_consistency.py | galera_consistency.py | import io
import optparse
import subprocess
def table_checksum(user, password, host):
"""Run pt-table-checksum with the user, password, and host specified."""
args = ['/usr/bin/pt-table-checksum', '-u', user, '-p', password]
if host:
args.extend(['-h', host])
out = io.StringIO()
err = io.StringIO()
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
# Let's poll the process to make sure it finishes before we return from
# this function.
while proc.poll() is None:
# Avoid the OS Pipe buffer from blocking the process
(stdout, stderr) = proc.communicate()
# Let's store the aggregated output in buffers
out.write(stdout)
err.write(stderr)
# The process has terminated, let's get the rest of stdout/stderr
(stdout, stderr) = proc.communicate()
out.write(stdout)
err.write(stderr)
# At this point we have a valid return code and the full stdout, stderr
# logs
return (proc.return_code, out.getvalue(), err.getvalue())
def main():
usage = "Usage: %prog [-h] [-H] username password"
parser = optparse.OptionParser(usage=usage)
parser.add_option(
'-H', '--host',
action='store',
dest='host',
default=None,
help="Allow user to connect to something other than localhost"
)
(options, args) = parser.parse_args()
# We will need the username and password to connect to the database
if len(args) != 2:
parser.print_help()
raise SystemExit(True)
# According to
# http://www.percona.com/doc/percona-toolkit/2.2/pt-table-checksum.html
# If the exit status is 0, everything is okay, otherwise the exit status
# will be non-zero. We don't need stdout at the moment so we can discard
# it. Stderr should contain any problems we run across.
(status, _, err) = table_checksum(args[0], args[1], options.host)
if status != 0:
print "status err %s" % err.strip()
raise SystemExit(True)
print "status ok"
if __name__ == '__main__':
main()
| import optparse
import subprocess
def table_checksum(user, password, host):
args = ['/usr/bin/pt-table-checksum', '-u', user, '-p', password]
if host:
args.extend(['-h', host])
proc = subprocess.Popen(args, stderr=subprocess.PIPE)
(out, err) = proc.communicate()
return (proc.return_code, out, err)
def main():
usage = "Usage: %prog [-h] [-H] username password"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-H', '--host', action='store', dest='host',
default=None)
(options, args) = parser.parse_args()
if len(args) != 2:
parser.print_help()
raise SystemExit(True)
(status, _, err) = table_checksum(args[0], args[1], options.host)
if status != 0:
print "status err %s" % err
raise SystemExit(True)
print "status ok"
if __name__ == '__main__':
main()
| apache-2.0 | Python |
1b2fa45766b1ea5945f246d74bc4adf0114abe84 | Fix typo in description of config item | imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery | astroquery/splatalogue/__init__.py | astroquery/splatalogue/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg ([email protected])
:Originally contributed by:
Magnus Vilhelm Persson ([email protected])
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'Splatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Splatalogue Catalog Query Tool
-----------------------------------
:Author: Adam Ginsburg ([email protected])
:Originally contributed by:
Magnus Vilhelm Persson ([email protected])
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.splatalogue`.
"""
slap_url = _config.ConfigItem(
'http://find.nrao.edu/splata-slap/slap',
'Splatalogue SLAP interface URL (not used).')
query_url = _config.ConfigItem(
'http://www.cv.nrao.edu/php/splat/c_export.php',
'SSplatalogue web interface URL.')
timeout = _config.ConfigItem(
60,
'Time limit for connecting to Splatalogue server.')
lines_limit = _config.ConfigItem(
1000,
'Limit to number of lines exported.')
conf = Conf()
from . import load_species_table
from . import utils
from .core import Splatalogue, SplatalogueClass
__all__ = ['Splatalogue', 'SplatalogueClass',
'Conf', 'conf',
]
| bsd-3-clause | Python |
a1c60939302bd60d0e7708d19b7eee3d2970bbfb | Fix minion state assertions - multiple keys possible | dincamihai/salt-toaster,dincamihai/salt-toaster | assertions.py | assertions.py | import re
import shlex
import subprocess
from config import SALT_KEY_CMD
def has_expected_state(expected_state, mapping, env):
assert expected_state in mapping
cmd = shlex.split(SALT_KEY_CMD.format(**env))
cmd.append("-L")
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env)
output, unused_err = process.communicate()
return mapping[expected_state].search(output) is not None
def assert_minion_key_state(env, expected_state):
STATES_MAPPING = dict(
unaccepted=re.compile("Unaccepted Keys:(\n.+)*\n{HOSTNAME}".format(**env)),
accepted=re.compile("Accepted Keys:(\n.+)*\n{HOSTNAME}".format(**env))
)
assert has_expected_state(expected_state, STATES_MAPPING, env)
def assert_proxyminion_key_state(env, expected_state):
STATES_MAPPING = dict(
unaccepted=re.compile("Unaccepted Keys:(\n.+)*\n{PROXY_ID}".format(**env)),
accepted=re.compile("Accepted Keys:(\n.+)*\n{PROXY_ID}".format(**env))
)
assert has_expected_state(expected_state, STATES_MAPPING, env)
| import re
import shlex
import subprocess
from config import SALT_KEY_CMD
def assert_minion_key_state(env, expected_state):
STATES_MAPPING = dict(
unaccepted=re.compile("Unaccepted Keys:\n{HOSTNAME}".format(**env)),
accepted=re.compile("Accepted Keys:\n{HOSTNAME}".format(**env))
)
assert expected_state in STATES_MAPPING
cmd = shlex.split(SALT_KEY_CMD.format(**env))
cmd.append("-L")
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env)
output, unused_err = process.communicate()
assert STATES_MAPPING[expected_state].search(output)
def assert_proxyminion_key_state(env, expected_state):
STATES_MAPPING = dict(
unaccepted=re.compile("Unaccepted Keys:\n{PROXY_ID}".format(**env)),
accepted=re.compile("Accepted Keys:\n{PROXY_ID}".format(**env))
)
assert expected_state in STATES_MAPPING
cmd = shlex.split(SALT_KEY_CMD.format(**env))
cmd.append("-L")
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env)
output, unused_err = process.communicate()
assert STATES_MAPPING[expected_state].search(output)
| mit | Python |
f3ec85cd7baf65036ed76a2c4ab4fe935b81b805 | introduce logging | fuzzy-id/midas,fuzzy-id/midas,fuzzy-id/midas | midas/scripts/md_config.py | midas/scripts/md_config.py | # -*- coding: utf-8 -*-
import logging
import sys
from midas.scripts import MDCommand
import midas.config as md_cfg
logger = logging.getLogger(__name__)
class MDConfig(MDCommand):
""" Read all configuration files, print the final configuration
and exit.
This can be used to see how a configuration file (e.g. a job file)
alters the whole configuration or to generate a default
configuration file which is going to be altered in a second step.
"""
POS_ARG = { 'dest': 'job_cfg',
'nargs': '?',
'metavar': 'FILE',
'help': 'additional configuration file to read'}
def __init__(self, argv):
MDCommand.__init__(self, argv)
if self.args.job_cfg:
md_cfg.read(self.args.job_cfg)
def run(self):
md_cfg.get_configparser().write(sys.stdout)
| # -*- coding: utf-8 -*-
import sys
from midas.scripts import MDCommand
import midas.config as md_cfg
class MDConfig(MDCommand):
""" Read all configuration files, print the final configuration
and exit.
This can be used to see how a configuration file (e.g. a job file)
alters the whole configuration or to generate a default
configuration file which is going to be altered in a second step.
"""
POS_ARG = { 'dest': 'job_cfg',
'nargs': '?',
'metavar': 'FILE',
'help': 'additional configuration file to read'}
def __init__(self, argv):
MDCommand.__init__(self, argv)
if self.args.job_cfg:
md_cfg.read(self.args.job_cfg)
def run(self):
md_cfg.get_configparser().write(sys.stdout)
| bsd-3-clause | Python |
8790eec0fdd94beeb4d0ceac8b24a1de77bd3eee | Update sql2rf.py | victoriamorris/iams2rf | bin/sql2rf.py | bin/sql2rf.py | #!/usr/bin/env python
# -*- coding: utf8 -*-
"""Script to search for records within an SQL database created using snapshot2sql
and convert to Researcher Format."""
# Import required modules
# import datetime
import getopt
# import sys
from iams2rf import *
__author__ = 'Victoria Morris'
__license__ = 'MIT License'
__version__ = '1.0.0'
__status__ = '4 - Beta Development'
def usage():
"""Function to print information about the script"""
print('========================================')
print('sql2rf')
print('IAMS data extraction for Researcher Format')
print('========================================')
print('This utility searches an SQL database of IAMS records')
print('created using the utility snapshot2sql')
print('and converts matching records to Researcher Format')
print('\nCorrect syntax is:')
print('sql2rf -d DB_PATH -r REQUEST_PATH [OPTIONS]')
print('\nSearch DB_PATH for records meeting criteria in REQUEST_PATH.')
print(' -d Path to the SQL database')
print(' -r Path to Outlook message containing details of the request')
print('\nUse quotation marks (") around arguments which contain spaces')
print('\nIf REQUEST_PATH is not specified you will be given the option to set parameters for the output')
print('\nOptions:')
print(' -o OUTPUT_FOLDER to save output files.')
print(' --debug Debug mode.')
print(' --help Show this message and exit.')
exit_prompt()
def main(argv=None):
if argv is None:
name = str(sys.argv[1])
db_path, request_path, output_folder = '', '', ''
debug = False
try:
opts, args = getopt.getopt(argv, 'd:r:o:', ['db_path=', 'request_path=', 'output_folder=', 'debug', 'help'])
except getopt.GetoptError as err:
exit_prompt('Error: {}'.format(err))
if opts is None or not opts:
usage()
for opt, arg in opts:
if opt == '--help': usage()
elif opt == '--debug': debug = True
elif opt in ['-d', '--db_path']: db_path = arg
elif opt in ['-r', '--request_path']: request_path = arg
elif opt in ['-o', '--output_folder']: output_folder = arg
else: exit_prompt('Error: Option {} not recognised'.format(opt))
iams2rf_sql2rf(db_path, request_path, output_folder, debug)
print('\n\nAll processing complete')
print('----------------------------------------')
print(str(datetime.datetime.now()))
sys.exit()
if __name__ == '__main__':
main(sys.argv[1:])
| #!/usr/bin/env python
# -*- coding: utf8 -*-
"""Script to search for records within an SQL database created using snapshot2sql
and convert to Researcher Format."""
# Import required modules
# import datetime
import getopt
# import sys
from iams2rf import *
__author__ = 'Victoria Morris'
__license__ = 'MIT License'
__version__ = '1.0.0'
__status__ = '4 - Beta Development'
def usage():
print('========================================')
print('sql2rf')
print('IAMS data extraction for Researcher Format')
print('========================================')
print('This utility searches an SQL database of IAMS records')
print('created using the utility snapshot2sql')
print('and converts matching records to Researcher Format')
print('\nCorrect syntax is:')
print('sql2rf -d DB_PATH -r REQUEST_PATH -o OUTPUT_FOLDER [OPTIONS]')
print('\nSearch DB_PATH for records meeting criteria in REQUEST_PATH.')
print(' -d Path to the SQL database')
print(' -r Path to Outlook message containing details of the request')
print(' -o Folder to save Researcher Format output files')
print('\nUse quotation marks (") around arguments which contain spaces')
print('\nIf REQUEST_PATH is not specified you will be given the option to set parameters for the output')
print('\nOptions:')
print(' --debug Debug mode.')
print(' --help Show this message and exit.')
exit_prompt()
def main(argv=None):
if argv is None:
name = str(sys.argv[1])
db_path, request_path, output_folder = '', '', ''
debug = False
try:
opts, args = getopt.getopt(argv, 'd:r:o:', ['db_path=', 'request_path=', 'output_folder=', 'debug', 'help'])
except getopt.GetoptError as err:
exit_prompt('Error: {}'.format(err))
if opts is None or not opts:
usage()
for opt, arg in opts:
if opt == '--help': usage()
elif opt == '--debug': debug = True
elif opt in ['-d', '--db_path']: db_path = arg
elif opt in ['-r', '--request_path']: request_path = arg
elif opt in ['-o', '--output_folder']: output_folder = arg
else: exit_prompt('Error: Option {} not recognised'.format(opt))
iams2rf_sql2rf(db_path, request_path, output_folder, debug)
print('\n\nAll processing complete')
print('----------------------------------------')
print(str(datetime.datetime.now()))
sys.exit()
if __name__ == '__main__':
main(sys.argv[1:])
| mit | Python |
7c382a33fa3f691fcbf89621b48c0c9e3a921d03 | update version number | PytLab/VASPy,PytLab/VASPy | vaspy/__init__.py | vaspy/__init__.py | __version__ = '0.1.1' # add d-band center calculation
class VasPy(object):
def __init__(self, filename):
"Base class to be inherited by all classes in VASPy."
self.filename = filename
class CarfileValueError(Exception):
"Exception raised for errors in the CONTCAR-like file."
pass
class UnmatchedDataShape(Exception):
"Exception raised for errors in unmatched data shape."
pass
| __version__ = '0.1.0' # add electro module
class VasPy(object):
def __init__(self, filename):
"Base class to be inherited by all classes in VASPy."
self.filename = filename
class CarfileValueError(Exception):
"Exception raised for errors in the CONTCAR-like file."
pass
class UnmatchedDataShape(Exception):
"Exception raised for errors in unmatched data shape."
pass
| mit | Python |
4d63320c2bf077e90cffb98286e0354dcab1fc64 | Make runTestCases.py possible to run independently | paleyss/incubator-mnemonic,lql5083psu/incubator-mnemonic,johnugeorge/incubator-mnemonic,paleyss/incubator-mnemonic,johnugeorge/incubator-mnemonic,NonVolatileComputing/incubator-mnemonic,yzz127/incubator-mnemonic,NonVolatileComputing/incubator-mnemonic,lql5083psu/incubator-mnemonic,yzz127/incubator-mnemonic,lql5083psu/incubator-mnemonic,NonVolatileComputing/incubator-mnemonic,NonVolatileComputing/incubator-mnemonic,yzz127/incubator-mnemonic,lql5083psu/incubator-mnemonic,paleyss/incubator-mnemonic,johnugeorge/incubator-mnemonic,johnugeorge/incubator-mnemonic,yzz127/incubator-mnemonic,paleyss/incubator-mnemonic | build-tools/runTestCases.py | build-tools/runTestCases.py | #! /usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import subprocess
import sys
import os
testLogDir = "testlog/"
if not os.path.exists(testLogDir):
os.makedirs(testLogDir)
testCmdFile = 'build-tools/test.conf'
tcCmdReg = re.compile('^mvn\s.*$')
tcNameReg = re.compile('-Dtest=(.+?)\s')
tcModuleReg = re.compile('-pl\s(.+?)\s')
with open(testCmdFile) as fp:
for line in fp:
match = tcCmdReg.findall(line)
if match:
logFilePath = testLogDir + tcNameReg.findall(line)[0] + ".log"
print("[INFO] Running " + tcNameReg.findall(line)[0] + " test case for \"" + tcModuleReg.findall(line)[0] + "\"...")
try:
#maven build
subprocess.check_call(match[0] + ">" + logFilePath, stderr=subprocess.STDOUT, shell=True)
print("[SUCCESS] Test case " + tcNameReg.findall(line)[0] + " for \"" + tcModuleReg.findall(line)[0]+ "\" is completed!")
except subprocess.CalledProcessError as e:
print("[ERROR] This test case requires \"pmalloc\" memory service to pass, please check if \"pmalloc\" has been configured correctly! If \"pmalloc\" is installed, please refer to testlog/" + tcNameReg.findall(line)[0] + ".log for detailed information.")
sys.exit(1)
print("[DONE] All test cases are completed! Log files are available under folder testlog!")
| #! /usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import re
import subprocess
import sys
testCmdFile = 'build-tools/test.conf'
tcCmdReg = re.compile('^mvn\s.*$')
tcNameReg = re.compile('-Dtest=(.+?)\s')
tcModuleReg = re.compile('-pl\s(.+?)\s')
with open(testCmdFile) as fp:
for line in fp:
match = tcCmdReg.findall(line)
if match:
logFilePath = "testlog/" + tcNameReg.findall(line)[0] + ".log"
print("[INFO] Running " + tcNameReg.findall(line)[0] + " test case for \"" + tcModuleReg.findall(line)[0] + "\"...")
try:
#maven build
subprocess.check_call(match[0] + ">" + logFilePath, stderr=subprocess.STDOUT, shell=True)
print("[SUCCESS] Test case " + tcNameReg.findall(line)[0] + " for \"" + tcModuleReg.findall(line)[0]+ "\" is completed!")
except subprocess.CalledProcessError as e:
print("[ERROR] This test case requires \"pmalloc\" memory service to pass, please check if \"pmalloc\" has been configured correctly! If \"pmalloc\" is installed, please refer to testlog/" + tcNameReg.findall(line)[0] + ".log for detailed information.")
sys.exit(1)
print("[DONE] All test cases are completed! Log files are available under folder testlog!")
| apache-2.0 | Python |
e977d997ab66196b519c60dea34e360dfa4fb15d | Complete decreasing pivot swap reverse sol | bowen0701/algorithms_data_structures | lc0031_next_permutation.py | lc0031_next_permutation.py | """Leetcode 31. Next Permutation
Medium
URL: https://leetcode.com/problems/next-permutation/
Implement next permutation, which rearranges numbers into the lexicographically
next greater permutation of numbers.
If such arrangement is not possible, it must rearrange it as the lowest possible
order (ie, sorted in ascending order).
The replacement must be in-place and use only constant extra memory.
Here are some examples. Inputs are in the left-hand column and its corresponding
outputs are in the right-hand column.
1,2,3 -> 1,3,2
3,2,1 -> 1,2,3
1,1,5 -> 1,5,1
"""
class SolutionDecreasingPivotSwapReverse(object):
def nextPermutation(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
Time complexity: O(n).
Space complexity: O(1).
"""
# From backward find the first pos (pivot) which is not in decreasing order.
i = len(nums) - 1
while i > 0 and nums[i - 1] >= nums[i]:
i -= 1
pivot = i - 1
# If we cannot find that number, all numbers are increasing. Reverse them.
if pivot == -1:
nums.reverse()
return None
# Find the first pos j with num which is bigger than pivot number. Swap them.
j = len(nums) - 1
while j > pivot and nums[j] <= nums[pivot]:
j -= 1
nums[pivot], nums[j] = nums[j], nums[pivot]
# Reverse the remaining numbers on the right of pivot.
left, right = pivot + 1, len(nums) - 1
while left < right:
nums[left], nums[right] = nums[right], nums[left]
left += 1
right -= 1
def main():
# 1,2,3 -> 1,3,2
nums = [1,2,3]
SolutionDecreasingPivotSwapReverse().nextPermutation(nums)
print nums
# 3,2,1 -> 1,2,3
nums = [3,2,1]
SolutionDecreasingPivotSwapReverse().nextPermutation(nums)
print nums
# 1,1,5 -> 1,5,1
nums = [1,1,5]
SolutionDecreasingPivotSwapReverse().nextPermutation(nums)
print nums
if __name__ == '__main__':
main()
| """Leetcode 31. Next Permutation
Medium
URL: https://leetcode.com/problems/next-permutation/
Implement next permutation, which rearranges numbers into the lexicographically
next greater permutation of numbers.
If such arrangement is not possible, it must rearrange it as the lowest possible
order (ie, sorted in ascending order).
The replacement must be in-place and use only constant extra memory.
Here are some examples. Inputs are in the left-hand column and its corresponding
outputs are in the right-hand column.
1,2,3 -> 1,3,2
3,2,1 -> 1,2,3
1,1,5 -> 1,5,1
"""
class Solution(object):
def nextPermutation(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
587c4603d3ab379e4ee22f2dcda7d7798cd35dcf | fix spacing around arguments | damonsauve/db-credentials | db_credentials/DBCredentials.py | db_credentials/DBCredentials.py | #! /usr/local/bin/python
import sys
import re
class DBCredentials:
def __init__(self):
self.creds = {
'host':'',
'port':'',
'username':'',
'password':'',
'database':'',
}
return;
# Load credentials from a file: no input validation.
#
def load_file(self, filename):
f = open(filename, 'r')
text = f.read()
f.close
#print text
tuples = re.findall(r'(\w+)=([^\s]+)', text)
#print tuples
#[('host', 'localhost'), ('username', 'foo'), ('password', 'bar')]
for tuple in tuples:
self.creds[ tuple[0] ] = tuple[1]
#print self.creds
return
def get_host(self):
return self.creds['host']
def set_host(self, host):
self.creds['host'] = host
# listener port - return if specified, otherwise default to 3306
#
def get_port(self):
if self.creds['port']:
return self.creds['port']
else:
return '3306'
def set_port(self, port):
self.creds['port'] = port
def get_username(self):
return self.creds['username']
def set_username(self, username):
self.creds['sid'] = username
def get_password(self):
return self.creds['password']
def set_password(self, password):
self.creds['password'] = password
# database
#
def get_database(self):
if self.creds['database'] == '' and self.creds['host'] != '':
self.creds['database'] = self.creds['host']
return self.creds['database']
def set_database(self, database):
self.creds['database'] = database
| #! /usr/local/bin/python
import sys
import re
class DBCredentials:
def __init__( self ):
self.creds = {
'host':'',
'port':'',
'username':'',
'password':'',
'database':'',
}
return;
# Load credentials from a file: no input validation.
#
def load_file( self, filename ):
f = open( filename, 'r' )
text = f.read()
f.close
#print text
tuples = re.findall( r'(\w+)=([^\s]+)', text )
#print tuples
#[('host', 'localhost'), ('username', 'foo'), ('password', 'bar')]
for tuple in tuples:
self.creds[ tuple[0] ] = tuple[1]
#print self.creds
return
def get_host( self ):
return self.creds['host']
def set_host( self, host ):
self.creds['host'] = host
# listener port - return if specified, otherwise default to 3306
#
def get_port( self ):
if self.creds['port']:
return self.creds['port']
else:
return '3306'
def set_port( self, port ):
self.creds['port'] = port
def get_username( self ):
return self.creds['username']
def set_username( self, username ):
self.creds['sid'] = username
def get_password( self ):
return self.creds['password']
def set_password( self, password ):
self.creds['password'] = password
# database
#
def get_database( self ):
if self.creds['database'] == '' and self.creds['host'] != '':
self.creds['database'] = self.creds['host']
return self.creds['database']
def set_database( self, database ):
self.creds['database'] = database
| mit | Python |
b939558f3d4bd0fa90f3f467ca85f698c4813046 | Update __init__.py | caktus/django-comps,caktus/django-comps | comps/__init__.py | comps/__init__.py | """
A simple application that provides an entry point for integrating
front end designers into a django project
"""
__version__ = '0.3.0'
| """
A simple application that provides an entry point for integrating
front end designers into a django project
"""
__version__ = '0.2.0'
| bsd-3-clause | Python |
86f143863fd9f0786fe83a5038b970b4782306ce | Check table exist | indictranstech/erpnext,gsnbng/erpnext,njmube/erpnext,Aptitudetech/ERPNext,indictranstech/erpnext,gsnbng/erpnext,gsnbng/erpnext,indictranstech/erpnext,geekroot/erpnext,geekroot/erpnext,geekroot/erpnext,njmube/erpnext,gsnbng/erpnext,njmube/erpnext,geekroot/erpnext,njmube/erpnext,indictranstech/erpnext | erpnext/patches/v7_0/update_missing_employee_in_timesheet.py | erpnext/patches/v7_0/update_missing_employee_in_timesheet.py | from __future__ import unicode_literals
import frappe
def execute():
if frappe.db.table_exists("Time Log"):
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
| from __future__ import unicode_literals
import frappe
def execute():
timesheet = frappe.db.sql("""select tl.employee as employee, ts.name as name,
tl.modified as modified, tl.modified_by as modified_by, tl.creation as creation, tl.owner as owner
from
`tabTimesheet` ts, `tabTimesheet Detail` tsd, `tabTime Log` tl
where
tsd.parent = ts.name and tl.from_time = tsd.from_time and tl.to_time = tsd.to_time
and tl.hours = tsd.hours and tl.billing_rate = tsd.billing_rate and tsd.idx=1
and tl.docstatus < 2 and (ts.employee = '' or ts.employee is null)""", as_dict=1)
for data in timesheet:
ts_doc = frappe.get_doc('Timesheet', data.name)
if len(ts_doc.time_logs) == 1:
frappe.db.sql(""" update `tabTimesheet` set creation = %(creation)s,
owner = %(owner)s, modified = %(modified)s, modified_by = %(modified_by)s,
employee = %(employee)s where name = %(name)s""", data)
| agpl-3.0 | Python |
b8c05a7ea6abefa3014f8703864031876c211679 | Add link for total malaria cases for year 2015 for Indonesia | DataKind-SG/healthcare_ASEAN | src/data/download_scripts/ID_malaria_down.py | src/data/download_scripts/ID_malaria_down.py | # This script downloads yearly malaria statistics from data.go.id
# It uses urllib and is compatible with both Python 2 and 3
import os
import sys
import logging #logs what goes on
DIRECTORY = '../../Data/raw/disease_ID'
OUTFILE = "yearly-malaria.csv"
URL = "http://data.go.id/dataset/cef9b348-91a9-4270-be1d-3cf64eb9d5b0/resource/42f31bb0-af59-4c96-9a74-db3283f9e316/download/kasusmalaria.csv"
URL2015 = "http://data.go.id/dataset/cef9b348-91a9-4270-be1d-3cf64eb9d5b0/resource/2965b760-0f7f-4bd7-9dbe-8d261729e12f/download/jumlahkasusangkakesakitanmalariaper1000pendudukberisiko.xlsx"
logger = logging.getLogger(__name__)
def download():
# compatibility check between python 2 and 3
if sys.version_info < (3, 0):
# for python 2, use this
try:
os.makedirs(DIRECTORY)
except OSError as e:
pass
import urllib as downloader
from urllib2 import URLError, HTTPError
else:
# for python 3, use this
os.makedirs(DIRECTORY, exist_ok=True)
import urllib.request as downloader
from urllib.error import URLError, HTTPError
output_path = os.path.join(DIRECTORY, OUTFILE)
# now retrieve the file
try:
downloader.urlretrieve(URL, output_path)
logger.info('Downloaded successfully to %s', os.path.abspath(output_path))
except (HTTPError, URLError) as e:
logger.error('Failed to download: %s', e.reason)
if __name__ == "__main__":
DIRECTORY = '../../../Data/raw/disease_ID'
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
download()
| # This script downloads yearly malaria statistics from data.go.id
# It uses urllib and is compatible with both Python 2 and 3
import os
import sys
import logging #logs what goes on
DIRECTORY = '../../Data/raw/disease_ID'
OUTFILE = "yearly-malaria.csv"
URL = "http://data.go.id/dataset/cef9b348-91a9-4270-be1d-3cf64eb9d5b0/resource/42f31bb0-af59-4c96-9a74-db3283f9e316/download/kasusmalaria.csv"
logger = logging.getLogger(__name__)
def download():
# compatibility check between python 2 and 3
if sys.version_info < (3, 0):
# for python 2, use this
try:
os.makedirs(DIRECTORY)
except OSError as e:
pass
import urllib as downloader
from urllib2 import URLError, HTTPError
else:
# for python 3, use this
os.makedirs(DIRECTORY, exist_ok=True)
import urllib.request as downloader
from urllib.error import URLError, HTTPError
output_path = os.path.join(DIRECTORY, OUTFILE)
# now retrieve the file
try:
downloader.urlretrieve(URL, output_path)
logger.info('Downloaded successfully to %s', os.path.abspath(output_path))
except (HTTPError, URLError) as e:
logger.error('Failed to download: %s', e.reason)
if __name__ == "__main__":
DIRECTORY = '../../../Data/raw/disease_ID'
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
download()
| mit | Python |
193cc8025910b92f764e6e1339ce2ec213b20cc5 | Fix duck punching unit test. | RobinD42/pyside,gbaty/pyside2,IronManMark20/pyside2,qtproject/pyside-pyside,BadSingleton/pyside2,M4rtinK/pyside-android,M4rtinK/pyside-bb10,PySide/PySide,qtproject/pyside-pyside,enthought/pyside,M4rtinK/pyside-android,IronManMark20/pyside2,M4rtinK/pyside-bb10,RobinD42/pyside,IronManMark20/pyside2,BadSingleton/pyside2,RobinD42/pyside,qtproject/pyside-pyside,M4rtinK/pyside-bb10,M4rtinK/pyside-android,M4rtinK/pyside-bb10,pankajp/pyside,pankajp/pyside,pankajp/pyside,pankajp/pyside,RobinD42/pyside,enthought/pyside,PySide/PySide,pankajp/pyside,qtproject/pyside-pyside,M4rtinK/pyside-bb10,IronManMark20/pyside2,M4rtinK/pyside-android,enthought/pyside,gbaty/pyside2,BadSingleton/pyside2,BadSingleton/pyside2,enthought/pyside,qtproject/pyside-pyside,IronManMark20/pyside2,M4rtinK/pyside-android,PySide/PySide,M4rtinK/pyside-bb10,PySide/PySide,RobinD42/pyside,RobinD42/pyside,gbaty/pyside2,RobinD42/pyside,gbaty/pyside2,enthought/pyside,BadSingleton/pyside2,enthought/pyside,gbaty/pyside2,M4rtinK/pyside-android,enthought/pyside,PySide/PySide | tests/qtcore/duck_punching_test.py | tests/qtcore/duck_punching_test.py | #!/usr/bin/python
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
import unittest
import types
from PySide.QtCore import QObject, QEvent
from helper import UsesQCoreApplication
class Duck(QObject):
def __init__(self):
QObject.__init__(self)
def childEvent(self, event):
QObject.childEvent(self, event)
class TestDuckPunchingOnQObjectInstance(UsesQCoreApplication):
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
def setUp(self):
#Acquire resources
self.duck_childEvent_called = False
UsesQCoreApplication.setUp(self)
def tearDown(self):
#Release resources
del self.duck_childEvent_called
UsesQCoreApplication.tearDown(self)
def testChildEventMonkeyPatch(self):
#Test if the new childEvent injected on QObject instance is called from C++
parent = QObject()
def childEvent(obj, event):
self.duck_childEvent_called = True
QObject.childEvent(obj, event)
parent.childEvent = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
def testChildEventMonkeyPatchWithInheritance(self):
#Test if the new childEvent injected on a QObject's extension class instance is called from C++
parent = Duck()
def childEvent(obj, event):
QObject.childEvent(obj, event)
self.duck_childEvent_called = True
child = QObject()
child.setParent(parent)
parent.childEvent = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
import unittest
import types
from PySide.QtCore import QObject, QEvent
from helper import UsesQCoreApplication
class Duck(QObject):
def __init__(self):
QObject.__init__(self)
def childEvent(self, event):
QObject.childEvent(self, event)
class TestDuckPunchingOnQObjectInstance(UsesQCoreApplication):
'''Test case for duck punching new implementations of C++ virtual methods into object instances.'''
def setUp(self):
#Acquire resources
self.duck_childEvent_called = False
UsesQCoreApplication.setUp(self)
def tearDown(self):
#Release resources
del self.duck_childEvent_called
UsesQCoreApplication.tearDown(self)
def testChildEventMonkeyPatch(self):
#Test if the new childEvent injected on QObject instance is called from C++
parent = QObject()
def childEvent(obj, event):
self.duck_childEvent_called = True
QObject.childEvent(obj, event)
parent.event = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
def testChildEventMonkeyPatchWithInheritance(self):
#Test if the new childEvent injected on a QObject's extension class instance is called from C++
parent = Duck()
def childEvent(obj, event):
QObject.childEvent(obj, event)
self.duck_childEvent_called = True
child = QObject()
child.setParent(parent)
parent.event = types.MethodType(childEvent, parent, QObject)
child = QObject()
child.setParent(parent)
self.assert_(self.duck_childEvent_called)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
3d237a6bf3a3dff684e08496f800a8957a9e3352 | Fix pep error. | VitalPet/hr,VitalPet/hr,yelizariev/hr,feketemihai/hr,charbeljc/hr,yelizariev/hr,acsone/hr,damdam-s/hr,thinkopensolutions/hr,vrenaville/hr,Endika/hr,microcom/hr,iDTLabssl/hr,abstract-open-solutions/hr,Antiun/hr,raycarnes/hr,Vauxoo/hr,feketemihai/hr,acsone/hr,Endika/hr,iDTLabssl/hr,alanljj/oca_hr,Vauxoo/hr,raycarnes/hr,xpansa/hr,open-synergy/hr,xpansa/hr,damdam-s/hr,thinkopensolutions/hr,Eficent/hr,open-synergy/hr,alanljj/oca_hr,microcom/hr,Antiun/hr,charbeljc/hr,hbrunn/hr,rschnapka/hr,abstract-open-solutions/hr,Eficent/hr,vrenaville/hr,rschnapka/hr,hbrunn/hr | hr_contract_hourly_rate/models/hr_hourly_rate_class.py | hr_contract_hourly_rate/models/hr_hourly_rate_class.py | # -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
from itertools import permutations
class hr_hourly_rate_class(models.Model):
_name = 'hr.hourly.rate.class'
_description = 'Hourly rate class'
name = fields.Char(string='Class Name', required=True, index=True)
line_ids = fields.One2many('hr.hourly.rate',
'class_id',
string='Hourly Rates')
contract_job_ids = fields.One2many('hr.contract.job',
'hourly_rate_class_id',
string='Contract Jobs')
@api.model
@api.constrains('line_ids')
def _check_overlapping_rates(self):
"""
Checks if a class has two rates that overlap in time.
"""
for hourly_rate_class in self:
for r1, r2 in permutations(hourly_rate_class.line_ids, 2):
if r1.date_end and \
(r1.date_start <= r2.date_start <= r1.date_end):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
elif not r1.date_end and (r1.date_start <= r2.date_start):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
return True
| # -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, exceptions, _
from itertools import permutations
class hr_hourly_rate_class(models.Model):
_name = 'hr.hourly.rate.class'
_description = 'Hourly rate class'
name = fields.Char(string='Class Name', required=True, index=True)
line_ids = fields.One2many('hr.hourly.rate',
'class_id',
string='Hourly Rates')
contract_job_ids = fields.One2many('hr.contract.job',
'hourly_rate_class_id',
string='Contract Jobs')
@api.model
@api.constrains('line_ids')
def _check_overlapping_rates(self):
"""
Checks if a class has two rates that overlap in time.
"""
for hourly_rate_class in self:
for r1, r2 in permutations(hourly_rate_class.line_ids, 2):
if r1.date_end and (r1.date_start <= r2.date_start <= r1.date_end):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
elif not r1.date_end and (r1.date_start <= r2.date_start):
raise exceptions.Warning(
_("Error! You cannot have overlapping rates"))
return True
| agpl-3.0 | Python |
c88314f935d9bf1e65c2a4f6d3eb6931fee5c4f5 | fix evaluate.py | BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH,BerlinUnited/NaoTH | Utils/py/BallDetection/RegressionNetwork/evaluate.py | Utils/py/BallDetection/RegressionNetwork/evaluate.py | #!/usr/bin/env python3
import argparse
import pickle
import tensorflow.keras as keras
import numpy as np
from pathlib import Path
DATA_DIR = Path(Path(__file__).parent.absolute() / "data").resolve()
MODEL_DIR = Path(Path(__file__).parent.absolute() / "models/best_models").resolve()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Evaluate the network given ')
parser.add_argument('-b', '--database-path', dest='imgdb_path', default=str(DATA_DIR / 'imgdb.pkl'),
help='Path to the image database containing test data.'
'Default is imgdb.pkl in the data folder.')
parser.add_argument('-m', '--model-path', dest='model_path', default=str(MODEL_DIR / 'fy1500_conf.h5'),
help='Store the trained model using this path. Default is fy1500_conf.h5.')
args = parser.parse_args()
res = {"x": 16, "y": 16}
with open(args.imgdb_path, "rb") as f:
mean = pickle.load(f)
print("mean=" + str(mean))
x = pickle.load(f)
y = pickle.load(f)
model = keras.models.load_model(args.model_path)
print(model.summary())
x = np.array(x)
y = np.array(y)
result = model.evaluate(x, y)
print("Evaluation result")
print("=================")
for idx in range(0, len(result)):
print(model.metrics_names[idx] + ":", result[idx])
| #!/usr/bin/env python3
import argparse
import pickle
import tensorflow.keras as keras
import numpy as np
parser = argparse.ArgumentParser(description='Train the network given ')
parser.add_argument('-b', '--database-path', dest='imgdb_path',
help='Path to the image database containing test data.'
'Default is img.db in current folder.')
parser.add_argument('-m', '--model-path', dest='model_path',
help='Store the trained model using this path. Default is model.h5.')
args = parser.parse_args()
imgdb_path = "img.db"
model_path = "model.h5"
res = {"x": 16, "y": 16}
if args.model_path is not None:
model_path = args.model_path
if args.imgdb_path is not None:
imgdb_path = args.imgdb_path
with open(imgdb_path, "rb") as f:
mean = pickle.load(f)
print("mean=" + str(mean))
x = pickle.load(f)
y = pickle.load(f)
model = keras.models.load_model(model_path)
print(model.summary())
x = np.array(x)
y = np.array(y)
result = model.evaluate(x, y)
print("Evaluation result")
print("=================")
for idx in range(0, len(result)):
print(model.metrics_names[idx] + ":", result[idx])
| apache-2.0 | Python |
cc7e3e5ef9d9c59b6b1ac80826445839ede73092 | Revert mast dev host change | imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery | astroquery/mast/__init__.py | astroquery/mast/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mast.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
MAST Query Tool
===============
This module contains various methods for querying the MAST Portal.
"""
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astroquery.mast`.
"""
server = _config.ConfigItem(
'https://mastdev.stsci.edu',
'Name of the MAST server.')
ssoserver = _config.ConfigItem(
'https://ssoportal.stsci.edu',
'MAST SSO Portal server.')
timeout = _config.ConfigItem(
600,
'Time limit for requests from the STScI server.')
pagesize = _config.ConfigItem(
50000,
'Number of results to request at once from the STScI server.')
conf = Conf()
from .core import Observations, ObservationsClass, Catalogs, CatalogsClass, Mast, MastClass
from .tesscut import TesscutClass, Tesscut
__all__ = ['Observations', 'ObservationsClass',
'Catalogs', 'CatalogsClass',
'Mast', 'MastClass',
'Tesscut', 'TesscutClass',
'Conf', 'conf',
]
| bsd-3-clause | Python |
3ef82203daebd532af2f8effebe8fa31cec11e76 | fix error message encoding | jaloren/robotframework,jaloren/robotframework,kurtdawg24/robotframework,Colorfulstan/robotframework,suvarnaraju/robotframework,ashishdeshpande/robotframework,SivagnanamCiena/robotframework,jorik041/robotframework,snyderr/robotframework,userzimmermann/robotframework,edbrannin/robotframework,joongh/robotframework,stasiek/robotframework,stasiek/robotframework,HelioGuilherme66/robotframework,nmrao/robotframework,Colorfulstan/robotframework,JackNokia/robotframework,snyderr/robotframework,userzimmermann/robotframework,snyderr/robotframework,rwarren14/robotframework,kurtdawg24/robotframework,ChrisHirsch/robotframework,jaloren/robotframework,joongh/robotframework,un33k/robotframework,edbrannin/robotframework,edbrannin/robotframework,SivagnanamCiena/robotframework,nmrao/robotframework,moto-timo/robotframework,yonglehou/robotframework,kurtdawg24/robotframework,dkentw/robotframework,ChrisHirsch/robotframework,eric-stanley/robotframework,rwarren14/robotframework,fingeronthebutton/robotframework,yahman72/robotframework,ashishdeshpande/robotframework,suvarnaraju/robotframework,wojciechtanski/robotframework,alexandrul-ci/robotframework,eric-stanley/robotframework,xiaokeng/robotframework,jaloren/robotframework,alexandrul-ci/robotframework,synsun/robotframework,ashishdeshpande/robotframework,un33k/robotframework,jorik041/robotframework,stasiek/robotframework,eric-stanley/robotframework,kyle1986/robortframe,nmrao/robotframework,yonglehou/robotframework,dkentw/robotframework,alexandrul-ci/robotframework,SivagnanamCiena/robotframework,edbrannin/robotframework,ashishdeshpande/robotframework,synsun/robotframework,ChrisHirsch/robotframework,kurtdawg24/robotframework,alexandrul-ci/robotframework,suvarnaraju/robotframework,Colorfulstan/robotframework,kyle1986/robortframe,ChrisHirsch/robotframework,rwarren14/robotframework,joongh/robotframework,yahman72/robotframework,yahman72/robotframework,robotframework/robotframework,edbrannin/robotframework,moto-timo/robotframework,dkentw/robotframework,nmrao/robotframework,kurtdawg24/robotframework,synsun/robotframework,xiaokeng/robotframework,userzimmermann/robotframework,yahman72/robotframework,jorik041/robotframework,robotframework/robotframework,wojciechtanski/robotframework,dkentw/robotframework,ashishdeshpande/robotframework,yonglehou/robotframework,joongh/robotframework,JackNokia/robotframework,wojciechtanski/robotframework,yahman72/robotframework,xiaokeng/robotframework,fingeronthebutton/robotframework,Colorfulstan/robotframework,nmrao/robotframework,SivagnanamCiena/robotframework,eric-stanley/robotframework,jorik041/robotframework,un33k/robotframework,jaloren/robotframework,fingeronthebutton/robotframework,HelioGuilherme66/robotframework,snyderr/robotframework,stasiek/robotframework,JackNokia/robotframework,rwarren14/robotframework,kyle1986/robortframe,yonglehou/robotframework,kyle1986/robortframe,suvarnaraju/robotframework,synsun/robotframework,xiaokeng/robotframework,yonglehou/robotframework,wojciechtanski/robotframework,moto-timo/robotframework,fingeronthebutton/robotframework,JackNokia/robotframework,alexandrul-ci/robotframework,un33k/robotframework,SivagnanamCiena/robotframework,xiaokeng/robotframework,kyle1986/robortframe,joongh/robotframework,jorik041/robotframework,HelioGuilherme66/robotframework,moto-timo/robotframework,suvarnaraju/robotframework,synsun/robotframework,ChrisHirsch/robotframework,snyderr/robotframework,un33k/robotframework,dkentw/robotframework,wojciechtanski/robotframework,userzimmermann/robotframework,Colorfulstan/robotframework,userzimmermann/robotframework,robotframework/robotframework,moto-timo/robotframework,JackNokia/robotframework,stasiek/robotframework,fingeronthebutton/robotframework,rwarren14/robotframework | atest/robot/tidy/TidyLib.py | atest/robot/tidy/TidyLib.py | from __future__ import with_statement
import os
from os.path import abspath, dirname, join
from subprocess import call, STDOUT
import tempfile
from robot.utils.asserts import assert_equals
ROBOT_SRC = join(dirname(abspath(__file__)), '..', '..', '..', 'src')
class TidyLib(object):
def __init__(self, interpreter):
self._cmd = [interpreter, '-m', 'robot.tidy']
self._interpreter = interpreter
def run_tidy_and_return_output(self, options, input, command=None):
"""Runs tidy in the operating system and returns output."""
options = options.split(' ') if options else []
with tempfile.TemporaryFile() as output:
rc = call(self._cmd + options + [self._path(input)], stdout=output,
stderr=STDOUT, cwd=ROBOT_SRC, shell=os.sep=='\\')
output.seek(0)
content = output.read()
if rc:
raise RuntimeError(content)
return content
def run_tidy_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
result = self.run_tidy_and_return_output(options, input)
self._assert_result(result, open(self._path(expected)).read())
def run_tidy_as_a_script_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
cmd = [self._interpreter, join(ROBOT_SRC, 'robot', 'tidy.py')]
result = self.run_tidy_and_return_output(options, input, cmd)
self._assert_result(result, open(self._path(expected)).read())
def _path(self, path):
return path.replace('/', os.sep)
def _assert_result(self, result, expected):
result = result.decode('UTF-8')
expected = expected.decode('UTF-8')
result_lines = result.splitlines()
expected_lines = expected.splitlines()
for line1, line2 in zip(result_lines, expected_lines):
msg = "\n%s\n!=\n%s\n" % (result, expected)
assert_equals(repr(unicode(line1)), repr(unicode(line2)), msg)
| from __future__ import with_statement
import os
from os.path import abspath, dirname, join
from subprocess import call, STDOUT
import tempfile
from robot.utils.asserts import assert_equals
ROBOT_SRC = join(dirname(abspath(__file__)), '..', '..', '..', 'src')
class TidyLib(object):
def __init__(self, interpreter):
self._cmd = [interpreter, '-m', 'robot.tidy']
self._interpreter = interpreter
def run_tidy_and_return_output(self, options, input, command=None):
"""Runs tidy in the operating system and returns output."""
options = options.split(' ') if options else []
with tempfile.TemporaryFile() as output:
rc = call(self._cmd + options + [self._path(input)], stdout=output,
stderr=STDOUT, cwd=ROBOT_SRC, shell=os.sep=='\\')
output.seek(0)
content = output.read()
if rc:
raise RuntimeError(content)
return content
def run_tidy_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
result = self.run_tidy_and_return_output(options, input)
self._assert_result(result, open(self._path(expected)).read())
def run_tidy_as_a_script_and_check_result(self, options, input, expected):
"""Runs tidy and checks that output matches content of file `expected`."""
cmd = [self._interpreter, join(ROBOT_SRC, 'robot', 'tidy.py')]
result = self.run_tidy_and_return_output(options, input, cmd)
self._assert_result(result, open(self._path(expected)).read())
def _path(self, path):
return path.replace('/', os.sep)
def _assert_result(self, result, expected):
result = result.decode('UTF-8')
expected = expected.decode('UTF-8')
for line1, line2 in zip(result.splitlines(), expected.splitlines()):
msg = "\n%s\n!=\n%s\n" % (result, expected)
assert_equals(repr(unicode(line1)), repr(unicode(line2)), msg)
| apache-2.0 | Python |
fe67796130854d83b3dfaa085d67d9eabe35a155 | Allow getdate for Energy Point Rule condition | yashodhank/frappe,yashodhank/frappe,frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,adityahase/frappe,almeidapaulopt/frappe,saurabh6790/frappe,almeidapaulopt/frappe,adityahase/frappe,StrellaGroup/frappe,vjFaLk/frappe,mhbu50/frappe,frappe/frappe,yashodhank/frappe,adityahase/frappe,mhbu50/frappe,vjFaLk/frappe,mhbu50/frappe,StrellaGroup/frappe,saurabh6790/frappe,yashodhank/frappe,adityahase/frappe,frappe/frappe,almeidapaulopt/frappe,mhbu50/frappe,vjFaLk/frappe,vjFaLk/frappe,saurabh6790/frappe,StrellaGroup/frappe | frappe/social/doctype/energy_point_rule/energy_point_rule.py | frappe/social/doctype/energy_point_rule/energy_point_rule.py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.cache_manager
from frappe.model.document import Document
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert
class EnergyPointRule(Document):
def on_update(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def on_trash(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def apply(self, doc):
whitelisted_globals = {
"getdate": getdate
}
if frappe.safe_eval(self.condition, whitelisted_globals, {'doc': doc.as_dict()}):
multiplier = 1
if self.multiplier_field:
multiplier = doc.get(self.multiplier_field) or 1
points = round(self.points * multiplier)
reference_doctype = doc.doctype
reference_name = doc.name
user = doc.get(self.user_field)
rule = self.name
# incase of zero as result after roundoff
if not points: return
# if user_field has no value
if not user or user == 'Administrator': return
try:
create_energy_points_log(reference_doctype, reference_name, {
'points': points,
'user': user,
'rule': rule
})
except Exception as e:
frappe.log_error(frappe.get_traceback(), 'apply_energy_point')
def process_energy_points(doc, state):
if (frappe.flags.in_patch
or frappe.flags.in_install
or not is_energy_point_enabled()):
return
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,
dict(reference_doctype = doc.doctype, enabled=1)):
frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc)
def revert_points_for_cancelled_doc(doc):
energy_point_logs = frappe.get_all('Energy Point Log', {
'reference_doctype': doc.doctype,
'reference_name': doc.name,
'type': 'Auto'
})
for log in energy_point_logs:
revert(log.name, _('Reference document has been cancelled'))
def get_energy_point_doctypes():
return [
d.reference_doctype for d in frappe.get_all('Energy Point Rule',
['reference_doctype'], {'enabled': 1})
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import frappe.cache_manager
from frappe.model.document import Document
from frappe.social.doctype.energy_point_settings.energy_point_settings import is_energy_point_enabled
from frappe.social.doctype.energy_point_log.energy_point_log import create_energy_points_log, revert
class EnergyPointRule(Document):
def on_update(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def on_trash(self):
frappe.cache_manager.clear_doctype_map('Energy Point Rule', self.name)
def apply(self, doc):
if frappe.safe_eval(self.condition, None, {'doc': doc.as_dict()}):
multiplier = 1
if self.multiplier_field:
multiplier = doc.get(self.multiplier_field) or 1
points = round(self.points * multiplier)
reference_doctype = doc.doctype
reference_name = doc.name
user = doc.get(self.user_field)
rule = self.name
# incase of zero as result after roundoff
if not points: return
# if user_field has no value
if not user or user == 'Administrator': return
try:
create_energy_points_log(reference_doctype, reference_name, {
'points': points,
'user': user,
'rule': rule
})
except Exception as e:
frappe.log_error(frappe.get_traceback(), 'apply_energy_point')
def process_energy_points(doc, state):
if (frappe.flags.in_patch
or frappe.flags.in_install
or not is_energy_point_enabled()):
return
old_doc = doc.get_doc_before_save()
# check if doc has been cancelled
if old_doc and old_doc.docstatus == 1 and doc.docstatus == 2:
return revert_points_for_cancelled_doc(doc)
for d in frappe.cache_manager.get_doctype_map('Energy Point Rule', doc.doctype,
dict(reference_doctype = doc.doctype, enabled=1)):
frappe.get_doc('Energy Point Rule', d.get('name')).apply(doc)
def revert_points_for_cancelled_doc(doc):
energy_point_logs = frappe.get_all('Energy Point Log', {
'reference_doctype': doc.doctype,
'reference_name': doc.name,
'type': 'Auto'
})
for log in energy_point_logs:
revert(log.name, _('Reference document has been cancelled'))
def get_energy_point_doctypes():
return [
d.reference_doctype for d in frappe.get_all('Energy Point Rule',
['reference_doctype'], {'enabled': 1})
]
| mit | Python |
6a2a0667179a78e2c56dff551b0d010db6ed0150 | fix imports | toslunar/chainerrl,toslunar/chainerrl | chainerrl/initializers/__init__.py | chainerrl/initializers/__init__.py | from chainerrl.initializers.constant import VarianceScalingConstant # NOQA
from chainerrl.initializers.normal import LeCunNormal # NOQA
from chainerrl.initializers.uniform import VarianceScalingUniform # NOQA
| from chainerrl.initializers.constant import VarianceScalingConstant # NOQA
from chainerrl.initializers.normal import LeCunNormal # NOQA
| mit | Python |
fb19411797ae7ac00e022a9409459c0f42969a91 | Remove unused code | patrick91/pycon,patrick91/pycon | backend/api/helpers/i18n.py | backend/api/helpers/i18n.py | from typing import Optional
from django.conf import settings
from django.utils import translation
def make_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
return getattr(root, field_name).localize(language)
return resolver
| from typing import Optional
from django.conf import settings
from django.utils import translation
def make_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
return getattr(root, field_name).localize(language)
return resolver
def make_dict_localized_resolver(field_name: str):
def resolver(root, info, language: Optional[str] = None) -> str:
language = language or translation.get_language() or settings.LANGUAGE_CODE
field = getattr(root, field_name)
return field.get(language, field["en"])
return resolver
| mit | Python |
4563e383962690cc196f4551f217d488501b660e | support for mysql as well | thenetcircle/dino,thenetcircle/dino,thenetcircle/dino,thenetcircle/dino | bin/count_users_in_rooms.py | bin/count_users_in_rooms.py | import sys
import os
import yaml
import redis
dino_env = sys.argv[1]
dino_home = sys.argv[2]
if dino_home is None:
raise RuntimeError('need environment variable DINO_HOME')
if dino_env is None:
raise RuntimeError('need environment variable DINO_ENVIRONMENT')
def load_secrets_file(config_dict: dict) -> dict:
from string import Template
import ast
secrets_path = dino_home + '/secrets/%s.yaml' % dino_env
# first substitute environment variables, which holds precedence over the yaml config (if it exists)
template = Template(str(config_dict))
template = template.safe_substitute(os.environ)
if os.path.isfile(secrets_path):
try:
secrets = yaml.safe_load(open(secrets_path))
except Exception as e:
raise RuntimeError("Failed to open secrets configuration {0}: {1}".format(secrets_path, str(e)))
template = Template(template)
template = template.safe_substitute(secrets)
return ast.literal_eval(template)
config = yaml.safe_load(open(dino_home + '/dino.yaml'))[dino_env]
config = load_secrets_file(config)
dbtype = config['database']['type']
the_count = 0
if dbtype == 'rdbms':
dbdriver = config['database']['driver']
dbname = config['database']['db']
dbhost = config['database']['host']
dbport = config['database']['port']
dbuser = config['database']['user']
dbpass = config['database']['password']
if dbdriver.startswith('postgres'):
import psycopg2
conn = psycopg2.connect("dbname='%s' user='%s' host='%s' port='%s' password='%s'" % (
dbname, dbuser, dbhost, dbport, dbpass)
)
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
if dbtype == 'rdbms' and dbdriver.startswith('mysql'):
import MySQLdb
conn = MySQLdb.connect(passwd=dbpass, db=dbname, user=dbuser, host=dbhost, port=dbport)
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
r_host, r_port = config['cache']['host'].split(':')
r_db = config['cache']['db']
r_server = redis.Redis(host=r_host, port=r_port, db=r_db)
r_server.set('users:online:inrooms', the_count)
| import sys
import os
import yaml
import redis
import psycopg2
dino_env = sys.argv[1]
dino_home = sys.argv[2]
if dino_home is None:
raise RuntimeError('need environment variable DINO_HOME')
if dino_env is None:
raise RuntimeError('need environment variable DINO_ENVIRONMENT')
def load_secrets_file(config_dict: dict) -> dict:
from string import Template
import ast
secrets_path = dino_home + '/secrets/%s.yaml' % dino_env
# first substitute environment variables, which holds precedence over the yaml config (if it exists)
template = Template(str(config_dict))
template = template.safe_substitute(os.environ)
if os.path.isfile(secrets_path):
try:
secrets = yaml.safe_load(open(secrets_path))
except Exception as e:
raise RuntimeError("Failed to open secrets configuration {0}: {1}".format(secrets_path, str(e)))
template = Template(template)
template = template.safe_substitute(secrets)
return ast.literal_eval(template)
config = yaml.safe_load(open(dino_home + '/dino.yaml'))[dino_env]
config = load_secrets_file(config)
dbtype = config['database']['type']
if dbtype == 'rdbms':
dbname = config['database']['db']
dbhost = config['database']['host']
dbport = config['database']['port']
dbuser = config['database']['user']
dbpass = config['database']['password']
try:
conn = psycopg2.connect("dbname='%s' user='%s' host='%s' port='%s' password='%s'" % (
dbname, dbuser, dbhost, dbport, dbpass)
)
except:
raise RuntimeError('could not connect to db')
cur = conn.cursor()
cur.execute("""select count(*) from rooms_users_association_table""")
the_count = cur.fetchone()[0]
r_host, r_port = config['cache']['host'].split(':')
r_db = config['cache']['db']
r_server = redis.Redis(host=r_host, port=r_port, db=r_db)
r_server.set('users:online:inrooms', the_count)
| apache-2.0 | Python |
81de62d46d7daefb2e1eef0d0cc4f5ca5c8aef2f | Use GCBV queryset to get PostGetMixin obj. | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | blog/utils.py | blog/utils.py | from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
| from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
| bsd-2-clause | Python |
2b419d499c37597094379f524d8347f35eeda57c | Fix tinycss css validator | eghuro/crawlcheck | src/checker/plugin/checkers/tinycss_css_validator_plugin.py | src/checker/plugin/checkers/tinycss_css_validator_plugin.py | from common import PluginType
import tinycss
from yapsy.IPlugin import IPlugin
import logging
class CssValidator(IPlugin):
category = PluginType.CHECKER
id = "tinycss"
def __init__(self):
self.journal = None
def setJournal(self, journal):
self.journal = journal
def check(self, transaction):
"""Pusti validator, ulozi chyby.
"""
try:
parser = tinycss.make_parser('page3')
c = transaction.getContent()
if type(c) == str:
data = c
else:
data = str(transaction.getContent(), 'utf-8')
stylesheet = parser.parse_stylesheet(data)
for error in stylesheet.errors:
self.journal.foundDefect(transaction.idno, "stylesheet", "Stylesheet error", [error.line, error.reason], 0.7)
except UnicodeDecodeError as e:
logging.getLogger(__name__).debug("Unicode decode error: "+format(e))
return
| from common import PluginType
import tinycss
from yapsy.IPlugin import IPlugin
import logging
class CssValidator(IPlugin):
category = PluginType.CHECKER
id = "tinycss"
def __init__(self):
self.journal = None
def setJournal(self, journal):
self.journal = journal
def check(self, transaction):
"""Pusti validator, ulozi chyby.
"""
try:
parser = tinycss.make_parser('page3')
data = str(transaction.getContent(), 'utf-8')
stylesheet = parser.parse_stylesheet(data)
for error in stylesheet.errors:
self.journal.foundDefect(transaction.idno, "stylesheet", "Stylesheet error", [error.line, error.reason], 0.7)
except UnicodeDecodeError as e:
logging.getLogger(__name__).debug("Unicode decode error: "+format(e))
return
| mit | Python |
88dd48eab612e89b956dea5600a999c78c61d5fb | fix lpproj algorithm | jakevdp/lpproj | lpproj/lpproj.py | lpproj/lpproj.py | import numpy as np
from scipy import linalg
from sklearn.neighbors import kneighbors_graph, NearestNeighbors
from sklearn.utils import check_array
from sklearn.base import BaseEstimator, TransformerMixin
class LocalityPreservingProjection(BaseEstimator, TransformerMixin):
def __init__(self, n_neighbors=5, n_components=2, eigen_solver='auto',
neighbors_algorithm='auto', kernel_width=None):
self.n_neighbors = n_neighbors
self.n_components = n_components
self.eigen_solver = eigen_solver
self.neighbors_algorithm = neighbors_algorithm
self.kernel_width = kernel_width
def fit(self, X, y=None):
X = check_array(X)
self.nbrs_ = NearestNeighbors(n_neighbors=self.n_neighbors,
algorithm=self.neighbors_algorithm)
self.nbrs_.fit(X)
self.training_data_ = self.nbrs_._fit_X
# TODO: make this more efficient
# TODO: make duplicates behave correctly
if self.kernel_width is None:
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='connectivity', include_self=True)
else:
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='distance')
W.data = np.exp(-W.data ** 2 / self.kernel_width ** 2)
W = W.toarray()
W = np.maximum(W, W.T)
D = np.diag(W.sum(1))
L = D - W
A = np.dot(X.T, np.dot(L, X))
B = np.dot(X.T, np.dot(D, X))
evals, evecs = linalg.eigh(A, B)
self.projection_ = evecs[:, :self.n_components]
return self
def transform(self, X):
X = check_array(X)
return np.dot(X, self.projection_)
| import numpy as np
from sklearn.neighbors import kneighbors_graph
from sklearn.utils import check_array
from sklearn.base import BaseEstimator, TransformerMixin
class LocalityPreservingProjection(BaseEstimator, TransformerMixin)::
def __init__(self, n_neighbors=5, n_components=2, eigen_solver='auto',
neighbors_algorithm='auto'):
self.n_neighbors = n_neighbors
self.n_components = n_components
self.eigen_solver = eigen_solver
self.neighbors_algorithm = neighbors_algorithm
def fit(self, X, y=None):
X = check_array(X)
self.nbrs_ = NearestNeighbors(n_neighbors=self.n_neighbors,
algorithm=self.neighbors_algorithm)
self.nbrs_.fit(X)
self.training_data_ = self.nbrs_._fit_X
# TODO: make this more efficient
# L = D - W
W = kneighbors_graph(self.nbrs_, self.n_neighbors,
mode='connectivity')
D = np.diag(W.sum(1))
L = D - W
evals, evecs = np.linalg.eigh(np.dot(X, np.dot(L, X.T)),
np.dot(X, np.dot(D, X.T)))
self.projection_ = evecs[:, :self.n_components]
return self
def transform(self, X):
X = check_array(X)
reutrn np.dot(self.projection_.T, X)
| bsd-3-clause | Python |
8e58d7cccb837254cc433c7533bff119cc19645d | Use json instead of django.utils.simplejson. | pozytywnie/django-javascript-settings | javascript_settings/templatetags/javascript_settings_tags.py | javascript_settings/templatetags/javascript_settings_tags.py | import json
from django import template
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + json.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| from django import template
from django.utils import simplejson
from javascript_settings.configuration_builder import \
DEFAULT_CONFIGURATION_BUILDER
register = template.Library()
@register.tag(name='javascript_settings')
def do_javascript_settings(parser, token):
"""
Returns a node with generated configuration.
"""
return JavascriptConfigurationNode()
class JavascriptConfigurationNode(template.Node):
"""
Represents a node that renders JavaScript configuration.
"""
def __init__(self):
pass
def render(self, context):
"""
Renders JS configuration.
"""
return 'var configuration = ' + simplejson.dumps(
DEFAULT_CONFIGURATION_BUILDER.get_configuration()
) + ';'
| mit | Python |
afafb47d77fd673abf8d8ce9baa9824b985a943a | Add create_class_wrapper and class_wrapper | ryanhiebert/undecorate | undecorate.py | undecorate.py | """Allow your decorations to be un-decorated.
In some cases, such as when testing, it can be useful to access the
decorated class or function directly, so as to not to use the behavior
or interface that the decorator might introduce.
Example:
>>> from functools import wraps
>>> from undecorate import unwrap, unwrappable
>>>
>>> @unwrappable
... def pack(func):
... @wraps(func)
... def wrapper(args, kwargs):
... return func(*args, **kwargs)
... return wrapper
...
>>> @pack
... def myfunc(a, b, c=None, d=None):
... return (a, b, c, d)
...
>>> myfunc('a', 'b', c='c')
Traceback (most recent call last):
...
TypeError: wrapper() got an unexpected keyword argument 'c'
>>>
>>> unwrap(myfunc)('a', 'b', c='c')
('a', 'b', 'c', None)
"""
from functools import wraps, partial
def unwrappable(decorator):
"""Make a decorator able to be un-decorated.
This meta-decorator takes a decorator, and returns a new decorator
that allows the decoration to be used by unwrap().
"""
@wraps(decorator)
def wrapper(decoration):
decorated = decorator(decoration)
decorated.__decoration__ = decoration
return decorated
return wrapper
def unwrap(wrapped):
"""Remove all wrappers from this decorated object."""
while True:
decoration = getattr(wrapped, '__decoration__', None)
if decoration is None:
return wrapped
wrapped = decoration
CLASS_WRAPPER_DELETES = ('__dict__', '__doc__', '__weakref__')
CLASS_WRAPPER_ASSIGNMENTS = ('__module__',)
def create_class_wrapper(wrapper,
wrapped,
deleted=CLASS_WRAPPER_DELETES,
assigned=CLASS_WRAPPER_ASSIGNMENTS):
"""Create a wrapper class that looks like the wrapped class.
wrapper is the class used to override the wrapped class.
wrapped is the class has values overridden by the wrapper.
deleted is a tuple naming the __dict__ items to be removed from the
wrapper class (defaults to CLASS_WRAPPER_DELETES).
assigned is a tuple naming the __dict__ items to be copied directly
from the wrapped class (defaults to CLASS_WRAPPER_ASSIGNMENTS).
A notable difference from update_wrapper is that is creates a new class
that does not appear to be exactly the same as the wrapped class, but
rather mimics the name and the module, and inherits from the original
class, relying on class inheritance to mimic the behavior.
"""
__dict__ = dict(wrapper.__dict__)
for attr in deleted:
__dict__.pop(attr)
for attr in assigned:
__dict__[attr] = getattr(wrapped, attr)
__dict__['__wrapped__'] = wrapped
# Use the metaclass of the wrapped class
return wrapped.__class__(wrapped.__name__, (wrapped,), __dict__)
def class_wraps(wrapped,
deleted=CLASS_WRAPPER_DELETES,
assigned=CLASS_WRAPPER_ASSIGNMENTS):
"""Decorator factory to apply create_class_wrapper() to a wrapper class.
Return a decorator that invokes create_class_wrapper() with the decorated
class as the wrapper argument and the arguments to class_wraps() as the
remaining arguments. Default arguments are as for create_class_wrapper().
This is a convenience function to simplify applying partial() to
create_class_wrapper().
"""
return partial(create_class_wrapper, wrapped=wrapped,
deleted=deleted, assigned=assigned)
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
doctest.testfile('README.rst', optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
| """Allow your decorations to be un-decorated.
In some cases, such as when testing, it can be useful to access the
decorated class or function directly, so as to not to use the behavior
or interface that the decorator might introduce.
Example:
>>> from functools import wraps
>>> from undecorate import unwrap, unwrappable
>>>
>>> @unwrappable
... def pack(func):
... @wraps(func)
... def wrapper(args, kwargs):
... return func(*args, **kwargs)
... return wrapper
...
>>> @pack
... def myfunc(a, b, c=None, d=None):
... return (a, b, c, d)
...
>>> myfunc('a', 'b', c='c')
Traceback (most recent call last):
...
TypeError: wrapper() got an unexpected keyword argument 'c'
>>>
>>> unwrap(myfunc)('a', 'b', c='c')
('a', 'b', 'c', None)
"""
from functools import wraps
def unwrappable(decorator):
"""Make a decorator able to be un-decorated.
This meta-decorator takes a decorator, and returns a new decorator
that allows the decoration to be used by unwrap().
"""
@wraps(decorator)
def wrapper(decoration):
decorated = decorator(decoration)
decorated.__decoration__ = decoration
return decorated
return wrapper
def unwrap(wrapped):
"""Remove all wrappers from this decorated object."""
while True:
decoration = getattr(wrapped, '__decoration__', None)
if decoration is None:
return wrapped
wrapped = decoration
if __name__ == '__main__':
import doctest
doctest.testmod(optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
doctest.testfile('README.rst', optionflags=doctest.IGNORE_EXCEPTION_DETAIL)
| mit | Python |
551c4b971f1d18e232ba193cf486300d3490224b | add log | icoxfog417/music_hack_day_onpasha,icoxfog417/music_hack_day_onpasha | api/photo2song.py | api/photo2song.py | import asyncio
from collections import Counter
from api.bluemix_vision_recognition import VisionRecognizer
from api.echonest import Echonest
from api.spotify import Spotify
from machines.machine_loader import MachineLoader
import machines.photo_mood
def convert(image_urls):
vr = VisionRecognizer()
ec = Echonest()
sp = Spotify()
photo_to_mood = MachineLoader.load(machines.photo_mood)
TARGET_LABELS = ['Boat', 'Human', 'Insect', 'Invertebrate', 'Mammal', 'Man Made Scene', 'Outdoors', 'People Activity', 'Placental Mammal', 'Vertebrate']
log = []
# analyze moode
log.append("begin vision recognition")
moods = Counter()
matrix = vr.recognize(image_urls).to_matrix(TARGET_LABELS)
for r in matrix:
mood = photo_to_mood.predict(r)[0]
moods[int(mood)] += 1
target_mood = moods.most_common(1)[0][0] # get top and its score
target_mood = Echonest.MOOD[target_mood]
# choose song from mood
log.append("begin search song by mood")
tracks = ec.search_songs(target_mood)
# load spotify info
@asyncio.coroutine
def load_spotify(t):
t.load_spotify(sp)
log.append("begin load song information")
tasks = [load_spotify(t) for t in tracks]
done, _ = asyncio.get_event_loop().run_until_complete(asyncio.wait(tasks))
result = {
"mood": target_mood,
"tracks": [t.__dict__ for t in tracks],
"log": log
}
return result
| import asyncio
from collections import Counter
from api.bluemix_vision_recognition import VisionRecognizer
from api.echonest import Echonest
from api.spotify import Spotify
from machines.machine_loader import MachineLoader
import machines.photo_mood
def convert(image_urls):
vr = VisionRecognizer()
ec = Echonest()
sp = Spotify()
photo_to_mood = MachineLoader.load(machines.photo_mood)
TARGET_LABELS = ['Boat', 'Human', 'Insect', 'Invertebrate', 'Mammal', 'Man Made Scene', 'Outdoors', 'People Activity', 'Placental Mammal', 'Vertebrate']
# analyze moode
moods = Counter()
matrix = vr.recognize(image_urls).to_matrix(TARGET_LABELS)
for r in matrix:
mood = photo_to_mood.predict(r)[0]
moods[int(mood)] += 1
target_mood = moods.most_common(1)[0][0] # get top and its score
target_mood = Echonest.MOOD[target_mood]
# choose song from mood
tracks = ec.search_songs(target_mood)
# load spotify info
@asyncio.coroutine
def load_spotify(t):
t.load_spotify(sp)
tasks = [load_spotify(t) for t in tracks]
done, _ = asyncio.get_event_loop().run_until_complete(asyncio.wait(tasks))
result = {
"mood": target_mood,
"tracks": [t.__dict__ for t in tracks]
}
return result
| mit | Python |
c59c0911c5022291b38774bf407ca83557c78cc5 | test login and logout views. | aneumeier/userprofile,aneumeier/userprofile,aneumeier/userprofile | user/tests.py | user/tests.py | from django.test import TestCase
class ViewsTest(TestCase):
"""
TestCase to test all exposed views for anonymous users.
"""
def setUp(self):
pass
def testHome(self):
response = self.client.get('/user/')
self.assertEquals(response.status_code, 200)
def testLogin(self):
response = self.client.get('/user/login/')
self.assertEquals(response.status_code, 200)
def testLogout(self):
response = self.client.get('/user/logout/')
self.assertEquals(response.status_code, 200)
| from django.test import TestCase
class ViewsTest(TestCase):
"""
TestCase to test all exposed views for anonymous users.
"""
def setUp(self):
pass
def testHome(self):
response = self.client.get('/user/')
self.assertEquals(response.status_code, 200)
def testLogin(self):
response = self.client.get('/login/')
self.assertEquals(response.status_code, 200)
def testLogout(self):
response = self.client.get('/logout/')
self.assertEquals(response.status_code, 200)
| mit | Python |
c6536da7fc1eda82922b286c096412e4371f6d4c | Bump version | jaj42/GraPhysio,jaj42/dyngraph,jaj42/GraPhysio | graphysio/__init__.py | graphysio/__init__.py | """Graphical time series visualizer and analyzer."""
__version__ = '2021.07.14.1'
__all__ = [
'algorithms',
'dialogs',
'exporter',
'legend',
'mainui',
'puplot',
'tsplot',
'utils',
'types',
'ui',
'transformations',
]
| """Graphical time series visualizer and analyzer."""
__version__ = '2021.07.14'
__all__ = [
'algorithms',
'dialogs',
'exporter',
'legend',
'mainui',
'puplot',
'tsplot',
'utils',
'types',
'ui',
'transformations',
]
| isc | Python |
23cdb0d62e44797f84aee61f1a4c2909df8221b0 | Fix settings import and add an option to DjangoAppEngineMiddleware to allow setting up of signals on init | potatolondon/djangoappengine-1-4,potatolondon/djangoappengine-1-4 | main/__init__.py | main/__init__.py | import logging
import os
from django.utils.importlib import import_module
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
class DjangoAppEngineMiddleware:
def __init__(self, app, setup_signals=False):
self.settings_module = os.environ['DJANGO_SETTINGS_MODULE']
from djangoappengine.boot import setup_env
setup_env()
from django.conf import settings
if setup_signals:
# Load all models.py to ensure signal handling installation or index
# loading of some apps.
for app_to_import in settings.INSTALLED_APPS:
try:
import_module('%s.models' % app_to_import)
except ImportError:
pass
## In vanilla Django, staticfiles overrides runserver to use StaticFilesHandler
## if necessary. As we can't do this in our runserver (because we handover to dev_appserver)
## this has to be done here
if (not on_production_server and settings.DEBUG) and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
app = StaticFilesHandler(app)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app = appstats_wsgi_middleware(app)
self.wrapped_app = app
def __call__(self, environ, start_response):
#Always make sure the settings module is set - AppEngine sometimes loses it!
os.environ['DJANGO_SETTINGS_MODULE'] = self.settings_module
return self.wrapped_app(environ, start_response)
| import logging
import os
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
from django.conf import settings
class DjangoAppEngineMiddleware:
def __init__(self, app):
self.settings_module = os.environ['DJANGO_SETTINGS_MODULE']
from djangoappengine.boot import setup_env
setup_env()
## In vanilla Django, staticfiles overrides runserver to use StaticFilesHandler
## if necessary. As we can't do this in our runserver (because we handover to dev_appserver)
## this has to be done here
if (not on_production_server and settings.DEBUG) and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
app = StaticFilesHandler(app)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
app = appstats_wsgi_middleware(app)
self.wrapped_app = app
def __call__(self, environ, start_response):
#Always make sure the settings module is set - AppEngine sometimes loses it!
os.environ['DJANGO_SETTINGS_MODULE'] = self.settings_module
return self.wrapped_app(environ, start_response)
| bsd-3-clause | Python |
e4ad2863236cd36e5860f1d17a06ca05e30216d5 | Store more stuff about songs in the queue | projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox,projectweekend/Pi-Jukebox | make_database.py | make_database.py | import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0,
name TEXT,
artist_name TEXT,
artist_uri TEXT,
artist_image TEXT,
album_name TEXT,
album_uri TEXT,
album_image TEXT
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| import sqlite3
CREATE_SONG_QUEUE = '''
CREATE TABLE IF NOT EXISTS
jukebox_song_queue (
spotify_uri TEXT,
has_played INTEGER DEFAULT 0
);
'''
if __name__ == '__main__':
conn = sqlite3.connect('jukebox.db')
cursor = conn.cursor()
cursor.execute(CREATE_SONG_QUEUE)
conn.commit()
conn.close()
| mit | Python |
ed97f1cdbcc5a00c2bf597ad921b17da652b0b07 | add annotations to _pytesttester.py | pydata/bottleneck,pydata/bottleneck,kwgoodman/bottleneck,pydata/bottleneck,kwgoodman/bottleneck,kwgoodman/bottleneck | bottleneck/_pytesttester.py | bottleneck/_pytesttester.py | """
Generic test utilities.
Based on scipy._libs._testutils
"""
import os
import sys
from typing import Optional, List
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name: str) -> None:
self.module_name = module_name
def __call__(
self,
label: str = "fast",
verbose: int = 1,
extra_argv: Optional[List[str]] = None,
doctests: bool = False,
coverage: bool = False,
tests: Optional[List[str]] = None,
parallel: Optional[int] = None,
) -> bool:
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist() -> bool:
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
| """
Generic test utilities.
Based on scipy._libs._testutils
"""
from __future__ import division, print_function, absolute_import
import os
import sys
__all__ = ["PytestTester"]
class PytestTester(object):
"""
Pytest test runner entry point.
"""
def __init__(self, module_name):
self.module_name = module_name
def __call__(
self,
label="fast",
verbose=1,
extra_argv=None,
doctests=False,
coverage=False,
tests=None,
parallel=None,
):
import pytest
module = sys.modules[self.module_name]
module_path = os.path.abspath(module.__path__[0])
pytest_args = ["-l"]
if doctests:
raise ValueError("Doctests not supported")
if extra_argv:
pytest_args += list(extra_argv)
if verbose and int(verbose) > 1:
pytest_args += ["-" + "v" * (int(verbose) - 1)]
if coverage:
pytest_args += ["--cov=" + module_path]
if label == "fast":
pytest_args += ["-m", "not slow"]
elif label != "full":
pytest_args += ["-m", label]
if tests is None:
tests = [self.module_name]
if parallel is not None and parallel > 1:
if _pytest_has_xdist():
pytest_args += ["-n", str(parallel)]
else:
import warnings
warnings.warn(
"Could not run tests in parallel because "
"pytest-xdist plugin is not available."
)
pytest_args += ["--pyargs"] + list(tests)
try:
code = pytest.main(pytest_args)
except SystemExit as exc:
code = exc.code
return code == 0
def _pytest_has_xdist():
"""
Check if the pytest-xdist plugin is installed, providing parallel tests
"""
# Check xdist exists without importing, otherwise pytests emits warnings
from importlib.util import find_spec
return find_spec("xdist") is not None
| bsd-2-clause | Python |
075c06a6360d8b88745e3bffd4883beead36c59b | Add orders_script | g4b1nagy/hipmenu-autoorder,g4b1nagy/hipmenu-autoorder | config_example.py | config_example.py | CHROMEDRIVER_PATH = '/usr/lib/chromium-browser/chromedriver'
FACEBOOK = {
'email': '',
'password': '',
}
HIPMENU = {
'restaurant_url': 'https://www.hipmenu.ro/#p1/rg/cluj-prod/group/98254//',
}
SKYPE = {
'username': '',
'password': '',
'conversation_title': '',
}
NEXMO = {
'api_key': '',
'api_secret': '',
'phone_number': '40744444444',
}
TEST = True
orders_script = """
var orders = [];
var my_name = document.querySelector('#h-profilename').textContent;
var name_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .header-left p'));
var price_tags = Array.prototype.slice.call(document.querySelectorAll('.container-white-rounded .summary-total .value'));
if (name_tags.length > price_tags.length) {
name_tags.splice(0, 1);
}
for (var i = 0; i < name_tags.length; i++) {
orders.push({
name: name_tags[i].textContent.replace('Selecțiile mele', my_name).trim(),
price: price_tags[i].textContent.trim(),
});
}
return orders;
"""
| CHROMEDRIVER_PATH = '/usr/lib/chromium-browser/chromedriver'
FACEBOOK = {
'email': '',
'password': '',
}
HIPMENU = {
'restaurant_url': 'https://www.hipmenu.ro/#p1/rg/cluj-prod/group/98254//',
}
SKYPE = {
'username': '',
'password': '',
'conversation_title': '',
}
NEXMO = {
'api_key': '',
'api_secret': '',
'phone_number': '40744444444',
}
TEST = True
| unlicense | Python |
60890b614132a8cfd48be3e001114275752e9ac4 | fix typo | materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet,materialsvirtuallab/megnet | megnet/config.py | megnet/config.py | """Data types"""
import numpy as np
import tensorflow as tf
DTYPES = {'float32': {'numpy': np.float32, 'tf': tf.float32},
'float16': {'numpy': np.float16, 'tf': tf.float16},
'int32': {'numpy': np.int32, 'tf': tf.int32},
'int16': {'numpy': np.int16, 'tf': tf.int16}}
class DataType:
np_float = np.float32
np_int = np.int32
tf_float = tf.float32
tf_int = tf.int32
@classmethod
def set_dtype(cls, data_type: str) -> None:
"""
Class method to set the data types
Args:
data_type (str): '16' or '32'
"""
if data_type.endswith('32'):
float_key = 'float32'
int_key = 'int32'
elif data_type.endswith('16'):
float_key = 'float16'
int_key = 'int16'
else:
raise ValueError("Data type not known, choose '16' or '32'")
cls.np_float = DTYPES[float_key]['numpy']
cls.tf_float = DTYPES[float_key]['tf']
cls.np_int = DTYPES[int_key]['numpy']
cls.tf_int = DTYPES[int_key]['tf']
def set_global_dtypes(data_type) -> None:
"""
Function to set the data types
Args:
data_type (str): '16' or '32'
Returns:
"""
DataType.set_dtype(data_type)
| """Data types"""
import numpy as np
import tensorflow as tf
DTYPES = {'float32': {'numpy': np.float32, 'tf': tf.float32},
'float16': {'numpy': np.float16, 'tf': tf.float16},
'int32': {'numpy': np.int32, 'tf': tf.int32},
'int16': {'numpy': np.int32, 'tf': tf.int32}}
class DataType:
np_float = np.float32
np_int = np.int32
tf_float = tf.float32
tf_int = tf.int32
@classmethod
def set_dtype(cls, data_type: str) -> None:
"""
Class method to set the data types
Args:
data_type (str): '16' or '32'
"""
if data_type.endswith('32'):
float_key = 'float32'
int_key = 'int32'
elif data_type.endswith('16'):
float_key = 'float16'
int_key = 'int16'
else:
raise ValueError("Data type not known, choose '16' or '32'")
cls.np_float = DTYPES[float_key]['numpy']
cls.tf_float = DTYPES[float_key]['tf']
cls.np_int = DTYPES[int_key]['numpy']
cls.tf_int = DTYPES[int_key]['tf']
def set_global_dtypes(data_type) -> None:
"""
Function to set the data types
Args:
data_type (str): '16' or '32'
Returns:
"""
DataType.set_dtype(data_type)
| bsd-3-clause | Python |
5eb2c6f7e1bf0cc1b73b167a08085fccf77974fe | Tidy up and doc-comment AWSInstanceEnv class | crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes,crossgovernmentservices/csd-notes | app/config/aws.py | app/config/aws.py | # -*- coding: utf-8 -*-
"""
Dictionary-like class for config settings from AWS credstash
"""
from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table=None, context=None, profile_name=None):
"""
Low level API for fetching secrets for the current instance
"""
if not table:
table = '{}-credentials'.format(self.env)
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
"""
Enable dict-like access
"""
return self.getSecret(key)
def get(self, key, default=None):
"""
Return the value, or the default if not found
"""
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| from boto import ec2, utils
import credstash
class AWSIntanceEnv(object):
def __init__(self):
metadata = utils.get_instance_metadata()
self.instance_id = metadata['instance-id']
self.region = metadata['placement']['availability-zone'][:-1]
conn = ec2.connect_to_region(self.region)
reservations = conn.get_all_instances(instance_ids=[self.instance_id])
instance = reservations[0].instances[0]
self.env = instance.tags['Environment']
self.version = instance.tags['ConfigVersion']
def getSecret(self, name, table="credential-store", context=None,
profile_name=None):
return credstash.getSecret(
name,
self.version,
region=self.region,
table=table,
context=context,
profile_name=profile_name)
def __getitem__(self, key):
return self.getSecret(key, table='{}-credentials'.format(self.env))
def get(self, key, default=None):
try:
return self.__getitem__(key)
except credstash.ItemNotFound:
return default
| mit | Python |
9642b8f3d2f14b3a61054f68f05f4ef8eaca0803 | add validation | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | molo/core/management/commands/add_translated_pages_to_pages.py | molo/core/management/commands/add_translated_pages_to_pages.py | from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from molo.core.models import PageTranslation, SiteLanguage, Page
class Command(BaseCommand):
def handle(self, *args, **options):
# first add all the translations to the main language Page
# and add the main language page as a translated page
# to the translated pages
main_language = SiteLanguage.objects.get(is_main_language=True)
pages = Page.objects.all().exclude(depth__in=[1, 2, 3])
for page in pages:
if page.specific.language.pk == main_language.pk:
for translation in PageTranslation.objects.filter(page=page):
if translation.page and translation.translated_page:
page.specific.translated_pages.add(
translation.translated_page.specific)
translation.translated_page.specific.translated_pages\
.add(page.specific)
page.save()
translation.translated_page.save()
else:
self.stdout.write(self.style.NOTICE(
'Translation with pk "%s"'
'is missing page/translated_page'
% (translation.pk)))
# loop through all translated_pages on the main language page and
# add all the translations to the rest of the translated pages
# except the language that it is in
for page in Page.objects.all().exclude(depth__in=[1, 2, 3]):
if page.language:
if page.specific.language.pk == main_language.pk:
for translated_page in \
page.specific.translated_pages.all():
translations = page.specific.translated_pages.all().\
exclude(language__pk=translated_page.language.pk)
for translation in translations:
translated_page.translated_pages.add(translation)
translated_page.save()
| from __future__ import absolute_import, unicode_literals
from django.core.management.base import BaseCommand
from molo.core.models import PageTranslation, SiteLanguage, Page
class Command(BaseCommand):
def handle(self, *args, **options):
# first add all the translations to the main language Page
# and add the main language page as a translated page
# to the translated pages
main_language = SiteLanguage.objects.get(is_main_language=True)
pages = Page.objects.all().exclude(depth__in=[1, 2, 3])
for page in pages:
if page.specific.language.pk == main_language.pk:
for translation in PageTranslation.objects.filter(page=page):
if translation.page and translation.translated_page:
page.specific.translated_pages.add(
translation.translated_page.specific)
translation.translated_page.specific.translated_pages\
.add(page.specific)
page.save()
translation.translated_page.save()
else:
self.stdout.write(self.style.NOTICE(
'Translation with pk "%s"'
'is missing page/translated_page'
% (translation.pk)))
# loop through all translated_pages on the main language page and
# add all the translations to the rest of the translated pages
# except the language that it is in
for page in Page.objects.all().exclude(depth__in=[1, 2, 3]):
if page.specific.language.pk == main_language.pk:
for translated_page in page.specific.translated_pages.all():
translations = page.specific.translated_pages.all().\
exclude(language__pk=translated_page.language.pk)
for translation in translations:
translated_page.translated_pages.add(translation)
translated_page.save()
| bsd-2-clause | Python |
58d7592c603509f2bb625e4e2e5cb31ada4a8194 | Change test for make_kernel(kerneltype='airy') from class to function | AustereCuriosity/astropy,astropy/astropy,lpsinger/astropy,MSeifert04/astropy,larrybradley/astropy,StuartLittlefair/astropy,dhomeier/astropy,kelle/astropy,joergdietrich/astropy,mhvk/astropy,kelle/astropy,joergdietrich/astropy,StuartLittlefair/astropy,tbabej/astropy,dhomeier/astropy,StuartLittlefair/astropy,mhvk/astropy,DougBurke/astropy,MSeifert04/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,astropy/astropy,funbaker/astropy,astropy/astropy,stargaser/astropy,stargaser/astropy,DougBurke/astropy,pllim/astropy,tbabej/astropy,pllim/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,funbaker/astropy,larrybradley/astropy,StuartLittlefair/astropy,larrybradley/astropy,larrybradley/astropy,saimn/astropy,bsipocz/astropy,saimn/astropy,stargaser/astropy,joergdietrich/astropy,tbabej/astropy,aleksandr-bakanov/astropy,pllim/astropy,saimn/astropy,lpsinger/astropy,AustereCuriosity/astropy,DougBurke/astropy,pllim/astropy,dhomeier/astropy,funbaker/astropy,pllim/astropy,tbabej/astropy,lpsinger/astropy,astropy/astropy,kelle/astropy,kelle/astropy,aleksandr-bakanov/astropy,astropy/astropy,joergdietrich/astropy,DougBurke/astropy,AustereCuriosity/astropy,funbaker/astropy,saimn/astropy,bsipocz/astropy,MSeifert04/astropy,mhvk/astropy,bsipocz/astropy,mhvk/astropy,larrybradley/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,saimn/astropy,tbabej/astropy,dhomeier/astropy,kelle/astropy,dhomeier/astropy,bsipocz/astropy,lpsinger/astropy,stargaser/astropy,joergdietrich/astropy,mhvk/astropy | astropy/nddata/convolution/tests/test_make_kernel.py | astropy/nddata/convolution/tests/test_make_kernel.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy():
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from numpy.testing import assert_allclose, assert_equal
from ....tests.helper import pytest
from ..make_kernel import make_kernel
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
class TestMakeKernel(object):
"""
Test the make_kernel function
"""
@pytest.mark.skipif('not HAS_SCIPY')
def test_airy(self):
"""
Test kerneltype airy, a.k.a. brickwall
Checks https://github.com/astropy/astropy/pull/939
"""
k1 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='airy')
k2 = make_kernel([3, 3], kernelwidth=0.5, kerneltype='brickwall')
ref = np.array([[ 0.06375119, 0.12992753, 0.06375119],
[ 0.12992753, 0.22528514, 0.12992753],
[ 0.06375119, 0.12992753, 0.06375119]])
assert_allclose(k1, ref, rtol=0, atol=1e-7)
assert_equal(k1, k2)
| bsd-3-clause | Python |
21850d8ab44981b2bb02cb50386db717aacc730b | Fix poor coverage | andela-sjames/paystack-python | paystackapi/tests/test_product.py | paystackapi/tests/test_product.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product fetch method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_update(self):
"""Function defined to test Product update method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
| import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.product import Product
class TestProduct(BaseTestCase):
@httpretty.activate
def test_product_create(self):
"""Method defined to test product creation."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Product successfully created"}',
status=201,
)
response = Product.create(
name="Product pypaystack test", description="my test description",
price=500000, currency="NGN"
)
self.assertTrue(response['status'])
@httpretty.activate
def test_product_list(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}], "meta":{}}',
status=201,
)
response = Product.list()
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.fetch(5499)
self.assertEqual(response['status'], True)
@httpretty.activate
def test_product_fetch(self):
"""Function defined to test Product list method."""
httpretty.register_uri(
httpretty.PUT,
self.endpoint_url("/product/5499"),
content_type='text/json',
body='{"status": true, "message": "Products retrieved", "data":[{}]}',
status=201,
)
response = Product.update(product_id=5499, name="Product pypaystack test",
description="my test description", price=500000000,
currency="USD"
)
self.assertEqual(response['status'], True)
| mit | Python |
69ac1eb2f125e93444c134346dca954d8c040d42 | Implement the API to get system running status | ParadropLabs/Paradrop,ParadropLabs/Paradrop,ParadropLabs/Paradrop | paradrop/src/paradrop/backend/system_status.py | paradrop/src/paradrop/backend/system_status.py | '''
Get system running status including CPU load, memory usage, network traffic.
'''
import psutil
import time
class SystemStatus(object):
def __init__(self):
self.timestamp = time.time()
self.cpu_load = []
self.mem = dict(total = 0,
available = 0,
free = 0,
cached = 0,
buffers = 0)
self.disk_partitions = {}
self.network = {}
partitions = psutil.disk_partitions()
for p in partitions:
if p.fstype == 'ext4':
usage = psutil.disk_usage(p.mountpoint)
self.disk_partitions[p.mountpoint] = {
'total': usage.total,
'used': usage.used
}
def getStatus(self):
timestamp = time.time()
if (timestamp > self.timestamp + 0.8):
self.timestamp = timestamp
self.refreshCpuLoad()
self.refreshMemoryInfo()
self.refreshDiskInfo()
self.refreshNetworkTraffic()
result = {
'cpu_load': self.cpu_load,
'mem': self.mem,
'disk': self.disk_partitions,
'network': self.network
}
return result
def refreshCpuLoad(self):
self.cpu_load = map(int, psutil.cpu_percent(percpu=True))
def refreshMemoryInfo(self):
mem = psutil.virtual_memory()
self.mem['total'] = mem.total
self.mem['available'] = mem.available
self.mem['free'] = mem.free
self.mem['cached'] = mem.cached
self.mem['buffers'] = mem.buffers
def refreshDiskInfo(self):
for key, value in self.disk_partitions.iteritems():
usage = psutil.disk_usage(key)
self.disk_partitions[key]['total'] = usage.total
self.disk_partitions[key]['used'] = usage.used
def refreshNetworkTraffic(self):
excluded_interfaces = set(["lo", 'br-lan', 'docker0', 'wlan0'])
interfaces = {}
stats = psutil.net_if_stats()
for key, value in stats.iteritems():
if key in excluded_interfaces:
continue
interfaces[key] = {
'isup': value.isup,
'speed': value.speed,
'mtu': value.mtu
}
addresses = psutil.net_if_addrs()
for key, value in addresses.iteritems():
if key in excluded_interfaces:
continue
for i in value:
if i.family == 2:
interfaces[key]['ipv4'] = i.address
interfaces[key]['netmask'] = i.netmask
elif i.family == 17:
interfaces[key]['mac'] = i.address
traffic = psutil.net_io_counters(pernic=True)
for key, value in traffic.iteritems():
if key in excluded_interfaces:
continue
interfaces[key]['bytes_sent'] = value.bytes_sent
interfaces[key]['bytes_recv'] = value.bytes_recv
interfaces[key]['packets_sent'] = value.packets_sent
interfaces[key]['packets_recv'] = value.packets_recv
interfaces[key]['errin'] = value.errin
interfaces[key]['errout'] = value.errout
interfaces[key]['dropin'] = value.dropin
interfaces[key]['dropout'] = value.dropout
self.network = interfaces
| '''
Get system running status including CPU load, memory usage, network traffic.
'''
class SystemStatus(object):
def __init__(self):
pass
def getStatus(self):
test = {
'cpuload': 10
}
return test
def refreshCpuLoad(self):
pass
def refreshMemoryInfo(self):
pass
def refreshDiskInfo(self):
pass
def refreshNetworkTraffic(self):
pass
| apache-2.0 | Python |
2726ec1c400a212b1cac13f20d65c1b43eb042b0 | Fix formatting in download-google-smart-card-client-library.py | googlechromelabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,googlechromelabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,googlechromelabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,googlechromelabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,GoogleChromeLabs/chromeos_smart_card_connector,googlechromelabs/chromeos_smart_card_connector,googlechromelabs/chromeos_smart_card_connector | example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py | example_js_standalone_smart_card_client_app/download-google-smart-card-client-library.py | #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downloads from GitHub the latest released version of the client library for
communicating to the Google Smart Card Connector app."""
import json
import os
import sys
import urllib2
GITHUB_REPO_OWNER = "GoogleChrome"
GITHUB_REPO = "chromeos_smart_card_connector"
CLIENT_LIBRARY_ASSET_NAME = "google-smart-card-client-library.js"
OUTPUT_FILE_NAME = "google-smart-card-client-library.js"
GITHUB_LATEST_RELEASE_URL_TEMPLATE = \
"https://api.github.com/repos/{owner}/{repo}/releases/latest"
def main():
sys.stderr.write('Accessing GitHub API...\n')
latest_release_url = GITHUB_LATEST_RELEASE_URL_TEMPLATE.format(
owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO)
latest_release_info = json.load(urllib2.urlopen(latest_release_url))
client_library_download_url = None
for asset in latest_release_info.get("assets", []):
if asset["name"] == CLIENT_LIBRARY_ASSET_NAME:
client_library_download_url = asset["browser_download_url"]
if client_library_download_url is None:
raise RuntimeError("Asset with the client library not found in the latest "
"GitHub release")
sys.stderr.write('Downloading from "{0}"...\n'.format(
client_library_download_url))
client_library = urllib2.urlopen(client_library_download_url).read()
if os.path.dirname(__file__):
output_file_path = os.path.join(
os.path.relpath(os.path.dirname(__file__)), OUTPUT_FILE_NAME)
else:
output_file_path = OUTPUT_FILE_NAME
with open(output_file_path, "wt") as f:
f.write(client_library)
sys.stderr.write(
'Successfully finished. The library is stored at "{0}".\n'.format(
output_file_path))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Downloads from GitHub the latest released version of the client library for
communicating to the Google Smart Card Connector app."""
import json
import os
import sys
import urllib2
GITHUB_REPO_OWNER = "GoogleChrome"
GITHUB_REPO = "chromeos_smart_card_connector"
CLIENT_LIBRARY_ASSET_NAME = "google-smart-card-client-library.js"
OUTPUT_FILE_NAME = "google-smart-card-client-library.js"
GITHUB_LATEST_RELEASE_URL_TEMPLATE = \
"https://api.github.com/repos/{owner}/{repo}/releases/latest"
def main():
sys.stderr.write('Accessing GitHub API...\n')
latest_release_url = GITHUB_LATEST_RELEASE_URL_TEMPLATE.format(
owner=GITHUB_REPO_OWNER, repo=GITHUB_REPO)
latest_release_info = json.load(urllib2.urlopen(latest_release_url))
client_library_download_url = None
for asset in latest_release_info.get("assets", []):
if asset["name"] == CLIENT_LIBRARY_ASSET_NAME:
client_library_download_url = asset["browser_download_url"]
if client_library_download_url is None:
raise RuntimeError("Asset with the client library not found in the latest "
"GitHub release")
sys.stderr.write('Downloading from "{0}"...\n'.format(
client_library_download_url))
client_library = urllib2.urlopen(client_library_download_url).read()
if os.path.dirname(__file__):
output_file_path = os.path.join(
os.path.relpath(os.path.dirname(__file__)), OUTPUT_FILE_NAME)
else:
output_file_path = OUTPUT_FILE_NAME
with open(output_file_path, "wt") as f:
f.write(client_library)
sys.stderr.write(
'Successfully finished. The library is stored at "{0}".\n'.format(
output_file_path))
if __name__ == '__main__':
main()
| apache-2.0 | Python |
ff9e3c6ef604a47a616e111ee2a90fda77692977 | Bump version to 3.3.2 | JukeboxPipeline/jukeboxmaya,JukeboxPipeline/jukeboxmaya | src/jukeboxmaya/__init__.py | src/jukeboxmaya/__init__.py | __author__ = 'David Zuber'
__email__ = '[email protected]'
__version__ = '3.3.2'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| __author__ = 'David Zuber'
__email__ = '[email protected]'
__version__ = '3.3.1'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| bsd-3-clause | Python |
a9c7a6e441159bdf1fd13d70bcc91617dee93f03 | revert revert. | phil65/script.module.kodi65 | lib/kodi65/selectdialog.py | lib/kodi65/selectdialog.py | # -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <[email protected]>
# This program is Free Software see LICENSE file for details
import xbmcgui
import xbmc
from kodi65 import addon
C_LIST_SIMPLE = 3
C_LIST_DETAIL = 6
C_BUTTON_GET_MORE = 5
C_LABEL_HEADER = 1
class SelectDialog(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self)
self.items = kwargs.get('listing')
self.header = kwargs.get('header')
self.detailed = kwargs.get('detailed')
self.extrabutton = kwargs.get('extrabutton')
self.listitems = [i.get_listitem() for i in self.items] if self.items else []
self.index = -1
def onInit(self):
if not self.listitems:
self.index == -1
self.close()
self.list = self.getControl(C_LIST_DETAIL)
self.getControl(C_LIST_DETAIL).setVisible(self.detailed)
self.getControl(C_LIST_SIMPLE).setVisible(not self.detailed)
self.getControl(C_BUTTON_GET_MORE).setVisible(bool(self.extrabutton))
if self.extrabutton:
self.getControl(C_BUTTON_GET_MORE).setLabel(self.extrabutton)
self.getControl(C_LABEL_HEADER).setLabel(self.header)
self.list.addItems(self.listitems)
self.setFocus(self.list)
def onClick(self, control_id):
if control_id in [C_LIST_SIMPLE, C_LIST_DETAIL]:
self.index = int(self.list.getSelectedPosition())
elif control_id == C_BUTTON_GET_MORE:
self.index = -2
self.close()
def onFocus(self, control_id):
pass
def open(listitems, header, detailed=True, extrabutton=False):
"""
open selectdialog, return index (-1 for closing, -2 for extra button)
"""
xbmc.executebuiltin("Dialog.Close(busydialog)")
w = SelectDialog('DialogSelect.xml', addon.PATH,
listing=listitems,
header=header,
detailed=detailed,
extrabutton=extrabutton)
w.doModal()
return w.index
| # -*- coding: utf8 -*-
# Copyright (C) 2015 - Philipp Temminghoff <[email protected]>
# This program is Free Software see LICENSE file for details
import xbmcgui
import xbmc
from kodi65 import addon
C_LIST_SIMPLE = 3
C_LIST_DETAIL = 6
C_BUTTON_GET_MORE = 5
C_LABEL_HEADER = 1
class SelectDialog(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
xbmcgui.WindowXMLDialog.__init__(self)
self.items = kwargs.get('listing')
self.header = kwargs.get('header')
self.detailed = kwargs.get('detailed')
self.extrabutton = kwargs.get('extrabutton')
self.listitems = [i.get_listitem() for i in self.items] if self.items else []
self.index = -1
def onInit(self):
if not self.listitems:
self.index == -1
self.close()
elif len(self.listitems) == 1:
self.index == 0
self.close()
self.list = self.getControl(C_LIST_DETAIL)
self.getControl(C_LIST_DETAIL).setVisible(self.detailed)
self.getControl(C_LIST_SIMPLE).setVisible(not self.detailed)
self.getControl(C_BUTTON_GET_MORE).setVisible(bool(self.extrabutton))
if self.extrabutton:
self.getControl(C_BUTTON_GET_MORE).setLabel(self.extrabutton)
self.getControl(C_LABEL_HEADER).setLabel(self.header)
self.list.addItems(self.listitems)
self.setFocus(self.list)
def onClick(self, control_id):
if control_id in [C_LIST_SIMPLE, C_LIST_DETAIL]:
self.index = int(self.list.getSelectedPosition())
elif control_id == C_BUTTON_GET_MORE:
self.index = -2
self.close()
def onFocus(self, control_id):
pass
def open(listitems, header, detailed=True, extrabutton=False):
"""
open selectdialog, return index (-1 for closing, -2 for extra button)
"""
xbmc.executebuiltin("Dialog.Close(busydialog)")
w = SelectDialog('DialogSelect.xml', addon.PATH,
listing=listitems,
header=header,
detailed=detailed,
extrabutton=extrabutton)
w.doModal()
return w.index
| lgpl-2.1 | Python |
7a83a9be7e2a986979cc898c3fd3aa3bb49442cc | modify dx model | architecture-building-systems/CEAforArcGIS,architecture-building-systems/CEAforArcGIS | cea/technologies/direct_expansion_units.py | cea/technologies/direct_expansion_units.py | # -*- coding: utf-8 -*-
"""
direct expansion units
"""
from __future__ import division
from scipy.interpolate import interp1d
from math import log, ceil
import pandas as pd
import numpy as np
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
# FIXME: this model is simplified, and required update
PRICE_DX_PER_W = 1.6 #USD FIXME: to be moved to database
# operation costs
def calc_cop_DX(Q_load_W):
cop = 2.3
return cop
def calc_DX(mdot_kgpers, T_sup_K, T_re_K):
if np.isclose(mdot_kgpers, 0.0):
wdot_W = 0
else:
q_chw_W = mdot_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK * (T_re_K - T_sup_K)
cop_DX = calc_cop_DX(q_chw_W)
wdot_W = q_chw_W/cop_DX
return wdot_W
# investment and maintenance costs
def calc_Cinv_DX(Q_design_W):
"""
Assume the same cost as gas boilers.
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler in [W]
:param gV: globalvar.py
:rtype InvCa : float
:returns InvCa: Annualized investment costs in CHF/a including Maintenance Cost
"""
Capex_a = 0
Opex_fixed = 0
if Q_design_W > 0:
InvC = Q_design_W * PRICE_DX_PER_W
Inv_IR = 5 / 100
Inv_LT = 25
Inv_OM = 5 / 100
Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1)
Opex_fixed = Capex_a * Inv_OM
return Capex_a, Opex_fixed | # -*- coding: utf-8 -*-
"""
direct expansion units
"""
from __future__ import division
from scipy.interpolate import interp1d
from math import log, ceil
import pandas as pd
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
__author__ = "Shanshan Hsieh"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Shanshan Hsieh"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "[email protected]"
__status__ = "Production"
# FIXME: this model is simplified, and required update
PRICE_DX_PER_W = 1.373 #USD FIXME: to be moved to database
# operation costs
def calc_cop_DX(Q_load_W):
cop = 2.7
return cop
def calc_DX(mdot_kgpers, T_sup_K, T_re_K):
q_chw_W = mdot_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK * (T_re_K - T_sup_K)
cop_DX = calc_cop_DX(q_chw_W)
wdot_W = q_chw_W/cop_DX
return wdot_W
# investment and maintenance costs
def calc_Cinv_DX(Q_design_W):
"""
Assume the same cost as gas boilers.
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler in [W]
:param gV: globalvar.py
:rtype InvCa : float
:returns InvCa: Annualized investment costs in CHF/a including Maintenance Cost
"""
Capex_a = 0
Opex_fixed = 0
if Q_design_W > 0:
InvC = Q_design_W * PRICE_DX_PER_W
Inv_IR = 5 / 100
Inv_LT = 25
Inv_OM = 5 / 100
Capex_a = InvC * (Inv_IR) * (1 + Inv_IR) ** Inv_LT / ((1 + Inv_IR) ** Inv_LT - 1)
Opex_fixed = Capex_a * Inv_OM
return Capex_a, Opex_fixed | mit | Python |
8b359d97e59d759bfd7711c8aacf9abc657fe457 | fix demo | FederatedAI/FATE,FederatedAI/FATE,FederatedAI/FATE | pipeline/demo/pipeline-homo-data-split-demo.py | pipeline/demo/pipeline-homo-data-split-demo.py | from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.backend.config import Backend
from pipeline.backend.config import WorkMode
from pipeline.backend.pipeline import PipeLine
from pipeline.component.dataio import DataIO
from pipeline.component.input import Input
from pipeline.interface.data import Data
guest = 9999
host = 10000
arbiter = 10002
guest_train_data = {"name": "breast_homo_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_homo_host", "namespace": "experiment"}
input_0 = Input(name="train_data")
print ("get input_0's init name {}".format(input_0.name))
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).algorithm_param(with_label=True, output_format="dense")
dataio_0.get_party_instance(role='host', party_id=host).algorithm_param(with_label=True)
homo_data_split_0 = HomoDataSplit(name="homo_data_split_0", stratified=True, test_size=0.2, validate_size=0.1)
print ("get input_0's name {}".format(input_0.name))
pipeline.add_component(dataio_0, data=Data(data=input_0.data))
pipeline.add_component(homo_data_split_0, data=Data(data=dataio_0.output.data))
pipeline.compile()
pipeline.fit(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
print (pipeline.get_component("dataio_0").get_model_param())
print (pipeline.get_component("homo_data_split_0").summary())
| from pipeline.component.homo_data_split import HomoDataSplit
from pipeline.backend.config import Backend
from pipeline.backend.config import WorkMode
from pipeline.backend.pipeline import PipeLine
from pipeline.component.dataio import DataIO
from pipeline.component.input import Input
from pipeline.interface.data import Data
guest = 9999
host = 10000
arbiter = 10002
guest_train_data = {"name": "breast_homo_guest", "namespace": "experiment"}
host_train_data = {"name": "breast_homo_host", "namespace": "experiment"}
input_0 = Input(name="train_data")
print ("get input_0's init name {}".format(input_0.name))
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host, arbiter=arbiter)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).algorithm_param(with_label=True, output_format="dense")
dataio_0.get_party_instance(role='host', party_id=host).algorithm_param(with_label=True)
homo_data_split_0 = HomoDataSplit(name="homo_data_split_0", stratified=True, test_size=0.2, validate_size=0.1)
print ("get input_0's name {}".format(input_0.name))
pipeline.add_component(dataio_0, data=Data(data=input_0.data))
pipeline.add_component(homo_data_split_0, data=Data(data=dataio_0.output.data))
pipeline.compile()
pipeline.fit(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
# predict
pipeline.predict(backend=Backend.EGGROLL, work_mode=WorkMode.STANDALONE,
feed_dict={input_0:
{"guest": {9999: guest_train_data},
"host": {
10000: host_train_data
}
}
})
print (pipeline.get_component("dataio_0").get_model_param())
print (pipeline.get_component("homo_data_split_0").summary())
| apache-2.0 | Python |
ed48555984886ff5ade23aeb23ad5f85e77e5b69 | fix docs | yuyu2172/chainercv,yuyu2172/chainercv,chainer/chainercv,chainer/chainercv,pfnet/chainercv | chainercv/transforms/image/pca_lighting.py | chainercv/transforms/image/pca_lighting.py | import numpy
def pca_lighting(img, sigma, eigen_value=None, eigen_vector=None):
"""Alter the intensities of input image using PCA.
This is used in training of AlexNet [Krizhevsky]_.
.. [Krizhevsky] Alex Krizhevsky, Ilya Sutskever, Geoffrey E. Hinton. \
ImageNet Classification with Deep Convolutional Neural Networks. \
NIPS 2012.
Args:
image (numpy.ndarray): An image array to be augmented. This is in
CHW format.
sigma (float): Standard deviation of the Gaussian. In AlexNet
[Krizhevsky]_, this value is 10% of the range of intensity
(25.5 if the range is [0, 255]).
eigen_value: (numpy.ndarray): An array of eigen values. The shape
have to be (3,). If it is not specified, the values computed from
ImageNet are used.
eigen_vector: (numpy.ndarray): An array of eigen vectors. The shape
have to be (3, 3). If it is not specified, the vectors computed
from ImageNet are used.
Returns:
An image in CHW format.
"""
if sigma <= 0:
return img
# these values are copied from facebook/fb.resnet.torch
if eigen_value is None:
eigen_value = numpy.array((0.2175, 0.0188, 0.0045))
if eigen_vector is None:
eigen_vector = numpy.array((
(0.4009, -0.814, 0.4203),
(0.7192, -0.0045, -0.6948),
(-0.5675, -0.5808, -0.5836)))
alpha = numpy.random.normal(0, sigma, size=3)
img = img.copy()
img += eigen_vector.dot(eigen_value * alpha).reshape(-1, 1, 1)
return img
| import numpy
def pca_lighting(img, sigma, eigen_value=None, eigen_vector=None):
"""Alter the intensities of input image using PCA.
This is used in training of AlexNet [Krizhevsky]_.
.. [Krizhevsky] Alex Krizhevsky, Ilya Sutskever, Geoffrey E. Hinton. \
ImageNet Classification with Deep Convolutional Neural Networks. \
NIPS 2012.
Args:
image (numpy.ndarray): An image array to be augmented. This is in
CHW format.
sigma (float): Standard deviation of the Gaussian. In AlexNet
[Krizhevsky]_, this value is 10% of the range of intensity
(25.5 if the range is [0, 255]).
eigen_value: (numpy.ndarray): An array of eigen values. The shape
have to be (3,). If it is not specified, the values computed from
ImageNet is used.
eigen_vector: (numpy.ndarray): An array of eigen vectors. The shape
have to be (3, 3). If it is not specified, the vectors computed
from ImageNet is used.
Returns:
An image in CHW format.
"""
if sigma <= 0:
return img
# these values are copied from facebook/fb.resnet.torch
if eigen_value is None:
eigen_value = numpy.array((0.2175, 0.0188, 0.0045))
if eigen_vector is None:
eigen_vector = numpy.array((
(0.4009, -0.814, 0.4203),
(0.7192, -0.0045, -0.6948),
(-0.5675, -0.5808, -0.5836)))
alpha = numpy.random.normal(0, sigma, size=3)
img = img.copy()
img += eigen_vector.dot(eigen_value * alpha).reshape(-1, 1, 1)
return img
| mit | Python |
6f04f1ed35635c08836f1eee67983abf9735f5db | handle more exceptions | AppEnlight/channelstream,AppEnlight/channelstream,AppEnlight/channelstream | channelstream/wsgi_views/error_handlers.py | channelstream/wsgi_views/error_handlers.py | from pyramid.view import exception_view_config
@exception_view_config(context='marshmallow.ValidationError', renderer='json')
def marshmallow_invalid_data(context, request):
request.response.status = 422
return context.messages
@exception_view_config(context='itsdangerous.BadTimeSignature', renderer='json')
@exception_view_config(context='itsdangerous.BadSignature', renderer='json')
def itsdangerous_signer_error(context, request):
request.response.status = 401
return {'request': 'Bad Signature'}
| from pyramid.view import exception_view_config
@exception_view_config(context='marshmallow.ValidationError', renderer='json')
def marshmallow_invalid_data(context, request):
request.response.status = 422
return context.messages
@exception_view_config(context='itsdangerous.BadTimeSignature', renderer='json')
def itsdangerous_signer_error(context, request):
request.response.status = 401
return {'request': 'Bad Signature'}
| bsd-3-clause | Python |
155fd9ae952a4eba53521739589d5e3462108ed2 | remove default statement per Gunther's comment | Gustavo6046/ChatterBot,Reinaesaya/OUIRL-ChatBot,davizucon/ChatterBot,gunthercox/ChatterBot,vkosuri/ChatterBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot | chatterbot/ext/django_chatterbot/models.py | chatterbot/ext/django_chatterbot/models.py | from django.db import models
class Statement(models.Model):
"""A short (<255) chat message, tweet, forum post, etc"""
text = models.CharField(
unique=True,
blank=False,
null=False,
max_length=255
)
def __str__(self):
if len(self.text.strip()) > 60:
return '{}...'.format(self.text[:57])
elif len(self.text.strip()) > 0:
return self.text
return '<empty>'
class Response(models.Model):
"""Connection between a response and the statement that triggered it
Comparble to a ManyToMany "through" table, but without the M2M indexing/relations.
Only the text and number of times it has occurred are currently stored.
Might be useful to store additional features like language, location(s)/region(s),
first created datetime(s), username, user full name, user gender, etc.
A the very least occurrences should be an FK to a meta-data table with this info.
"""
statement = models.ForeignKey(
'Statement',
related_name='in_response_to'
)
response = models.ForeignKey(
'Statement',
related_name='+'
)
unique_together = (('statement', 'response'),)
occurrence = models.PositiveIntegerField(default=0)
def __str__(self):
s = self.statement.text if len(self.statement.text) <= 20 else self.statement.text[:17] + '...'
s += ' => '
s += self.response.text if len(self.response.text) <= 40 else self.response.text[:37] + '...'
return s
| from django.db import models
class Statement(models.Model):
"""A short (<255) chat message, tweet, forum post, etc"""
text = models.CharField(
unique=True,
blank=False,
null=False,
default='<empty>',
max_length=255
)
def __str__(self):
if len(self.text.strip()) > 60:
return '{}...'.format(self.text[:57])
elif len(self.text.strip()) > 0:
return self.text
return '<empty>'
class Response(models.Model):
"""Connection between a response and the statement that triggered it
Comparble to a ManyToMany "through" table, but without the M2M indexing/relations.
Only the text and number of times it has occurred are currently stored.
Might be useful to store additional features like language, location(s)/region(s),
first created datetime(s), username, user full name, user gender, etc.
A the very least occurrences should be an FK to a meta-data table with this info.
"""
statement = models.ForeignKey(
'Statement',
related_name='in_response_to'
)
response = models.ForeignKey(
'Statement',
related_name='+'
)
unique_together = (('statement', 'response'),)
occurrence = models.PositiveIntegerField(default=0)
def __str__(self):
s = self.statement.text if len(self.statement.text) <= 20 else self.statement.text[:17] + '...'
s += ' => '
s += self.response.text if len(self.response.text) <= 40 else self.response.text[:37] + '...'
return s
| bsd-3-clause | Python |
4d81c88627b0f71c765112b9a814fe876239bcc5 | Print stats for constant points to. | plast-lab/llvm-datalog,plast-lab/cclyzer | src/main/copper/analysis.py | src/main/copper/analysis.py | import os
from .project import ProjectManager
from .analysis_steps import *
from .analysis_stats import AnalysisStatisticsBuilder as StatBuilder
class Analysis(object):
def __init__(self, config, projects=ProjectManager()):
self.logger = logging.getLogger(__name__)
self._config = config
self._stats = None
self._pipeline = [
CleaningStep(),
FactGenerationStep(),
DatabaseCreationStep(),
SanityCheckStep(projects.SCHEMA),
LoadProjectStep(projects.SYMBOL_LOOKUP),
LoadProjectStep(projects.CALLGRAPH),
LoadProjectStep(projects.POINTS_TO),
]
@property
def pipeline(self):
return [step.check() for step in self._pipeline]
@property
def stats(self):
# Compute stats if needed
if self._stats is None:
self.compute_stats()
return self._stats
@property
def input_files(self):
return [os.path.abspath(f) for f in self._config.input_files]
@property
def output_directory(self):
return os.path.abspath(self._config.output_directory)
@property
def facts_directory(self):
return os.path.join(self.output_directory, 'facts')
@property
def database_directory(self):
return os.path.join(self.output_directory, 'db')
def load_project(self, project):
LoadProjectStep(project).apply(self)
def run(self):
# Run each step of pipeline
for step in self.pipeline:
step.apply(self)
# Compute stats
self.compute_stats()
def compute_stats(self):
self._stats = (
StatBuilder(self)
.count('instruction')
.count('reachable_function')
.count('callgraph:fn_edge', 'call-graph edges')
.count('var_points_to', 'var-points-to')
.count('constant_points_to', 'constant-points-to')
.count('ptr_points_to', 'ptr-points-to')
.build()
)
| import os
from .project import ProjectManager
from .analysis_steps import *
from .analysis_stats import AnalysisStatisticsBuilder as StatBuilder
class Analysis(object):
def __init__(self, config, projects=ProjectManager()):
self.logger = logging.getLogger(__name__)
self._config = config
self._stats = None
self._pipeline = [
CleaningStep(),
FactGenerationStep(),
DatabaseCreationStep(),
SanityCheckStep(projects.SCHEMA),
LoadProjectStep(projects.SYMBOL_LOOKUP),
LoadProjectStep(projects.CALLGRAPH),
LoadProjectStep(projects.POINTS_TO),
]
@property
def pipeline(self):
return [step.check() for step in self._pipeline]
@property
def stats(self):
# Compute stats if needed
if self._stats is None:
self.compute_stats()
return self._stats
@property
def input_files(self):
return [os.path.abspath(f) for f in self._config.input_files]
@property
def output_directory(self):
return os.path.abspath(self._config.output_directory)
@property
def facts_directory(self):
return os.path.join(self.output_directory, 'facts')
@property
def database_directory(self):
return os.path.join(self.output_directory, 'db')
def load_project(self, project):
LoadProjectStep(project).apply(self)
def run(self):
# Run each step of pipeline
for step in self.pipeline:
step.apply(self)
# Compute stats
self.compute_stats()
def compute_stats(self):
self._stats = (
StatBuilder(self)
.count('instruction')
.count('reachable_function')
.count('callgraph:fn_edge', 'call-graph edges')
.count('var_points_to', 'var-points-to')
.count('ptr_points_to', 'ptr-points-to')
.build()
)
| mit | Python |
40fe16d058d18d2384be464ecefed1028edace17 | Fix error on SASL PLAIN authentication | ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd | txircd/modules/ircv3_sasl_plain.py | txircd/modules/ircv3_sasl_plain.py | from txircd.modbase import Module
from base64 import b64decode
class SaslPlainMechanism(Module):
def authenticate(self, user, authentication):
try:
authenticationID, authorizationID, password = b64decode(authentication[0]).split("\0")
except TypeError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
except ValueError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
if "server_sasl_agent" not in self.ircd.servconfig or self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
return self.ircd.module_data_cache["sasl_agent"].authenticate(user, authenticationid=authenticationID, authorizationid=authorizationID, password=password)
# TODO: The rest of this doesn't really make sense until s2s, but we'll return false for now since it's failing
return False
def bindSaslResult(self, user, successFunction, failureFunction):
if "server_sasl_agent" not in self.ircd.servconfig or self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return
self.ircd.module_data_cache["sasl_agent"].bindSaslResult(user, successFunction, failureFunction)
# TODO: server_sasl_agent stuff when s2s
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "sasl_mechanisms" not in self.ircd.module_data_cache:
self.ircd.module_data_cache["sasl_mechanisms"] = {}
self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] = SaslPlainMechanism().hook(self.ircd)
return {}
def cleanup(self):
del self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] | from txircd.modbase import Module
from base64 import b64decode
class SaslPlainMechanism(Module):
def authenticate(self, user, authentication):
try:
authenticationID, authorizationID, password = b64decode(authentication[0]).split("\0")
except TypeError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
except ValueError:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
if self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return False
return self.ircd.module_data_cache["sasl_agent"].authenticate(user, authenticationid=authenticationID, authorizationid=authorizationID, password=password)
# TODO: The rest of this doesn't really make sense until s2s, but we'll return false for now since it's failing
return False
def bindSaslResult(self, user, successFunction, failureFunction):
if self.ircd.servconfig["server_sasl_agent"] == "":
if "sasl_agent" not in self.ircd.module_data_cache:
user.sendMessage(irc.ERR_SASLFAILED, ":SASL authentication failed")
return
self.ircd.module_data_cache["sasl_agent"].bindSaslResult(user, successFunction, failureFunction)
# TODO: server_sasl_agent stuff when s2s
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
if "sasl_mechanisms" not in self.ircd.module_data_cache:
self.ircd.module_data_cache["sasl_mechanisms"] = {}
self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] = SaslPlainMechanism().hook(self.ircd)
return {}
def cleanup(self):
del self.ircd.module_data_cache["sasl_mechanisms"]["PLAIN"] | bsd-3-clause | Python |
026db0e635f0c82e1b24884cb768d53b7fadfc0c | use lots of connections for the pool | SergioChan/Stream-Framework,izhan/Stream-Framework,izhan/Stream-Framework,nikolay-saskovets/Feedly,smuser90/Stream-Framework,Anislav/Stream-Framework,nikolay-saskovets/Feedly,izhan/Stream-Framework,SergioChan/Stream-Framework,smuser90/Stream-Framework,turbolabtech/Stream-Framework,nikolay-saskovets/Feedly,Architizer/Feedly,Anislav/Stream-Framework,turbolabtech/Stream-Framework,SergioChan/Stream-Framework,izhan/Stream-Framework,turbolabtech/Stream-Framework,smuser90/Stream-Framework,Architizer/Feedly,Anislav/Stream-Framework,smuser90/Stream-Framework,turbolabtech/Stream-Framework,nikolay-saskovets/Feedly,Architizer/Feedly,Anislav/Stream-Framework,SergioChan/Stream-Framework | feedly/storage/cassandra/connection.py | feedly/storage/cassandra/connection.py | from pycassa.pool import ConnectionPool
def get_cassandra_connection(keyspace_name, hosts):
if get_cassandra_connection._connection is None:
get_cassandra_connection._connection = ConnectionPool(
keyspace_name, hosts, pool_size=len(hosts)*24,
prefill=False, timeout=10)
return get_cassandra_connection._connection
get_cassandra_connection._connection = None
| from pycassa.pool import ConnectionPool
def get_cassandra_connection(keyspace_name, hosts):
if get_cassandra_connection._connection is None:
get_cassandra_connection._connection = ConnectionPool(
keyspace_name, hosts)
return get_cassandra_connection._connection
get_cassandra_connection._connection = None
| bsd-3-clause | Python |
fb4b9e4570c4053204304fc934d0fe816d4c056d | add new split dictionary and dependencies | pagarme/pagarme-python | tests/resources/dictionaries/transaction_dictionary.py | tests/resources/dictionaries/transaction_dictionary.py | # -*- coding: utf-8 -*-
from tests.resources.dictionaries import card_dictionary
from tests.resources.dictionaries import customer_dictionary
from tests.resources.dictionaries import recipient_dictionary
from tests.resources import pagarme_test
from pagarme import recipient
BOLETO_TRANSACTION = {'amount': '10000', 'payment_method': 'boleto'}
CALCULATE_INTALLMENTS_AMOUNT = {'amount': '10000', 'free_installments': "1", 'interest_rate': '13',
'max_installments': '12'}
PAY_BOLETO = {'status': 'paid'}
REFUNDED_OR_CAPTURE_TRANSACTION = {'amount': '10000'}
RECIPIENT = recipient.create(recipient_dictionary.RECIPIENT_DICTIONARY)
SPLIT_RULE_PERCENTAGE = {'recipient_id': RECIPIENT['id'], 'percentage': 100, 'liable': 'true',
'charge_processing_fee': 'true'}
BOLETO_TRANSACTION_SPLIT = {'amount': BOLETO_TRANSACTION['amount'], 'payment_method': BOLETO_TRANSACTION['payment_method'],
'split_rules':[SPLIT_RULE_PERCENTAGE]}
INVALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.INVALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.INVALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.INVALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.INVALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_CAPTURE_FALSE_DICTIONARY = {'amount': '10000', 'capture': 'false',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION__WITH_POSTBACK_DICTIONARY = {'amount': '10000',
'card_number': card_dictionary.VALID_CARD_DICTIONARY['card_number'], 'postback_url': pagarme_test.create_postback_url(),
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv': card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date': card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
| # -*- coding: utf-8 -*-
from tests.resources.dictionaries import card_dictionary
from tests.resources.dictionaries import customer_dictionary
from tests.resources import pagarme_test
BOLETO_TRANSACTION = {'amount': '10000', 'payment_method': 'boleto'}
CALCULATE_INTALLMENTS_AMOUNT = {'amount': '10000', 'free_installments': "1", 'interest_rate': '13',
'max_installments': '12'}
PAY_BOLETO = {'status':'paid'}
REFUNDED_OR_CAPTURE_TRANSACTION = {'amount':'10000'}
INVALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.INVALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.INVALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.INVALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.INVALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_CAPTURE_FALSE_DICTIONARY = {'amount':'10000', 'capture':'false',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name':card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'],
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY}
VALID_CREDIT_CARD_TRANSACTION__WITH_POSTBACK_DICTIONARY = {'amount':'10000',
'card_number':card_dictionary.VALID_CARD_DICTIONARY['card_number'], 'postback_url':pagarme_test.create_postback_url(),
'card_holder_name': card_dictionary.VALID_CARD_DICTIONARY['card_holder_name'],
'card_cvv':card_dictionary.VALID_CARD_DICTIONARY['card_cvv'],
'card_expiration_date':card_dictionary.VALID_CARD_DICTIONARY['card_expiration_date'],
'customer': customer_dictionary.CUSTOMER_DICTIONARY} | mit | Python |
1bccf48e6e142e6c62374dd9d7dc94330f15c650 | Update ipc_lista1.3.py | any1m1c/ipc20161 | lista1/ipc_lista1.3.py | lista1/ipc_lista1.3.py | #ipc_lista1.3
#Professor: Jucimar Junior
#Any Mendes Carvalho - 161531004
#
#
#
#
#Faça um programa que peça dois números e imprima a soma.
number1 = input("Digite o primeiro: ")
number2 = input("Digite o segundo número: ")
print(number1+number2)
| #ipc_lista1.3
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que peça dois números e imprima a soma.
number1 = input("Digite o primeiro: ")
number2 = input("Digite o segundo número: ")
print(number1+number2)
| apache-2.0 | Python |
4e6fc94fde8eace1b461eba59dc4a56611664877 | Update ipc_lista1.7.py | any1m1c/ipc20161 | lista1/ipc_lista1.7.py | lista1/ipc_lista1.7.py | #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: ")
| #ipc_lista1.7
#Professor: Jucimar Junior
#Any Mendes Carvalho
#
#
#
#
#Faça um programa que calcule a área de um quadrado, em seguida mostre o dobro desta área para o #usuário.
altura = input("Digite a altura do quadrado em metros: "
| apache-2.0 | Python |
950e9f82be8b3a02ce96db47061cf828da231be9 | Update ipc_lista1.8.py | any1m1c/ipc20161 | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input
| apache-2.0 | Python |
26c781807937038ec2c4fbfd4413ae2c60decd1b | add stdint.h for c++ default header include. | aceway/cppite,aceway/cppite,aceway/cppite | src/py/cpp_fragment_tmpl.py | src/py/cpp_fragment_tmpl.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
hpp_tmpl="""#ifndef __FRAGMENT_HPP__
#define __FRAGMENT_HPP__
#include <string>
#include <vector>
#include <map>
#include <list>
// linux int type define; should be remore/add by system dependent in the future version.
#include <stdint.h>
{includes}
void fragment_container();
#endif
"""
cpp_tmpl="""#include "{head_file}"
#include <iostream>
#include <stdio.h>
void fragment_container()
{{
// tmp code begin
{tmp_cpp}
// tmp code end
}}
"""
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
hpp_tmpl="""#ifndef __FRAGMENT_HPP__
#define __FRAGMENT_HPP__
#include <string>
#include <vector>
#include <map>
#include <list>
{includes}
void fragment_container();
#endif
"""
cpp_tmpl="""#include "{head_file}"
#include <iostream>
#include <stdio.h>
void fragment_container()
{{
// tmp code begin
{tmp_cpp}
// tmp code end
}}
"""
| mit | Python |
5e21c7d0fa46e2b290368533cc6dc741b1d366e2 | correct src path in settings | Victory/clicker-me-bliss,Victory/clicker-me-bliss,Victory/clicker-me-bliss,Victory/clicker-me-bliss | functional-tests/clickerft/settings.py | functional-tests/clickerft/settings.py | from os.path import dirname, realpath
BASEDIR = dirname(dirname(dirname(realpath(__file__))))
HOME = "file://" + BASEDIR + "/src/"
| import os
BASEDIR = os.path.dirname(os.getcwd())
HOME = "file://" + BASEDIR + "/src/"
| mit | Python |
54563933a265a7c70adce3996d0a31eb9c915203 | Use kwarg normally in piratepad.controllers.Form | hifly/OpenUpgrade,ingadhoc/odoo,MarcosCommunity/odoo,NeovaHealth/odoo,odootr/odoo,rahuldhote/odoo,Ernesto99/odoo,OpenPymeMx/OCB,kittiu/odoo,shivam1111/odoo,sve-odoo/odoo,ramadhane/odoo,ihsanudin/odoo,syci/OCB,nhomar/odoo-mirror,QianBIG/odoo,fossoult/odoo,takis/odoo,eino-makitalo/odoo,guewen/OpenUpgrade,sergio-incaser/odoo,Codefans-fan/odoo,ramitalat/odoo,naousse/odoo,NeovaHealth/odoo,JGarcia-Panach/odoo,apanju/GMIO_Odoo,0k/OpenUpgrade,CubicERP/odoo,n0m4dz/odoo,frouty/odoogoeen,sysadminmatmoz/OCB,PongPi/isl-odoo,mlaitinen/odoo,GauravSahu/odoo,bakhtout/odoo-educ,dalegregory/odoo,fdvarela/odoo8,virgree/odoo,hbrunn/OpenUpgrade,tinkerthaler/odoo,sergio-incaser/odoo,stonegithubs/odoo,microcom/odoo,MarcosCommunity/odoo,vnsofthe/odoo,alexcuellar/odoo,SerpentCS/odoo,microcom/odoo,elmerdpadilla/iv,oihane/odoo,gavin-feng/odoo,alhashash/odoo,sebalix/OpenUpgrade,deKupini/erp,nagyistoce/odoo-dev-odoo,ujjwalwahi/odoo,wangjun/odoo,FlorianLudwig/odoo,eino-makitalo/odoo,patmcb/odoo,mmbtba/odoo,odooindia/odoo,camptocamp/ngo-addons-backport,hip-odoo/odoo,CubicERP/odoo,leoliujie/odoo,kirca/OpenUpgrade,Grirrane/odoo,demon-ru/iml-crm,jfpla/odoo,bealdav/OpenUpgrade,Kilhog/odoo,florian-dacosta/OpenUpgrade,pedrobaeza/OpenUpgrade,xujb/odoo,minhtuancn/odoo,Endika/odoo,shivam1111/odoo,0k/OpenUpgrade,slevenhagen/odoo-npg,sinbazhou/odoo,apocalypsebg/odoo,prospwro/odoo,nhomar/odoo,codekaki/odoo,hanicker/odoo,kirca/OpenUpgrade,colinnewell/odoo,OpenPymeMx/OCB,fuhongliang/odoo,idncom/odoo,factorlibre/OCB,odootr/odoo,csrocha/OpenUpgrade,osvalr/odoo,damdam-s/OpenUpgrade,waytai/odoo,ApuliaSoftware/odoo,fjbatresv/odoo,ecosoft-odoo/odoo,odooindia/odoo,charbeljc/OCB,markeTIC/OCB,leorochael/odoo,gorjuce/odoo,prospwro/odoo,patmcb/odoo,Elico-Corp/odoo_OCB,tvtsoft/odoo8,ubic135/odoo-design,codekaki/odoo,mszewczy/odoo,CubicERP/odoo,Drooids/odoo,nitinitprof/odoo,lgscofield/odoo,gsmartway/odoo,alqfahad/odoo,NeovaHealth/odoo,takis/odoo,erkrishna9/odoo,ovnicraft/odoo,sysadminmatmoz/OCB,kifcaliph/odoo,CatsAndDogsbvba/odoo,dllsf/odootest,klunwebale/odoo,hopeall/odoo,joshuajan/odoo,credativUK/OCB,slevenhagen/odoo-npg,waytai/odoo,jeasoft/odoo,spadae22/odoo,Maspear/odoo,mlaitinen/odoo,dezynetechnologies/odoo,addition-it-solutions/project-all,acshan/odoo,srimai/odoo,Danisan/odoo-1,ehirt/odoo,collex100/odoo,florentx/OpenUpgrade,simongoffin/website_version,makinacorpus/odoo,BT-fgarbely/odoo,ujjwalwahi/odoo,ujjwalwahi/odoo,guewen/OpenUpgrade,oasiswork/odoo,kifcaliph/odoo,pedrobaeza/odoo,omprakasha/odoo,odoousers2014/odoo,juanalfonsopr/odoo,abstract-open-solutions/OCB,nuncjo/odoo,feroda/odoo,x111ong/odoo,jeasoft/odoo,kirca/OpenUpgrade,goliveirab/odoo,Noviat/odoo,mlaitinen/odoo,bobisme/odoo,rgeleta/odoo,erkrishna9/odoo,luiseduardohdbackup/odoo,alhashash/odoo,ShineFan/odoo,windedge/odoo,odoousers2014/odoo,OpenUpgrade/OpenUpgrade,tvibliani/odoo,sysadminmatmoz/OCB,bobisme/odoo,microcom/odoo,nhomar/odoo,minhtuancn/odoo,bealdav/OpenUpgrade,simongoffin/website_version,hanicker/odoo,tvibliani/odoo,pplatek/odoo,ovnicraft/odoo,havt/odoo,feroda/odoo,OpenUpgrade-dev/OpenUpgrade,ChanduERP/odoo,papouso/odoo,highco-groupe/odoo,shivam1111/odoo,SAM-IT-SA/odoo,jolevq/odoopub,florian-dacosta/OpenUpgrade,massot/odoo,Drooids/odoo,Bachaco-ve/odoo,shaufi/odoo,javierTerry/odoo,spadae22/odoo,ccomb/OpenUpgrade,arthru/OpenUpgrade,dfang/odoo,BT-rmartin/odoo,Maspear/odoo,goliveirab/odoo,OpenPymeMx/OCB,camptocamp/ngo-addons-backport,OSSESAC/odoopubarquiluz,lightcn/odoo,nhomar/odoo-mirror,cpyou/odoo,srimai/odoo,jaxkodex/odoo,cysnake4713/odoo,FlorianLudwig/odoo,brijeshkesariya/odoo,funkring/fdoo,sadleader/odoo,gsmartway/odoo,synconics/odoo,addition-it-solutions/project-all,joariasl/odoo,hbrunn/OpenUpgrade,shingonoide/odoo,ojengwa/odoo,pedrobaeza/OpenUpgrade,blaggacao/OpenUpgrade,hip-odoo/odoo,CopeX/odoo,savoirfairelinux/OpenUpgrade,hip-odoo/odoo,RafaelTorrealba/odoo,bkirui/odoo,ojengwa/odoo,ThinkOpen-Solutions/odoo,salaria/odoo,rubencabrera/odoo,sv-dev1/odoo,deKupini/erp,blaggacao/OpenUpgrade,janocat/odoo,papouso/odoo,Ichag/odoo,steedos/odoo,mszewczy/odoo,havt/odoo,ChanduERP/odoo,jiachenning/odoo,fevxie/odoo,SerpentCS/odoo,rdeheele/odoo,OSSESAC/odoopubarquiluz,BT-fgarbely/odoo,JGarcia-Panach/odoo,jesramirez/odoo,zchking/odoo,dsfsdgsbngfggb/odoo,addition-it-solutions/project-all,kybriainfotech/iSocioCRM,dgzurita/odoo,ShineFan/odoo,juanalfonsopr/odoo,Antiun/odoo,VielSoft/odoo,oliverhr/odoo,jiangzhixiao/odoo,dsfsdgsbngfggb/odoo,ovnicraft/odoo,bakhtout/odoo-educ,ramadhane/odoo,kifcaliph/odoo,spadae22/odoo,inspyration/odoo,jolevq/odoopub,vnsofthe/odoo,kybriainfotech/iSocioCRM,slevenhagen/odoo,srsman/odoo,AuyaJackie/odoo,credativUK/OCB,idncom/odoo,VielSoft/odoo,MarcosCommunity/odoo,pedrobaeza/odoo,apanju/odoo,steedos/odoo,csrocha/OpenUpgrade,ecosoft-odoo/odoo,odooindia/odoo,oliverhr/odoo,camptocamp/ngo-addons-backport,markeTIC/OCB,kittiu/odoo,cloud9UG/odoo,hubsaysnuaa/odoo,fuhongliang/odoo,datenbetrieb/odoo,arthru/OpenUpgrade,mkieszek/odoo,CubicERP/odoo,Endika/OpenUpgrade,bwrsandman/OpenUpgrade,JCA-Developpement/Odoo,AuyaJackie/odoo,GauravSahu/odoo,rdeheele/odoo,fgesora/odoo,elmerdpadilla/iv,mkieszek/odoo,osvalr/odoo,cdrooom/odoo,odoo-turkiye/odoo,acshan/odoo,luistorresm/odoo,nagyistoce/odoo-dev-odoo,ojengwa/odoo,odoo-turkiye/odoo,joshuajan/odoo,alexteodor/odoo,apocalypsebg/odoo,aviciimaxwell/odoo,rahuldhote/odoo,nuuuboo/odoo,alqfahad/odoo,pplatek/odoo,abdellatifkarroum/odoo,BT-fgarbely/odoo,luistorresm/odoo,apanju/odoo,erkrishna9/odoo,shaufi/odoo,savoirfairelinux/OpenUpgrade,gvb/odoo,bkirui/odoo,nitinitprof/odoo,OSSESAC/odoopubarquiluz,CatsAndDogsbvba/odoo,chiragjogi/odoo,nhomar/odoo,Noviat/odoo,BT-ojossen/odoo,papouso/odoo,AuyaJackie/odoo,odoo-turkiye/odoo,Daniel-CA/odoo,kirca/OpenUpgrade,stonegithubs/odoo,podemos-info/odoo,stonegithubs/odoo,windedge/odoo,xujb/odoo,ChanduERP/odoo,BT-fgarbely/odoo,joshuajan/odoo,luistorresm/odoo,cysnake4713/odoo,jfpla/odoo,kybriainfotech/iSocioCRM,RafaelTorrealba/odoo,hmen89/odoo,0k/OpenUpgrade,Endika/odoo,BT-ojossen/odoo,simongoffin/website_version,hip-odoo/odoo,janocat/odoo,chiragjogi/odoo,luiseduardohdbackup/odoo,datenbetrieb/odoo,chiragjogi/odoo,fevxie/odoo,optima-ict/odoo,agrista/odoo-saas,ovnicraft/odoo,grap/OpenUpgrade,draugiskisprendimai/odoo,sergio-incaser/odoo,rowemoore/odoo,pedrobaeza/odoo,apanju/odoo,alexcuellar/odoo,odoousers2014/odoo,dalegregory/odoo,Nick-OpusVL/odoo,andreparames/odoo,bguillot/OpenUpgrade,incaser/odoo-odoo,dezynetechnologies/odoo,syci/OCB,Gitlab11/odoo,javierTerry/odoo,abstract-open-solutions/OCB,CatsAndDogsbvba/odoo,synconics/odoo,makinacorpus/odoo,Gitlab11/odoo,glovebx/odoo,florian-dacosta/OpenUpgrade,rgeleta/odoo,slevenhagen/odoo,datenbetrieb/odoo,pedrobaeza/OpenUpgrade,Ichag/odoo,rgeleta/odoo,n0m4dz/odoo,mkieszek/odoo,charbeljc/OCB,abdellatifkarroum/odoo,frouty/odoogoeen,vnsofthe/odoo,NeovaHealth/odoo,dezynetechnologies/odoo,omprakasha/odoo,NL66278/OCB,Noviat/odoo,mmbtba/odoo,srsman/odoo,ihsanudin/odoo,nhomar/odoo,x111ong/odoo,simongoffin/website_version,pedrobaeza/OpenUpgrade,brijeshkesariya/odoo,makinacorpus/odoo,CopeX/odoo,bwrsandman/OpenUpgrade,deKupini/erp,andreparames/odoo,provaleks/o8,gavin-feng/odoo,Danisan/odoo-1,syci/OCB,lsinfo/odoo,dgzurita/odoo,ehirt/odoo,fossoult/odoo,alhashash/odoo,synconics/odoo,VitalPet/odoo,Nick-OpusVL/odoo,dllsf/odootest,ccomb/OpenUpgrade,draugiskisprendimai/odoo,TRESCLOUD/odoopub,colinnewell/odoo,tangyiyong/odoo,jaxkodex/odoo,slevenhagen/odoo,nitinitprof/odoo,bplancher/odoo,Codefans-fan/odoo,gvb/odoo,incaser/odoo-odoo,Adel-Magebinary/odoo,pplatek/odoo,PongPi/isl-odoo,gvb/odoo,omprakasha/odoo,hopeall/odoo,fuhongliang/odoo,x111ong/odoo,florentx/OpenUpgrade,abdellatifkarroum/odoo,jesramirez/odoo,MarcosCommunity/odoo,realsaiko/odoo,jfpla/odoo,gdgellatly/OCB1,hassoon3/odoo,mlaitinen/odoo,bobisme/odoo,lgscofield/odoo,mvaled/OpenUpgrade,Endika/OpenUpgrade,ingadhoc/odoo,Codefans-fan/odoo,factorlibre/OCB,Antiun/odoo,blaggacao/OpenUpgrade,oihane/odoo,makinacorpus/odoo,bguillot/OpenUpgrade,cysnake4713/odoo,laslabs/odoo,hmen89/odoo,acshan/odoo,MarcosCommunity/odoo,vrenaville/ngo-addons-backport,tangyiyong/odoo,Nick-OpusVL/odoo,blaggacao/OpenUpgrade,savoirfairelinux/odoo,Daniel-CA/odoo,arthru/OpenUpgrade,virgree/odoo,nagyistoce/odoo-dev-odoo,jfpla/odoo,osvalr/odoo,bplancher/odoo,aviciimaxwell/odoo,hoatle/odoo,luiseduardohdbackup/odoo,ramitalat/odoo,matrixise/odoo,sv-dev1/odoo,fdvarela/odoo8,rschnapka/odoo,mszewczy/odoo,SAM-IT-SA/odoo,pplatek/odoo,FlorianLudwig/odoo,fjbatresv/odoo,grap/OCB,hassoon3/odoo,numerigraphe/odoo,avoinsystems/odoo,Drooids/odoo,VitalPet/odoo,mmbtba/odoo,guerrerocarlos/odoo,rubencabrera/odoo,osvalr/odoo,fgesora/odoo,VielSoft/odoo,leoliujie/odoo,feroda/odoo,OpenPymeMx/OCB,RafaelTorrealba/odoo,podemos-info/odoo,minhtuancn/odoo,joariasl/odoo,mustafat/odoo-1,ujjwalwahi/odoo,GauravSahu/odoo,hbrunn/OpenUpgrade,rowemoore/odoo,lombritz/odoo,alexcuellar/odoo,glovebx/odoo,JGarcia-Panach/odoo,collex100/odoo,papouso/odoo,Bachaco-ve/odoo,mlaitinen/odoo,klunwebale/odoo,rgeleta/odoo,vrenaville/ngo-addons-backport,0k/odoo,BT-fgarbely/odoo,ecosoft-odoo/odoo,ChanduERP/odoo,shingonoide/odoo,Kilhog/odoo,Kilhog/odoo,dezynetechnologies/odoo,Danisan/odoo-1,nuncjo/odoo,tinkerthaler/odoo,tinkhaven-organization/odoo,savoirfairelinux/odoo,ecosoft-odoo/odoo,dkubiak789/odoo,GauravSahu/odoo,ThinkOpen-Solutions/odoo,nexiles/odoo,pplatek/odoo,dfang/odoo,sv-dev1/odoo,Adel-Magebinary/odoo,hassoon3/odoo,eino-makitalo/odoo,fuselock/odoo,poljeff/odoo,florian-dacosta/OpenUpgrade,shaufi/odoo,ingadhoc/odoo,vrenaville/ngo-addons-backport,ramadhane/odoo,alexcuellar/odoo,mkieszek/odoo,florentx/OpenUpgrade,jusdng/odoo,waytai/odoo,ingadhoc/odoo,NL66278/OCB,pedrobaeza/OpenUpgrade,addition-it-solutions/project-all,storm-computers/odoo,codekaki/odoo,kirca/OpenUpgrade,stonegithubs/odoo,ApuliaSoftware/odoo,odoo-turkiye/odoo,spadae22/odoo,GauravSahu/odoo,fgesora/odoo,fdvarela/odoo8,vnsofthe/odoo,grap/OCB,synconics/odoo,javierTerry/odoo,VitalPet/odoo,havt/odoo,0k/OpenUpgrade,dkubiak789/odoo,prospwro/odoo,idncom/odoo,OpenUpgrade/OpenUpgrade,CatsAndDogsbvba/odoo,Eric-Zhong/odoo,abstract-open-solutions/OCB,windedge/odoo,fuhongliang/odoo,jeasoft/odoo,massot/odoo,wangjun/odoo,Ernesto99/odoo,klunwebale/odoo,JCA-Developpement/Odoo,sebalix/OpenUpgrade,savoirfairelinux/OpenUpgrade,jusdng/odoo,odoousers2014/odoo,prospwro/odoo,mszewczy/odoo,oliverhr/odoo,hmen89/odoo,ClearCorp-dev/odoo,Daniel-CA/odoo,gsmartway/odoo,aviciimaxwell/odoo,VitalPet/odoo,shaufi10/odoo,jusdng/odoo,hmen89/odoo,gavin-feng/odoo,jaxkodex/odoo,guewen/OpenUpgrade,BT-rmartin/odoo,fossoult/odoo,nuncjo/odoo,Ichag/odoo,bwrsandman/OpenUpgrade,guewen/OpenUpgrade,srimai/odoo,ujjwalwahi/odoo,goliveirab/odoo,tvtsoft/odoo8,kittiu/odoo,laslabs/odoo,markeTIC/OCB,nuuuboo/odoo,jpshort/odoo,shingonoide/odoo,luiseduardohdbackup/odoo,naousse/odoo,kittiu/odoo,cedk/odoo,cdrooom/odoo,SerpentCS/odoo,x111ong/odoo,omprakasha/odoo,BT-astauder/odoo,BT-rmartin/odoo,joshuajan/odoo,frouty/odoogoeen,Gitlab11/odoo,havt/odoo,CopeX/odoo,javierTerry/odoo,shaufi10/odoo,abenzbiria/clients_odoo,Maspear/odoo,NeovaHealth/odoo,matrixise/odoo,virgree/odoo,alexcuellar/odoo,alhashash/odoo,Endika/OpenUpgrade,shaufi/odoo,realsaiko/odoo,OpenUpgrade-dev/OpenUpgrade,collex100/odoo,Daniel-CA/odoo,OpenUpgrade-dev/OpenUpgrade,synconics/odoo,fjbatresv/odoo,sebalix/OpenUpgrade,omprakasha/odoo,ygol/odoo,shivam1111/odoo,frouty/odoogoeen,hoatle/odoo,agrista/odoo-saas,patmcb/odoo,Nick-OpusVL/odoo,nhomar/odoo,havt/odoo,jpshort/odoo,chiragjogi/odoo,Ernesto99/odoo,ovnicraft/odoo,odootr/odoo,shingonoide/odoo,Grirrane/odoo,xujb/odoo,damdam-s/OpenUpgrade,jpshort/odoo,apocalypsebg/odoo,Antiun/odoo,hbrunn/OpenUpgrade,brijeshkesariya/odoo,hbrunn/OpenUpgrade,Noviat/odoo,apanju/odoo,spadae22/odoo,mlaitinen/odoo,joariasl/odoo,NL66278/OCB,highco-groupe/odoo,leorochael/odoo,colinnewell/odoo,dariemp/odoo,TRESCLOUD/odoopub,dalegregory/odoo,Drooids/odoo,idncom/odoo,jesramirez/odoo,zchking/odoo,alqfahad/odoo,JonathanStein/odoo,gavin-feng/odoo,Bachaco-ve/odoo,windedge/odoo,damdam-s/OpenUpgrade,cedk/odoo,jaxkodex/odoo,Codefans-fan/odoo,avoinsystems/odoo,SerpentCS/odoo,papouso/odoo,lombritz/odoo,salaria/odoo,dfang/odoo,janocat/odoo,sergio-incaser/odoo,srsman/odoo,savoirfairelinux/odoo,Elico-Corp/odoo_OCB,JGarcia-Panach/odoo,alexteodor/odoo,Nick-OpusVL/odoo,rdeheele/odoo,factorlibre/OCB,gvb/odoo,OpenPymeMx/OCB,acshan/odoo,ingadhoc/odoo,nhomar/odoo,hopeall/odoo,abdellatifkarroum/odoo,SAM-IT-SA/odoo,slevenhagen/odoo,dalegregory/odoo,optima-ict/odoo,demon-ru/iml-crm,Endika/OpenUpgrade,stonegithubs/odoo,OpenPymeMx/OCB,tinkhaven-organization/odoo,pedrobaeza/odoo,podemos-info/odoo,mustafat/odoo-1,erkrishna9/odoo,hubsaysnuaa/odoo,BT-rmartin/odoo,rschnapka/odoo,lombritz/odoo,zchking/odoo,juanalfonsopr/odoo,guerrerocarlos/odoo,KontorConsulting/odoo,ChanduERP/odoo,nuuuboo/odoo,hopeall/odoo,jiangzhixiao/odoo,eino-makitalo/odoo,aviciimaxwell/odoo,GauravSahu/odoo,lgscofield/odoo,rahuldhote/odoo,Ernesto99/odoo,Ichag/odoo,kirca/OpenUpgrade,stonegithubs/odoo,goliveirab/odoo,nagyistoce/odoo-dev-odoo,n0m4dz/odoo,microcom/odoo,gdgellatly/OCB1,draugiskisprendimai/odoo,MarcosCommunity/odoo,patmcb/odoo,oasiswork/odoo,ChanduERP/odoo,BT-fgarbely/odoo,bealdav/OpenUpgrade,shivam1111/odoo,storm-computers/odoo,Antiun/odoo,highco-groupe/odoo,avoinsystems/odoo,hifly/OpenUpgrade,gorjuce/odoo,minhtuancn/odoo,gorjuce/odoo,apanju/GMIO_Odoo,sadleader/odoo,ihsanudin/odoo,laslabs/odoo,thanhacun/odoo,OpusVL/odoo,klunwebale/odoo,frouty/odoogoeen,credativUK/OCB,pedrobaeza/odoo,Adel-Magebinary/odoo,Noviat/odoo,Bachaco-ve/odoo,charbeljc/OCB,bkirui/odoo,patmcb/odoo,charbeljc/OCB,ClearCorp-dev/odoo,stonegithubs/odoo,FlorianLudwig/odoo,tangyiyong/odoo,florian-dacosta/OpenUpgrade,damdam-s/OpenUpgrade,bplancher/odoo,Bachaco-ve/odoo,ApuliaSoftware/odoo,ramadhane/odoo,tvibliani/odoo,hopeall/odoo,SAM-IT-SA/odoo,tinkhaven-organization/odoo,guewen/OpenUpgrade,shivam1111/odoo,sve-odoo/odoo,JonathanStein/odoo,idncom/odoo,nitinitprof/odoo,poljeff/odoo,frouty/odoo_oph,ShineFan/odoo,gavin-feng/odoo,CatsAndDogsbvba/odoo,0k/odoo,alhashash/odoo,oliverhr/odoo,ingadhoc/odoo,Endika/odoo,apocalypsebg/odoo,jaxkodex/odoo,apocalypsebg/odoo,javierTerry/odoo,OpenPymeMx/OCB,sysadminmatmoz/OCB,JonathanStein/odoo,NeovaHealth/odoo,mkieszek/odoo,klunwebale/odoo,apanju/GMIO_Odoo,AuyaJackie/odoo,xzYue/odoo,jpshort/odoo,SerpentCS/odoo,JGarcia-Panach/odoo,addition-it-solutions/project-all,ojengwa/odoo,eino-makitalo/odoo,gorjuce/odoo,BT-astauder/odoo,PongPi/isl-odoo,Ichag/odoo,gavin-feng/odoo,NL66278/OCB,Nowheresly/odoo,lightcn/odoo,diagramsoftware/odoo,hoatle/odoo,sv-dev1/odoo,windedge/odoo,pplatek/odoo,bkirui/odoo,hanicker/odoo,oliverhr/odoo,sergio-incaser/odoo,ApuliaSoftware/odoo,gdgellatly/OCB1,cdrooom/odoo,arthru/OpenUpgrade,QianBIG/odoo,VielSoft/odoo,bwrsandman/OpenUpgrade,bkirui/odoo,blaggacao/OpenUpgrade,apanju/GMIO_Odoo,jeasoft/odoo,fdvarela/odoo8,Ichag/odoo,fevxie/odoo,grap/OpenUpgrade,pedrobaeza/odoo,deKupini/erp,Grirrane/odoo,Elico-Corp/odoo_OCB,grap/OCB,bobisme/odoo,srimai/odoo,rgeleta/odoo,nuncjo/odoo,Noviat/odoo,OpenUpgrade/OpenUpgrade,odooindia/odoo,fuselock/odoo,numerigraphe/odoo,Elico-Corp/odoo_OCB,VitalPet/odoo,stephen144/odoo,kybriainfotech/iSocioCRM,0k/odoo,dkubiak789/odoo,RafaelTorrealba/odoo,ygol/odoo,simongoffin/website_version,takis/odoo,jusdng/odoo,dfang/odoo,Nowheresly/odoo,osvalr/odoo,laslabs/odoo,minhtuancn/odoo,Adel-Magebinary/odoo,datenbetrieb/odoo,slevenhagen/odoo-npg,JonathanStein/odoo,Eric-Zhong/odoo,fossoult/odoo,javierTerry/odoo,rahuldhote/odoo,shaufi10/odoo,gvb/odoo,odootr/odoo,cpyou/odoo,nuuuboo/odoo,avoinsystems/odoo,podemos-info/odoo,factorlibre/OCB,klunwebale/odoo,abstract-open-solutions/OCB,zchking/odoo,srimai/odoo,shaufi10/odoo,gavin-feng/odoo,salaria/odoo,odootr/odoo,oihane/odoo,hassoon3/odoo,sysadminmatmoz/OCB,provaleks/o8,tarzan0820/odoo,leorochael/odoo,hubsaysnuaa/odoo,odoo-turkiye/odoo,sebalix/OpenUpgrade,odoousers2014/odoo,srimai/odoo,mszewczy/odoo,ehirt/odoo,rahuldhote/odoo,hmen89/odoo,ramadhane/odoo,ygol/odoo,mszewczy/odoo,fgesora/odoo,grap/OpenUpgrade,gsmartway/odoo,jiachenning/odoo,VitalPet/odoo,ecosoft-odoo/odoo,codekaki/odoo,MarcosCommunity/odoo,dariemp/odoo,bobisme/odoo,doomsterinc/odoo,guerrerocarlos/odoo,x111ong/odoo,acshan/odoo,kybriainfotech/iSocioCRM,savoirfairelinux/odoo,bakhtout/odoo-educ,hopeall/odoo,ramitalat/odoo,bwrsandman/OpenUpgrade,Noviat/odoo,gdgellatly/OCB1,joshuajan/odoo,syci/OCB,Codefans-fan/odoo,JonathanStein/odoo,Endika/OpenUpgrade,spadae22/odoo,Grirrane/odoo,wangjun/odoo,codekaki/odoo,kifcaliph/odoo,shingonoide/odoo,grap/OpenUpgrade,frouty/odoogoeen,naousse/odoo,rschnapka/odoo,thanhacun/odoo,thanhacun/odoo,zchking/odoo,stephen144/odoo,lgscofield/odoo,bguillot/OpenUpgrade,jeasoft/odoo,bobisme/odoo,Maspear/odoo,ihsanudin/odoo,dalegregory/odoo,agrista/odoo-saas,janocat/odoo,alexcuellar/odoo,nuncjo/odoo,fuhongliang/odoo,apocalypsebg/odoo,nitinitprof/odoo,Ichag/odoo,addition-it-solutions/project-all,nexiles/odoo,tvibliani/odoo,slevenhagen/odoo-npg,jusdng/odoo,fjbatresv/odoo,mvaled/OpenUpgrade,SAM-IT-SA/odoo,sinbazhou/odoo,abstract-open-solutions/OCB,fevxie/odoo,BT-rmartin/odoo,camptocamp/ngo-addons-backport,ccomb/OpenUpgrade,grap/OpenUpgrade,nagyistoce/odoo-dev-odoo,provaleks/o8,poljeff/odoo,nexiles/odoo,sinbazhou/odoo,ehirt/odoo,cloud9UG/odoo,bkirui/odoo,ygol/odoo,virgree/odoo,fuselock/odoo,hassoon3/odoo,poljeff/odoo,cloud9UG/odoo,tinkerthaler/odoo,nexiles/odoo,shingonoide/odoo,glovebx/odoo,bguillot/OpenUpgrade,BT-astauder/odoo,0k/odoo,diagramsoftware/odoo,ccomb/OpenUpgrade,jeasoft/odoo,christophlsa/odoo,shaufi/odoo,camptocamp/ngo-addons-backport,BT-ojossen/odoo,SerpentCS/odoo,jiachenning/odoo,synconics/odoo,zchking/odoo,rschnapka/odoo,dkubiak789/odoo,steedos/odoo,OpenUpgrade-dev/OpenUpgrade,tvibliani/odoo,ubic135/odoo-design,christophlsa/odoo,takis/odoo,ubic135/odoo-design,kittiu/odoo,SAM-IT-SA/odoo,leorochael/odoo,credativUK/OCB,matrixise/odoo,vnsofthe/odoo,Grirrane/odoo,savoirfairelinux/OpenUpgrade,bwrsandman/OpenUpgrade,nagyistoce/odoo-dev-odoo,osvalr/odoo,fuhongliang/odoo,oihane/odoo,hubsaysnuaa/odoo,odootr/odoo,OpenUpgrade/OpenUpgrade,abstract-open-solutions/OCB,diagramsoftware/odoo,PongPi/isl-odoo,funkring/fdoo,NeovaHealth/odoo,rubencabrera/odoo,frouty/odoo_oph,oihane/odoo,funkring/fdoo,aviciimaxwell/odoo,idncom/odoo,glovebx/odoo,camptocamp/ngo-addons-backport,sv-dev1/odoo,Maspear/odoo,incaser/odoo-odoo,BT-rmartin/odoo,joariasl/odoo,OpenUpgrade-dev/OpenUpgrade,diagramsoftware/odoo,janocat/odoo,frouty/odoogoeen,sv-dev1/odoo,luistorresm/odoo,ShineFan/odoo,hbrunn/OpenUpgrade,steedos/odoo,bkirui/odoo,naousse/odoo,Antiun/odoo,jpshort/odoo,windedge/odoo,shaufi/odoo,dkubiak789/odoo,oliverhr/odoo,savoirfairelinux/OpenUpgrade,fgesora/odoo,grap/OCB,avoinsystems/odoo,tarzan0820/odoo,BT-fgarbely/odoo,Daniel-CA/odoo,salaria/odoo,brijeshkesariya/odoo,nuuuboo/odoo,credativUK/OCB,hassoon3/odoo,Daniel-CA/odoo,fuselock/odoo,bguillot/OpenUpgrade,steedos/odoo,juanalfonsopr/odoo,highco-groupe/odoo,papouso/odoo,apanju/odoo,VielSoft/odoo,KontorConsulting/odoo,odoo-turkiye/odoo,slevenhagen/odoo,odoo-turkiye/odoo,srsman/odoo,bplancher/odoo,alexteodor/odoo,hubsaysnuaa/odoo,abenzbiria/clients_odoo,OpenUpgrade-dev/OpenUpgrade,csrocha/OpenUpgrade,mvaled/OpenUpgrade,Endika/OpenUpgrade,kirca/OpenUpgrade,christophlsa/odoo,Endika/OpenUpgrade,Eric-Zhong/odoo,pplatek/odoo,papouso/odoo,realsaiko/odoo,QianBIG/odoo,florentx/OpenUpgrade,vrenaville/ngo-addons-backport,Eric-Zhong/odoo,poljeff/odoo,jesramirez/odoo,jfpla/odoo,hifly/OpenUpgrade,Daniel-CA/odoo,numerigraphe/odoo,tvtsoft/odoo8,CatsAndDogsbvba/odoo,srimai/odoo,rschnapka/odoo,PongPi/isl-odoo,abenzbiria/clients_odoo,alexteodor/odoo,ehirt/odoo,optima-ict/odoo,ramitalat/odoo,ThinkOpen-Solutions/odoo,thanhacun/odoo,colinnewell/odoo,KontorConsulting/odoo,hopeall/odoo,nitinitprof/odoo,jolevq/odoopub,bplancher/odoo,oasiswork/odoo,leoliujie/odoo,Endika/odoo,xujb/odoo,lombritz/odoo,lsinfo/odoo,Drooids/odoo,apocalypsebg/odoo,naousse/odoo,nagyistoce/odoo-dev-odoo,feroda/odoo,alexteodor/odoo,tinkhaven-organization/odoo,hifly/OpenUpgrade,AuyaJackie/odoo,csrocha/OpenUpgrade,virgree/odoo,lsinfo/odoo,xujb/odoo,klunwebale/odoo,vnsofthe/odoo,draugiskisprendimai/odoo,BT-astauder/odoo,grap/OpenUpgrade,ThinkOpen-Solutions/odoo,poljeff/odoo,virgree/odoo,sinbazhou/odoo,ShineFan/odoo,laslabs/odoo,wangjun/odoo,savoirfairelinux/odoo,sebalix/OpenUpgrade,apanju/odoo,jiachenning/odoo,Elico-Corp/odoo_OCB,hifly/OpenUpgrade,virgree/odoo,gsmartway/odoo,gsmartway/odoo,FlorianLudwig/odoo,ClearCorp-dev/odoo,jiachenning/odoo,bealdav/OpenUpgrade,SerpentCS/odoo,massot/odoo,Nowheresly/odoo,deKupini/erp,AuyaJackie/odoo,waytai/odoo,salaria/odoo,florian-dacosta/OpenUpgrade,slevenhagen/odoo-npg,steedos/odoo,sysadminmatmoz/OCB,gdgellatly/OCB1,rgeleta/odoo,oasiswork/odoo,KontorConsulting/odoo,sinbazhou/odoo,hoatle/odoo,inspyration/odoo,kittiu/odoo,cloud9UG/odoo,tarzan0820/odoo,markeTIC/OCB,rowemoore/odoo,abenzbiria/clients_odoo,FlorianLudwig/odoo,hanicker/odoo,markeTIC/OCB,tvtsoft/odoo8,sinbazhou/odoo,syci/OCB,luistorresm/odoo,JCA-Developpement/Odoo,csrocha/OpenUpgrade,rschnapka/odoo,frouty/odoo_oph,tangyiyong/odoo,rgeleta/odoo,ramitalat/odoo,xzYue/odoo,ujjwalwahi/odoo,Ernesto99/odoo,collex100/odoo,matrixise/odoo,OpusVL/odoo,doomsterinc/odoo,odooindia/odoo,guewen/OpenUpgrade,damdam-s/OpenUpgrade,dariemp/odoo,JCA-Developpement/Odoo,cloud9UG/odoo,AuyaJackie/odoo,datenbetrieb/odoo,tvtsoft/odoo8,bakhtout/odoo-educ,frouty/odoo_oph,realsaiko/odoo,collex100/odoo,avoinsystems/odoo,NL66278/OCB,Endika/odoo,christophlsa/odoo,nhomar/odoo-mirror,rschnapka/odoo,n0m4dz/odoo,Ernesto99/odoo,fjbatresv/odoo,cloud9UG/odoo,colinnewell/odoo,QianBIG/odoo,doomsterinc/odoo,pedrobaeza/OpenUpgrade,credativUK/OCB,fossoult/odoo,sve-odoo/odoo,alexcuellar/odoo,collex100/odoo,storm-computers/odoo,waytai/odoo,fevxie/odoo,ygol/odoo,lightcn/odoo,CopeX/odoo,shaufi10/odoo,srsman/odoo,mustafat/odoo-1,fuselock/odoo,Bachaco-ve/odoo,jusdng/odoo,slevenhagen/odoo-npg,lsinfo/odoo,damdam-s/OpenUpgrade,Adel-Magebinary/odoo,naousse/odoo,vrenaville/ngo-addons-backport,Danisan/odoo-1,sebalix/OpenUpgrade,dezynetechnologies/odoo,waytai/odoo,patmcb/odoo,rowemoore/odoo,oliverhr/odoo,lightcn/odoo,n0m4dz/odoo,tinkhaven-organization/odoo,tarzan0820/odoo,srsman/odoo,hubsaysnuaa/odoo,leorochael/odoo,laslabs/odoo,charbeljc/OCB,numerigraphe/odoo,janocat/odoo,cpyou/odoo,credativUK/OCB,rowemoore/odoo,havt/odoo,cpyou/odoo,draugiskisprendimai/odoo,wangjun/odoo,feroda/odoo,OpenPymeMx/OCB,draugiskisprendimai/odoo,abstract-open-solutions/OCB,slevenhagen/odoo-npg,Ernesto99/odoo,grap/OCB,tangyiyong/odoo,lgscofield/odoo,hanicker/odoo,sadleader/odoo,blaggacao/OpenUpgrade,diagramsoftware/odoo,ygol/odoo,funkring/fdoo,leoliujie/odoo,jolevq/odoopub,kittiu/odoo,jfpla/odoo,jaxkodex/odoo,juanalfonsopr/odoo,fuselock/odoo,ingadhoc/odoo,Adel-Magebinary/odoo,Codefans-fan/odoo,Nick-OpusVL/odoo,fjbatresv/odoo,fossoult/odoo,tarzan0820/odoo,jaxkodex/odoo,bakhtout/odoo-educ,factorlibre/OCB,hanicker/odoo,vrenaville/ngo-addons-backport,dsfsdgsbngfggb/odoo,minhtuancn/odoo,takis/odoo,numerigraphe/odoo,Antiun/odoo,microcom/odoo,gvb/odoo,ThinkOpen-Solutions/odoo,CatsAndDogsbvba/odoo,ccomb/OpenUpgrade,ApuliaSoftware/odoo,luiseduardohdbackup/odoo,cedk/odoo,dkubiak789/odoo,jiangzhixiao/odoo,ClearCorp-dev/odoo,zchking/odoo,osvalr/odoo,provaleks/o8,aviciimaxwell/odoo,chiragjogi/odoo,xujb/odoo,BT-ojossen/odoo,tinkerthaler/odoo,mustafat/odoo-1,incaser/odoo-odoo,frouty/odoogoeen,ubic135/odoo-design,mvaled/OpenUpgrade,rubencabrera/odoo,hubsaysnuaa/odoo,christophlsa/odoo,florentx/OpenUpgrade,goliveirab/odoo,RafaelTorrealba/odoo,dkubiak789/odoo,joariasl/odoo,synconics/odoo,ramadhane/odoo,takis/odoo,provaleks/o8,JGarcia-Panach/odoo,colinnewell/odoo,makinacorpus/odoo,dllsf/odootest,TRESCLOUD/odoopub,cpyou/odoo,microcom/odoo,Kilhog/odoo,arthru/OpenUpgrade,nuuuboo/odoo,rdeheele/odoo,mszewczy/odoo,guewen/OpenUpgrade,doomsterinc/odoo,storm-computers/odoo,lsinfo/odoo,nuuuboo/odoo,bguillot/OpenUpgrade,joariasl/odoo,mvaled/OpenUpgrade,goliveirab/odoo,damdam-s/OpenUpgrade,sadleader/odoo,ccomb/OpenUpgrade,numerigraphe/odoo,hoatle/odoo,xzYue/odoo,fossoult/odoo,frouty/odoo_oph,bobisme/odoo,cysnake4713/odoo,nexiles/odoo,oihane/odoo,QianBIG/odoo,lombritz/odoo,incaser/odoo-odoo,ApuliaSoftware/odoo,luistorresm/odoo,eino-makitalo/odoo,Maspear/odoo,bwrsandman/OpenUpgrade,0k/odoo,erkrishna9/odoo,oasiswork/odoo,eino-makitalo/odoo,goliveirab/odoo,salaria/odoo,Endika/odoo,demon-ru/iml-crm,kybriainfotech/iSocioCRM,GauravSahu/odoo,ojengwa/odoo,CubicERP/odoo,charbeljc/OCB,ihsanudin/odoo,chiragjogi/odoo,nexiles/odoo,collex100/odoo,slevenhagen/odoo,mustafat/odoo-1,lgscofield/odoo,xujb/odoo,Grirrane/odoo,Gitlab11/odoo,christophlsa/odoo,incaser/odoo-odoo,Maspear/odoo,mkieszek/odoo,poljeff/odoo,alqfahad/odoo,BT-ojossen/odoo,chiragjogi/odoo,inspyration/odoo,n0m4dz/odoo,luiseduardohdbackup/odoo,vrenaville/ngo-addons-backport,MarcosCommunity/odoo,rdeheele/odoo,oihane/odoo,jiangzhixiao/odoo,provaleks/o8,glovebx/odoo,oasiswork/odoo,Drooids/odoo,lombritz/odoo,JonathanStein/odoo,Nick-OpusVL/odoo,leorochael/odoo,hanicker/odoo,tvtsoft/odoo8,lgscofield/odoo,OpenUpgrade/OpenUpgrade,cysnake4713/odoo,tinkerthaler/odoo,guerrerocarlos/odoo,feroda/odoo,ihsanudin/odoo,prospwro/odoo,andreparames/odoo,bakhtout/odoo-educ,kybriainfotech/iSocioCRM,gorjuce/odoo,dllsf/odootest,CubicERP/odoo,hoatle/odoo,Nowheresly/odoo,omprakasha/odoo,doomsterinc/odoo,christophlsa/odoo,shaufi10/odoo,optima-ict/odoo,ehirt/odoo,spadae22/odoo,aviciimaxwell/odoo,dalegregory/odoo,jiachenning/odoo,luistorresm/odoo,joariasl/odoo,optima-ict/odoo,arthru/OpenUpgrade,dezynetechnologies/odoo,fevxie/odoo,sinbazhou/odoo,apanju/GMIO_Odoo,brijeshkesariya/odoo,fevxie/odoo,odootr/odoo,OpenUpgrade/OpenUpgrade,VielSoft/odoo,dgzurita/odoo,ChanduERP/odoo,rubencabrera/odoo,Elico-Corp/odoo_OCB,abenzbiria/clients_odoo,demon-ru/iml-crm,sadleader/odoo,ShineFan/odoo,dalegregory/odoo,naousse/odoo,rschnapka/odoo,javierTerry/odoo,stephen144/odoo,bplancher/odoo,storm-computers/odoo,elmerdpadilla/iv,Danisan/odoo-1,lightcn/odoo,agrista/odoo-saas,sysadminmatmoz/OCB,Eric-Zhong/odoo,gdgellatly/OCB1,mvaled/OpenUpgrade,nhomar/odoo-mirror,avoinsystems/odoo,mmbtba/odoo,factorlibre/OCB,andreparames/odoo,guerrerocarlos/odoo,abdellatifkarroum/odoo,juanalfonsopr/odoo,draugiskisprendimai/odoo,jeasoft/odoo,guerrerocarlos/odoo,VielSoft/odoo,brijeshkesariya/odoo,doomsterinc/odoo,cdrooom/odoo,bakhtout/odoo-educ,ehirt/odoo,ramadhane/odoo,takis/odoo,sve-odoo/odoo,doomsterinc/odoo,CopeX/odoo,shaufi10/odoo,thanhacun/odoo,factorlibre/OCB,VitalPet/odoo,glovebx/odoo,Eric-Zhong/odoo,stephen144/odoo,Gitlab11/odoo,andreparames/odoo,jpshort/odoo,KontorConsulting/odoo,rahuldhote/odoo,bealdav/OpenUpgrade,wangjun/odoo,cedk/odoo,ecosoft-odoo/odoo,stephen144/odoo,RafaelTorrealba/odoo,ygol/odoo,dariemp/odoo,sergio-incaser/odoo,rahuldhote/odoo,srsman/odoo,datenbetrieb/odoo,tinkerthaler/odoo,tarzan0820/odoo,jeasoft/odoo,hifly/OpenUpgrade,x111ong/odoo,OpenUpgrade/OpenUpgrade,BT-ojossen/odoo,fgesora/odoo,CubicERP/odoo,matrixise/odoo,gorjuce/odoo,nhomar/odoo-mirror,massot/odoo,podemos-info/odoo,andreparames/odoo,dllsf/odootest,elmerdpadilla/iv,hoatle/odoo,savoirfairelinux/OpenUpgrade,charbeljc/OCB,havt/odoo,0k/OpenUpgrade,nuncjo/odoo,guerrerocarlos/odoo,xzYue/odoo,BT-ojossen/odoo,shingonoide/odoo,patmcb/odoo,jesramirez/odoo,Nowheresly/odoo,gorjuce/odoo,Kilhog/odoo,rubencabrera/odoo,cloud9UG/odoo,dsfsdgsbngfggb/odoo,tinkhaven-organization/odoo,slevenhagen/odoo,cedk/odoo,jpshort/odoo,vnsofthe/odoo,JCA-Developpement/Odoo,Danisan/odoo-1,abdellatifkarroum/odoo,mvaled/OpenUpgrade,OpusVL/odoo,brijeshkesariya/odoo,funkring/fdoo,gdgellatly/OCB1,VitalPet/odoo,Bachaco-ve/odoo,grap/OpenUpgrade,n0m4dz/odoo,mustafat/odoo-1,markeTIC/OCB,feroda/odoo,demon-ru/iml-crm,apanju/GMIO_Odoo,Adel-Magebinary/odoo,gdgellatly/OCB1,hifly/OpenUpgrade,credativUK/OCB,BT-rmartin/odoo,dgzurita/odoo,markeTIC/OCB,highco-groupe/odoo,fgesora/odoo,QianBIG/odoo,mmbtba/odoo,shivam1111/odoo,mustafat/odoo-1,ClearCorp-dev/odoo,numerigraphe/odoo,Antiun/odoo,janocat/odoo,windedge/odoo,Nowheresly/odoo,agrista/odoo-saas,csrocha/OpenUpgrade,inspyration/odoo,ThinkOpen-Solutions/odoo,wangjun/odoo,funkring/fdoo,camptocamp/ngo-addons-backport,salaria/odoo,PongPi/isl-odoo,Endika/odoo,dariemp/odoo,nuncjo/odoo,codekaki/odoo,mlaitinen/odoo,ihsanudin/odoo,BT-astauder/odoo,waytai/odoo,camptocamp/ngo-addons-backport,acshan/odoo,tvibliani/odoo,thanhacun/odoo,tangyiyong/odoo,abdellatifkarroum/odoo,prospwro/odoo,JGarcia-Panach/odoo,ccomb/OpenUpgrade,juanalfonsopr/odoo,jusdng/odoo,dsfsdgsbngfggb/odoo,mmbtba/odoo,elmerdpadilla/iv,blaggacao/OpenUpgrade,rowemoore/odoo,PongPi/isl-odoo,lightcn/odoo,0k/OpenUpgrade,dgzurita/odoo,lsinfo/odoo,leorochael/odoo,tinkhaven-organization/odoo,incaser/odoo-odoo,lightcn/odoo,syci/OCB,KontorConsulting/odoo,minhtuancn/odoo,leoliujie/odoo,kifcaliph/odoo,joshuajan/odoo,jiangzhixiao/odoo,storm-computers/odoo,Danisan/odoo-1,dariemp/odoo,csrocha/OpenUpgrade,diagramsoftware/odoo,OSSESAC/odoopubarquiluz,ecosoft-odoo/odoo,TRESCLOUD/odoopub,ojengwa/odoo,omprakasha/odoo,Gitlab11/odoo,JonathanStein/odoo,x111ong/odoo,TRESCLOUD/odoopub,dariemp/odoo,optima-ict/odoo,dezynetechnologies/odoo,ubic135/odoo-design,sve-odoo/odoo,dgzurita/odoo,gsmartway/odoo,RafaelTorrealba/odoo,jfpla/odoo,jolevq/odoopub,cedk/odoo,xzYue/odoo,ujjwalwahi/odoo,dfang/odoo,stephen144/odoo,tarzan0820/odoo,ovnicraft/odoo,Codefans-fan/odoo,ThinkOpen-Solutions/odoo,oasiswork/odoo,codekaki/odoo,podemos-info/odoo,xzYue/odoo,odoousers2014/odoo,makinacorpus/odoo,provaleks/o8,shaufi/odoo,frouty/odoo_oph,leoliujie/odoo,alqfahad/odoo,makinacorpus/odoo,ramitalat/odoo,alqfahad/odoo,glovebx/odoo,bguillot/OpenUpgrade,ojengwa/odoo,tvibliani/odoo,rowemoore/odoo,tinkerthaler/odoo,bealdav/OpenUpgrade,Gitlab11/odoo,pedrobaeza/OpenUpgrade,sebalix/OpenUpgrade,grap/OCB,dgzurita/odoo,nexiles/odoo,tangyiyong/odoo,steedos/odoo,alhashash/odoo,apanju/odoo,thanhacun/odoo,KontorConsulting/odoo,ShineFan/odoo,sv-dev1/odoo,Nowheresly/odoo,fuselock/odoo,grap/OCB,colinnewell/odoo,lombritz/odoo,massot/odoo,diagramsoftware/odoo,rubencabrera/odoo,hip-odoo/odoo,jiangzhixiao/odoo,OpusVL/odoo,dsfsdgsbngfggb/odoo,leoliujie/odoo,savoirfairelinux/odoo,Kilhog/odoo,acshan/odoo,florentx/OpenUpgrade,fdvarela/odoo8,hip-odoo/odoo,nitinitprof/odoo,Kilhog/odoo,codekaki/odoo,xzYue/odoo,luiseduardohdbackup/odoo,fuhongliang/odoo,grap/OCB,dfang/odoo,idncom/odoo,CopeX/odoo,FlorianLudwig/odoo,alqfahad/odoo,ApuliaSoftware/odoo,Drooids/odoo,mmbtba/odoo,apanju/GMIO_Odoo,podemos-info/odoo,cedk/odoo,gvb/odoo,jiangzhixiao/odoo,SAM-IT-SA/odoo,andreparames/odoo,Eric-Zhong/odoo,OSSESAC/odoopubarquiluz,vrenaville/ngo-addons-backport,lsinfo/odoo,dsfsdgsbngfggb/odoo,fjbatresv/odoo,CopeX/odoo,OSSESAC/odoopubarquiluz,ovnicraft/odoo,realsaiko/odoo,datenbetrieb/odoo,funkring/fdoo,prospwro/odoo | addons/piratepad/controllers.py | addons/piratepad/controllers.py | from openobject.tools import expose
from openerp.controllers import form
from openerp.utils import rpc, TinyDict
import cherrypy
class Form(form.Form):
_cp_path = "/piratepad/form"
@expose('json', methods=('POST',))
def save(self, pad_name):
params, data = TinyDict.split(cherrypy.session['params'])
ctx = dict(rpc.session.context,
default_res_model=params.model, default_res_id=params.id,
active_id=False, active_ids=[])
pad_link = "http://piratepad.net/"+'-'.join(pad_name.split())
attachment_id = rpc.RPCProxy('ir.attachment').create({
'name': pad_name,
'url': pad_link,
}, ctx)
return {'id': attachment_id, 'name': pad_name, 'url': pad_link}
| from openobject.tools import expose
from openerp.controllers import form
from openerp.utils import rpc, common, TinyDict
import cherrypy
class Form(form.Form):
_cp_path = "/piratepad/form"
@expose('json', methods=('POST',))
def save(self, **kwargs):
params, data = TinyDict.split(cherrypy.session['params'])
pad_name=kwargs.get('pad_name')
ctx = dict(rpc.session.context,
default_res_model=params.model, default_res_id=params.id,
active_id=False, active_ids=[])
pad_link = "http://piratepad.net/"+'-'.join(pad_name.split())
attachment_id = rpc.RPCProxy('ir.attachment').create({
'name': pad_name,
'url': pad_link,
}, ctx)
return {'id': attachment_id, 'name': pad_name, 'url': pad_link}
| agpl-3.0 | Python |
31aa44ef336c497be9f545c9bd4af64aac250748 | Fix remote coverage execution | xfournet/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,allotria/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,semonte/intellij-community,ibinti/intellij-community,da1z/intellij-community,suncycheng/intellij-community,allotria/intellij-community,apixandru/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,signed/intellij-community,da1z/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,FHannes/intellij-community,da1z/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,da1z/intellij-community,apixandru/intellij-community,apixandru/intellij-community,apixandru/intellij-community,FHannes/intellij-community,apixandru/intellij-community,allotria/intellij-community,semonte/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,signed/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,semonte/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,allotria/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,asedunov/intellij-community,da1z/intellij-community,semonte/intellij-community,allotria/intellij-community,asedunov/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,FHannes/intellij-community,ibinti/intellij-community,xfournet/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ibinti/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,signed/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,ibinti/intellij-community,signed/intellij-community,ibinti/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,da1z/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,asedunov/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,semonte/intellij-community,ibinti/intellij-community,allotria/intellij-community,allotria/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,allotria/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,allotria/intellij-community,signed/intellij-community,signed/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,signed/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,asedunov/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,signed/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,mglukhikh/intellij-community,signed/intellij-community,allotria/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,allotria/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,signed/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,apixandru/intellij-community,allotria/intellij-community,ibinti/intellij-community,asedunov/intellij-community,signed/intellij-community,xfournet/intellij-community,da1z/intellij-community,vvv1559/intellij-community,semonte/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community | python/helpers/coverage_runner/run_coverage.py | python/helpers/coverage_runner/run_coverage.py | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
coverage_file = coverage_file[0:-len(".coverage")]
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file + ".coverage", new_cov_file + ".coverage"))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file + ".coverage"
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
try:
main()
finally:
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | """Coverage.py's main entrypoint."""
import os
import sys
bundled_coverage_path = os.getenv('BUNDLED_COVERAGE_PATH')
if bundled_coverage_path:
sys_path_backup = sys.path
sys.path = [p for p in sys.path if p != bundled_coverage_path]
from coverage.cmdline import main
sys.path = sys_path_backup
else:
from coverage.cmdline import main
coverage_file = os.getenv('PYCHARM_COVERAGE_FILE')
coverage_file = coverage_file[0:-len(".coverage")]
run_cov = os.getenv('PYCHARM_RUN_COVERAGE')
if os.getenv('CREATE_TEMP_COVERAGE_FILE'):
line = 'LOG: PyCharm: File mapping:%s\t%s\n'
import tempfile
(h, new_cov_file) = tempfile.mkstemp(prefix='pycharm-coverage')
print(line%(coverage_file + ".coverage", new_cov_file + ".coverage"))
print(line%(coverage_file + '.syspath.txt', new_cov_file + '.syspath.txt'))
print(line%(coverage_file + '.xml', new_cov_file + '.xml'))
coverage_file = new_cov_file + ".cov"
if coverage_file:
os.environ['COVERAGE_FILE'] = coverage_file + ".coverage"
if run_cov:
a_file = open(coverage_file + '.syspath.txt', mode='w')
a_file.write(os.getcwd()+"\n")
for path in sys.path: a_file.write(path + "\n")
a_file.close()
argv = []
for arg in sys.argv:
if arg.startswith('-m'):
argv.append('-m')
argv.append(arg[2:])
else:
argv.append(arg)
sys.argv = argv
cwd = os.getcwd()
try:
main()
finally:
if run_cov:
os.chdir(cwd)
main(["xml", "-o", coverage_file + ".xml", "--ignore-errors"]) | apache-2.0 | Python |
c109728986a3a583fe037780c88bdaa458e663c4 | Bump 2.1.1 | appium/python-client,appium/python-client | appium/version.py | appium/version.py | version = '2.1.1'
| version = '2.1.0'
| apache-2.0 | Python |
05140304c1ef08e7e291eec92de4091320bdfc0e | Add acceleration to example | SpotlightKid/micropython-stm-lib | encoder/examples/encoder_lcd.py | encoder/examples/encoder_lcd.py | # -*- coding: utf-8 -*-
"""Read encoder and print position value to LCD."""
from machine import sleep_ms
from pyb_encoder import Encoder
from hd44780 import HD44780
class STM_LCDShield(HD44780):
_default_pins = ('PD2','PD1','PD6','PD5','PD4','PD3')
def main():
lcd.set_string("Value: ")
lastval = 0
while True:
val = enc.value
if lastval != val:
lastpos = val
lcd.set_cursor(6, 0)
for c in "%3i" % val:
lcd.send_byte(c)
enc.cur_accel = max(0, enc.cur_accel - enc.accel)
sleep_ms(50)
if __name__ == '__main__':
lcd = STM_LCDShield()
enc = Encoder('A0', 'A1', max_value=999, accel=5)
main()
| # -*- coding: utf-8 -*-
"""Read encoder and print position value to LCD."""
from machine import sleep_ms
from pyb_encoder import Encoder
from hd44780 import HD44780
class STM_LCDShield(HD44780):
_default_pins = ('PD2','PD1','PD6','PD5','PD4','PD3')
def main():
lcd.set_string("Value: ")
lastval = 0
while True:
val = enc.value
if lastval != val:
lastpos = val
lcd.set_cursor(6, 0)
for c in "%3i" % val:
lcd.send_byte(c)
sleep_ms(50)
if __name__ == '__main__':
lcd = STM_LCDShield()
enc = Encoder('A0', 'A1', max_value=999)
main()
| mit | Python |
2268ebdc47b1d9221c06622a7b1992cae14013c2 | Test endpoint for the web server | datasciencebr/whistleblower | web/server.py | web/server.py | import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/facebook_webhook', methods=['POST'])
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
| import http.client
import os
from flask import Flask
from pymongo import MongoClient
MONGO_URL = os.environ.get('MONGO_URL', 'mongodb://mongo:27017/')
MONGO_DATABASE = os.environ.get('MONGO_DATABASE', 'whistleblower')
DATABASE = MongoClient(MONGO_URL)[MONGO_DATABASE]
app = Flask(__name__)
@app.route('/facebook_webhook', methods=['POST'])
def facebook_webhook():
DATABASE.facebook_webhook.insert(request.form)
return ('', http.client.NO_CONTENT)
| unlicense | Python |
db40a42c2825b157017e6730a2b5c95371bbe598 | Allow user to adjust nyquist freq and freq spacing in cp_utils.py | farr/arfit | arfit/cp_utils.py | arfit/cp_utils.py | import carmcmc as cm
from gatspy.periodic import LombScargleFast
import matplotlib.pyplot as plt
import numpy as np
def csample_from_files(datafile, chainfile, p, q):
data = np.loadtxt(datafile)
times, tind = np.unique(data[:,0], return_index=True)
data = data[tind, :]
chain = np.loadtxt(chainfile)
assert chain.shape[1] == p + q + 5, 'dimension mismatch'
return cm.CarmaSample(data[:,0], data[:,1], data[:,2], None, q=q, trace=chain[:,:-2], loglike=chain[:,-2], logpost=chain[:,-1])
def normalised_lombscargle(ts, ys, dys, oversampling=5, nyquist_factor=3):
model = LombScargleFast().fit(ts, ys, dys)
pers, pows = model.periodogram_auto(oversampling=oversampling, nyquist_factor=nyquist_factor)
fs = 1.0/pers
T = np.max(ts) - np.min(ts)
mu = 1/T*np.trapz(ys, ts)
s2 = 1/T*np.trapz(np.square(ys-mu), ts)
return fs, s2*pows/np.trapz(pows, fs)
def plot_psd_sample_data(sample, oversampling=5, nyquist_factor=3):
psd_low, psd_high, psd_med, fs = sample.plot_power_spectrum(doShow=False)
plt.clf()
plt.loglog(fs, psd_med, '-b', alpha=0.33)
plt.fill_between(fs, psd_low, psd_high, color='b', alpha=0.17)
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
bw = fs[-1] - fs[0]
T = sample.time[-1] - sample.time[0]
s2 = 1/T*np.trapz(np.square(sample.ysig), sample.time)
noise_level = s2/bw
levels = noise_level*np.sqrt(sample.get_samples('measerr_scale'))
plt.axhline(np.median(levels), color='g', alpha=0.33)
plt.fill_between(fs, np.percentile(levels, 84)+0*fs, np.percentile(levels, 16)+0*fs, color='g', alpha=0.17)
plt.loglog(fs, psd, '-r', alpha=0.33)
def plot_psd_sample_draw(sample, loc='upper left', oversampling=5, nyquist_factor=3):
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
ys_draw = sample.predict(sample.time, bestfit='random')[0]
fs, dpsd = normalised_lombscargle(sample.time, ys_draw, sample.ysig, oversampling=oversampling, nyquist_factor=nyquist_factor)
plt.loglog(fs, psd, '-k', label='Data', alpha=0.5)
plt.loglog(fs, dpsd, '-b', label='Prediction', alpha=0.5)
plt.legend(loc=loc)
| import carmcmc as cm
from gatspy.periodic import LombScargleFast
import matplotlib.pyplot as plt
import numpy as np
def csample_from_files(datafile, chainfile, p, q):
data = np.loadtxt(datafile)
times, tind = np.unique(data[:,0], return_index=True)
data = data[tind, :]
chain = np.loadtxt(chainfile)
assert chain.shape[1] == p + q + 5, 'dimension mismatch'
return cm.CarmaSample(data[:,0], data[:,1], data[:,2], None, q=q, trace=chain[:,:-2], loglike=chain[:,-2], logpost=chain[:,-1])
def normalised_lombscargle(ts, ys, dys):
model = LombScargleFast().fit(ts, ys, dys)
T = np.max(ts)-np.min(ts)
dts = np.diff(np.sort(ts))
fny = 1.0/(2.0*np.min(dts))
df = 1.0/T
N = fny/df
fs = np.linspace(df, fny, N)
pows = model.score_frequency_grid(df, df, N)
mu = 1.0/T*np.trapz(ys, ts)
s2 = 1.0/T*np.trapz(np.square(ys-mu), ts)
return fs, s2*pows/np.trapz(pows, fs)
def plot_psd_sample_data(sample):
psd_low, psd_high, psd_med, fs = sample.plot_power_spectrum(doShow=False)
plt.clf()
plt.loglog(fs, psd_med, '-b', alpha=0.33)
plt.fill_between(fs, psd_low, psd_high, color='b', alpha=0.17)
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig)
bw = fs[-1] - fs[0]
T = sample.time[-1] - sample.time[0]
s2 = 1/T*np.trapz(np.square(sample.ysig), sample.time)
noise_level = s2/bw
levels = noise_level*np.sqrt(sample.get_samples('measerr_scale'))
plt.axhline(np.median(levels), color='g', alpha=0.33)
plt.fill_between(fs, np.percentile(levels, 84)+0*fs, np.percentile(levels, 16)+0*fs, color='g', alpha=0.17)
plt.loglog(fs, psd, '-r', alpha=0.33)
def plot_psd_sample_draw(sample, loc='upper left'):
fs, psd = normalised_lombscargle(sample.time, sample.y, sample.ysig)
ys_draw = sample.predict(sample.time, bestfit='random')[0]
fs, dpsd = normalised_lombscargle(sample.time, ys_draw, sample.ysig)
plt.loglog(fs, psd, '-k', label='Data', alpha=0.5)
plt.loglog(fs, dpsd, '-b', label='Prediction', alpha=0.5)
plt.legend(loc=loc)
| mit | Python |
4fd80a9a593a4f9100899e96a383782c68a41af1 | Fix to subtract USDT withdrawals from balance | BenjiLee/PoloniexAnalyzer | poloniex_apis/api_models/deposit_withdrawal_history.py | poloniex_apis/api_models/deposit_withdrawal_history.py | from collections import defaultdict
from poloniex_apis.api_models.ticker_price import TickerData
class DWHistory:
def __init__(self, history):
self.withdrawals = defaultdict(float)
self.deposits = defaultdict(float)
self.history = history
def get_dw_history(self):
for deposit in self.history['deposits']:
if deposit['currency'] in self.deposits:
self.deposits[deposit['currency']] += float(deposit['amount'])
else:
self.deposits[deposit['currency']] = float(deposit['amount'])
for withdrawal in self.history['withdrawals']:
if withdrawal['currency'] in self.withdrawals:
self.withdrawals[withdrawal['currency']] += float(withdrawal['amount'])
else:
self.withdrawals[withdrawal['currency']] = float(withdrawal['amount'])
return self.deposits, self.withdrawals
def get_btc_balance(self, ticker):
balance = 0
for deposit_symbol, amount in self.deposits.items():
if deposit_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if deposit_symbol != u'BTC':
balance += amount * ticker.get_price("BTC_" + deposit_symbol)
else:
balance += amount
for withdrawal_symbol, amount in self.withdrawals.items():
if withdrawal_symbol == u"USDT":
balance -= amount * ticker.get_price("USDT_BTC")
if withdrawal_symbol != u'BTC':
balance -= amount * ticker.get_price("BTC_" + withdrawal_symbol)
else:
balance -= amount
return balance
| from collections import defaultdict
from poloniex_apis.api_models.ticker_price import TickerData
class DWHistory:
def __init__(self, history):
self.withdrawals = defaultdict(float)
self.deposits = defaultdict(float)
self.history = history
def get_dw_history(self):
for deposit in self.history['deposits']:
if deposit['currency'] in self.deposits:
self.deposits[deposit['currency']] += float(deposit['amount'])
else:
self.deposits[deposit['currency']] = float(deposit['amount'])
for withdrawal in self.history['withdrawals']:
if withdrawal['currency'] in self.withdrawals:
self.withdrawals[withdrawal['currency']] += float(withdrawal['amount'])
else:
self.withdrawals[withdrawal['currency']] = float(withdrawal['amount'])
return self.deposits, self.withdrawals
def get_btc_balance(self, ticker):
balance = 0
for deposit_symbol, amount in self.deposits.items():
if deposit_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if deposit_symbol != u'BTC':
balance += amount * ticker.get_price("BTC_" + deposit_symbol)
else:
balance += amount
for withdrawal_symbol, amount in self.withdrawals.items():
if withdrawal_symbol == u"USDT":
balance += amount * ticker.get_price("USDT_BTC")
if withdrawal_symbol != u'BTC':
balance -= amount * ticker.get_price("BTC_" + withdrawal_symbol)
else:
balance -= amount
return balance
| mit | Python |
1e16048c7ceb50377fdfdda3a39ef9910d2021bb | Bump version to 0.2 | Temeez/wagtail-simple-gallery,Temeez/wagtail-simple-gallery | wagtail_simple_gallery/__init__.py | wagtail_simple_gallery/__init__.py | __version__ = '0.2' | __version__ = '0.1' | mit | Python |
7e9d3b3d2c4e46c2b16595b7acc6aa670ece9e6e | use correct API to save to bucket. | astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin | astrobin/tasks.py | astrobin/tasks.py | from django.conf import settings
from celery.decorators import task
from celery.task.sets import subtask
from PIL import Image as PILImage
from subprocess import call
import StringIO
import os
import os.path
from image_utils import *
from s3 import *
from notifications import *
@task()
def solve_image(image, callback=None):
# Solve
path = settings.UPLOADS_DIRECTORY
uid = image.filename
original_ext = image.original_ext
solved = False
command = ['nice', '-n', '5', '/usr/local/astrometry/bin/solve-field', path + uid + original_ext]
call(command)
solved_filename = settings.UPLOADS_DIRECTORY + image.filename + '-ngc.png'
if os.path.exists(settings.UPLOADS_DIRECTORY + image.filename + '.solved'):
solved = True
solved_file = open(solved_filename)
solved_data = StringIO.StringIO(solved_file.read())
solved_image = PILImage.open(solved_data)
(w, h) = solved_image.size
(w, h) = scale_dimensions(w, h, settings.RESIZED_IMAGE_SIZE)
solved_resizedImage = solved_image.resize((w, h), PILImage.ANTIALIAS)
# Then save to bucket
solved_resizedFile = StringIO.StringIO()
solved_resizedImage.save(solved_resizedFile, 'PNG')
save_to_bucket(uid + '_solved.png', solved_resizedFile.getvalue())
if solved:
push_notification([image.user], 'image_solved',
{'object_url':image.get_absolute_url() + '?mod=solved'})
else:
push_notification([image.user], 'image_not_solved',
{'object_url':image.get_absolute_url()})
if callback:
callback(image, solved, '%s%s*' % (path, uid))
@task()
def store_image(image, solve, callback=None):
try:
store_image_in_s3(settings.UPLOADS_DIRECTORY, image.filename, image.original_ext)
except S3CreateError, exc:
store_image.retry(exc=exc)
push_notification([image.user], 'image_ready', {'object_url':image.get_absolute_url()})
if callback:
callback(image, True, solve)
@task
def delete_image(filename, ext):
delete_image_from_s3(filename, ext)
| from django.conf import settings
from celery.decorators import task
from celery.task.sets import subtask
from PIL import Image as PILImage
from subprocess import call
import StringIO
import os
import os.path
from image_utils import *
from s3 import *
from notifications import *
@task()
def solve_image(image, callback=None):
# Solve
path = settings.UPLOADS_DIRECTORY
uid = image.filename
original_ext = image.original_ext
solved = False
command = ['nice', '-n', '5', '/usr/local/astrometry/bin/solve-field', path + uid + original_ext]
call(command)
solved_filename = settings.UPLOADS_DIRECTORY + image.filename + '-ngc.png'
if os.path.exists(settings.UPLOADS_DIRECTORY + image.filename + '.solved'):
solved = True
solved_file = open(solved_filename)
solved_data = StringIO.StringIO(solved_file.read())
solved_image = PILImage.open(solved_data)
(w, h) = solved_image.size
(w, h) = scale_dimensions(w, h, settings.RESIZED_IMAGE_SIZE)
solved_resizedImage = solved_image.resize((w, h), PILImage.ANTIALIAS)
# Then save to bucket
solved_resizedFile = StringIO.StringIO()
solved_resizedImage.save(solved_resizedFile, 'PNG')
save_to_bucket(solved_resizedFile.getvalue(),
'image/png',
settings.S3_SOLVED_BUCKET,
uid,
'.png')
if solved:
push_notification([image.user], 'image_solved',
{'object_url':image.get_absolute_url() + '?mod=solved'})
else:
push_notification([image.user], 'image_not_solved',
{'object_url':image.get_absolute_url()})
if callback:
callback(image, solved, '%s%s*' % (path, uid))
@task()
def store_image(image, solve, callback=None):
try:
store_image_in_s3(settings.UPLOADS_DIRECTORY, image.filename, image.original_ext)
except S3CreateError, exc:
store_image.retry(exc=exc)
push_notification([image.user], 'image_ready', {'object_url':image.get_absolute_url()})
if callback:
callback(image, True, solve)
@task
def delete_image(filename, ext):
delete_image_from_s3(filename, ext)
| agpl-3.0 | Python |
70f137998b2cc3b9c873a57e17a435c6ca181192 | improve code for getting the pricelist | OCA/multi-company,OCA/multi-company | product_supplier_intercompany/models/purchase_order.py | product_supplier_intercompany/models/purchase_order.py | # Copyright 2021 Akretion (https://www.akretion.com).
# @author Sébastien BEAU <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, fields, models
from odoo.exceptions import UserError
class PurchaseOrder(models.Model):
_inherit = "purchase.order"
def _get_intercompany_pricelist(self, partner, dest_company):
if partner.property_product_pricelist.is_intercompany_supplier:
return partner.property_product_pricelist
else:
pricelist = self.env["product.pricelist"].search(
[
("company_id", "=", dest_company.id),
("is_intercompany_supplier", "=", True),
]
)
if len(pricelist) == 0:
raise UserError(
_(
(
"The Company {} do not have an intercompany pricelist "
"configured.\nPlease contact them and ask them to "
"active the option on the pricelist"
).format(dest_company.name)
)
)
else:
# Note in case that there is several pricelist that match we take
# the first one and the user will change it manually if needed
return fields.first(pricelist)
def _prepare_sale_order_data(
self, name, partner, dest_company, direct_delivery_address
):
res = super()._prepare_sale_order_data(
name, partner, dest_company, direct_delivery_address
)
res["pricelist_id"] = self._get_intercompany_pricelist(partner, dest_company).id
return res
| # Copyright 2021 Akretion (https://www.akretion.com).
# @author Sébastien BEAU <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import _, models
from odoo.exceptions import UserError
class PurchaseOrder(models.Model):
_inherit = "purchase.order"
def _prepare_sale_order_data(
self, name, partner, dest_company, direct_delivery_address
):
res = super()._prepare_sale_order_data(
name, partner, dest_company, direct_delivery_address
)
pricelist = self.env["product.pricelist"].search(
[
("company_id", "=", dest_company.id),
("is_intercompany_supplier", "=", True),
]
)
if not len(pricelist) == 1:
raise UserError(
_("Company %s do not have an intercompany pricelist configured"),
dest_company.name,
)
else:
res["pricelist_id"] = pricelist.id
return res
| agpl-3.0 | Python |
590ba3c9d645f6eac41687bee9f12f7c914858d6 | revert to http for loading clusters | jdfekete/progressivis,jdfekete/progressivis,jdfekete/progressivis,jdfekete/progressivis,jdfekete/progressivis | progressivis/datasets/__init__.py | progressivis/datasets/__init__.py | import os
from progressivis import ProgressiveError
from .random import generate_random_csv
from .wget import wget_file
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../data'))
def get_dataset(name, **kwds):
if not os.path.isdir(DATA_DIR):
os.mkdir(DATA_DIR)
if name == 'bigfile':
return generate_random_csv('%s/bigfile.csv'%DATA_DIR, 1000000, 30)
if name == 'smallfile':
return generate_random_csv('%s/smallfile.csv'%DATA_DIR, 30000, 10)
if name == 'warlogs':
return wget_file(filename='%s/warlogs.vec.bz2'%DATA_DIR,
url='http://www.cs.ubc.ca/labs/imager/video/2014/QSNE/warlogs.vec.bz2',
**kwds)
if name.startswith('cluster:'):
fname = name[len('cluster:'):] + ".txt"
return wget_file(filename='%s/%s'%(DATA_DIR, fname),
url='http://cs.joensuu.fi/sipu/datasets/%s'%fname)
raise ProgressiveError('Unknow dataset %s'%name)
__all__ = ['get_dataset',
'generate_random_csv']
| import os
from progressivis import ProgressiveError
from .random import generate_random_csv
from .wget import wget_file
DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../data'))
def get_dataset(name, **kwds):
if not os.path.isdir(DATA_DIR):
os.mkdir(DATA_DIR)
if name == 'bigfile':
return generate_random_csv('%s/bigfile.csv'%DATA_DIR, 1000000, 30)
if name == 'smallfile':
return generate_random_csv('%s/smallfile.csv'%DATA_DIR, 30000, 10)
if name == 'warlogs':
return wget_file(filename='%s/warlogs.vec.bz2'%DATA_DIR,
url='http://www.cs.ubc.ca/labs/imager/video/2014/QSNE/warlogs.vec.bz2',
**kwds)
if name.startswith('cluster:'):
fname = name[len('cluster:'):] + ".txt"
return wget_file(filename='%s/%s'%(DATA_DIR, fname),
url='https://cs.joensuu.fi/sipu/datasets/%s'%fname)
raise ProgressiveError('Unknow dataset %s'%name)
__all__ = ['get_dataset',
'generate_random_csv']
| bsd-2-clause | Python |
0658934a7a7a1581c6f1d871c192f49b42144b09 | fix issue with ControlPlayer on mac | UmSenhorQualquer/pyforms | pyforms/gui/Controls/ControlPlayer/VideoQt5GLWidget.py | pyforms/gui/Controls/ControlPlayer/VideoQt5GLWidget.py | from pyforms.gui.Controls.ControlPlayer.AbstractGLWidget import AbstractGLWidget
from PyQt5 import QtGui
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5 import QtCore
class VideoQt5GLWidget(AbstractGLWidget, QOpenGLWidget):
def initializeGL(self):
self.gl = self.context().versionFunctions()
self.gl.initializeOpenGLFunctions()
'''
Sets up the OpenGL rendering context, defines display lists, etc.
Gets called once before the first time resizeGL() or paintGL() is called.
'''
self.gl.glClearDepth(1.0)
self.gl.glClearColor(0, 0, 0, 1.0)
self.gl.glEnable(self.gl.GL_DEPTH_TEST)
def perspective(self, fovy, aspect, zNear, zFar):
ymax = zNear * math.tan( fovy * math.pi / 360.0 );
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
self.gl.glFrustum( xmin, xmax, ymin, ymax, zNear, zFar )
def resizeGL(self, width, height):
self.setupViewport(width, height)
def setupViewport(self, width, height):
side = min(width, height)
self.gl.glViewport((width - side) // 2, (height - side) // 2, side,
side)
self.gl.glMatrixMode(self.gl.GL_PROJECTION)
self.gl.glLoadIdentity()
#self.gl.glOrtho(-0.5, +0.5, +0.5, -0.5, 4.0, 15.0)
self.perspective(40.0, float(width) / float(height), 0.01, 10.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW) | from pyforms.gui.Controls.ControlPlayer.AbstractGLWidget import AbstractGLWidget
from PyQt5 import QtGui
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5 import QtCore
class VideoQt5GLWidget(AbstractGLWidget, QOpenGLWidget):
def initializeGL(self):
self.gl = self.context().versionFunctions()
self.gl.initializeOpenGLFunctions()
'''
Sets up the OpenGL rendering context, defines display lists, etc.
Gets called once before the first time resizeGL() or paintGL() is called.
'''
self.gl.glClearDepth(1.0)
self.gl.glClearColor(0, 0, 0, 1.0)
self.gl.glEnable(GL.GL_DEPTH_TEST)
def perspective(self, fovy, aspect, zNear, zFar):
ymax = zNear * math.tan( fovy * math.pi / 360.0 );
ymin = -ymax;
xmin = ymin * aspect;
xmax = ymax * aspect;
self.gl.glFrustum( xmin, xmax, ymin, ymax, zNear, zFar )
def resizeGL(self, width, height):
self.setupViewport(width, height)
def setupViewport(self, width, height):
side = min(width, height)
self.gl.glViewport((width - side) // 2, (height - side) // 2, side,
side)
self.gl.glMatrixMode(self.gl.GL_PROJECTION)
self.gl.glLoadIdentity()
#self.gl.glOrtho(-0.5, +0.5, +0.5, -0.5, 4.0, 15.0)
self.perspective(40.0, float(width) / float(height), 0.01, 10.0)
self.gl.glMatrixMode(self.gl.GL_MODELVIEW) | mit | Python |
d0ed8aeb2126a4b14b8413bd8c6d54952451e890 | Update version number. | aviweit/libcloud,vongazman/libcloud,mgogoulos/libcloud,DimensionDataCBUSydney/libcloud,aviweit/libcloud,wuyuewen/libcloud,lochiiconnectivity/libcloud,dcorbacho/libcloud,mistio/libcloud,andrewsomething/libcloud,Cloud-Elasticity-Services/as-libcloud,t-tran/libcloud,ZuluPro/libcloud,apache/libcloud,sgammon/libcloud,sahildua2305/libcloud,watermelo/libcloud,briancurtin/libcloud,iPlantCollaborativeOpenSource/libcloud,apache/libcloud,iPlantCollaborativeOpenSource/libcloud,ClusterHQ/libcloud,munkiat/libcloud,JamesGuthrie/libcloud,iPlantCollaborativeOpenSource/libcloud,erjohnso/libcloud,Cloud-Elasticity-Services/as-libcloud,Scalr/libcloud,dcorbacho/libcloud,mgogoulos/libcloud,niteoweb/libcloud,ByteInternet/libcloud,DimensionDataCBUSydney/libcloud,sahildua2305/libcloud,cryptickp/libcloud,samuelchong/libcloud,jimbobhickville/libcloud,carletes/libcloud,sahildua2305/libcloud,cloudControl/libcloud,Jc2k/libcloud,watermelo/libcloud,sergiorua/libcloud,Verizon/libcloud,JamesGuthrie/libcloud,curoverse/libcloud,samuelchong/libcloud,kater169/libcloud,Itxaka/libcloud,mbrukman/libcloud,carletes/libcloud,thesquelched/libcloud,samuelchong/libcloud,pquentin/libcloud,cryptickp/libcloud,smaffulli/libcloud,NexusIS/libcloud,jerryblakley/libcloud,sfriesel/libcloud,techhat/libcloud,Kami/libcloud,pantheon-systems/libcloud,Verizon/libcloud,schaubl/libcloud,andrewsomething/libcloud,StackPointCloud/libcloud,erjohnso/libcloud,aviweit/libcloud,kater169/libcloud,curoverse/libcloud,atsaki/libcloud,mbrukman/libcloud,wrigri/libcloud,marcinzaremba/libcloud,mathspace/libcloud,thesquelched/libcloud,mtekel/libcloud,mistio/libcloud,mathspace/libcloud,carletes/libcloud,pquentin/libcloud,kater169/libcloud,techhat/libcloud,smaffulli/libcloud,vongazman/libcloud,NexusIS/libcloud,ZuluPro/libcloud,MrBasset/libcloud,apache/libcloud,Cloud-Elasticity-Services/as-libcloud,pantheon-systems/libcloud,briancurtin/libcloud,jerryblakley/libcloud,cryptickp/libcloud,Jc2k/libcloud,briancurtin/libcloud,watermelo/libcloud,marcinzaremba/libcloud,Kami/libcloud,sgammon/libcloud,t-tran/libcloud,wrigri/libcloud,niteoweb/libcloud,wrigri/libcloud,SecurityCompass/libcloud,cloudControl/libcloud,illfelder/libcloud,mtekel/libcloud,ninefold/libcloud,niteoweb/libcloud,mtekel/libcloud,NexusIS/libcloud,vongazman/libcloud,SecurityCompass/libcloud,pantheon-systems/libcloud,Itxaka/libcloud,mbrukman/libcloud,Scalr/libcloud,techhat/libcloud,ClusterHQ/libcloud,jerryblakley/libcloud,lochiiconnectivity/libcloud,ByteInternet/libcloud,sergiorua/libcloud,t-tran/libcloud,wido/libcloud,MrBasset/libcloud,DimensionDataCBUSydney/libcloud,ninefold/libcloud,sergiorua/libcloud,illfelder/libcloud,ByteInternet/libcloud,Keisuke69/libcloud,supertom/libcloud,munkiat/libcloud,Keisuke69/libcloud,supertom/libcloud,supertom/libcloud,aleGpereira/libcloud,dcorbacho/libcloud,aleGpereira/libcloud,MrBasset/libcloud,atsaki/libcloud,munkiat/libcloud,aleGpereira/libcloud,schaubl/libcloud,lochiiconnectivity/libcloud,mathspace/libcloud,mistio/libcloud,schaubl/libcloud,cloudControl/libcloud,ZuluPro/libcloud,smaffulli/libcloud,sfriesel/libcloud,StackPointCloud/libcloud,wido/libcloud,wuyuewen/libcloud,atsaki/libcloud,erjohnso/libcloud,jimbobhickville/libcloud,thesquelched/libcloud,StackPointCloud/libcloud,sfriesel/libcloud,jimbobhickville/libcloud,mgogoulos/libcloud,illfelder/libcloud,andrewsomething/libcloud,SecurityCompass/libcloud,pquentin/libcloud,JamesGuthrie/libcloud,Kami/libcloud,Verizon/libcloud,wuyuewen/libcloud,wido/libcloud,Itxaka/libcloud,curoverse/libcloud,Scalr/libcloud,marcinzaremba/libcloud | libcloud/__init__.py | libcloud/__init__.py | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.5.0-dev"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import ConnectionKey, LoggingHTTPConnection, LoggingHTTPSConnection
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
libcloud provides a unified interface to the cloud computing resources.
@var __version__: Current version of libcloud
"""
__all__ = ["__version__", "enable_debug"]
__version__ = "0.4.3-dev"
def enable_debug(fo):
"""
Enable library wide debugging to a file-like object.
@param fo: Where to append debugging information
@type fo: File like object, only write operations are used.
"""
from libcloud.base import ConnectionKey, LoggingHTTPConnection, LoggingHTTPSConnection
LoggingHTTPSConnection.log = fo
LoggingHTTPConnection.log = fo
ConnectionKey.conn_classes = (LoggingHTTPConnection, LoggingHTTPSConnection)
def _init_once():
"""
Utility function that is ran once on Library import.
This checks for the LIBCLOUD_DEBUG enviroment variable, which if it exists
is where we will log debug information about the provider transports.
If LIBCLOUD_DEBUG is not a path, C{/tmp/libcloud_debug.log} is used by
default.
"""
import os
d = os.getenv("LIBCLOUD_DEBUG")
if d:
if d.isdigit():
d = "/tmp/libcloud_debug.log"
fo = open(d, "a")
enable_debug(fo)
_init_once()
| apache-2.0 | Python |
b999240903bb71e14818fb3f2d8eb12bda75ada2 | Bump tensorflow to 2.1.0 (#721) | tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io,tensorflow/io | tensorflow_io/core/python/ops/version_ops.py | tensorflow_io/core/python/ops/version_ops.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow==2.1.0'
version = '0.11.0'
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""version_ops"""
package = 'tensorflow>=2.1.0rc2'
version = '0.11.0'
| apache-2.0 | Python |
a9cd7d6eaa7ea70e962cf4d1c9e4aa53a2845968 | Bump version number | looplab/lillebror | lillebror/version.py | lillebror/version.py | # Copyright 2012 Loop Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'lillebror'
__version__ = '0.2.0'
__author__ = 'Max Persson'
__license__ = 'Apache License 2.0'
__copyright__ = 'Copyright 2013 Max Persson'
__project_url__ = 'https://github.com/looplab/lillebror'
| # Copyright 2012 Loop Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__title__ = 'lillebror'
__version__ = '0.1.1'
__author__ = 'Max Persson'
__license__ = 'Apache License 2.0'
__copyright__ = 'Copyright 2013 Max Persson'
__project_url__ = 'https://github.com/looplab/lillebror'
| apache-2.0 | Python |
3fea814461d2a51e0cc13c4981fa6f4cdfca75e9 | Correct broken import, this could never have worked. | EmilStenstrom/nephele | providers/moviedata/filmtipset.py | providers/moviedata/filmtipset.py | from providers.moviedata.provider import MoviedataProvider
from urllib import urlencode
from access_keys import ACCESS_KEYS
from application import APPLICATION as APP
IDENTIFIER = "Filmtipset"
class Provider(MoviedataProvider):
def get_url(self, movie):
options = {
"action": "search",
"id": movie["name"],
"returntype": "json",
"accesskey": ACCESS_KEYS[IDENTIFIER]["ACCESS_KEY"],
"usernr": ACCESS_KEYS[IDENTIFIER]["USER_KEY"],
}
return "http://www.filmtipset.se/api/api.cgi?" + urlencode(options)
def get_movie_data(self, movie):
url = self.get_url(movie)
APP.debug("Fetching url: %s" % url)
data = self.parse_json(url, path="0.data.0.hits")
data = self.find_movie_matching_year(data, movie["year"])
if not data:
return None, {}
data = self.transform_data(data)
return data["id"], data
def find_movie_matching_year(self, data, year):
if not year:
return self.traverse_json(data, path="0.movie")
for i in range(5):
data = self.traverse_json(data, "%s.movie" % i)
if data.get("year", None) == year:
return data
return self.traverse_json(data, path="0.movie")
def get_data_mapping(self):
return {
"id": lambda data: "tt" + data["imdb"],
"title": "orgname",
"title_swe": "name",
"country": "country",
"director": "director",
"year": "year",
"filmtipset_my_grade": "grade.value",
"filmtipset_my_grade_type": "grade.type",
"filmtipset_avg_grade": "filmtipsetgrade.value",
"filmtipset_url": "url",
"filmtipset_id": "id",
}
| from providers.moviedata.provider import MoviedataProvider
from urllib import urlencode
from settings import ACCESS_KEYS
from application import APPLICATION as APP
IDENTIFIER = "Filmtipset"
class Provider(MoviedataProvider):
def get_url(self, movie):
options = {
"action": "search",
"id": movie["name"],
"returntype": "json",
"accesskey": ACCESS_KEYS[IDENTIFIER]["ACCESS_KEY"],
"usernr": ACCESS_KEYS[IDENTIFIER]["USER_KEY"],
}
return "http://www.filmtipset.se/api/api.cgi?" + urlencode(options)
def get_movie_data(self, movie):
url = self.get_url(movie)
APP.debug("Fetching url: %s" % url)
data = self.parse_json(url, path="0.data.0.hits")
data = self.find_movie_matching_year(data, movie["year"])
if not data:
return None, {}
data = self.transform_data(data)
return data["id"], data
def find_movie_matching_year(self, data, year):
if not year:
return self.traverse_json(data, path="0.movie")
for i in range(5):
data = self.traverse_json(data, "%s.movie" % i)
if data.get("year", None) == year:
return data
return self.traverse_json(data, path="0.movie")
def get_data_mapping(self):
return {
"id": lambda data: "tt" + data["imdb"],
"title": "orgname",
"title_swe": "name",
"country": "country",
"director": "director",
"year": "year",
"filmtipset_my_grade": "grade.value",
"filmtipset_my_grade_type": "grade.type",
"filmtipset_avg_grade": "filmtipsetgrade.value",
"filmtipset_url": "url",
"filmtipset_id": "id",
}
| mit | Python |
b8241c2ff0cff4a0bc96e6d229c80029cdbcb71c | Change contact email. | tryolabs/luminoth,tryolabs/luminoth,tryolabs/luminoth | luminoth/__init__.py | luminoth/__init__.py | from .cli import cli # noqa
__version__ = '0.0.1.dev0'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'http://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2017 Tryolabs S.A.'
| from .cli import cli # noqa
__version__ = '0.0.1.dev0'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'http://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2017 Tryolabs S.A.'
| bsd-3-clause | Python |
df3441a2c98fffbb18c11d3660acb86d2e31e5fa | Fix main run | UltrosBot/Ultros3K,UltrosBot/Ultros3K | src/ultros/core/__main__.py | src/ultros/core/__main__.py | # coding=utf-8
import argparse
import asyncio
from ultros.core.ultros import Ultros
"""
Ultros - Module runnable
"""
__author__ = "Gareth Coles"
__version__ = "0.0.1"
def start(arguments):
u = Ultros(arguments.config, arguments.data)
u.start()
def init(arguments):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="ultros")
parser.add_argument(
"--version", action="version", version="Ultros {}".format(__version__)
)
parser.add_argument(
"--config", help="specify a directory containing configuration files",
default="./config"
)
parser.add_argument(
"--data", help="specify a directory to store data files",
default="./data"
)
subparsers = parser.add_subparsers()
parser_init = subparsers.add_parser(
"init", help="Create a default directory structure with example files"
)
parser_init.set_defaults(func=init)
parser_start = subparsers.add_parser("start", help="Start Ultros")
parser_start.set_defaults(func=start)
args = parser.parse_args()
if hasattr(args, "func"):
args.func(args)
else:
parser.print_usage()
| # coding=utf-8
import argparse
import asyncio
from ultros.core.ultros import Ultros
"""
Ultros - Module runnable
"""
__author__ = "Gareth Coles"
__version__ = "0.0.1"
def start(args):
u = Ultros(args.config, args.data)
# Gonna have to be a coroutine if we're AIO-based. Probably.
asyncio.get_event_loop().run_until_complete(u.start)
def init(args):
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="ultros")
parser.add_argument(
"--version", action="version", version="Ultros {}".format(__version__)
)
parser.add_argument(
"--config", help="specify a directory containing configuration files",
default="./config"
)
parser.add_argument(
"--data", help="specify a directory to store data files",
default="./data"
)
subparsers = parser.add_subparsers()
parser_init = subparsers.add_parser(
"init", help="Create a default directory structure with example files"
)
parser_init.set_defaults(func=init)
parser_start = subparsers.add_parser("start", help="Start Ultros")
parser_start.set_defaults(func=start)
args = parser.parse_args()
if hasattr(args, "func"):
args.func(args)
else:
parser.print_usage()
| artistic-2.0 | Python |
5229bf4a16d468a3a337db65c478671409d6d898 | Update summery.py | tinypony/testing-utils,tinypony/testing-utils,tinypony/testing-utils | metric-consumer/summary.py | metric-consumer/summary.py | #!/usr/bin/python
import os
import argparse
import re
def cumulative_moving_average(new_value, old_mean, total_items):
return old_mean + (new_value - old_mean) / total_items
def print_file_summary(path):
cma = 0
n = 0
with open(path, 'r') as csv_file:
all_lines = csv_file.readlines()
for line in all_lines[1:]:
try:
values = line.split(',')
#latency,1467792005016000000,3,False,338,False,256.0,1.467791983851e+12
receive_time = float(values[1])
send_time = float(values[7])
receive_time = receive_time/1000000 #convert from nanoseconds
travel_time = receive_time - send_time
cma = cumulative_moving_average(travel_time, cma, n+1)
n = n+1
except:
continue
print '{} = mean {}'.format(path, cma)
parser = argparse.ArgumentParser(description='Traverse all csv files in given dir and print mean travel time')
parser.add_argument('--dir', dest='dir', type=str, help='Root directory')
parser.set_defaults(dir='.')
args = parser.parse_args()
csv_pattern = re.compile(".*\.csv$")
for root, dirs, files in os.walk(args.dir):
for f in files:
if(csv_pattern.match(f)):
print_file_summary('{}/{}'.format(root, f))
| #!/usr/bin/python
import os
import argparse
import re
def cumulative_moving_average(new_value, old_mean, total_items):
return old_mean + (new_value - old_mean) / total_items
def print_file_summary(path):
cma = 0
n = 0
with open(path, 'r') as csv_file:
all_lines = csv_file.readlines()
for line in all_lines[1:]:
try:
values = line.split(',')
#latency,1467792005016000000,3,False,338,False,256.0,1.467791983851e+12
receive_time = int(values[1])
send_time = int(float(values[7]))
receive_time = receive_time/1000000 #convert from nanoseconds
travel_time = receive_time - send_time
cma = cumulative_moving_average(travel_time, cma, n+1)
n = n+1
except:
continue
print '{} = mean {}'.format(path, cma)
parser = argparse.ArgumentParser(description='Traverse all csv files in given dir and print mean travel time')
parser.add_argument('--dir', dest='dir', type=str, help='Root directory')
parser.set_defaults(dir='.')
args = parser.parse_args()
csv_pattern = re.compile(".*\.csv$")
for root, dirs, files in os.walk(args.dir):
for f in files:
if(csv_pattern.match(f)):
print_file_summary('{}/{}'.format(root, f))
| mit | Python |
4eb71abf71823a5a065d1b593ca8b624d17a35c9 | prepare for 1.6 | antidot/Pyckson | src/pyckson/__init__.py | src/pyckson/__init__.py | from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter
__version__ = '1.6'
| from pyckson.decorators import *
from pyckson.json import *
from pyckson.parser import parse
from pyckson.parsers.base import Parser
from pyckson.serializer import serialize
from pyckson.serializers.base import Serializer
from pyckson.dates.helpers import configure_date_formatter
__version__ = '1.5'
| lgpl-2.1 | Python |
521c71c38d4e6edc242afb76daf330d9aec8e9ff | remove ipdb | haoyuchen1992/osf.io,samanehsan/osf.io,mattclark/osf.io,kwierman/osf.io,KAsante95/osf.io,saradbowman/osf.io,jmcarp/osf.io,rdhyee/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,mfraezz/osf.io,aaxelb/osf.io,emetsger/osf.io,chennan47/osf.io,ckc6cz/osf.io,hmoco/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,lyndsysimon/osf.io,TomHeatwole/osf.io,abought/osf.io,caseyrollins/osf.io,erinspace/osf.io,icereval/osf.io,zachjanicki/osf.io,cwisecarver/osf.io,mattclark/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,njantrania/osf.io,mluo613/osf.io,HarryRybacki/osf.io,TomHeatwole/osf.io,ticklemepierce/osf.io,sloria/osf.io,danielneis/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,amyshi188/osf.io,mfraezz/osf.io,caseyrygt/osf.io,jmcarp/osf.io,binoculars/osf.io,mluke93/osf.io,TomBaxter/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,arpitar/osf.io,mluke93/osf.io,jmcarp/osf.io,mluo613/osf.io,cosenal/osf.io,alexschiller/osf.io,kch8qx/osf.io,crcresearch/osf.io,pattisdr/osf.io,emetsger/osf.io,acshi/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,Nesiehr/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,sloria/osf.io,HalcyonChimera/osf.io,Ghalko/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,samanehsan/osf.io,bdyetton/prettychart,kch8qx/osf.io,monikagrabowska/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,TomHeatwole/osf.io,haoyuchen1992/osf.io,leb2dg/osf.io,arpitar/osf.io,acshi/osf.io,samanehsan/osf.io,KAsante95/osf.io,mluo613/osf.io,cslzchen/osf.io,dplorimer/osf,hmoco/osf.io,MerlinZhang/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,abought/osf.io,laurenrevere/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,petermalcolm/osf.io,lyndsysimon/osf.io,doublebits/osf.io,TomBaxter/osf.io,acshi/osf.io,kch8qx/osf.io,danielneis/osf.io,njantrania/osf.io,njantrania/osf.io,kch8qx/osf.io,zachjanicki/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,erinspace/osf.io,zamattiac/osf.io,mluo613/osf.io,icereval/osf.io,aaxelb/osf.io,jnayak1/osf.io,Ghalko/osf.io,adlius/osf.io,KAsante95/osf.io,mluke93/osf.io,GageGaskins/osf.io,emetsger/osf.io,cldershem/osf.io,samchrisinger/osf.io,brianjgeiger/osf.io,reinaH/osf.io,mfraezz/osf.io,pattisdr/osf.io,GageGaskins/osf.io,ckc6cz/osf.io,brandonPurvis/osf.io,HarryRybacki/osf.io,TomBaxter/osf.io,chennan47/osf.io,acshi/osf.io,caneruguz/osf.io,jnayak1/osf.io,dplorimer/osf,jolene-esposito/osf.io,chrisseto/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,MerlinZhang/osf.io,saradbowman/osf.io,jolene-esposito/osf.io,doublebits/osf.io,hmoco/osf.io,jolene-esposito/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,kch8qx/osf.io,amyshi188/osf.io,Nesiehr/osf.io,jnayak1/osf.io,crcresearch/osf.io,wearpants/osf.io,leb2dg/osf.io,Nesiehr/osf.io,caneruguz/osf.io,doublebits/osf.io,sbt9uc/osf.io,wearpants/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,billyhunt/osf.io,kwierman/osf.io,mattclark/osf.io,Ghalko/osf.io,wearpants/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,ckc6cz/osf.io,reinaH/osf.io,ckc6cz/osf.io,binoculars/osf.io,doublebits/osf.io,petermalcolm/osf.io,zamattiac/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,abought/osf.io,billyhunt/osf.io,TomHeatwole/osf.io,adlius/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,MerlinZhang/osf.io,cslzchen/osf.io,HarryRybacki/osf.io,HalcyonChimera/osf.io,njantrania/osf.io,GageGaskins/osf.io,sloria/osf.io,petermalcolm/osf.io,baylee-d/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,felliott/osf.io,cldershem/osf.io,chrisseto/osf.io,kwierman/osf.io,doublebits/osf.io,arpitar/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,dplorimer/osf,icereval/osf.io,cldershem/osf.io,SSJohns/osf.io,amyshi188/osf.io,sbt9uc/osf.io,adlius/osf.io,acshi/osf.io,caneruguz/osf.io,billyhunt/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,rdhyee/osf.io,ticklemepierce/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,cosenal/osf.io,bdyetton/prettychart,chrisseto/osf.io,brandonPurvis/osf.io,sbt9uc/osf.io,cosenal/osf.io,MerlinZhang/osf.io,alexschiller/osf.io,bdyetton/prettychart,arpitar/osf.io,brianjgeiger/osf.io,erinspace/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,abought/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,felliott/osf.io,zachjanicki/osf.io,kwierman/osf.io,SSJohns/osf.io,alexschiller/osf.io,hmoco/osf.io,billyhunt/osf.io,samchrisinger/osf.io,cslzchen/osf.io,danielneis/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,adlius/osf.io,KAsante95/osf.io,pattisdr/osf.io,lyndsysimon/osf.io,GageGaskins/osf.io,mluo613/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,dplorimer/osf,HarryRybacki/osf.io,rdhyee/osf.io,reinaH/osf.io,Johnetordoff/osf.io,jmcarp/osf.io,aaxelb/osf.io,alexschiller/osf.io,crcresearch/osf.io,wearpants/osf.io,mfraezz/osf.io,mluke93/osf.io,felliott/osf.io,zachjanicki/osf.io,cldershem/osf.io,reinaH/osf.io,cosenal/osf.io,baylee-d/osf.io,cwisecarver/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,caseyrygt/osf.io | scripts/dataverse/connect_external_accounts.py | scripts/dataverse/connect_external_accounts.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.dataverse.model import AddonDataverseNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in AddonDataverseNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
if not user_addon.external_accounts:
logger.warning('User {0} has no dataverse external account'.format(user_addon.owner._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
from modularodm import Q
from website.app import init_app
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
from website.addons.dataverse.model import AddonDataverseNodeSettings
logger = logging.getLogger(__name__)
def do_migration():
for node_addon in AddonDataverseNodeSettings.find(Q('foreign_user_settings', 'ne', None)):
user_addon = node_addon.foreign_user_settings
# import ipdb; ipdb.set_trace()
if not user_addon.external_accounts:
logger.warning('User {0} has no dataverse external account'.format(user_addon.owner._id))
continue
account = user_addon.external_accounts[0]
node_addon.set_auth(account, user_addon.owner)
logger.info('Added external account {0} to node {1}'.format(
account._id, node_addon.owner._id,
))
def main(dry=True):
init_app(set_backends=True, routes=False) # Sets the storage backends on all models
with TokuTransaction():
do_migration()
if dry:
raise Exception('Abort Transaction - Dry Run')
if __name__ == '__main__':
dry = 'dry' in sys.argv
if not dry:
script_utils.add_file_logger(logger, __file__)
main(dry=dry)
| apache-2.0 | Python |
9cdd86499013c1deac7caeb8320c34294789f716 | Add _kill_and_join to async actor stub | clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage | py/garage/garage/asyncs/actors.py | py/garage/garage/asyncs/actors.py | """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
async def _kill_and_join(self, graceful=True):
self._kill(graceful=graceful)
await self._get_future().result()
| """Asynchronous support for garage.threads.actors."""
__all__ = [
'StubAdapter',
]
from garage.asyncs import futures
class StubAdapter:
"""Wrap all method calls, adding FutureAdapter on their result.
While this simple adapter does not work for all corner cases, for
common cases, it should work fine.
"""
def __init__(self, stub):
super().__setattr__('_stub', stub)
def __getattr__(self, name):
method = getattr(self._stub, name)
# Simple foolproof detection of non-message-sending access
if name.startswith('_'):
return method
return lambda *args, **kwargs: \
futures.FutureAdapter(method(*args, **kwargs))
def _get_future(self):
return futures.FutureAdapter(self._stub._get_future())
def _send_message(self, func, args, kwargs):
"""Enqueue a message into actor's message queue.
Since this does not block, it may raise Full when the message
queue is full.
"""
future = self._stub._send_message(func, args, kwargs, block=False)
return futures.FutureAdapter(future)
| mit | Python |
16381d4fafe743c3feb1de7ec27b6cbf95f617f1 | Add state and conf to interactive namespace by default | kinverarity1/pyexperiment,DeercoderResearch/pyexperiment,DeercoderResearch/pyexperiment,kinverarity1/pyexperiment,DeercoderResearch/pyexperiment,kinverarity1/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,DeercoderResearch/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,kinverarity1/pyexperiment | pyexperiment/utils/interactive.py | pyexperiment/utils/interactive.py | """Provides helper functions for interactive prompts
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from pyexperiment import state
from pyexperiment import conf
def embed_interactive(**kwargs):
"""Embed an interactive terminal into a running python process
"""
if not 'state' in kwargs:
kwargs['state'] = state
if not 'conf' in kwargs:
kwargs['conf'] = conf
try:
import IPython
ipython_config = IPython.Config()
ipython_config.TerminalInteractiveShell.confirm_exit = False
if IPython.__version__ == '1.2.1':
IPython.embed(config=ipython_config,
banner1='',
user_ns=kwargs)
else:
IPython.embed(config=ipython_config,
banner1='',
local_ns=kwargs)
except ImportError:
import readline # pylint: disable=unused-variable
import code
code.InteractiveConsole(kwargs).interact()
| """Provides helper functions for interactive prompts
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
def embed_interactive(**kwargs):
"""Embed an interactive terminal into a running python process
"""
try:
import IPython
ipython_config = IPython.Config()
ipython_config.TerminalInteractiveShell.confirm_exit = False
if IPython.__version__ == '1.2.1':
IPython.embed(config=ipython_config,
banner1='',
user_ns=kwargs)
else:
IPython.embed(config=ipython_config,
banner1='',
local_ns=kwargs)
except ImportError:
import readline # pylint: disable=unused-variable
import code
code.InteractiveConsole(kwargs).interact()
| mit | Python |
6cd34697334ddd8ada1daeee9a2c8b9522257487 | Remove unused function | jackfirth/pyramda | pyramda/iterable/for_each_test.py | pyramda/iterable/for_each_test.py | try:
# Python 3
from unittest import mock
except ImportError:
# Python 2
import mock
from .for_each import for_each
def test_for_each_nocurry_returns_the_original_iterable():
assert for_each(mock.MagicMock(), [1, 2, 3]) == [1, 2, 3]
def test_for_each_curry_returns_the_original_iterable():
assert for_each(mock.MagicMock())([1, 2, 3]) == [1, 2, 3]
def test_for_each_no_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m, ([1, 2, 3])) == [1, 2, 3]
assert len(m.mock_calls) == 3
def test_for_each_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m)([1, 2, 3]) == [1, 2, 3]
assert len(m.mock_calls) == 3
|
try:
# Python 3
from unittest import mock
except ImportError:
# Python 2
import mock
from .for_each import for_each
def print_x_plus_5(x):
print(x + 5)
def test_for_each_nocurry_returns_the_original_iterable():
assert for_each(mock.MagicMock(), [1, 2, 3]) == [1, 2, 3]
def test_for_each_curry_returns_the_original_iterable():
assert for_each(mock.MagicMock())([1, 2, 3]) == [1, 2, 3]
def test_for_each_no_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m, ([1, 2, 3])) == [1, 2, 3]
assert len(m.mock_calls) == 3
def test_for_each_curry_executed_function_for_each_item_in_the_iterable():
m = mock.MagicMock()
for_each(m)([1, 2, 3]) == [1, 2, 3]
assert len(m.mock_calls) == 3
| mit | Python |
4f2b7e5601e9f241868f86743eacb0e432be7495 | fix settings of cache in UT | patochectp/navitia,kinnou02/navitia,pbougue/navitia,Tisseo/navitia,xlqian/navitia,xlqian/navitia,TeXitoi/navitia,VincentCATILLON/navitia,is06/navitia,kinnou02/navitia,lrocheWB/navitia,ballouche/navitia,xlqian/navitia,datanel/navitia,francois-vincent/navitia,CanalTP/navitia,kadhikari/navitia,is06/navitia,TeXitoi/navitia,thiphariel/navitia,thiphariel/navitia,francois-vincent/navitia,francois-vincent/navitia,fueghan/navitia,Tisseo/navitia,ballouche/navitia,fueghan/navitia,lrocheWB/navitia,djludo/navitia,datanel/navitia,patochectp/navitia,antoine-de/navitia,prhod/navitia,is06/navitia,Tisseo/navitia,stifoon/navitia,kinnou02/navitia,CanalTP/navitia,VincentCATILLON/navitia,patochectp/navitia,stifoon/navitia,prhod/navitia,TeXitoi/navitia,frodrigo/navitia,xlqian/navitia,kadhikari/navitia,datanel/navitia,francois-vincent/navitia,thiphariel/navitia,is06/navitia,frodrigo/navitia,datanel/navitia,CanalTP/navitia,stifoon/navitia,antoine-de/navitia,CanalTP/navitia,lrocheWB/navitia,stifoon/navitia,xlqian/navitia,Tisseo/navitia,VincentCATILLON/navitia,djludo/navitia,kadhikari/navitia,pbougue/navitia,antoine-de/navitia,frodrigo/navitia,TeXitoi/navitia,djludo/navitia,frodrigo/navitia,VincentCATILLON/navitia,kadhikari/navitia,djludo/navitia,antoine-de/navitia,CanalTP/navitia,pbougue/navitia,Tisseo/navitia,lrocheWB/navitia,patochectp/navitia,kinnou02/navitia,pbougue/navitia,ballouche/navitia,ballouche/navitia,prhod/navitia,fueghan/navitia,thiphariel/navitia,prhod/navitia,fueghan/navitia | source/jormungandr/tests/integration_tests_settings.py | source/jormungandr/tests/integration_tests_settings.py | # encoding: utf-8
START_MONITORING_THREAD = False
SAVE_STAT = True
# désactivation de l'authentification
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters':{
'default': {
'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s',
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
}
}
CACHE_CONFIGURATION = {
'CACHE_TYPE': 'null'
}
| # encoding: utf-8
START_MONITORING_THREAD = False
SAVE_STAT = True
# désactivation de l'authentification
PUBLIC = True
LOGGER = {
'version': 1,
'disable_existing_loggers': False,
'formatters':{
'default': {
'format': '[%(asctime)s] [%(levelname)5s] [%(process)5s] [%(name)10s] %(message)s',
},
},
'handlers': {
'default': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'default',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'INFO',
'propagate': True
},
}
}
| agpl-3.0 | Python |
26c4effd8741d2511bb0b3bd46cca12d37b0e01b | Add file magic | ceefour/opencog,printedheart/opencog,eddiemonroe/atomspace,yantrabuddhi/opencog,Selameab/atomspace,anitzkin/opencog,gavrieltal/opencog,virneo/opencog,inflector/opencog,MarcosPividori/atomspace,cosmoharrigan/atomspace,sanuj/opencog,gaapt/opencog,rohit12/atomspace,eddiemonroe/opencog,anitzkin/opencog,gaapt/opencog,tim777z/opencog,TheNameIsNigel/opencog,misgeatgit/opencog,rohit12/atomspace,ArvinPan/atomspace,rohit12/atomspace,rTreutlein/atomspace,MarcosPividori/atomspace,rodsol/opencog,ceefour/opencog,eddiemonroe/opencog,misgeatgit/opencog,rodsol/atomspace,williampma/opencog,sumitsourabh/opencog,Selameab/atomspace,shujingke/opencog,rohit12/opencog,yantrabuddhi/atomspace,sumitsourabh/opencog,UIKit0/atomspace,printedheart/opencog,AmeBel/opencog,jswiergo/atomspace,MarcosPividori/atomspace,Selameab/opencog,virneo/opencog,sanuj/opencog,Tiggels/opencog,TheNameIsNigel/opencog,sumitsourabh/opencog,shujingke/opencog,TheNameIsNigel/opencog,andre-senna/opencog,Allend575/opencog,ArvinPan/opencog,printedheart/atomspace,cosmoharrigan/atomspace,inflector/atomspace,prateeksaxena2809/opencog,yantrabuddhi/atomspace,jswiergo/atomspace,ArvinPan/opencog,sumitsourabh/opencog,ceefour/atomspace,kim135797531/opencog,andre-senna/opencog,rohit12/opencog,ArvinPan/atomspace,rodsol/atomspace,eddiemonroe/opencog,ruiting/opencog,inflector/opencog,williampma/opencog,Tiggels/opencog,williampma/opencog,gaapt/opencog,yantrabuddhi/atomspace,kinoc/opencog,tim777z/opencog,ArvinPan/opencog,iAMr00t/opencog,inflector/atomspace,cosmoharrigan/atomspace,sumitsourabh/opencog,anitzkin/opencog,printedheart/atomspace,tim777z/opencog,cosmoharrigan/opencog,Selameab/atomspace,shujingke/opencog,Allend575/opencog,virneo/opencog,ArvinPan/atomspace,UIKit0/atomspace,yantrabuddhi/opencog,ruiting/opencog,rodsol/atomspace,Allend575/opencog,AmeBel/opencog,misgeatgit/atomspace,yantrabuddhi/opencog,gavrieltal/opencog,andre-senna/opencog,iAMr00t/opencog,misgeatgit/atomspace,kinoc/opencog,cosmoharrigan/opencog,rohit12/opencog,prateeksaxena2809/opencog,misgeatgit/atomspace,rodsol/opencog,rTreutlein/atomspace,misgeatgit/opencog,Allend575/opencog,sanuj/opencog,shujingke/opencog,jlegendary/opencog,sumitsourabh/opencog,AmeBel/atomspace,sumitsourabh/opencog,gaapt/opencog,cosmoharrigan/opencog,UIKit0/atomspace,ruiting/opencog,ruiting/opencog,jlegendary/opencog,andre-senna/opencog,misgeatgit/atomspace,williampma/opencog,printedheart/opencog,ceefour/opencog,virneo/atomspace,Selameab/opencog,prateeksaxena2809/opencog,kim135797531/opencog,virneo/opencog,UIKit0/atomspace,Selameab/atomspace,kim135797531/opencog,eddiemonroe/opencog,shujingke/opencog,AmeBel/atomspace,AmeBel/atomspace,roselleebarle04/opencog,misgeatgit/opencog,tim777z/opencog,williampma/opencog,iAMr00t/opencog,printedheart/atomspace,kim135797531/opencog,gavrieltal/opencog,virneo/opencog,printedheart/opencog,inflector/atomspace,inflector/atomspace,rohit12/opencog,prateeksaxena2809/opencog,yantrabuddhi/opencog,virneo/atomspace,inflector/atomspace,cosmoharrigan/atomspace,williampma/atomspace,anitzkin/opencog,ceefour/opencog,kinoc/opencog,ceefour/atomspace,roselleebarle04/opencog,cosmoharrigan/opencog,Tiggels/opencog,virneo/atomspace,gaapt/opencog,cosmoharrigan/opencog,AmeBel/opencog,Tiggels/opencog,ArvinPan/opencog,rTreutlein/atomspace,ceefour/atomspace,yantrabuddhi/opencog,kim135797531/opencog,Allend575/opencog,Tiggels/opencog,ArvinPan/opencog,yantrabuddhi/atomspace,rohit12/opencog,roselleebarle04/opencog,ruiting/opencog,ArvinPan/atomspace,gaapt/opencog,inflector/opencog,williampma/atomspace,rodsol/opencog,eddiemonroe/atomspace,inflector/opencog,yantrabuddhi/opencog,misgeatgit/atomspace,misgeatgit/opencog,inflector/opencog,williampma/atomspace,Allend575/opencog,kinoc/opencog,gavrieltal/opencog,ArvinPan/opencog,misgeatgit/opencog,sanuj/opencog,sanuj/opencog,roselleebarle04/opencog,virneo/opencog,printedheart/atomspace,gavrieltal/opencog,jlegendary/opencog,kinoc/opencog,rodsol/opencog,shujingke/opencog,gaapt/opencog,williampma/opencog,inflector/opencog,gavrieltal/opencog,AmeBel/atomspace,rohit12/atomspace,andre-senna/opencog,ceefour/opencog,ceefour/opencog,jlegendary/opencog,prateeksaxena2809/opencog,rodsol/opencog,TheNameIsNigel/opencog,printedheart/opencog,roselleebarle04/opencog,iAMr00t/opencog,jlegendary/opencog,eddiemonroe/atomspace,AmeBel/opencog,prateeksaxena2809/opencog,jlegendary/opencog,misgeatgit/opencog,inflector/opencog,kim135797531/opencog,TheNameIsNigel/opencog,rodsol/opencog,misgeatgit/opencog,eddiemonroe/atomspace,ruiting/opencog,AmeBel/atomspace,ceefour/atomspace,yantrabuddhi/opencog,AmeBel/opencog,anitzkin/opencog,jlegendary/opencog,inflector/opencog,TheNameIsNigel/opencog,printedheart/opencog,iAMr00t/opencog,ruiting/opencog,rohit12/opencog,AmeBel/opencog,AmeBel/opencog,sanuj/opencog,ceefour/opencog,jswiergo/atomspace,Selameab/opencog,shujingke/opencog,Tiggels/opencog,Selameab/opencog,rTreutlein/atomspace,eddiemonroe/opencog,iAMr00t/opencog,williampma/atomspace,jswiergo/atomspace,MarcosPividori/atomspace,tim777z/opencog,misgeatgit/opencog,andre-senna/opencog,roselleebarle04/opencog,kim135797531/opencog,anitzkin/opencog,eddiemonroe/opencog,Allend575/opencog,kinoc/opencog,kinoc/opencog,prateeksaxena2809/opencog,gavrieltal/opencog,yantrabuddhi/atomspace,virneo/atomspace,roselleebarle04/opencog,anitzkin/opencog,rodsol/atomspace,virneo/opencog,eddiemonroe/atomspace,Selameab/opencog,eddiemonroe/opencog,cosmoharrigan/opencog,rTreutlein/atomspace,tim777z/opencog,Selameab/opencog,andre-senna/opencog | examples/python/scheme_timer.py | examples/python/scheme_timer.py | #! /usr/bin/env python
"""
Checks the execution time of repeated calls to the Scheme API from Python
Runs an empty Scheme command NUMBER_OF_ITERATIONS times and displays the
total execution time
"""
__author__ = 'Cosmo Harrigan'
NUMBER_OF_ITERATIONS = 100
from opencog.atomspace import AtomSpace, TruthValue, types, get_type_name
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
atomspace = AtomSpace()
__init__(atomspace)
data = ["opencog/atomspace/core_types.scm",
"opencog/scm/utilities.scm"]
for item in data:
load_scm(atomspace, item)
def test_operation():
for i in range(NUMBER_OF_ITERATIONS):
scheme_eval_h(atomspace, '()')
import timeit
elapsed = timeit.timeit("test_operation()",
setup="from __main__ import test_operation",
number=1)
print "{0} seconds elapsed performing {1} repeated calls = {2} calls / sec".\
format(elapsed, NUMBER_OF_ITERATIONS, NUMBER_OF_ITERATIONS / elapsed)
| """
Checks the execution time of repeated calls to the Scheme API from Python
Runs an empty Scheme command NUMBER_OF_ITERATIONS times and displays the
total execution time
"""
__author__ = 'Cosmo Harrigan'
NUMBER_OF_ITERATIONS = 100
from opencog.atomspace import AtomSpace, TruthValue, types, get_type_name
from opencog.scheme_wrapper import load_scm, scheme_eval, scheme_eval_h, __init__
atomspace = AtomSpace()
__init__(atomspace)
data = ["opencog/atomspace/core_types.scm",
"opencog/scm/utilities.scm"]
for item in data:
load_scm(atomspace, item)
def test_operation():
for i in range(NUMBER_OF_ITERATIONS):
scheme_eval_h(atomspace, '()')
import timeit
elapsed = timeit.timeit("test_operation()",
setup="from __main__ import test_operation",
number=1)
print "{0} seconds elapsed performing {1} repeated calls = {2} calls / sec".\
format(elapsed, NUMBER_OF_ITERATIONS, NUMBER_OF_ITERATIONS / elapsed)
| agpl-3.0 | Python |
9f1a4977e34dc01a0489655df826b63b84f7d3be | Use SunPy sample data for Solar Cycle example. | Alex-Ian-Hamilton/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy,dpshelio/sunpy,Alex-Ian-Hamilton/sunpy,dpshelio/sunpy | examples/solar_cycle_example.py | examples/solar_cycle_example.py | """
===============
The Solar Cycle
===============
This example shows the current and possible next solar cycle.
"""
import datetime
import matplotlib.pyplot as plt
import sunpy.lightcurve as lc
from sunpy.data.sample import NOAAINDICES_LIGHTCURVE, NOAAPREDICT_LIGHTCURVE
###############################################################################
# For this example we will use the SunPy sample data, if you want the current
# data, delete the argument to the ``create`` function. i.e.
# ``noaa = lc.NOAAIndicesLightCurve.create()``
noaa = lc.NOAAIndicesLightCurve.create(NOAAINDICES_LIGHTCURVE)
noaa_predict = lc.NOAAPredictIndicesLightCurve.create(NOAAPREDICT_LIGHTCURVE)
###############################################################################
# Next lets grab the data again to create a new data structure that we will
# shift by 12 years to simulate the next solar cycle. We will truncate the
# data to only plot what is necessary.
noaa2 = lc.NOAAIndicesLightCurve.create(NOAAINDICES_LIGHTCURVE)
noaa2.data = noaa2.data.shift(2, freq=datetime.timedelta(days=365*12))
noaa2 = noaa2.truncate('2021/04/01', '2030/01/01')
###############################################################################
# Finally lets plot everything together with some arbitrary range for the
# strength of the next solar cycle.
plt.plot(noaa.data.index, noaa.data['sunspot RI'], label='Sunspot Number')
plt.plot(noaa_predict.data.index, noaa_predict.data['sunspot'],
color='grey', label='Near-term Prediction')
plt.fill_between(noaa_predict.data.index, noaa_predict.data['sunspot low'],
noaa_predict.data['sunspot high'], alpha=0.3, color='grey')
plt.fill_between(noaa2.data.index, noaa2.data['sunspot RI smooth']*0.4,
noaa2.data['sunspot RI smooth']*1.3, alpha=0.3, color='grey',
label='Next Cycle Predict')
plt.ylim(0)
plt.text('2011-01-01', 120, 'Cycle 24', fontsize=16)
plt.text('2024-01-01', 120, 'Cycle 25', fontsize=16)
plt.ylabel('Sunspot Number')
plt.xlabel('Year')
plt.legend(loc=2, framealpha=0.5)
plt.show()
| """
===============
The Solar Cycle
===============
This example shows the current and possible next solar cycle.
"""
import datetime
import matplotlib.pyplot as plt
import sunpy.lightcurve as lc
###############################################################################
# Let's download the latest data from NOAA.
noaa = lc.NOAAIndicesLightCurve.create()
noaa_predict = lc.NOAAPredictIndicesLightCurve.create()
###############################################################################
# Next lets grab the data again to create a new data structure that we will
# shift by 12 years to simulate the next solar cycle. We will truncate the
# data to only plot what is necessary.
noaa2 = lc.NOAAIndicesLightCurve.create()
noaa2.data = noaa2.data.shift(2, freq=datetime.timedelta(days = 365*12))
noaa2 = noaa2.truncate('2021/04/01', '2030/01/01')
###############################################################################
# Finally lets plot everything together with some arbitrary range for the strength
# of the next solar cycle.
plt.plot(noaa.data.index, noaa.data['sunspot RI'], label='Sunspot Number')
plt.plot(noaa_predict.data.index,noaa_predict.data['sunspot'],color='grey', label='Near-term Prediction')
plt.fill_between(noaa_predict.data.index, noaa_predict.data['sunspot low'], noaa_predict.data['sunspot high'],
alpha = 0.3, color='grey')
plt.fill_between(noaa2.data.index, noaa2.data['sunspot RI smooth']*0.4, noaa2.data['sunspot RI smooth']*1.3,
alpha = 0.3, color='grey', label='Next Cycle Predict')
plt.ylim(0)
plt.text('2011-01-01', 120,'Cycle 24',fontsize=16)
plt.text('2024-01-01', 120,'Cycle 25',fontsize=16)
plt.ylabel('Sunspot Number')
plt.xlabel('Year')
plt.legend(loc=2, framealpha=0.5)
plt.show()
| bsd-2-clause | Python |
ca57e29c15ad02dee3cdad0d2159cbe33c15d6e0 | fix expire cache | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/app_manager/signals.py | corehq/apps/app_manager/signals.py | from __future__ import absolute_import
from __future__ import unicode_literals
from django.dispatch.dispatcher import Signal
from corehq.apps.callcenter.app_parser import get_call_center_config_from_app
from corehq.apps.domain.models import Domain
from dimagi.utils.logging import notify_exception
def create_app_structure_repeat_records(sender, application, **kwargs):
from corehq.motech.repeaters.models import AppStructureRepeater
domain = application.domain
if domain:
repeaters = AppStructureRepeater.by_domain(domain)
for repeater in repeaters:
repeater.register(application)
def update_callcenter_config(sender, application, **kwargs):
if not application.copy_of:
return
try:
domain = Domain.get_by_name(application.domain)
cc_config = domain.call_center_config
if not cc_config or not (cc_config.fixtures_are_active() and cc_config.config_is_valid()):
return
app_config = get_call_center_config_from_app(application)
save = cc_config.update_from_app_config(app_config)
if save:
cc_config.save()
except Exception:
notify_exception(None, "Error updating CallCenter config for app build")
def expire_latest_enabled_build_profiles(sender, application, **kwargs):
from corehq.apps.app_manager.util import get_latest_enabled_build_for_profile
from corehq.apps.app_manager.util import get_enabled_build_profiles_for_version
if application.copy_of:
for build_profile_id in application.build_profiles:
get_latest_enabled_build_for_profile.clear(application.domain, build_profile_id)
get_enabled_build_profiles_for_version.clear(application.get_id, application.version)
app_post_save = Signal(providing_args=['application'])
app_post_save.connect(create_app_structure_repeat_records)
app_post_save.connect(update_callcenter_config)
app_post_save.connect(expire_latest_enabled_build_profiles)
app_post_release = Signal(providing_args=['application'])
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.dispatch.dispatcher import Signal
from corehq.apps.callcenter.app_parser import get_call_center_config_from_app
from corehq.apps.domain.models import Domain
from dimagi.utils.logging import notify_exception
def create_app_structure_repeat_records(sender, application, **kwargs):
from corehq.motech.repeaters.models import AppStructureRepeater
domain = application.domain
if domain:
repeaters = AppStructureRepeater.by_domain(domain)
for repeater in repeaters:
repeater.register(application)
def update_callcenter_config(sender, application, **kwargs):
if not application.copy_of:
return
try:
domain = Domain.get_by_name(application.domain)
cc_config = domain.call_center_config
if not cc_config or not (cc_config.fixtures_are_active() and cc_config.config_is_valid()):
return
app_config = get_call_center_config_from_app(application)
save = cc_config.update_from_app_config(app_config)
if save:
cc_config.save()
except Exception:
notify_exception(None, "Error updating CallCenter config for app build")
def expire_latest_enabled_build_profiles(sender, application, **kwargs):
from corehq.apps.app_manager.util import get_latest_enabled_build_for_profile
from corehq.apps.app_manager.util import get_enabled_build_profiles_for_version
if application.copy_of:
for build_profile_id in application.build_profiles:
get_latest_enabled_build_for_profile.clear(application.domain, build_profile_id)
get_enabled_build_profiles_for_version(application.get_id, application.version)
app_post_save = Signal(providing_args=['application'])
app_post_save.connect(create_app_structure_repeat_records)
app_post_save.connect(update_callcenter_config)
app_post_save.connect(expire_latest_enabled_build_profiles)
app_post_release = Signal(providing_args=['application'])
| bsd-3-clause | Python |
a7b9c9a120aebe270ea200f3be0b2d3468f911cf | Bump version | ckirby/django-modelqueryform | modelqueryform/__init__.py | modelqueryform/__init__.py | __version__ = "2.2"
| __version__ = "2.1"
| bsd-2-clause | Python |
4d40e9db4bd6b58787557e8d5547f69eb67c9b96 | Add additional coverage to author build list | wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes | tests/changes/api/test_author_build_index.py | tests/changes/api/test_author_build_index.py | from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email=self.default_user.email, name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
self.login(self.default_user)
path = '/api/0/authors/me/builds/'
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| from uuid import uuid4
from changes.config import db
from changes.models import Author
from changes.testutils import APITestCase
class AuthorBuildListTest(APITestCase):
def test_simple(self):
fake_author_id = uuid4()
self.create_build(self.project)
path = '/api/0/authors/{0}/builds/'.format(fake_author_id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 0
author = Author(email='[email protected]', name='Foo Bar')
db.session.add(author)
build = self.create_build(self.project, author=author)
path = '/api/0/authors/{0}/builds/'.format(author.id.hex)
resp = self.client.get(path)
assert resp.status_code == 200
data = self.unserialize(resp)
assert len(data) == 1
assert data[0]['id'] == build.id.hex
| apache-2.0 | Python |
03aebd7eff51be1847866d9920b8520cee72348f | fix failure in test_global_pinger_memo | jtrobec/pants,foursquare/pants,UnrememberMe/pants,cevaris/pants,15Dkatz/pants,lahosken/pants,baroquebobcat/pants,twitter/pants,benjyw/pants,foursquare/pants,baroquebobcat/pants,ericzundel/pants,dbentley/pants,dbentley/pants,ericzundel/pants,lahosken/pants,foursquare/pants,jsirois/pants,qma/pants,megaserg/pants,cevaris/pants,cevaris/pants,qma/pants,dbentley/pants,15Dkatz/pants,twitter/pants,peiyuwang/pants,cevaris/pants,peiyuwang/pants,15Dkatz/pants,pombredanne/pants,mateor/pants,landism/pants,fkorotkov/pants,tdyas/pants,jessrosenfield/pants,peiyuwang/pants,landism/pants,jtrobec/pants,tdyas/pants,UnrememberMe/pants,cevaris/pants,pombredanne/pants,megaserg/pants,benjyw/pants,lahosken/pants,jtrobec/pants,mateor/pants,pantsbuild/pants,dturner-tw/pants,jtrobec/pants,wisechengyi/pants,dturner-tw/pants,kwlzn/pants,dturner-tw/pants,peiyuwang/pants,dturner-tw/pants,fkorotkov/pants,peiyuwang/pants,twitter/pants,mateor/pants,ericzundel/pants,benjyw/pants,pantsbuild/pants,pantsbuild/pants,manasapte/pants,fkorotkov/pants,lahosken/pants,benjyw/pants,kwlzn/pants,peiyuwang/pants,fkorotkov/pants,gmalmquist/pants,UnrememberMe/pants,ity/pants,gmalmquist/pants,tdyas/pants,15Dkatz/pants,qma/pants,gmalmquist/pants,wisechengyi/pants,foursquare/pants,fkorotkov/pants,dbentley/pants,wisechengyi/pants,wisechengyi/pants,landism/pants,lahosken/pants,jessrosenfield/pants,mateor/pants,jessrosenfield/pants,manasapte/pants,tdyas/pants,lahosken/pants,ericzundel/pants,pantsbuild/pants,mateor/pants,mateor/pants,15Dkatz/pants,UnrememberMe/pants,twitter/pants,peiyuwang/pants,jsirois/pants,baroquebobcat/pants,tdyas/pants,kwlzn/pants,lahosken/pants,15Dkatz/pants,UnrememberMe/pants,cevaris/pants,ericzundel/pants,tdyas/pants,dturner-tw/pants,mateor/pants,jessrosenfield/pants,jessrosenfield/pants,twitter/pants,ericzundel/pants,wisechengyi/pants,dturner-tw/pants,lahosken/pants,baroquebobcat/pants,jtrobec/pants,jsirois/pants,ity/pants,baroquebobcat/pants,kwlzn/pants,foursquare/pants,dbentley/pants,qma/pants,qma/pants,gmalmquist/pants,fkorotkov/pants,dturner-tw/pants,tdyas/pants,jtrobec/pants,landism/pants,landism/pants,15Dkatz/pants,manasapte/pants,ity/pants,megaserg/pants,pombredanne/pants,pombredanne/pants,mateor/pants,qma/pants,pantsbuild/pants,jtrobec/pants,ity/pants,gmalmquist/pants,baroquebobcat/pants,manasapte/pants,manasapte/pants,foursquare/pants,benjyw/pants,benjyw/pants,wisechengyi/pants,pombredanne/pants,pombredanne/pants,twitter/pants,twitter/pants,tdyas/pants,peiyuwang/pants,landism/pants,pombredanne/pants,wisechengyi/pants,fkorotkov/pants,manasapte/pants,landism/pants,wisechengyi/pants,jessrosenfield/pants,megaserg/pants,UnrememberMe/pants,twitter/pants,megaserg/pants,dbentley/pants,qma/pants,benjyw/pants,pantsbuild/pants,kwlzn/pants,ericzundel/pants,cevaris/pants,ity/pants,landism/pants,UnrememberMe/pants,twitter/pants,jessrosenfield/pants,ity/pants,gmalmquist/pants,dbentley/pants,UnrememberMe/pants,ity/pants,kwlzn/pants,manasapte/pants,15Dkatz/pants,fkorotkov/pants,gmalmquist/pants,UnrememberMe/pants,baroquebobcat/pants,ericzundel/pants,pantsbuild/pants,baroquebobcat/pants,foursquare/pants,foursquare/pants,megaserg/pants,wisechengyi/pants,foursquare/pants,baroquebobcat/pants,kwlzn/pants,tdyas/pants,megaserg/pants | tests/python/pants_test/cache/test_pinger.py | tests/python/pants_test/cache/test_pinger.py | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import threading
import time
from six.moves import SimpleHTTPServer, socketserver
from pants.cache.pinger import Pinger
from pants_test.base_test import BaseTest
def get_delayed_handler(delay):
class DelayResponseHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_HEAD(self):
time.sleep(delay)
self.send_response(200)
self.end_headers()
return DelayResponseHandler
class TestPinger(BaseTest):
timeout_seconds = .6
slow_seconds = .05
fast_seconds = 0
def setup_delayed_server(self, delay):
server = socketserver.TCPServer(("", 0), get_delayed_handler(delay))
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
return server
def setUp(self):
timeout = self.setup_delayed_server(self.timeout_seconds)
slow = self.setup_delayed_server(self.slow_seconds)
fast = self.setup_delayed_server(self.fast_seconds)
self.servers = [timeout, slow, fast]
self.fast_netloc = 'localhost:{}'.format(fast.socket.getsockname()[1])
self.slow_netloc = 'localhost:{}'.format(slow.socket.getsockname()[1])
self.timeout_netloc = 'localhost:{}'.format(timeout.socket.getsockname()[1])
def test_pinger_times_correct(self):
test = Pinger(timeout=.5, tries=2)
netlocs = [self.fast_netloc, self.slow_netloc, self.timeout_netloc]
ping_results = dict(test.pings(netlocs))
self.assertLess(ping_results[self.fast_netloc], ping_results[self.slow_netloc])
self.assertEqual(ping_results[self.timeout_netloc], Pinger.UNREACHABLE)
def test_pinger_timeout_config(self):
test = Pinger(timeout=self.slow_seconds - .01, tries=2)
netlocs = [self.fast_netloc, self.slow_netloc]
ping_results = dict(test.pings(netlocs))
self.assertLess(ping_results[self.fast_netloc], 1)
self.assertEqual(ping_results[self.slow_netloc], Pinger.UNREACHABLE)
def test_global_pinger_memo(self):
fast_pinger = Pinger(timeout=self.slow_seconds - .01, tries=2)
slow_pinger = Pinger(timeout=self.timeout_seconds, tries=2)
self.assertEqual(fast_pinger.pings([self.slow_netloc])[0][1], Pinger.UNREACHABLE)
self.assertNotEqual(slow_pinger.pings([self.slow_netloc])[0][1], Pinger.UNREACHABLE)
def tearDown(self):
for server in self.servers:
server.shutdown()
| # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import threading
import time
from six.moves import SimpleHTTPServer, socketserver
from pants.cache.pinger import Pinger
from pants_test.base_test import BaseTest
def get_delayed_handler(delay):
class DelayResponseHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_HEAD(self):
time.sleep(delay)
self.send_response(200)
self.end_headers()
return DelayResponseHandler
class TestPinger(BaseTest):
timeout_seconds = .6
slow_seconds = .05
fast_seconds = 0
def setup_delayed_server(self, delay):
server = socketserver.TCPServer(("", 0), get_delayed_handler(delay))
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
return server
def setUp(self):
timeout = self.setup_delayed_server(self.timeout_seconds)
slow = self.setup_delayed_server(self.slow_seconds)
fast = self.setup_delayed_server(self.fast_seconds)
self.servers = [timeout, slow, fast]
self.fast_netloc = 'localhost:{}'.format(fast.socket.getsockname()[1])
self.slow_netloc = 'localhost:{}'.format(slow.socket.getsockname()[1])
self.timeout_netloc = 'localhost:{}'.format(timeout.socket.getsockname()[1])
def test_pinger_times_correct(self):
test = Pinger(timeout=.5, tries=2)
netlocs = [self.fast_netloc, self.slow_netloc, self.timeout_netloc]
ping_results = dict(test.pings(netlocs))
self.assertLess(ping_results[self.fast_netloc], ping_results[self.slow_netloc])
self.assertEqual(ping_results[self.timeout_netloc], Pinger.UNREACHABLE)
def test_pinger_timeout_config(self):
test = Pinger(timeout=self.slow_seconds - .01, tries=2)
netlocs = [self.fast_netloc, self.slow_netloc]
ping_results = dict(test.pings(netlocs))
self.assertLess(ping_results[self.fast_netloc], 1)
self.assertEqual(ping_results[self.slow_netloc], Pinger.UNREACHABLE)
def test_global_pinger_memo(self):
fast_pinger = Pinger(timeout=self.slow_seconds, tries=2)
slow_pinger = Pinger(timeout=self.timeout_seconds, tries=2)
self.assertEqual(fast_pinger.pings([self.slow_netloc])[0][1], Pinger.UNREACHABLE)
self.assertNotEqual(slow_pinger.pings([self.slow_netloc])[0][1], Pinger.UNREACHABLE)
def tearDown(self):
for server in self.servers:
server.shutdown()
| apache-2.0 | Python |
932fccc77fb10ece61c3feeb47a28225216c7c0d | add two more authors for gemeinfrei_2021.py | the-it/WS_THEbotIT,the-it/WS_THEbotIT | service/ws_re/scanner/tasks/gemeinfrei_2021.py | service/ws_re/scanner/tasks/gemeinfrei_2021.py | import pywikibot
from service.ws_re.register.authors import Authors
from service.ws_re.scanner.tasks.base_task import ReScannerTask
from service.ws_re.template.article import Article
from tools.bots.pi import WikiLogger
class GF21Task(ReScannerTask):
def __init__(self, wiki: pywikibot.Site, logger: WikiLogger, debug: bool = True):
super().__init__(wiki, logger, debug)
self.authors = Authors()
def task(self):
for re_article in self.re_page:
if isinstance(re_article, Article):
authors = self.authors.get_author_by_mapping(re_article.author[0], re_article["BAND"].value)
for author in authors:
author_string = f"{author.first_name} {author.last_name}"
if author_string in ("Arthur Stein", "Hugo Willrich", "Edward Capps", "Kurt Witte",
"August Hug", "Max Radin", "Werner Schur", "Percy Neville Ure",
"Herbert Bannert", "Adolf Wilhelm", "Wilhelm Schmid"):
if re_article["KEINE_SCHÖPFUNGSHÖHE"].value:
re_article["TODESJAHR"].value = ""
re_article["KEINE_SCHÖPFUNGSHÖHE"].value = False
| import pywikibot
from service.ws_re.register.authors import Authors
from service.ws_re.scanner.tasks.base_task import ReScannerTask
from service.ws_re.template.article import Article
from tools.bots.pi import WikiLogger
class GF21Task(ReScannerTask):
def __init__(self, wiki: pywikibot.Site, logger: WikiLogger, debug: bool = True):
super().__init__(wiki, logger, debug)
self.authors = Authors()
def task(self):
for re_article in self.re_page:
if isinstance(re_article, Article):
authors = self.authors.get_author_by_mapping(re_article.author[0], re_article["BAND"].value)
for author in authors:
author_string = f"{author.first_name} {author.last_name}"
if author_string in ("Arthur Stein", "Hugo Willrich", "Edward Capps", "Kurt Witte",
"August Hug", "Max Radin", "Werner Schur", "Percy Neville Ure",
"Herbert Bannert"):
if re_article["KEINE_SCHÖPFUNGSHÖHE"].value:
re_article["TODESJAHR"].value = ""
re_article["KEINE_SCHÖPFUNGSHÖHE"].value = False
| mit | Python |
d56382a87068e7d43b3333b6ea3dc2fd0a80d929 | Use dict instead of list | dustalov/watset,dustalov/watset | 10-disambiguate.py | 10-disambiguate.py | #!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import csv
import gc
import sys
from collections import defaultdict
from sklearn.feature_extraction import DictVectorizer
from sklearn.metrics.pairwise import cosine_similarity as sim
from operator import itemgetter
from multiprocessing import Pool, cpu_count
wsi = defaultdict(lambda: dict())
v = DictVectorizer()
D = []
with open('03-cw-wsi.txt') as f:
reader = csv.reader(f, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word, sid, _, words = row
try:
words = {k: float(v) for record in words.split(' ') for k, v in (record.rsplit(':', 1),)}
except ValueError:
print('Skipping misformatted string: %s.' % words, file=sys.stderr)
continue
wsi[word][int(sid)] = words
D.append(words)
X = v.fit_transform(D)
def emit(word):
sneighbours = {}
for sid, words in wsi[word].items():
sense = '%s#%d' % (word, sid)
features = words.copy()
features.update({word: 1.})
vector = v.transform(features)
sneighbours[sense] = {}
for neighbour, weight in words.items():
neighbours = wsi[neighbour]
candidates = {nsid: sim(vector, v.transform(neighbours[nsid])).item(0) for nsid in neighbours}
_, cosine = max(candidates.items(), key=itemgetter(1))
if cosine > 0:
nsense = '%s#%d' % (neighbour, nsid)
sneighbours[sense][nsense] = weight
return sneighbours
with Pool(cpu_count()) as pool:
for sneighbours in pool.imap_unordered(emit, wsi):
for sense, neighbours in sneighbours.items():
for nsense, weight in neighbours.items():
print('%s\t%s\t%f' % (sense, nsense, weight))
| #!/usr/bin/env python
from signal import signal, SIGPIPE, SIG_DFL
signal(SIGPIPE, SIG_DFL)
import csv
import gc
import sys
from collections import defaultdict
from sklearn.feature_extraction import DictVectorizer
from sklearn.metrics.pairwise import cosine_similarity as sim
from operator import itemgetter
from multiprocessing import Pool, cpu_count
wsi = defaultdict(lambda: dict())
v = DictVectorizer()
D = []
with open('03-cw-wsi.txt') as f:
reader = csv.reader(f, delimiter='\t', quoting=csv.QUOTE_NONE)
for row in reader:
word, sid, _, words = row
try:
words = {k: float(v) for record in words.split(' ') for k, v in (record.rsplit(':', 1),)}
except ValueError:
print('Skipping misformatted string: %s.' % words, file=sys.stderr)
continue
wsi[word][int(sid)] = words
D.append(words)
X = v.fit_transform(D)
def emit(word):
sneighbours = {}
for sid, words in wsi[word].items():
sense = '%s#%d' % (word, sid)
features = words.copy()
features.update({word: 1.})
vector = v.transform(features)
sneighbours[sense] = {}
for neighbour, weight in words.items():
neighbours = wsi[neighbour]
candidates = [(nsid, sim(vector, v.transform(neighbours[nsid])).item(0)) for nsid in neighbours]
nsid, cosine = max(candidates, key=itemgetter(1))
if cosine > 0:
nsense = '%s#%d' % (neighbour, nsid)
sneighbours[sense][nsense] = weight
return sneighbours
with Pool(cpu_count()) as pool:
for sneighbours in pool.imap_unordered(emit, wsi):
for sense, neighbours in sneighbours.items():
for nsense, weight in neighbours.items():
print('%s\t%s\t%f' % (sense, nsense, weight))
| mit | Python |
72df22e62806e64e05b3bbb6eca0efd958c7c8bb | make btcnet_wrapper fail in a more instructive manner | c00w/bitHopper,c00w/bitHopper | btcnet_wrapper.py | btcnet_wrapper.py | from git import Repo
try:
repo = Repo("btcnet_info")
except:
repo = Repo.init("btcnet_info")
repo = repo.clone("git://github.com/c00w/btcnet_info.git")
origin = repo.create_remote('origin', 'git://github.com/c00w/btcnet_info.git')
origin = repo.remotes.origin
origin.fetch()
origin.pull('master')
try:
import btcnet_info
except:
print 'Install pythongit! See the readme for detailed instructions'
import os
os._exit(2)
| from git import Repo
try:
repo = Repo("btcnet_info")
except:
repo = Repo.init("btcnet_info")
repo = repo.clone("git://github.com/c00w/btcnet_info.git")
origin = repo.create_remote('origin', 'git://github.com/c00w/btcnet_info.git')
origin = repo.remotes.origin
origin.fetch()
origin.pull('master')
import btcnet_info
| mit | Python |
1ac423e9127631eeb78868c47cf6fee12bf36a12 | Fix bug in handling get/post, should work now | acdha/django-test-utils,frac/django-test-utils,acdha/django-test-utils,ericholscher/django-test-utils,frac/django-test-utils,ericholscher/django-test-utils | test_utils/middleware/testmaker.py | test_utils/middleware/testmaker.py | from django.conf import settings
from django.test import Client
from django.test.utils import setup_test_environment
import logging, re
from django.utils.encoding import force_unicode
log = logging.getLogger('testmaker')
print "Loaded Testmaker Middleware"
#Remove at your own peril
debug = getattr(settings, 'DEBUG', False)
if not debug:
print "THIS CODE IS NOT MEANT FOR USE IN PRODUCTION"
#return
class TestMakerMiddleware(object):
def process_request(self, request):
if 'test_client_true' not in request.REQUEST:
log_request(request)
if request.method.lower() == "get":
setup_test_environment()
c = Client()
getdict = request.GET.copy()
getdict['test_client_true'] = 'yes' #avoid recursion
r = c.get(request.path, getdict)
log_status(request.path, r)
if r.context and r.status_code != 404:
con = get_user_context(r.context)
output_user_context(con)
def log_request(request):
log.info('\n\tdef %s(self): ' % 'test_path')
method = request.method.lower()
request_str = "'%s', {" % request.path
for dikt in request.REQUEST.dicts:
for arg in dikt:
request_str += "'%s': '%s'" % (arg, request.REQUEST[arg])
request_str += "}"
log.info("\t\tr = c.%s(%s)" % (method, request_str))
def log_status(path, request):
log.info("\t\tself.assertEqual(r.status_code, %s)" % request.status_code)
def get_user_context(context_list):
#Ugly Hack. Needs to be a better way
if isinstance(context_list, list):
context_list = context_list[-1] #Last context rendered
ret = context_list.dicts[-1]
if ret == {}:
ret = context_list.dicts[0]
return ret
else:
return context_list
def output_user_context(context):
for var in context:
try:
if not re.search("0x\w+", force_unicode(context[var])): #Avoid memory addy's which will change.
log.info(u'\t\tself.assertEqual(unicode(r.context[-1]["%s"]), u"%s")' % (var, unicode(context[var])))
except UnicodeDecodeError, e:
#FIXME: This might blow up on odd encoding
pass
| from django.conf import settings
from django.test import Client
from django.test.utils import setup_test_environment
import logging, re
from django.utils.encoding import force_unicode
log = logging.getLogger('testmaker')
print "Loaded Testmaker Middleware"
#Remove at your own peril
debug = getattr(settings, 'DEBUG', False)
if not debug:
print "THIS CODE IS NOT MEANT FOR USE IN PRODUCTION"
#return
class TestMakerMiddleware(object):
def process_request(self, request):
if 'test_client_true' not in request.REQUEST:
log_request(request)
if request.method.lower() == "get":
setup_test_environment()
c = Client()
getdict = request.GET.copy()
getdict['test_client_true'] = 'yes' #avoid recursion
r = c.get(request.path, getdict)
log_status(request.path, r)
if r.context:
con = get_user_context(r.context)
output_user_context(con)
def log_request(request):
log.info('\n\tdef %s(self): ' % 'test_path')
method = request.method.lower()
request_str = "'%s', {" % request.path
for dict in request.REQUEST.dicts:
for arg in dict:
request_str += "'%s': '%s', " % arg, request.REQUEST[arg]
request_str += "}"
log.info("\t\tr = c.%s(%s)" % (method, request_str))
def log_status(path, request):
log.info("\t\tself.assertEqual(r.status_code, %s)" % request.status_code)
def get_user_context(context_list):
#Ugly Hack. Needs to be a better way
if isinstance(context_list, list):
context_list = context_list[-1] #Last context rendered
ret = context_list.dicts[-1]
if ret == {}:
ret = context_list.dicts[0]
return ret
else:
return context_list
def output_user_context(context):
for var in context:
try:
if not re.search("0x\w+", force_unicode(context[var])): #Avoid memory addy's which will change.
log.info(u'\t\tself.assertEqual(unicode(r.context[-1]["%s"]), u"%s")' % (var, unicode(context[var])))
except Exception, e:
#FIXME: This might blow up on odd encoding or 404s.
pass
| mit | Python |
e4a5dd51829df198a07232afc06afdff6089ae6c | fix wmt datatype checking (#1259) | facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI | parlai/tasks/wmt/agents.py | parlai/tasks/wmt/agents.py | #!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from parlai.core.teachers import FbDialogTeacher
from .build import build
import copy
import os
def _path(task, opt, dt):
# Build the data if it doesn't exist.
build(opt)
return os.path.join(opt['datapath'], 'wmt',
'{task}_{type}.txt'.format(task=task, type=dt))
class EnDeTeacher(FbDialogTeacher):
def __init__(self, opt, shared=None):
opt = copy.deepcopy(opt)
self.task_name = 'en_de'
dt = opt['datatype'].split(':')[0]
opt['datafile'] = _path(self.task_name, opt, dt)
super().__init__(opt, shared)
class DefaultTeacher(EnDeTeacher):
pass
| #!/usr/bin/env python3
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from parlai.core.teachers import FbDialogTeacher
from .build import build
import copy
import os
def _path(task, opt, dt=''):
# Build the data if it doesn't exist.
build(opt)
if dt == '':
dt = opt['datatype'].split(':')[0]
return os.path.join(opt['datapath'], 'wmt',
'{task}_{type}.txt'.format(task=task, type=dt))
class EnDeTeacher(FbDialogTeacher):
def __init__(self, opt, shared=None):
opt = copy.deepcopy(opt)
task = opt.get('task', 'wmt:en_de')
self.task_name = task.split(':')[1] if ':' in task else 'en_de'
opt['datafile'] = _path(self.task_name, opt, opt['datatype'])
super().__init__(opt, shared)
class DefaultTeacher(EnDeTeacher):
pass
| mit | Python |
226b27ad6e66c7d512ce6cad300b7f96de5ccfa7 | Introduce cache feature to GoogleDrive base logic. | supistar/Botnyan | model/googledrive.py | model/googledrive.py | # -*- encoding:utf8 -*-
import os
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.discovery import build
from model.cache import Cache
class GoogleDrive(object):
@classmethod
def retrieve_content(cls, **kwargs):
document_id = kwargs.get('document_id')
export_type = kwargs.get('export_type')
if not document_id:
print("There is no documentID")
return None
if not export_type:
print("There is no exportType")
return None
# Check document cache exists
content = Cache().get(document_id)
if content:
return content
try:
private_key = os.environ['GOOGLE_PRIVATE_KEY']
if not private_key:
return None
credential_args = (
os.environ['GOOGLE_CLIENT_EMAIL'],
private_key,
'https://www.googleapis.com/auth/drive'
)
credential_kwargs = {
'sub': os.environ.get('GOOGLE_OWNER_EMAIL')
}
credentials = SignedJwtAssertionCredentials(*credential_args, **credential_kwargs)
http = httplib2.Http()
credentials.authorize(http)
service = build('drive', 'v2', http=http)
f = service.files().get(fileId=document_id).execute()
if 'exportLinks' in f and export_type in f['exportLinks']:
download = f['exportLinks'][export_type]
resp, content = service._http.request(download)
# Set document cache
Cache().set(document_id, content)
else:
content = '読み込みに失敗したにゃー'
except Exception as e:
content = '読み込みに失敗したにゃーー : ' + str(e) + ' / ' + str(e.message)
return content
| # -*- encoding:utf8 -*-
import os
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.discovery import build
class GoogleDrive(object):
@classmethod
def retrieve_content(cls, **kwargs):
document_id = kwargs.get('document_id')
export_type = kwargs.get('export_type')
if not document_id:
print("There is no documentID")
return None
if not export_type:
print("There is no exportType")
return None
try:
private_key = os.environ['GOOGLE_PRIVATE_KEY']
if not private_key:
return None
credential_args = (
os.environ['GOOGLE_CLIENT_EMAIL'],
private_key,
'https://www.googleapis.com/auth/drive'
)
credential_kwargs = {
'sub': os.environ.get('GOOGLE_OWNER_EMAIL')
}
credentials = SignedJwtAssertionCredentials(*credential_args, **credential_kwargs)
http = httplib2.Http()
credentials.authorize(http)
service = build('drive', 'v2', http=http)
f = service.files().get(fileId=document_id).execute()
if 'exportLinks' in f and export_type in f['exportLinks']:
download = f['exportLinks'][export_type]
resp, content = service._http.request(download)
else:
content = '読み込みに失敗したにゃー'
except Exception as e:
content = '読み込みに失敗したにゃーー : ' + str(e) + ' / ' + str(e.message)
return content
| mit | Python |
4c4499dcb86ae16a7d3822feab4390adca89d348 | Bump version to 0.12.1 | thombashi/pingparsing,thombashi/pingparsing | pingparsing/__version__.py | pingparsing/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright {}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.12.1"
__maintainer__ = __author__
__email__ = "[email protected]"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright {}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.12.0"
__maintainer__ = __author__
__email__ = "[email protected]"
| mit | Python |
12995be9490bde60c92e6f962b748832c083fe45 | use API and HTTP HEAD instead | sammdot/circa | modules/subreddit.py | modules/subreddit.py | import re
import urllib.request as req
import urllib.error as err
class SubredditModule:
subre = re.compile(r"^(?:.* )?/r/([A-Za-z0-9][A-Za-z0-9_]{2,20})")
def __init__(self, circa):
self.circa = circa
self.events = {
"message": [self.findsub]
}
def findsub(self, fr, to, msg, m):
for sub in self.subre.findall(msg):
try:
r = req.Request("http://api.reddit.com/r/" + sub + ".json")
r.get_method = lambda: "HEAD"
req.urlopen(r)
self.circa.say(to, "http://www.reddit.com/r/" + sub)
except err.HTTPError as e:
pass
module = SubredditModule
| import re
import urllib.request as req
class SubredditModule:
subre = re.compile(r"^(?:.* )?/r/([A-Za-z0-9][A-Za-z0-9_]{2,20})")
def __init__(self, circa):
self.circa = circa
self.events = {
"message": [self.findsub]
}
def findsub(self, fr, to, msg, m):
for sub in self.subre.findall(msg):
url = "http://www.reddit.com/r/" + sub
try:
req.urlopen(url)
self.circa.say(to, url)
except:
pass
module = SubredditModule
| bsd-3-clause | Python |
c1e84bd196f28c35b032a609a3edb5f596216f71 | fix for document.iter | mylokin/mongoext | mongoext/document.py | mongoext/document.py | from __future__ import absolute_import
import mongoext.collection
import mongoext.scheme
import mongoext.exc
class MetaDocument(type):
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases:
for name, obj in vars(base).iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
for name, obj in attrs.iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
attrs['__scheme__'] = mongoext.scheme.Scheme(fields)
return super(MetaDocument, cls).__new__(cls, name, bases, attrs)
class Document(object):
__metaclass__ = MetaDocument
__scheme__ = None
_id = mongoext.scheme.Field()
def __init__(self, **data):
for name, value in data.items():
if name not in self.__scheme__:
raise mongoext.exc.SchemeError(name)
setattr(self, name, value)
def __contains__(self, name):
return name in self.__scheme__
def __len__(self):
return len(self.__scheme__)
def __iter__(self):
for name in self.__scheme__:
yield name, getattr(self, name, None)
def __hash__(self):
return super(object, self).__hash__()
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self._id)
| from __future__ import absolute_import
import collections
import mongoext.collection
import mongoext.scheme
import mongoext.exc
class MetaDocument(type):
def __new__(cls, name, bases, attrs):
fields = {}
for base in bases:
for name, obj in vars(base).iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
for name, obj in attrs.iteritems():
if issubclass(type(obj), mongoext.scheme.Field):
fields[name] = obj
attrs['__scheme__'] = mongoext.scheme.Scheme(fields)
return super(MetaDocument, cls).__new__(cls, name, bases, attrs)
class Document(object):
__metaclass__ = MetaDocument
__scheme__ = None
_id = mongoext.scheme.Field()
def __init__(self, **data):
for name, value in data.items():
if name not in self.__scheme__:
raise mongoext.exc.SchemeError(name)
setattr(self, name, value)
def __contains__(self, name):
return name in self.__scheme__
def __len__(self):
return len(self.__scheme__)
def __iter__(self):
for name in self.__scheme__:
yield name, getattr(self, name)
def __hash__(self):
return super(object, self).__hash__()
def __repr__(self):
return '<{}: {}>'.format(type(self).__name__, self._id)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.