repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
kalefranz/auxlib | tests/test_path.py | 1 | 2148 | # # -*- coding: utf-8 -*-
# import logging
# from unittest import TestCase
#
# from auxlib import logz
# from auxlib.path import PackageFile, find_file_in_site_packages, open_package_file
#
# log = logging.getLogger(__name__)
#
#
# class PackageFileTests(TestCase):
#
# @classmethod
# def setUpClass(cls):
# logz.set_root_level(logging.INFO)
# logz.attach_stderr(logging.DEBUG)
# assert not logz.attach_stderr()
#
# @classmethod
# def tearDownClass(self):
# logz.detach_stderr()
# assert not logz.detach_stderr()
#
# def test_find_python_file_in_package(self):
# with PackageFile('path.py', 'auxlib') as fh:
# lines = fh.readlines()
# assert any(line.startswith(b'class PackageFile(object):') for line in lines)
#
# def test_find_python_file_in_package_subdirectory(self):
# with PackageFile('_vendor/five.py', 'auxlib') as fh:
# lines = fh.readlines()
# assert any(line.startswith(b'PY3 = sys.version_info[0] == 3') for line in lines)
#
# def test_package_resources_paths(self):
# with PackageFile('AES.py', 'Crypto.Cipher') as fh:
# lines = fh.readlines()
# assert any(line.startswith(b'class AESCipher') for line in lines)
#
# def test_package_resources_paths_subdirectory(self):
# with PackageFile('Cipher/AES.py', 'Crypto') as fh:
# lines = fh.readlines()
# assert any(line.startswith(b'class AESCipher') for line in lines)
#
# def test_site_packages_paths(self):
# with open(find_file_in_site_packages('AES.py', 'Crypto.Cipher')) as fh:
# lines = fh.readlines()
# assert any(line.startswith('class AESCipher') for line in lines)
#
# def test_site_packages_paths_subdirectory(self):
# with open(find_file_in_site_packages('Cipher/AES.py', 'Crypto')) as fh:
# lines = fh.readlines()
# assert any(line.startswith('class AESCipher') for line in lines)
#
# def test_no_file_found(self):
# self.assertRaises(IOError, open_package_file, 'not-a-file.txt', 'auxlib')
| isc |
cwelton/incubator-hawq | tools/bin/lib/gpgetconfig.py | 12 | 3572 | #!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
gpgetconfig -- obtain gp_configuration
Usage: gpgetconfig [-f] [-u user] -d master_data_directory
-f : if necessary, force start up and shutdown of DB to obtain configuration
Exit: 0 - no error
1 - misc error
2 - unable to connect to database
'''
import os, sys
os.putenv('PGHOST', '')
os.putenv("PGOPTIONS", '-c gp_session_role=utility')
os.putenv('PGDATABASE', 'template1')
class __globals__:
opt = {}
for o in 'ud': opt['-'+o] = ''
for o in 'f': opt['-'+o] = False
GV = __globals__()
############
def usage(exitarg):
print __doc__
sys.exit(exitarg)
############
def parseCommandLine():
import getopt
try:
(options, args) = getopt.getopt(sys.argv[1:], '?fu:d:')
except Exception, e:
sys.stderr.write('Error: %s\n' % str(e))
usage(1)
for (switch, val) in options:
if switch == '-?': usage(0)
elif switch[1] in 'ud': GV.opt[switch] = val
elif switch[1] in 'f': GV.opt[switch] = True
if not GV.opt['-d']:
usage('Error: missing -d param')
############
def setPort():
port = 0
f = None
try:
f = open(os.path.join(GV.opt['-d'], 'postgresql.conf'))
lines = f.readlines()
lines = map(lambda x: x.strip().split('='), lines)
lines = filter(lambda x: len(x) and x[0] == 'port', lines)
port = int( (lines[0][1].split()) [0])
except Exception, e:
pass
finally:
if f: f.close()
if port == 0:
sys.stderr.write('Error: unable to read port number from %s/postgresql.conf' %
GV.opt['-d'])
sys.exit(1)
os.putenv('PGPORT', str(port))
############
def getConfiguration():
CMD = """psql -At -q -c "select content, preferred_role='p' as definedprimary, dbid, role = 'p' as isprimary, 't' as valid, hostname, port, fse\
location as datadir from gp_segment_configuration join pg_filespace_entry on (dbid = fsedbid) where fsefsoid = 3052" 2> /dev/null"""
p = os.popen(CMD)
out = p.readlines()
rc = p.close()
return (rc, out)
############
def main():
parseCommandLine()
if GV.opt['-u']:
os.putenv('PGUSER', GV.opt['-u'])
os.putenv('MASTER_DATA_DIRECTORY', GV.opt['-d'])
setPort()
(rc, out) = getConfiguration()
if rc:
if not GV.opt['-f']:
sys.stderr.write('Error: psql unable to connect\n')
sys.exit(2)
os.putenv('GPSTART_INTERNAL_MASTER_ONLY', '1')
p = os.popen("gpstart -m")
p.readlines()
p.close()
(rc, out) = getConfiguration()
p = os.popen("gpstop -m")
p.readlines()
p.close()
if rc:
sys.stderr.write('Error: psql still unable to connect after bouncing\n')
sys.exit(1)
out = filter(lambda x: x, map(lambda x: x.strip(), out))
for line in out:
print '[gpgetconfig]',line
if __name__ == '__main__':
main()
| apache-2.0 |
marco-lancini/Showcase | django/db/models/loading.py | 308 | 8745 | "Utilities for loading models and the modules that contain them."
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.datastructures import SortedDict
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
import imp
import sys
import os
import threading
__all__ = ('get_apps', 'get_app', 'get_models', 'get_model', 'register_models',
'load_app', 'app_cache_ready')
class AppCache(object):
"""
A cache that stores installed applications and their models. Used to
provide reverse-relations and for app introspection (e.g. admin).
"""
# Use the Borg pattern to share state between all instances. Details at
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66531.
__shared_state = dict(
# Keys of app_store are the model modules for each application.
app_store = SortedDict(),
# Mapping of app_labels to a dictionary of model names to model code.
app_models = SortedDict(),
# Mapping of app_labels to errors raised when trying to import the app.
app_errors = {},
# -- Everything below here is only used when populating the cache --
loaded = False,
handled = {},
postponed = [],
nesting_level = 0,
write_lock = threading.RLock(),
_get_models_cache = {},
)
def __init__(self):
self.__dict__ = self.__shared_state
def _populate(self):
"""
Fill in all the cache information. This method is threadsafe, in the
sense that every caller will see the same state upon return, and if the
cache is already initialised, it does no work.
"""
if self.loaded:
return
self.write_lock.acquire()
try:
if self.loaded:
return
for app_name in settings.INSTALLED_APPS:
if app_name in self.handled:
continue
self.load_app(app_name, True)
if not self.nesting_level:
for app_name in self.postponed:
self.load_app(app_name)
self.loaded = True
finally:
self.write_lock.release()
def load_app(self, app_name, can_postpone=False):
"""
Loads the app with the provided fully qualified name, and returns the
model module.
"""
self.handled[app_name] = None
self.nesting_level += 1
app_module = import_module(app_name)
try:
models = import_module('.models', app_name)
except ImportError:
self.nesting_level -= 1
# If the app doesn't have a models module, we can just ignore the
# ImportError and return no models for it.
if not module_has_submodule(app_module, 'models'):
return None
# But if the app does have a models module, we need to figure out
# whether to suppress or propagate the error. If can_postpone is
# True then it may be that the package is still being imported by
# Python and the models module isn't available yet. So we add the
# app to the postponed list and we'll try it again after all the
# recursion has finished (in populate). If can_postpone is False
# then it's time to raise the ImportError.
else:
if can_postpone:
self.postponed.append(app_name)
return None
else:
raise
self.nesting_level -= 1
if models not in self.app_store:
self.app_store[models] = len(self.app_store)
return models
def app_cache_ready(self):
"""
Returns true if the model cache is fully populated.
Useful for code that wants to cache the results of get_models() for
themselves once it is safe to do so.
"""
return self.loaded
def get_apps(self):
"Returns a list of all installed modules that contain models."
self._populate()
# Ensure the returned list is always in the same order (with new apps
# added at the end). This avoids unstable ordering on the admin app
# list page, for example.
apps = [(v, k) for k, v in self.app_store.items()]
apps.sort()
return [elt[1] for elt in apps]
def get_app(self, app_label, emptyOK=False):
"""
Returns the module containing the models for the given app_label. If
the app has no models in it and 'emptyOK' is True, returns None.
"""
self._populate()
self.write_lock.acquire()
try:
for app_name in settings.INSTALLED_APPS:
if app_label == app_name.split('.')[-1]:
mod = self.load_app(app_name, False)
if mod is None:
if emptyOK:
return None
else:
return mod
raise ImproperlyConfigured("App with label %s could not be found" % app_label)
finally:
self.write_lock.release()
def get_app_errors(self):
"Returns the map of known problems with the INSTALLED_APPS."
self._populate()
return self.app_errors
def get_models(self, app_mod=None, include_auto_created=False, include_deferred=False):
"""
Given a module containing models, returns a list of the models.
Otherwise returns a list of all installed models.
By default, auto-created models (i.e., m2m models without an
explicit intermediate table) are not included. However, if you
specify include_auto_created=True, they will be.
By default, models created to satisfy deferred attribute
queries are *not* included in the list of models. However, if
you specify include_deferred, they will be.
"""
cache_key = (app_mod, include_auto_created, include_deferred)
try:
return self._get_models_cache[cache_key]
except KeyError:
pass
self._populate()
if app_mod:
app_list = [self.app_models.get(app_mod.__name__.split('.')[-2], SortedDict())]
else:
app_list = self.app_models.itervalues()
model_list = []
for app in app_list:
model_list.extend(
model for model in app.values()
if ((not model._deferred or include_deferred)
and (not model._meta.auto_created or include_auto_created))
)
self._get_models_cache[cache_key] = model_list
return model_list
def get_model(self, app_label, model_name, seed_cache=True):
"""
Returns the model matching the given app_label and case-insensitive
model_name.
Returns None if no model is found.
"""
if seed_cache:
self._populate()
return self.app_models.get(app_label, SortedDict()).get(model_name.lower())
def register_models(self, app_label, *models):
"""
Register a set of models as belonging to an app.
"""
for model in models:
# Store as 'name: model' pair in a dictionary
# in the app_models dictionary
model_name = model._meta.object_name.lower()
model_dict = self.app_models.setdefault(app_label, SortedDict())
if model_name in model_dict:
# The same model may be imported via different paths (e.g.
# appname.models and project.appname.models). We use the source
# filename as a means to detect identity.
fname1 = os.path.abspath(sys.modules[model.__module__].__file__)
fname2 = os.path.abspath(sys.modules[model_dict[model_name].__module__].__file__)
# Since the filename extension could be .py the first time and
# .pyc or .pyo the second time, ignore the extension when
# comparing.
if os.path.splitext(fname1)[0] == os.path.splitext(fname2)[0]:
continue
model_dict[model_name] = model
self._get_models_cache.clear()
cache = AppCache()
# These methods were always module level, so are kept that way for backwards
# compatibility.
get_apps = cache.get_apps
get_app = cache.get_app
get_app_errors = cache.get_app_errors
get_models = cache.get_models
get_model = cache.get_model
register_models = cache.register_models
load_app = cache.load_app
app_cache_ready = cache.app_cache_ready
| mit |
AMOSus/amos-ss16-proj6 | DataProcessing/test_dataProcessing.py | 1 | 3169 | #!/usr/bin/env python
# This file is part of Rogue Vision.
#
# Copyright (C) 2016 Daniel Reischl, Rene Rathmann, Peter Tan,
# Tobias Dorsch, Shefali Shukla, Vignesh Govindarajulu,
# Aleksander Penew, Abhinav Puri
#
# Rogue Vision is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rogue Vision is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Rogue Vision. If not, see <http://www.gnu.org/licenses/>.
# This files includes all tests for dataProcessing
import unittest
import os
import dataProcessingFunctions
# This class tests if all necessary folders exist
class testFolderExistence (unittest.TestCase):
# Folder InitialData
def test_FolderInitialData(self):
res = True
self.assertEqual(res, os.path.isdir("/srv/DataProcessing/InitialData"))
# Folder InitialDataArchive
def test_FolderInitialDataArchive(self):
res = True
self.assertEqual(res, os.path.isdir("/srv/DataProcessing/InitialDataArchive"))
# Folder CarrierData
def test_FolderCarrierData(self):
res = True
self.assertEqual(res, os.path.isdir("/srv/DataProcessing/CarrierData"))
# Folder CarrierDataArchive
def test_FolderCarrierDataArchive(self):
res = True
self.assertEqual(res, os.path.isdir('/srv/DataProcessing/CarrierDataArchive'))
# Checks if all files are existing
class testFileExistence (unittest.TestCase):
# compressInitialData.py
def test_CompressIntitialData (self):
res = True
self.assertEqual (res, os.path.exists('/srv/DataProcessing/compressInitialData.py'))
# writeCarrierDataToDataBase
def test_WriteDataToDatabase(self):
res = True
self.assertEqual(res, os.path.exists('/srv/DataProcessing/writeCarrierDataToDataBase.py'))
# setConstants.py
def test_configFile(self):
res = True
self.assertEqual(res, os.path.exists('/srv/DataProcessing/settings.cfg'))
# dataProcessingFunctions.py
def test_dataProcessingFunctions(self):
res = True
self.assertEqual(res, os.path.exists('/srv/DataProcessing/dataProcessingFunctions.py'))
class testRunningFile (unittest.TestCase):
# Tests if Running.txt is created
def test_CreationOfRunningFile(self):
res = True
dataProcessingFunctions.createRunningFile()
self.assertEqual(res, os.path.exists('/srv/DataProcessing/Running.txt'))
# Tests if Running.txt is deleted
def test_DeleteOfRunningFile(self):
res = False
dataProcessingFunctions.deleteRunningFile()
self.assertEqual(res, os.path.exists('/srv/DataProcessing/Running.txt'))
if __name__ == '__main__':
unittest.main() | agpl-3.0 |
1844144/django-blog-zinnia | zinnia/migrations/0009_change_mptt_field.py | 4 | 8003 | from south.db import db
from south.v2 import SchemaMigration
from zinnia.migrations import user_name
from zinnia.migrations import user_table
from zinnia.migrations import user_orm_label
from zinnia.migrations import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Category.parent'
db.alter_column('zinnia_category', 'parent_id', self.gf('mptt.fields.TreeForeignKey')(null=True, to=orm['zinnia.Category']))
def backwards(self, orm):
# Changing field 'Category.parent'
db.alter_column('zinnia_category', 'parent_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['zinnia.Category']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_name, 'db_table': "'%s'" % user_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 5, 11, 10, 16, 27, 936575)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 5, 11, 10, 16, 27, 936424)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'zinnia.category': {
'Meta': {'ordering': "['title']", 'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['zinnia.Category']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'zinnia.entry': {
'Meta': {'ordering': "['-creation_date']", 'object_name': 'Entry'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'entries'", 'blank': 'True', 'to': "orm['%s']" % user_orm_label}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'entries'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['zinnia.Category']"}),
'comment_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'end_publication': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2042, 3, 15, 0, 0)'}),
'excerpt': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'pingback_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'related': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'related_rel_+'", 'null': 'True', 'to': "orm['zinnia.Entry']"}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'entries'", 'symmetrical': 'False', 'to': "orm['sites.Site']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'start_publication': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'tags': ('tagging.fields.TagField', [], {}),
'template': ('django.db.models.fields.CharField', [], {'default': "'entry_detail.html'", 'max_length': '250'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['zinnia']
| bsd-3-clause |
Lezval/horizon | django-openstack/django_openstack/tests/view_tests/dash/security_groups_tests.py | 5 | 13975 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2011 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import http
from django.contrib import messages
from django.core.urlresolvers import reverse
from django_openstack import api
from django_openstack.tests.view_tests import base
from glance.common import exception as glance_exception
from openstackx.api import exceptions as api_exceptions
from novaclient import exceptions as novaclient_exceptions
from mox import IgnoreArg, IsA
class SecurityGroupsViewTests(base.BaseViewTests):
def setUp(self):
super(SecurityGroupsViewTests, self).setUp()
security_group = self.mox.CreateMock(api.SecurityGroup)
security_group.name = 'default'
self.security_groups = (security_group,)
def test_index(self):
self.mox.StubOutWithMock(api, 'security_group_list')
api.security_group_list(IsA(http.HttpRequest)).\
AndReturn(self.security_groups)
self.mox.ReplayAll()
res = self.client.get(reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.assertTemplateUsed(res,
'django_openstack/dash/security_groups/index.html')
self.assertItemsEqual(res.context['security_groups'],
self.security_groups)
self.mox.VerifyAll()
def test_index_exception(self):
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
self.mox.StubOutWithMock(api, 'security_group_list')
api.security_group_list(IsA(http.HttpRequest)).AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.ReplayAll()
res = self.client.get(reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.assertTemplateUsed(res,
'django_openstack/dash/security_groups/index.html')
self.assertEqual(len(res.context['security_groups']), 0)
self.mox.VerifyAll()
def test_create_security_groups_get(self):
res = self.client.get(reverse('dash_security_groups_create',
args=[self.TEST_TENANT]))
self.assertTemplateUsed(res,
'django_openstack/dash/security_groups/create.html')
def test_create_security_groups_post(self):
SECGROUP_NAME = 'fakegroup'
SECGROUP_DESC = 'fakegroup_desc'
new_group = self.mox.CreateMock(api.SecurityGroup)
new_group.name = SECGROUP_NAME
formData = {'method': 'CreateGroup',
'tenant_id': self.TEST_TENANT,
'name': SECGROUP_NAME,
'description': SECGROUP_DESC,
}
self.mox.StubOutWithMock(api, 'security_group_create')
api.security_group_create(IsA(http.HttpRequest),
SECGROUP_NAME, SECGROUP_DESC).AndReturn(new_group)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_create',
args=[self.TEST_TENANT]),
formData)
self.assertRedirectsNoFollow(res, reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_create_security_groups_post_exception(self):
SECGROUP_NAME = 'fakegroup'
SECGROUP_DESC = 'fakegroup_desc'
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
formData = {'method': 'CreateGroup',
'tenant_id': self.TEST_TENANT,
'name': SECGROUP_NAME,
'description': SECGROUP_DESC,
}
self.mox.StubOutWithMock(api, 'security_group_create')
api.security_group_create(IsA(http.HttpRequest),
SECGROUP_NAME, SECGROUP_DESC).AndRaise(exception)
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_create',
args=[self.TEST_TENANT]),
formData)
self.assertTemplateUsed(res,
'django_openstack/dash/security_groups/create.html')
self.mox.VerifyAll()
def test_edit_rules_get(self):
SECGROUP_ID = '1'
self.mox.StubOutWithMock(api, 'security_group_get')
api.security_group_get(IsA(http.HttpRequest), SECGROUP_ID).AndReturn(
self.security_groups[0])
self.mox.ReplayAll()
res = self.client.get(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.assertTemplateUsed(res,
'django_openstack/dash/security_groups/edit_rules.html')
self.assertItemsEqual(res.context['security_group'].name,
self.security_groups[0].name)
self.mox.VerifyAll()
def test_edit_rules_get_exception(self):
SECGROUP_ID = '1'
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
self.mox.StubOutWithMock(api, 'security_group_get')
api.security_group_get(IsA(http.HttpRequest), SECGROUP_ID).AndRaise(
exception)
self.mox.ReplayAll()
res = self.client.get(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.assertRedirectsNoFollow(res, reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_edit_rules_add_rule(self):
SECGROUP_ID = '1'
RULE_ID = '1'
FROM_PORT = '-1'
TO_PORT = '-1'
IP_PROTOCOL = 'icmp'
CIDR = '0.0.0.0/0'
new_rule = self.mox.CreateMock(api.SecurityGroup)
new_rule.from_port = FROM_PORT
new_rule.to_port = TO_PORT
new_rule.ip_protocol = IP_PROTOCOL
new_rule.cidr = CIDR
new_rule.security_group_id = SECGROUP_ID
new_rule.id = RULE_ID
formData = {'method': 'AddRule',
'tenant_id': self.TEST_TENANT,
'security_group_id': SECGROUP_ID,
'from_port': FROM_PORT,
'to_port': TO_PORT,
'ip_protocol': IP_PROTOCOL,
'cidr': CIDR}
self.mox.StubOutWithMock(api, 'security_group_rule_create')
api.security_group_rule_create(IsA(http.HttpRequest),
SECGROUP_ID, IP_PROTOCOL, FROM_PORT, TO_PORT, CIDR)\
.AndReturn(new_rule)
self.mox.StubOutWithMock(messages, 'info')
messages.info(IsA(http.HttpRequest), IsA(basestring))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]),
formData)
self.assertRedirectsNoFollow(res,
reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.mox.VerifyAll()
def test_edit_rules_add_rule_exception(self):
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
SECGROUP_ID = '1'
RULE_ID = '1'
FROM_PORT = '-1'
TO_PORT = '-1'
IP_PROTOCOL = 'icmp'
CIDR = '0.0.0.0/0'
formData = {'method': 'AddRule',
'tenant_id': self.TEST_TENANT,
'security_group_id': SECGROUP_ID,
'from_port': FROM_PORT,
'to_port': TO_PORT,
'ip_protocol': IP_PROTOCOL,
'cidr': CIDR}
self.mox.StubOutWithMock(api, 'security_group_rule_create')
api.security_group_rule_create(IsA(http.HttpRequest),
SECGROUP_ID, IP_PROTOCOL, FROM_PORT,
TO_PORT, CIDR).AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]),
formData)
self.assertRedirectsNoFollow(res,
reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.mox.VerifyAll()
def test_edit_rules_delete_rule(self):
SECGROUP_ID = '1'
RULE_ID = '1'
formData = {'method': 'DeleteRule',
'tenant_id': self.TEST_TENANT,
'security_group_rule_id': RULE_ID,
}
self.mox.StubOutWithMock(api, 'security_group_rule_delete')
api.security_group_rule_delete(IsA(http.HttpRequest), RULE_ID)
self.mox.StubOutWithMock(messages, 'info')
messages.info(IsA(http.HttpRequest), IsA(unicode))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]),
formData)
self.assertRedirectsNoFollow(res,
reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.mox.VerifyAll()
def test_edit_rules_delete_rule_exception(self):
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
SECGROUP_ID = '1'
RULE_ID = '1'
formData = {'method': 'DeleteRule',
'tenant_id': self.TEST_TENANT,
'security_group_rule_id': RULE_ID,
}
self.mox.StubOutWithMock(api, 'security_group_rule_delete')
api.security_group_rule_delete(IsA(http.HttpRequest), RULE_ID).\
AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]),
formData)
self.assertRedirectsNoFollow(res,
reverse('dash_security_groups_edit_rules',
args=[self.TEST_TENANT, SECGROUP_ID]))
self.mox.VerifyAll()
def test_delete_group(self):
SECGROUP_ID = '1'
formData = {'method': 'DeleteGroup',
'tenant_id': self.TEST_TENANT,
'security_group_id': SECGROUP_ID,
}
self.mox.StubOutWithMock(api, 'security_group_delete')
api.security_group_delete(IsA(http.HttpRequest), SECGROUP_ID)
self.mox.StubOutWithMock(messages, 'info')
messages.info(IsA(http.HttpRequest), IsA(unicode))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups',
args=[self.TEST_TENANT]),
formData)
self.assertRedirectsNoFollow(res, reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
def test_delete_group_exception(self):
exception = novaclient_exceptions.ClientException('ClientException',
message='ClientException')
SECGROUP_ID = '1'
formData = {'method': 'DeleteGroup',
'tenant_id': self.TEST_TENANT,
'security_group_id': SECGROUP_ID,
}
self.mox.StubOutWithMock(api, 'security_group_delete')
api.security_group_delete(IsA(http.HttpRequest), SECGROUP_ID).\
AndRaise(exception)
self.mox.StubOutWithMock(messages, 'error')
messages.error(IsA(http.HttpRequest), IsA(basestring))
self.mox.ReplayAll()
res = self.client.post(reverse('dash_security_groups',
args=[self.TEST_TENANT]),
formData)
self.assertRedirectsNoFollow(res, reverse('dash_security_groups',
args=[self.TEST_TENANT]))
self.mox.VerifyAll()
| apache-2.0 |
usersource/anno | tools/copytool3/oauth2client/django_orm.py | 261 | 3833 | # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = '[email protected] (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, *args, **kwargs):
if 'null' not in kwargs:
kwargs['null'] = True
super(CredentialsField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return base64.b64encode(pickle.dumps(value))
class FlowField(models.Field):
__metaclass__ = models.SubfieldBase
def __init__(self, *args, **kwargs):
if 'null' not in kwargs:
kwargs['null'] = True
super(FlowField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value, connection, prepared=False):
if value is None:
return None
return base64.b64encode(pickle.dumps(value))
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def locked_get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self)
return credential
def locked_put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
def locked_delete(self):
"""Delete Credentials from the datastore."""
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query).delete()
| mpl-2.0 |
listamilton/supermilton.repository | plugin.video.traquinas/resources/lib/libraries/f4mproxy/utils/openssl_tripledes.py | 202 | 1788 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""OpenSSL/M2Crypto 3DES implementation."""
from .cryptomath import *
from .tripledes import *
if m2cryptoLoaded:
def new(key, mode, IV):
return OpenSSL_TripleDES(key, mode, IV)
class OpenSSL_TripleDES(TripleDES):
def __init__(self, key, mode, IV):
TripleDES.__init__(self, key, mode, IV, "openssl")
self.key = key
self.IV = IV
def _createContext(self, encrypt):
context = m2.cipher_ctx_new()
cipherType = m2.des_ede3_cbc()
m2.cipher_init(context, cipherType, self.key, self.IV, encrypt)
return context
def encrypt(self, plaintext):
TripleDES.encrypt(self, plaintext)
context = self._createContext(1)
ciphertext = m2.cipher_update(context, plaintext)
m2.cipher_ctx_free(context)
self.IV = ciphertext[-self.block_size:]
return bytearray(ciphertext)
def decrypt(self, ciphertext):
TripleDES.decrypt(self, ciphertext)
context = self._createContext(0)
#I think M2Crypto has a bug - it fails to decrypt and return the last block passed in.
#To work around this, we append sixteen zeros to the string, below:
plaintext = m2.cipher_update(context, ciphertext+('\0'*16))
#If this bug is ever fixed, then plaintext will end up having a garbage
#plaintext block on the end. That's okay - the below code will ignore it.
plaintext = plaintext[:len(ciphertext)]
m2.cipher_ctx_free(context)
self.IV = ciphertext[-self.block_size:]
return bytearray(plaintext) | gpl-2.0 |
zooniverse/aggregation | experimental/penguins/newCluster.py | 2 | 11987 | #!/usr/bin/env python
__author__ = 'greg'
from sklearn.cluster import DBSCAN
from sklearn.cluster import AffinityPropagation
import numpy as np
import matplotlib.pyplot as plt
import csv
import sys
import os
import pymongo
import matplotlib.cbook as cbook
import cPickle as pickle
import shutil
import urllib
import math
def dist(c1,c2):
return math.sqrt((c1[0]-c2[0])**2 + (c1[1]-c2[1])**2)
def adaptiveDBSCAN(XYpts,user_ids):
if XYpts == []:
return []
pts_in_each_cluster = []
users_in_each_cluster = []
cluster_centers = []
#increase the epsilon until we don't have any nearby clusters corresponding to non-overlapping
#sets of users
X = np.array(XYpts)
for epsilon in [5,10,15,20,25,30]:
db = DBSCAN(eps=epsilon, min_samples=2).fit(X)
labels = db.labels_
pts_in_each_cluster = []
users_in_each_cluster = []
cluster_centers = []
for k in sorted(set(labels)):
if k == -1:
continue
class_member_mask = (labels == k)
pts_in_cluster = list(X[class_member_mask])
xSet,ySet = zip(*pts_in_cluster)
cluster_centers.append((np.mean(xSet),np.mean(ySet)))
pts_in_each_cluster.append(pts_in_cluster[:])
users_in_each_cluster.append([u for u,l in zip(user_ids,labels) if l == k])
#do we have any adjacent clusters with non-overlapping sets of users
#if so, we should merge them by increasing the epsilon value
cluster_compare = []
for cluster_index, (c1,users) in enumerate(zip(cluster_centers,users_in_each_cluster)):
for cluster_index, (c2,users2) in enumerate(zip(cluster_centers[cluster_index+1:],users_in_each_cluster[cluster_index+1:])):
overlappingUsers = [u for u in users if u in users2]
cluster_compare.append((dist(c1,c2),overlappingUsers))
cluster_compare.sort(key = lambda x:x[0])
needToMerge = [] in [c[1] for c in cluster_compare[:10]]
if not(needToMerge):
break
print epsilon
print [c[1] for c in cluster_compare[:10]]
centers_to_return = []
#do we need to split any clusters?
for cluster_index in range(len(cluster_centers)):
print "splitting"
needToSplit = (sorted(users_in_each_cluster[cluster_index]) != sorted(list(set(users_in_each_cluster[cluster_index]))))
if needToSplit:
subcluster_centers = []
X = np.array(pts_in_each_cluster[cluster_index])
for epsilon in [30,25,20,15,10,5,1,0.1,0.01]:
db = DBSCAN(eps=epsilon, min_samples=2).fit(X)
labels = db.labels_
subcluster_centers = []
needToSplit = False
for k in sorted(set(labels)):
if k == -1:
continue
class_member_mask = (labels == k)
users_in_subcluster = [u for u,l in zip(users_in_each_cluster[cluster_index],labels) if l == k]
needToSplit = (sorted(users_in_subcluster) != sorted(list(set(users_in_subcluster))))
if needToSplit:
break
pts_in_cluster = list(X[class_member_mask])
xSet,ySet = zip(*pts_in_cluster)
subcluster_centers.append((np.mean(xSet),np.mean(ySet)))
if not(needToSplit):
break
assert not(needToSplit)
centers_to_return.extend(subcluster_centers)
#if needToSplit:
# print pts_in_each_cluster[cluster_index]
# print users_in_each_cluster[cluster_index]
#else:
else:
centers_to_return.append(cluster_centers[cluster_index])
return centers_to_return
# def cluster(XYpts,user_ids):
# if XYpts == []:
# return []
#
# #find out which points are noise - don't care about the actual clusters
# needToSplit = False
# X = np.array(XYpts)
#
#
# #X = np.array([XYpts[i] for i in signal_pts])
# #user_ids = [user_ids[i] for i in signal_pts]
# oldCenters = None
#
# needToMerge = False
# needToSplit = False
#
# cluster_list = []
# usersInCluster = []
# centers = []
#
# for pref in [0,-100,-200,-400,-800,-1200,-2000,-2200,-2400,-2700,-3000,-3500,-4000,-5000,-6000,-10000]:
# #now run affinity propagation to find the actual clusters
# af = AffinityPropagation(preference=pref).fit(X)
# #cluster_centers_indices = af.cluster_centers_indices_
# labels = af.labels_
#
#
#
# unique_labels = set(labels)
#
# usersInCluster = []
# centers = []
# cluster_list = []
# for k in sorted(unique_labels):
# assert(k != -1)
# #print k
# usersInCluster.append([u for u,l in zip(user_ids,labels) if l == k])
# #print XYpts
# #print user_ids
#
# class_member_mask = (labels == k)
# pts_in_cluster = list(X[class_member_mask])
# xSet,ySet = zip(*pts_in_cluster)
# centers.append((np.mean(xSet),np.mean(ySet)))
# cluster_list.append(pts_in_cluster[:])
#
# compare = []
# for cluster_index, (c1,users) in enumerate(zip(centers,usersInCluster)):
# for cluster_index, (c2,users2) in enumerate(zip(centers[cluster_index+1:],usersInCluster[cluster_index+1:])):
# overlappingUsers = [u for u in users if u in users2]
# compare.append((dist(c1,c2),overlappingUsers))
#
# #needToSplit = False
# #for users in usersInCluster:
# # needToSplit = (sorted(users) != sorted(list(set(users))))
# # if needToSplit:
# # break
#
# compare.sort(key = lambda x:x[0])
#
# needToMerge = ([] in [c[1] for c in compare[:3]]) and (compare[-1][0] <= 200)
#
# #if needToSplit:
# # assert(oldCenters != None)
# # return oldCenters
# if not(needToMerge):
# break
#
# oldCenters = centers[:]
#
# if needToMerge:
# print compare[0:3]
# assert not(needToMerge)
#
# centers_to_return = []
# for cluster_index in range(len(cluster_list)):
# if len(list(set(usersInCluster[cluster_index]))) == 1:
# continue
# #split any individual cluster
# needToSplit = (sorted(usersInCluster[cluster_index]) != sorted(list(set(usersInCluster[cluster_index]))))
# if needToSplit:
# #print cluster_list[cluster_index]
# X = np.array(cluster_list[cluster_index])
# sub_center_list = []
# for pref in [-2400,-2200,-2000,-1200,-800,-400,-200,-100,-75,-50,-30,0]:
# af = AffinityPropagation(preference=pref).fit(X)
# #cluster_centers_indices = af.cluster_centers_indices_
# labels = af.labels_
# try:
# unique_labels = set(labels)
# except TypeError:
# print pref
# print X
# print usersInCluster[cluster_index]
# print labels
# raise
# #get the new "sub"clusters and check to see if we need to split even more
# for k in sorted(unique_labels):
# users = [u for u,l in zip(usersInCluster[cluster_index],labels) if l == k]
# needToSplit = (sorted(users) != sorted(list(set(users))))
#
# if needToSplit:
# break
#
# #add this new sub-cluster onto the list
# class_member_mask = (labels == k)
# pts_in_cluster = list(X[class_member_mask])
# xSet,ySet = zip(*pts_in_cluster)
# sub_center_list.append((np.mean(xSet),np.mean(ySet)))
#
# if not(needToSplit):
# break
#
# #if pref == 0:
# # print sub_center_list
# assert not(needToSplit)
# #print pref
# centers_to_return.extend([c for c in sub_center_list if len(c) > 1])
#
#
#
# else:
# centers_to_return.append(centers[cluster_index])
#
# assert not(needToSplit)
# return centers
client = pymongo.MongoClient()
db = client['penguin_2014-09-19']
collection = db["penguin_classifications"]
collection2 = db["penguin_subjects"]
images = {}
pts = {}
ids = {}
userCount = {}
errorCount = 0
total = 0
at_5 = {}
at_10 = {}
center_5 = {}
center_10 = {}
step_1 = 5
step_2 = 8
toSkip = ["APZ0002uw3","APZ0001v9f","APZ00010ww","APZ0000p99","APZ0002jc3","APZ00014t4","APZ0000v0n","APZ0000ifx","APZ0002pch","APZ0003kls","APZ0001iv3","APZ0003auc","APZ0002ezn"]
mainSubject = "APZ0003fgt" #APZ0001jre
toPlot = None
numClassifications = []
for r in collection.find():
subject_id = r["subjects"][0]["zooniverse_id"]
total += 1
if subject_id != "APZ0003kls":# in toSkip:
continue
if not(subject_id in pts):
pts[subject_id] = []
userCount[subject_id] = 0
ids[subject_id] = []
userCount[subject_id] += 1
animalsPresent = r["annotations"][0]["value"] == "yes"
#print animalsPresent
if animalsPresent:
c = 0
for marking_index in r["annotations"][1]["value"]:
try:
marking = r["annotations"][1]["value"][marking_index]
if True: # marking["value"] == "adult":
x = float(marking["x"])
y = float(marking["y"])
ip = r["user_ip"]
alreadyInList = False
try:
index = pts[subject_id].index((x,y))
if ids[subject_id][index] == ip:
alreadyInList = True
except ValueError:
pass
if not(alreadyInList):
pts[subject_id].append((x,y))
ids[subject_id].append(ip)
c += 1
except TypeError:
errorCount += 1
userCount[subject_id] += -1
break
except ValueError:
errorCount += 1
continue
numClassifications.append(c)
if userCount[subject_id] in [step_2]:
cluster_center = adaptiveDBSCAN(pts[subject_id],ids[subject_id])
mainSubject = subject_id
if cluster_center != []:
break
if userCount[subject_id] == step_1:
pass
#at_5[subject_id] = len(cluster_center)
else:
at_10[subject_id] = len(cluster_center)
# inBoth = [subject_id for subject_id in at_10 if (subject_id in at_5)]
# # print len(inBoth)
# x = [at_5[subject_id] for subject_id in inBoth]
# y = [at_10[subject_id] for subject_id in inBoth]
# print zip(inBoth,zip(x,y))
# plt.plot((0,100),(0,100),'--')
# # #print x
# # #print y
# plt.plot(x,y,'.')
# plt.show()
# print userCount
# print numClassifications
#
#
print mainSubject
r2 = collection2.find_one({"zooniverse_id":mainSubject})
url = r2["location"]["standard"]
if not(os.path.isfile("/home/greg/Databases/penguins/images/"+mainSubject+".JPG")):
urllib.urlretrieve (url, "/home/greg/Databases/penguins/images/"+mainSubject+".JPG")
image_file = cbook.get_sample_data("/home/greg/Databases/penguins/images/"+mainSubject+".JPG")
image = plt.imread(image_file)
fig, ax = plt.subplots()
im = ax.imshow(image)
#plt.show()
#
if cluster_center != []:
x,y = zip(*cluster_center)
plt.plot(x,y,'.',color='blue')
#
# x,y = zip(*center_5[mainSubject])
# plt.plot(x,y,'.',color='red')
# x,y = zip(*center_10[mainSubject])
# plt.plot(x,y,'.',color='green')
plt.show() | apache-2.0 |
stadtgestalten/stadtgestalten | docs/deployment/settings.py | 1 | 2052 | # grouprise settings file
# see https://docs.djangoproject.com/en/2.1/ref/settings/
import os
import subprocess
from stadt.settings.default import *
from grouprise.core.assets import add_javascript_reference, add_javascript_inline, add_csp_directive, add_meta
# see https://www.miniwebtool.com/django-secret-key-generator/
SECRET_KEY = 'CHANGE THIS!'
ALLOWED_HOSTS = ['yourhostname.org', 'localhost']
# dies wird von nginx gesetzt
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# SECURE_HSTS_SECONDS = 31536000
# SECURE_HSTS_INCLUDE_SUBDOMAINS = True
# SECURE_CONTENT_TYPE_NOSNIFF = True
# SECURE_BROWSER_XSS_FILTER = True
# SECURE_SSL_REDIRECT = False
# SESSION_COOKIE_SECURE = True
# CSRF_COOKIE_SECURE = True
# CSRF_COOKIE_HTTPONLY = False
# X_FRAME_OPTIONS = 'DENY'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': 'localhost',
'NAME': 'grouprise',
'USER': 'grouprise',
'PASSWORD': 'xxxxx',
}
}
ADMINS = [
('Admins', '[email protected]'),
]
DEFAULT_FROM_EMAIL = 'noreply@localhost'
FROM_EMAIL_WITH_SLUG = 'noreply+{slug}@localhost'
ANSWERABLE_FROM_EMAIL = 'noreply@localhost'
DEFAULT_REPLY_TO_EMAIL = 'reply+{reply_key}@localhost'
STADTGESTALTEN_BOT_EMAIL = 'grouprise-bot@localhost'
SERVER_EMAIL = 'grouprise <noreply@localhost>'
GROUPRISE_POSTMASTER_EMAIL = 'postmaster@localhost'
OPERATOR_GROUP_ID = 1
STADTGESTALTEN_FEEDS_IMPORTER_USER_ID = 1
GROUPRISE_FEEDS_IMPORTER_GESTALT_ID = 1
GROUPRISE_UNKNOWN_GESTALT_ID = 1
ACCOUNT_DEFAULT_HTTP_PROTOCOL = 'https'
# ENTITY_SLUG_BLACKLIST = [ 'all', 'alle', 'antwort', 'crew', 'facebook', 'gbr', 'info', 'kontakt', 'mail', 'noreply', 'postmaster', 'presse', 'reply', 'stadt', 'unknown', 'webmaster', 'www']
# set debug mode to false
DEBUG = False
# increase session cookie time to 1 year
SESSION_COOKIE_AGE = 60 * 60 * 24 * 365
STADTGESTALTEN_CLAIMS = [
'your claim 1',
'your claim 2',
# ...
]
# HAYSTACK_CONNECTIONS['default']['PATH'] = os.path.join(DATA_DIR, 'xapian_index')
| agpl-3.0 |
shl198/Pipeline | Modules/PacBioEDA/PacBio_Productivity.py | 3 | 2900 | #!/usr/bin/env python
# Copyright (C) 2011 Genome Research Limited -- See full notice at end
# of module.
# Create a plot of ZMW productivity by x/y position on the
# SMRTcell. First parameter is input .bas.h5 file. Output png file is
# optional command line parameter, defaulting to productivity.png.
import sys
import optparse
import numpy as np
import h5py
from tt_log import logger
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
DEF_OUTPUT = 'productivity.png'
def main ():
logger.debug("%s starting" % sys.argv[0])
opt, args = getParms()
infile_name = args[0]
infile = h5py.File (infile_name, 'r')
colours = ('grey', 'red', 'green')
legends = ('non-seq', 'prod-0', 'prod-1')
top = h5py.Group (infile, '/')
ZMW = top["PulseData/BaseCalls/ZMW"]
ZMWMetrics = top["PulseData/BaseCalls/ZMWMetrics"]
holeStatus = ZMW["HoleStatus"]
holeXY = ZMW["HoleXY"]
holeProd = ZMWMetrics["Productivity"]
nonseqHoles = holeStatus[:]!=0 # ZMWs other than sequencing
prod0Holes = np.logical_and(holeProd[:]==0, np.logical_not(nonseqHoles))
prod1Holes = np.logical_and(holeProd[:]==1, np.logical_not(nonseqHoles))
holesByType = (nonseqHoles, prod0Holes, prod1Holes)
for which in xrange(len(holesByType)):
whichHoles = holesByType[which]
howMany = sum(whichHoles)
logger.debug("%5d %s" % (howMany, legends[which]));
if howMany > 0:
plt.scatter (holeXY[whichHoles,0], holeXY[whichHoles,1], \
s=1, c=colours[which], edgecolor='face', \
label="%5d %s" % (howMany, legends[which]))
plt.axis ('equal')
plt.legend (scatterpoints=3, prop={'size':8})
plt.savefig (opt.output)
infile.close()
logger.debug("complete")
def getParms (): # use default input sys.argv[1:]
parser = optparse.OptionParser(usage='%prog [options] <bas_file>')
parser.add_option ('--output', help='Output file name (def: %default)')
parser.set_defaults (output=DEF_OUTPUT)
opt, args = parser.parse_args()
return opt, args
if __name__ == "__main__":
main()
# Copyright (C) 2011 Genome Research Limited
#
# This library is free software. You can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
| mit |
chbrown/pi | pi/commands/publish.py | 1 | 1402 | import os
from subprocess import check_call
from pi.dist import read_script
def publish(execute=True, verbose=False, script_name='setup.py'):
dist = read_script(script_name)
name = dist.get_name()
version = dist.get_version()
if os.path.exists('README.md'):
print 'Converting README.md to reStructuredText, because PyPI requires reStructuredText'
if execute:
check_call(['pandoc', 'README.md', '-o', 'README.rst'])
# print 'Tagging current version in git'
## e.g., git tag -a v1.2.3 -m 1.2.3
# subprocessor('git', 'tag', '-a', 'v' + pi.__version__, '-m', pi.__version__)
# subprocessor('git', 'push')
print 'Installing locally in develop mode (version=%s)' % version
if execute:
dist.run_command('develop')
# python setup.py --help register
print 'Registering on PyPI: https://pypi.python.org/pypi/%s' % name
if execute:
dist.run_command('register')
# python setup.py --help sdist upload
print 'Uploading source distribution: https://pypi.python.org/simple/%s' % name
if execute:
dist.run_command('sdist')
dist.run_command('upload')
def cli(parser):
parser.add_argument('-n', '--dry-run', action='store_true', help='Print publish sequence without running')
opts = parser.parse_args()
publish(execute=not opts.dry_run, verbose=opts.verbose or opts.dry_run)
| mit |
zfrenchee/pandas | doc/sphinxext/ipython_sphinxext/ipython_directive.py | 1 | 37812 | # -*- coding: utf-8 -*-
"""
Sphinx directive to support embedded IPython code.
This directive allows pasting of entire interactive IPython sessions, prompts
and all, and their code will actually get re-executed at doc build time, with
all prompts renumbered sequentially. It also allows you to input code as a pure
python input by giving the argument python to the directive. The output looks
like an interactive ipython section.
To enable this directive, simply list it in your Sphinx ``conf.py`` file
(making sure the directory where you placed it is visible to sphinx, as is
needed for all Sphinx directives). For example, to enable syntax highlighting
and the IPython directive::
extensions = ['IPython.sphinxext.ipython_console_highlighting',
'IPython.sphinxext.ipython_directive']
The IPython directive outputs code-blocks with the language 'ipython'. So
if you do not have the syntax highlighting extension enabled as well, then
all rendered code-blocks will be uncolored. By default this directive assumes
that your prompts are unchanged IPython ones, but this can be customized.
The configurable options that can be placed in conf.py are:
ipython_savefig_dir:
The directory in which to save the figures. This is relative to the
Sphinx source directory. The default is `html_static_path`.
ipython_rgxin:
The compiled regular expression to denote the start of IPython input
lines. The default is re.compile('In \[(\d+)\]:\s?(.*)\s*'). You
shouldn't need to change this.
ipython_rgxout:
The compiled regular expression to denote the start of IPython output
lines. The default is re.compile('Out\[(\d+)\]:\s?(.*)\s*'). You
shouldn't need to change this.
ipython_promptin:
The string to represent the IPython input prompt in the generated ReST.
The default is 'In [%d]:'. This expects that the line numbers are used
in the prompt.
ipython_promptout:
The string to represent the IPython prompt in the generated ReST. The
default is 'Out [%d]:'. This expects that the line numbers are used
in the prompt.
ipython_mplbackend:
The string which specifies if the embedded Sphinx shell should import
Matplotlib and set the backend. The value specifies a backend that is
passed to `matplotlib.use()` before any lines in `ipython_execlines` are
executed. If not specified in conf.py, then the default value of 'agg' is
used. To use the IPython directive without matplotlib as a dependency, set
the value to `None`. It may end up that matplotlib is still imported
if the user specifies so in `ipython_execlines` or makes use of the
@savefig pseudo decorator.
ipython_execlines:
A list of strings to be exec'd in the embedded Sphinx shell. Typical
usage is to make certain packages always available. Set this to an empty
list if you wish to have no imports always available. If specified in
conf.py as `None`, then it has the effect of making no imports available.
If omitted from conf.py altogether, then the default value of
['import numpy as np', 'import matplotlib.pyplot as plt'] is used.
ipython_holdcount
When the @suppress pseudo-decorator is used, the execution count can be
incremented or not. The default behavior is to hold the execution count,
corresponding to a value of `True`. Set this to `False` to increment
the execution count after each suppressed command.
As an example, to use the IPython directive when `matplotlib` is not available,
one sets the backend to `None`::
ipython_mplbackend = None
An example usage of the directive is:
.. code-block:: rst
.. ipython::
In [1]: x = 1
In [2]: y = x**2
In [3]: print(y)
See http://matplotlib.org/sampledoc/ipython_directive.html for additional
documentation.
ToDo
----
- Turn the ad-hoc test() function into a real test suite.
- Break up ipython-specific functionality from matplotlib stuff into better
separated code.
Authors
-------
- John D Hunter: original author.
- Fernando Perez: refactoring, documentation, cleanups, port to 0.11.
- VáclavŠmilauer <eudoxos-AT-arcig.cz>: Prompt generalizations.
- Skipper Seabold, refactoring, cleanups, pure python addition
"""
from __future__ import print_function
from __future__ import unicode_literals
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Stdlib
import os
import re
import sys
import tempfile
import ast
from pandas.compat import zip, range, map, lmap, u, text_type, cStringIO as StringIO
import warnings
# To keep compatibility with various python versions
try:
from hashlib import md5
except ImportError:
from md5 import md5
# Third-party
import sphinx
from docutils.parsers.rst import directives
from docutils import nodes
from sphinx.util.compat import Directive
# Our own
try:
from traitlets.config import Config
except ImportError:
from IPython import Config
from IPython import InteractiveShell
from IPython.core.profiledir import ProfileDir
from IPython.utils import io
from IPython.utils.py3compat import PY3
if PY3:
from io import StringIO
else:
from StringIO import StringIO
#-----------------------------------------------------------------------------
# Globals
#-----------------------------------------------------------------------------
# for tokenizing blocks
COMMENT, INPUT, OUTPUT = range(3)
#-----------------------------------------------------------------------------
# Functions and class declarations
#-----------------------------------------------------------------------------
def block_parser(part, rgxin, rgxout, fmtin, fmtout):
"""
part is a string of ipython text, comprised of at most one
input, one output, comments, and blank lines. The block parser
parses the text into a list of::
blocks = [ (TOKEN0, data0), (TOKEN1, data1), ...]
where TOKEN is one of [COMMENT | INPUT | OUTPUT ] and
data is, depending on the type of token::
COMMENT : the comment string
INPUT: the (DECORATOR, INPUT_LINE, REST) where
DECORATOR: the input decorator (or None)
INPUT_LINE: the input as string (possibly multi-line)
REST : any stdout generated by the input line (not OUTPUT)
OUTPUT: the output string, possibly multi-line
"""
block = []
lines = part.split('\n')
N = len(lines)
i = 0
decorator = None
while 1:
if i==N:
# nothing left to parse -- the last line
break
line = lines[i]
i += 1
line_stripped = line.strip()
if line_stripped.startswith('#'):
block.append((COMMENT, line))
continue
if line_stripped.startswith('@'):
# we're assuming at most one decorator -- may need to
# rethink
decorator = line_stripped
continue
# does this look like an input line?
matchin = rgxin.match(line)
if matchin:
lineno, inputline = int(matchin.group(1)), matchin.group(2)
# the ....: continuation string
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
Nc = len(continuation)
# input lines can continue on for more than one line, if
# we have a '\' line continuation char or a function call
# echo line 'print'. The input line can only be
# terminated by the end of the block or an output line, so
# we parse out the rest of the input line if it is
# multiline as well as any echo text
rest = []
while i<N:
# look ahead; if the next line is blank, or a comment, or
# an output line, we're done
nextline = lines[i]
matchout = rgxout.match(nextline)
#print "nextline=%s, continuation=%s, starts=%s"%(nextline, continuation, nextline.startswith(continuation))
if matchout or nextline.startswith('#'):
break
elif nextline.startswith(continuation):
nextline = nextline[Nc:]
if nextline and nextline[0] == ' ':
nextline = nextline[1:]
inputline += '\n' + nextline
else:
rest.append(nextline)
i+= 1
block.append((INPUT, (decorator, inputline, '\n'.join(rest))))
continue
# if it looks like an output line grab all the text to the end
# of the block
matchout = rgxout.match(line)
if matchout:
lineno, output = int(matchout.group(1)), matchout.group(2)
if i<N-1:
output = '\n'.join([output] + lines[i:])
block.append((OUTPUT, output))
break
return block
class DecodingStringIO(StringIO, object):
def __init__(self,buf='',encodings=('utf8',), *args, **kwds):
super(DecodingStringIO, self).__init__(buf, *args, **kwds)
self.set_encodings(encodings)
def set_encodings(self, encodings):
self.encodings = encodings
def write(self,data):
if isinstance(data, text_type):
return super(DecodingStringIO, self).write(data)
else:
for enc in self.encodings:
try:
data = data.decode(enc)
return super(DecodingStringIO, self).write(data)
except :
pass
# default to brute utf8 if no encoding succeeded
return super(DecodingStringIO, self).write(data.decode('utf8', 'replace'))
class EmbeddedSphinxShell(object):
"""An embedded IPython instance to run inside Sphinx"""
def __init__(self, exec_lines=None,state=None):
self.cout = DecodingStringIO(u'')
if exec_lines is None:
exec_lines = []
self.state = state
# Create config object for IPython
config = Config()
config.InteractiveShell.autocall = False
config.InteractiveShell.autoindent = False
config.InteractiveShell.colors = 'NoColor'
# create a profile so instance history isn't saved
tmp_profile_dir = tempfile.mkdtemp(prefix='profile_')
profname = 'auto_profile_sphinx_build'
pdir = os.path.join(tmp_profile_dir,profname)
profile = ProfileDir.create_profile_dir(pdir)
# Create and initialize global ipython, but don't start its mainloop.
# This will persist across different EmbededSphinxShell instances.
IP = InteractiveShell.instance(config=config, profile_dir=profile)
# io.stdout redirect must be done after instantiating InteractiveShell
io.stdout = self.cout
io.stderr = self.cout
# For debugging, so we can see normal output, use this:
#from IPython.utils.io import Tee
#io.stdout = Tee(self.cout, channel='stdout') # dbg
#io.stderr = Tee(self.cout, channel='stderr') # dbg
# Store a few parts of IPython we'll need.
self.IP = IP
self.user_ns = self.IP.user_ns
self.user_global_ns = self.IP.user_global_ns
self.input = ''
self.output = ''
self.is_verbatim = False
self.is_doctest = False
self.is_suppress = False
# Optionally, provide more detailed information to shell.
self.directive = None
# on the first call to the savefig decorator, we'll import
# pyplot as plt so we can make a call to the plt.gcf().savefig
self._pyplot_imported = False
# Prepopulate the namespace.
for line in exec_lines:
self.process_input_line(line, store_history=False)
def clear_cout(self):
self.cout.seek(0)
self.cout.truncate(0)
def process_input_line(self, line, store_history=True):
"""process the input, capturing stdout"""
stdout = sys.stdout
splitter = self.IP.input_splitter
try:
sys.stdout = self.cout
splitter.push(line)
more = splitter.push_accepts_more()
if not more:
try:
source_raw = splitter.source_raw_reset()[1]
except:
# recent ipython #4504
source_raw = splitter.raw_reset()
self.IP.run_cell(source_raw, store_history=store_history)
finally:
sys.stdout = stdout
def process_image(self, decorator):
"""
# build out an image directive like
# .. image:: somefile.png
# :width 4in
#
# from an input like
# savefig somefile.png width=4in
"""
savefig_dir = self.savefig_dir
source_dir = self.source_dir
saveargs = decorator.split(' ')
filename = saveargs[1]
# insert relative path to image file in source
outfile = os.path.relpath(os.path.join(savefig_dir,filename),
source_dir)
imagerows = ['.. image:: %s'%outfile]
for kwarg in saveargs[2:]:
arg, val = kwarg.split('=')
arg = arg.strip()
val = val.strip()
imagerows.append(' :%s: %s'%(arg, val))
image_file = os.path.basename(outfile) # only return file name
image_directive = '\n'.join(imagerows)
return image_file, image_directive
# Callbacks for each type of token
def process_input(self, data, input_prompt, lineno):
"""
Process data block for INPUT token.
"""
decorator, input, rest = data
image_file = None
image_directive = None
is_verbatim = decorator=='@verbatim' or self.is_verbatim
is_doctest = (decorator is not None and \
decorator.startswith('@doctest')) or self.is_doctest
is_suppress = decorator=='@suppress' or self.is_suppress
is_okexcept = decorator=='@okexcept' or self.is_okexcept
is_okwarning = decorator=='@okwarning' or self.is_okwarning
is_savefig = decorator is not None and \
decorator.startswith('@savefig')
# set the encodings to be used by DecodingStringIO
# to convert the execution output into unicode if
# needed. this attrib is set by IpythonDirective.run()
# based on the specified block options, defaulting to ['ut
self.cout.set_encodings(self.output_encoding)
input_lines = input.split('\n')
if len(input_lines) > 1:
if input_lines[-1] != "":
input_lines.append('') # make sure there's a blank line
# so splitter buffer gets reset
continuation = ' %s:'%''.join(['.']*(len(str(lineno))+2))
if is_savefig:
image_file, image_directive = self.process_image(decorator)
ret = []
is_semicolon = False
# Hold the execution count, if requested to do so.
if is_suppress and self.hold_count:
store_history = False
else:
store_history = True
# Note: catch_warnings is not thread safe
with warnings.catch_warnings(record=True) as ws:
for i, line in enumerate(input_lines):
if line.endswith(';'):
is_semicolon = True
if i == 0:
# process the first input line
if is_verbatim:
self.process_input_line('')
self.IP.execution_count += 1 # increment it anyway
else:
# only submit the line in non-verbatim mode
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(input_prompt, line)
else:
# process a continuation line
if not is_verbatim:
self.process_input_line(line, store_history=store_history)
formatted_line = '%s %s'%(continuation, line)
if not is_suppress:
ret.append(formatted_line)
if not is_suppress and len(rest.strip()) and is_verbatim:
# the "rest" is the standard output of the
# input, which needs to be added in
# verbatim mode
ret.append(rest)
self.cout.seek(0)
output = self.cout.read()
if not is_suppress and not is_semicolon:
ret.append(output)
elif is_semicolon: # get spacing right
ret.append('')
# context information
filename = self.state.document.current_source
lineno = self.state.document.current_line
# output any exceptions raised during execution to stdout
# unless :okexcept: has been specified.
if not is_okexcept and "Traceback" in output:
s = "\nException in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okexcept: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write(output)
sys.stdout.write('<<<' + ('-' * 73) + '\n\n')
# output any warning raised during execution to stdout
# unless :okwarning: has been specified.
if not is_okwarning:
for w in ws:
s = "\nWarning in %s at block ending on line %s\n" % (filename, lineno)
s += "Specify :okwarning: as an option in the ipython:: block to suppress this message\n"
sys.stdout.write('\n\n>>>' + ('-' * 73))
sys.stdout.write(s)
sys.stdout.write('-' * 76 + '\n')
s=warnings.formatwarning(w.message, w.category,
w.filename, w.lineno, w.line)
sys.stdout.write(s)
sys.stdout.write('<<<' + ('-' * 73) + '\n')
self.cout.truncate(0)
return (ret, input_lines, output, is_doctest, decorator, image_file,
image_directive)
def process_output(self, data, output_prompt,
input_lines, output, is_doctest, decorator, image_file):
"""
Process data block for OUTPUT token.
"""
TAB = ' ' * 4
if is_doctest and output is not None:
found = output
found = found.strip()
submitted = data.strip()
if self.directive is None:
source = 'Unavailable'
content = 'Unavailable'
else:
source = self.directive.state.document.current_source
content = self.directive.content
# Add tabs and join into a single string.
content = '\n'.join(TAB + line for line in content)
# Make sure the output contains the output prompt.
ind = found.find(output_prompt)
if ind < 0:
e = ('output does not contain output prompt\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'Input line(s):\n{TAB}{2}\n\n'
'Output line(s):\n{TAB}{3}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), TAB=TAB)
raise RuntimeError(e)
found = found[len(output_prompt):].strip()
# Handle the actual doctest comparison.
if decorator.strip() == '@doctest':
# Standard doctest
if found != submitted:
e = ('doctest failure\n\n'
'Document source: {0}\n\n'
'Raw content: \n{1}\n\n'
'On input line(s):\n{TAB}{2}\n\n'
'we found output:\n{TAB}{3}\n\n'
'instead of the expected:\n{TAB}{4}\n\n')
e = e.format(source, content, '\n'.join(input_lines),
repr(found), repr(submitted), TAB=TAB)
raise RuntimeError(e)
else:
self.custom_doctest(decorator, input_lines, found, submitted)
def process_comment(self, data):
"""Process data fPblock for COMMENT token."""
if not self.is_suppress:
return [data]
def save_image(self, image_file):
"""
Saves the image file to disk.
"""
self.ensure_pyplot()
command = ('plt.gcf().savefig("%s", bbox_inches="tight", '
'dpi=100)' % image_file)
#print 'SAVEFIG', command # dbg
self.process_input_line('bookmark ipy_thisdir', store_history=False)
self.process_input_line('cd -b ipy_savedir', store_history=False)
self.process_input_line(command, store_history=False)
self.process_input_line('cd -b ipy_thisdir', store_history=False)
self.process_input_line('bookmark -d ipy_thisdir', store_history=False)
self.clear_cout()
def process_block(self, block):
"""
process block from the block_parser and return a list of processed lines
"""
ret = []
output = None
input_lines = None
lineno = self.IP.execution_count
input_prompt = self.promptin % lineno
output_prompt = self.promptout % lineno
image_file = None
image_directive = None
for token, data in block:
if token == COMMENT:
out_data = self.process_comment(data)
elif token == INPUT:
(out_data, input_lines, output, is_doctest, decorator,
image_file, image_directive) = \
self.process_input(data, input_prompt, lineno)
elif token == OUTPUT:
out_data = \
self.process_output(data, output_prompt,
input_lines, output, is_doctest,
decorator, image_file)
if out_data:
ret.extend(out_data)
# save the image files
if image_file is not None:
self.save_image(image_file)
return ret, image_directive
def ensure_pyplot(self):
"""
Ensures that pyplot has been imported into the embedded IPython shell.
Also, makes sure to set the backend appropriately if not set already.
"""
# We are here if the @figure pseudo decorator was used. Thus, it's
# possible that we could be here even if python_mplbackend were set to
# `None`. That's also strange and perhaps worthy of raising an
# exception, but for now, we just set the backend to 'agg'.
if not self._pyplot_imported:
if 'matplotlib.backends' not in sys.modules:
# Then ipython_matplotlib was set to None but there was a
# call to the @figure decorator (and ipython_execlines did
# not set a backend).
#raise Exception("No backend was set, but @figure was used!")
import matplotlib
matplotlib.use('agg')
# Always import pyplot into embedded shell.
self.process_input_line('import matplotlib.pyplot as plt',
store_history=False)
self._pyplot_imported = True
def process_pure_python(self, content):
"""
content is a list of strings. it is unedited directive content
This runs it line by line in the InteractiveShell, prepends
prompts as needed capturing stderr and stdout, then returns
the content as a list as if it were ipython code
"""
output = []
savefig = False # keep up with this to clear figure
multiline = False # to handle line continuation
multiline_start = None
fmtin = self.promptin
ct = 0
for lineno, line in enumerate(content):
line_stripped = line.strip()
if not len(line):
output.append(line)
continue
# handle decorators
if line_stripped.startswith('@'):
output.extend([line])
if 'savefig' in line:
savefig = True # and need to clear figure
continue
# handle comments
if line_stripped.startswith('#'):
output.extend([line])
continue
# deal with lines checking for multiline
continuation = u' %s:'% ''.join(['.']*(len(str(ct))+2))
if not multiline:
modified = u"%s %s" % (fmtin % ct, line_stripped)
output.append(modified)
ct += 1
try:
ast.parse(line_stripped)
output.append(u'')
except Exception: # on a multiline
multiline = True
multiline_start = lineno
else: # still on a multiline
modified = u'%s %s' % (continuation, line)
output.append(modified)
# if the next line is indented, it should be part of multiline
if len(content) > lineno + 1:
nextline = content[lineno + 1]
if len(nextline) - len(nextline.lstrip()) > 3:
continue
try:
mod = ast.parse(
'\n'.join(content[multiline_start:lineno+1]))
if isinstance(mod.body[0], ast.FunctionDef):
# check to see if we have the whole function
for element in mod.body[0].body:
if isinstance(element, ast.Return):
multiline = False
else:
output.append(u'')
multiline = False
except Exception:
pass
if savefig: # clear figure if plotted
self.ensure_pyplot()
self.process_input_line('plt.clf()', store_history=False)
self.clear_cout()
savefig = False
return output
def custom_doctest(self, decorator, input_lines, found, submitted):
"""
Perform a specialized doctest.
"""
from .custom_doctests import doctests
args = decorator.split()
doctest_type = args[1]
if doctest_type in doctests:
doctests[doctest_type](self, args, input_lines, found, submitted)
else:
e = "Invalid option to @doctest: {0}".format(doctest_type)
raise Exception(e)
class IPythonDirective(Directive):
has_content = True
required_arguments = 0
optional_arguments = 4 # python, suppress, verbatim, doctest
final_argumuent_whitespace = True
option_spec = { 'python': directives.unchanged,
'suppress' : directives.flag,
'verbatim' : directives.flag,
'doctest' : directives.flag,
'okexcept': directives.flag,
'okwarning': directives.flag,
'output_encoding': directives.unchanged_required
}
shell = None
seen_docs = set()
def get_config_options(self):
# contains sphinx configuration variables
config = self.state.document.settings.env.config
# get config variables to set figure output directory
confdir = self.state.document.settings.env.app.confdir
savefig_dir = config.ipython_savefig_dir
source_dir = os.path.dirname(self.state.document.current_source)
if savefig_dir is None:
savefig_dir = config.html_static_path
if isinstance(savefig_dir, list):
savefig_dir = savefig_dir[0] # safe to assume only one path?
savefig_dir = os.path.join(confdir, savefig_dir)
# get regex and prompt stuff
rgxin = config.ipython_rgxin
rgxout = config.ipython_rgxout
promptin = config.ipython_promptin
promptout = config.ipython_promptout
mplbackend = config.ipython_mplbackend
exec_lines = config.ipython_execlines
hold_count = config.ipython_holdcount
return (savefig_dir, source_dir, rgxin, rgxout,
promptin, promptout, mplbackend, exec_lines, hold_count)
def setup(self):
# Get configuration values.
(savefig_dir, source_dir, rgxin, rgxout, promptin, promptout,
mplbackend, exec_lines, hold_count) = self.get_config_options()
if self.shell is None:
# We will be here many times. However, when the
# EmbeddedSphinxShell is created, its interactive shell member
# is the same for each instance.
if mplbackend and 'matplotlib.backends' not in sys.modules:
import matplotlib
# Repeated calls to use() will not hurt us since `mplbackend`
# is the same each time.
matplotlib.use(mplbackend)
# Must be called after (potentially) importing matplotlib and
# setting its backend since exec_lines might import pylab.
self.shell = EmbeddedSphinxShell(exec_lines, self.state)
# Store IPython directive to enable better error messages
self.shell.directive = self
# reset the execution count if we haven't processed this doc
#NOTE: this may be borked if there are multiple seen_doc tmp files
#check time stamp?
if self.state.document.current_source not in self.seen_docs:
self.shell.IP.history_manager.reset()
self.shell.IP.execution_count = 1
try:
self.shell.IP.prompt_manager.width = 0
except AttributeError:
# GH14003: class promptManager has removed after IPython 5.x
pass
self.seen_docs.add(self.state.document.current_source)
# and attach to shell so we don't have to pass them around
self.shell.rgxin = rgxin
self.shell.rgxout = rgxout
self.shell.promptin = promptin
self.shell.promptout = promptout
self.shell.savefig_dir = savefig_dir
self.shell.source_dir = source_dir
self.shell.hold_count = hold_count
# setup bookmark for saving figures directory
self.shell.process_input_line('bookmark ipy_savedir %s'%savefig_dir,
store_history=False)
self.shell.clear_cout()
return rgxin, rgxout, promptin, promptout
def teardown(self):
# delete last bookmark
self.shell.process_input_line('bookmark -d ipy_savedir',
store_history=False)
self.shell.clear_cout()
def run(self):
debug = False
#TODO, any reason block_parser can't be a method of embeddable shell
# then we wouldn't have to carry these around
rgxin, rgxout, promptin, promptout = self.setup()
options = self.options
self.shell.is_suppress = 'suppress' in options
self.shell.is_doctest = 'doctest' in options
self.shell.is_verbatim = 'verbatim' in options
self.shell.is_okexcept = 'okexcept' in options
self.shell.is_okwarning = 'okwarning' in options
self.shell.output_encoding = [options.get('output_encoding', 'utf8')]
# handle pure python code
if 'python' in self.arguments:
content = self.content
self.content = self.shell.process_pure_python(content)
parts = '\n'.join(self.content).split('\n\n')
lines = ['.. code-block:: ipython', '']
figures = []
for part in parts:
block = block_parser(part, rgxin, rgxout, promptin, promptout)
if len(block):
rows, figure = self.shell.process_block(block)
for row in rows:
lines.extend([' %s'%line for line in row.split('\n')])
if figure is not None:
figures.append(figure)
for figure in figures:
lines.append('')
lines.extend(figure.split('\n'))
lines.append('')
if len(lines)>2:
if debug:
print('\n'.join(lines))
else:
# This has to do with input, not output. But if we comment
# these lines out, then no IPython code will appear in the
# final output.
self.state_machine.insert_input(
lines, self.state_machine.input_lines.source(0))
# cleanup
self.teardown()
return []
# Enable as a proper Sphinx directive
def setup(app):
setup.app = app
app.add_directive('ipython', IPythonDirective)
app.add_config_value('ipython_savefig_dir', None, 'env')
app.add_config_value('ipython_rgxin',
re.compile('In \[(\d+)\]:\s?(.*)\s*'), 'env')
app.add_config_value('ipython_rgxout',
re.compile('Out\[(\d+)\]:\s?(.*)\s*'), 'env')
app.add_config_value('ipython_promptin', 'In [%d]:', 'env')
app.add_config_value('ipython_promptout', 'Out[%d]:', 'env')
# We could just let matplotlib pick whatever is specified as the default
# backend in the matplotlibrc file, but this would cause issues if the
# backend didn't work in headless environments. For this reason, 'agg'
# is a good default backend choice.
app.add_config_value('ipython_mplbackend', 'agg', 'env')
# If the user sets this config value to `None`, then EmbeddedSphinxShell's
# __init__ method will treat it as [].
execlines = ['import numpy as np', 'import matplotlib.pyplot as plt']
app.add_config_value('ipython_execlines', execlines, 'env')
app.add_config_value('ipython_holdcount', True, 'env')
# Simple smoke test, needs to be converted to a proper automatic test.
def test():
examples = [
r"""
In [9]: pwd
Out[9]: '/home/jdhunter/py4science/book'
In [10]: cd bookdata/
/home/jdhunter/py4science/book/bookdata
In [2]: from pylab import *
In [2]: ion()
In [3]: im = imread('stinkbug.png')
@savefig mystinkbug.png width=4in
In [4]: imshow(im)
Out[4]: <matplotlib.image.AxesImage object at 0x39ea850>
""",
r"""
In [1]: x = 'hello world'
# string methods can be
# used to alter the string
@doctest
In [2]: x.upper()
Out[2]: 'HELLO WORLD'
@verbatim
In [3]: x.st<TAB>
x.startswith x.strip
""",
r"""
In [130]: url = 'http://ichart.finance.yahoo.com/table.csv?s=CROX\
.....: &d=9&e=22&f=2009&g=d&a=1&br=8&c=2006&ignore=.csv'
In [131]: print url.split('&')
['http://ichart.finance.yahoo.com/table.csv?s=CROX', 'd=9', 'e=22', 'f=2009', 'g=d', 'a=1', 'b=8', 'c=2006', 'ignore=.csv']
In [60]: import urllib
""",
r"""\
In [133]: import numpy.random
@suppress
In [134]: numpy.random.seed(2358)
@doctest
In [135]: numpy.random.rand(10,2)
Out[135]:
array([[ 0.64524308, 0.59943846],
[ 0.47102322, 0.8715456 ],
[ 0.29370834, 0.74776844],
[ 0.99539577, 0.1313423 ],
[ 0.16250302, 0.21103583],
[ 0.81626524, 0.1312433 ],
[ 0.67338089, 0.72302393],
[ 0.7566368 , 0.07033696],
[ 0.22591016, 0.77731835],
[ 0.0072729 , 0.34273127]])
""",
r"""
In [106]: print x
jdh
In [109]: for i in range(10):
.....: print i
.....:
.....:
0
1
2
3
4
5
6
7
8
9
""",
r"""
In [144]: from pylab import *
In [145]: ion()
# use a semicolon to suppress the output
@savefig test_hist.png width=4in
In [151]: hist(np.random.randn(10000), 100);
@savefig test_plot.png width=4in
In [151]: plot(np.random.randn(10000), 'o');
""",
r"""
# use a semicolon to suppress the output
In [151]: plt.clf()
@savefig plot_simple.png width=4in
In [151]: plot([1,2,3])
@savefig hist_simple.png width=4in
In [151]: hist(np.random.randn(10000), 100);
""",
r"""
# update the current fig
In [151]: ylabel('number')
In [152]: title('normal distribution')
@savefig hist_with_text.png
In [153]: grid(True)
@doctest float
In [154]: 0.1 + 0.2
Out[154]: 0.3
@doctest float
In [155]: np.arange(16).reshape(4,4)
Out[155]:
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
In [1]: x = np.arange(16, dtype=float).reshape(4,4)
In [2]: x[0,0] = np.inf
In [3]: x[0,1] = np.nan
@doctest float
In [4]: x
Out[4]:
array([[ inf, nan, 2., 3.],
[ 4., 5., 6., 7.],
[ 8., 9., 10., 11.],
[ 12., 13., 14., 15.]])
""",
]
# skip local-file depending first example:
examples = examples[1:]
#ipython_directive.DEBUG = True # dbg
#options = dict(suppress=True) # dbg
options = dict()
for example in examples:
content = example.split('\n')
IPythonDirective('debug', arguments=None, options=options,
content=content, lineno=0,
content_offset=None, block_text=None,
state=None, state_machine=None,
)
# Run test suite as a script
if __name__=='__main__':
if not os.path.isdir('_static'):
os.mkdir('_static')
test()
print('All OK? Check figures in _static/')
| bsd-3-clause |
Alwnikrotikz/jythonconsole | tip.py | 9 | 1701 | from java.awt import Color, Dimension
from javax.swing import JWindow, JTextArea, JScrollPane
__author__ = "Don Coleman <[email protected]>"
__cvsid__ = "$Id: tip.py,v 1.3 2003/05/01 03:43:53 dcoleman Exp $"
class Tip(JWindow):
"""
Window which provides the user with information about the method.
For Python, this shows arguments, and the documention
For Java, this shows the signature(s) and return type
"""
MAX_HEIGHT = 300
MAX_WIDTH = 400
def __init__(self, frame):
JWindow.__init__(self, frame)
self.textarea = JTextArea()
# TODO put this color with all the other colors
self.textarea.setBackground(Color(225,255,255))
self.textarea.setEditable(0)
self.jscrollpane = JScrollPane(self.textarea)
self.getContentPane().add(self.jscrollpane)
def setText(self, tip):
self.textarea.setText(tip)
self.textarea.setCaretPosition(0)
#print >> sys.stderr, self.textarea.getPreferredScrollableViewportSize()
self.setSize(self.getPreferredSize())
def getPreferredSize(self):
# need to add a magic amount to the size to avoid scrollbars
# I'm sure there's a better way to do this
MAGIC = 20
size = self.textarea.getPreferredScrollableViewportSize()
height = size.height + MAGIC
width = size.width + MAGIC
if height > Tip.MAX_HEIGHT:
height = Tip.MAX_HEIGHT
if width > Tip.MAX_WIDTH:
width = Tip.MAX_WIDTH
return Dimension(width, height)
def showTip(self, tip, displayPoint):
self.setLocation(displayPoint)
self.setText(tip)
self.show()
| lgpl-2.1 |
coursemdetw/2015cdb | static/Brython3.1.0-20150301-090019/Lib/site-packages/pygame/pkgdata.py | 603 | 2146 | """pkgdata is a simple, extensible way for a package to acquire data file
resources.
The getResource function is equivalent to the standard idioms, such as
the following minimal implementation::
import sys, os
def getResource(identifier, pkgname=__name__):
pkgpath = os.path.dirname(sys.modules[pkgname].__file__)
path = os.path.join(pkgpath, identifier)
return file(os.path.normpath(path), mode='rb')
When a __loader__ is present on the module given by __name__, it will defer
getResource to its get_data implementation and return it as a file-like
object (such as StringIO).
"""
__all__ = ['getResource']
import sys
import os
#from cStringIO import StringIO
from io import StringIO
try:
# Try to use setuptools if available.
from pkg_resources import resource_stream
_have_resource_stream = True
except ImportError:
_have_resource_stream = False
def getResource(identifier, pkgname=__name__):
"""Acquire a readable object for a given package name and identifier.
An IOError will be raised if the resource can not be found.
For example::
mydata = getResource('mypkgdata.jpg').read()
Note that the package name must be fully qualified, if given, such
that it would be found in sys.modules.
In some cases, getResource will return a real file object. In that
case, it may be useful to use its name attribute to get the path
rather than use it as a file-like object. For example, you may
be handing data off to a C API.
"""
# Prefer setuptools
if _have_resource_stream:
return resource_stream(pkgname, identifier)
mod = sys.modules[pkgname]
fn = getattr(mod, '__file__', None)
if fn is None:
raise IOError("%r has no __file__!")
path = os.path.join(os.path.dirname(fn), identifier)
loader = getattr(mod, '__loader__', None)
if loader is not None:
try:
data = loader.get_data(path)
except IOError:
pass
else:
return StringIO(data)
#return file(os.path.normpath(path), 'rb')
return open(os.path.normpath(path), 'rb')
| gpl-3.0 |
sdague/home-assistant | homeassistant/components/scsgate/light.py | 12 | 3131 | """Support for SCSGate lights."""
import logging
from scsgate.tasks import ToggleStatusTask
import voluptuous as vol
from homeassistant.components.light import PLATFORM_SCHEMA, LightEntity
from homeassistant.const import ATTR_ENTITY_ID, ATTR_STATE, CONF_DEVICES, CONF_NAME
import homeassistant.helpers.config_validation as cv
from . import CONF_SCS_ID, DOMAIN, SCSGATE_SCHEMA
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_DEVICES): cv.schema_with_slug_keys(SCSGATE_SCHEMA)}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the SCSGate switches."""
devices = config.get(CONF_DEVICES)
lights = []
logger = logging.getLogger(__name__)
scsgate = hass.data[DOMAIN]
if devices:
for entity_info in devices.values():
if entity_info[CONF_SCS_ID] in scsgate.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[CONF_SCS_ID]
logger.info("Adding %s scsgate.light", name)
light = SCSGateLight(
name=name, scs_id=scs_id, logger=logger, scsgate=scsgate
)
lights.append(light)
add_entities(lights)
scsgate.add_devices_to_register(lights)
class SCSGateLight(LightEntity):
"""Representation of a SCSGate light."""
def __init__(self, scs_id, name, logger, scsgate):
"""Initialize the light."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
self._scsgate = scsgate
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed for a SCSGate light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if light is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
self._scsgate.append_task(ToggleStatusTask(target=self._scs_id, toggled=True))
self._toggled = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._scsgate.append_task(ToggleStatusTask(target=self._scs_id, toggled=False))
self._toggled = False
self.schedule_update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this light."""
if self._toggled == message.toggled:
self._logger.info(
"Light %s, ignoring message %s because state already active",
self._scs_id,
message,
)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.schedule_update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
"button_pressed", {ATTR_ENTITY_ID: self._scs_id, ATTR_STATE: command}
)
| apache-2.0 |
amanikamail/flexx | docs/scripts/genexamples.py | 19 | 3162 | """ Generate docs for examples.
"""
import os
from types import ModuleType
from flexx import ui, app
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
DOC_DIR = os.path.abspath(os.path.join(THIS_DIR, '..'))
EXAMPLES_DIR = os.path.abspath(os.path.join(DOC_DIR, '..', 'examples'))
OUTPUT_DIR = os.path.join(DOC_DIR, 'examples')
created_files = []
def main():
# Collect examples
examples = {}
for sub in os.listdir(EXAMPLES_DIR):
dirname = os.path.join(EXAMPLES_DIR, sub)
if os.path.isdir(dirname):
examples[sub] = {}
for fname in os.listdir(dirname):
filename = os.path.join(dirname, fname)
if os.path.isfile(filename) and fname.endswith('.py'):
# Create example content
code = open(filename, 'rt').read()
text = ':orphan:\n\n' # avoid toctree warning
text += '.. _%s:\n\n' % fname
text += '%s\n%s\n\n' % (fname, '=' * len(fname))
text += '.. code-block:: py\n :linenos:\n\n'
text += '\n ' + code.replace('\n', '\n ').rstrip() + '\n'
examples[sub][fname] = text
if not examples[sub]:
del examples[sub]
# Write all examples
created_files.append(OUTPUT_DIR)
if not os.path.isdir(OUTPUT_DIR):
os.mkdir(OUTPUT_DIR)
for sub in list(examples.keys()):
dirname = os.path.join(OUTPUT_DIR, sub)
created_files.append(dirname)
if not os.path.isdir(dirname):
os.mkdir(dirname)
for name in examples[sub]:
filename = os.path.join(dirname, name + '.rst')
created_files.append(filename)
open(filename, 'wt').write(examples[sub][name])
# # Create example index page
# docs = 'Examples'
# docs += '\n' + '=' * len(docs) + '\n\n'
# for sub in sorted(examples):
# docs += '\n' + sub + ':\n\n'
# for name in sorted(examples[sub]):
# docs += '* :ref:`%s`\n' % name
# # Write
# filename = os.path.join(DOC_DIR, 'examples.rst')
# created_files.append(filename)
# open(filename, 'wt').write(docs)
better_names = {'pyscript': 'PyScript'}
# Create example pages per submodule
for sub in examples:
dirname = os.path.join(DOC_DIR, sub)
if os.path.isdir(dirname):
docs = better_names.get(sub, sub.capitalize()) + ' examples'
docs += '\n%s\n\n' % (len(docs) * '=')
for name in sorted(examples[sub]):
docs += '* :ref:`%s`\n' % name
# Write
filename = os.path.join(DOC_DIR, sub, 'examples.rst')
created_files.append(filename)
open(filename, 'wt').write(docs)
print(' generated %i examples' % sum([len(x) for x in examples.values()]))
def clean():
while created_files:
filename = created_files.pop()
if os.path.isfile(filename):
os.remove(filename)
elif os.path.isdir(filename) and not os.listdir(filename):
os.rmdir(filename)
| bsd-2-clause |
tengqm/senlin-container | senlin/tests/unit/api/common/test_util.py | 1 | 3858 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from webob import exc
from senlin.api.common import util
from senlin.api.common import wsgi
from senlin.common import context
from senlin.common import policy
from senlin.tests.unit.common import base
class TestGetAllowedParams(base.SenlinTestCase):
def setUp(self):
super(TestGetAllowedParams, self).setUp()
req = wsgi.Request({})
self.params = req.params.copy()
self.params.add('foo', 'foo value')
self.whitelist = {'foo': 'single'}
def test_returns_empty_dict(self):
self.whitelist = {}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual({}, result)
def test_only_adds_whitelisted_params_if_param_exists(self):
self.whitelist = {'foo': 'single'}
self.params.clear()
result = util.get_allowed_params(self.params, self.whitelist)
self.assertNotIn('foo', result)
def test_returns_only_whitelisted_params(self):
self.params.add('bar', 'bar value')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertIn('foo', result)
self.assertNotIn('bar', result)
def test_handles_single_value_params(self):
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual('foo value', result['foo'])
def test_handles_multiple_value_params(self):
self.whitelist = {'foo': 'multi'}
self.params.add('foo', 'foo value 2')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual(2, len(result['foo']))
self.assertIn('foo value', result['foo'])
self.assertIn('foo value 2', result['foo'])
def test_handles_mixed_value_param_with_multiple_entries(self):
self.whitelist = {'foo': 'mixed'}
self.params.add('foo', 'foo value 2')
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual(2, len(result['foo']))
self.assertIn('foo value', result['foo'])
self.assertIn('foo value 2', result['foo'])
def test_handles_mixed_value_param_with_single_entry(self):
self.whitelist = {'foo': 'mixed'}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertEqual('foo value', result['foo'])
def test_ignores_bogus_whitelist_items(self):
self.whitelist = {'foo': 'blah'}
result = util.get_allowed_params(self.params, self.whitelist)
self.assertNotIn('foo', result)
class TestPolicyEnforce(base.SenlinTestCase):
def setUp(self):
super(TestPolicyEnforce, self).setUp()
self.req = wsgi.Request({})
self.req.context = context.RequestContext(project='foo',
is_admin=False)
class DummyController(object):
REQUEST_SCOPE = 'test'
@util.policy_enforce
def an_action(self, req):
return 'woot'
self.controller = DummyController()
@mock.patch.object(policy, 'enforce')
def test_policy_enforce_policy_deny(self, mock_enforce):
mock_enforce.return_value = False
self.assertRaises(exc.HTTPForbidden,
self.controller.an_action,
self.req, tenant_id='foo')
| apache-2.0 |
TeslaProject/external_chromium_org | tools/telemetry/telemetry/core/backends/chrome/tab_list_backend.py | 46 | 2790 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import urllib2
from telemetry.core import tab
from telemetry.core import util
from telemetry.core.backends.chrome import inspector_backend_list
class TabListBackend(inspector_backend_list.InspectorBackendList):
"""A dynamic sequence of tab.Tabs in UI order."""
def __init__(self, browser_backend):
super(TabListBackend, self).__init__(browser_backend,
backend_wrapper=tab.Tab)
def New(self, timeout):
assert self._browser_backend.supports_tab_control
self._browser_backend.Request('new', timeout=timeout)
return self[-1]
def CloseTab(self, debugger_url, timeout=None):
assert self._browser_backend.supports_tab_control
tab_id = inspector_backend_list.DebuggerUrlToId(debugger_url)
# TODO(dtu): crbug.com/160946, allow closing the last tab on some platforms.
# For now, just create a new tab before closing the last tab.
if len(self) <= 1:
self.New(timeout)
try:
response = self._browser_backend.Request('close/%s' % tab_id,
timeout=timeout,
throw_network_exception=True)
except urllib2.HTTPError:
raise Exception('Unable to close tab, tab id not found: %s' % tab_id)
assert response == 'Target is closing'
util.WaitFor(lambda: tab_id not in self, timeout=5)
def ActivateTab(self, debugger_url, timeout=None):
assert self._browser_backend.supports_tab_control
tab_id = inspector_backend_list.DebuggerUrlToId(debugger_url)
assert tab_id in self
try:
response = self._browser_backend.Request('activate/%s' % tab_id,
timeout=timeout,
throw_network_exception=True)
except urllib2.HTTPError:
raise Exception('Unable to activate tab, tab id not found: %s' % tab_id)
assert response == 'Target activated'
def GetTabUrl(self, debugger_url):
tab_id = inspector_backend_list.DebuggerUrlToId(debugger_url)
tab_info = self.GetContextInfo(tab_id)
assert tab_info is not None
return tab_info['url']
def Get(self, index, ret):
"""Returns self[index] if it exists, or ret if index is out of bounds."""
if len(self) <= index:
return ret
return self[index]
def ShouldIncludeContext(self, context):
if 'type' in context:
return context['type'] == 'page'
# TODO: For compatibility with Chrome before r177683.
# This check is not completely correct, see crbug.com/190592.
return not context['url'].startswith('chrome-extension://')
| bsd-3-clause |
jgsogo/neutron | webapp/synthetic/forms.py | 1 | 1384 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from neutron.models import Word
from .models import AlternateData
class AlternateDataForm(forms.ModelForm):
word = forms.CharField()
class Meta:
model = AlternateData
fields = '__all__'
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance', None)
initial = kwargs.pop('initial', {})
if instance:
initial.update({'word': instance.word.word })
super(AlternateDataForm, self).__init__(initial=initial, *args, **kwargs)
def clean(self):
cleaned_data = self.cleaned_data
cleaned_data['word'], _ = Word.objects.get_or_create(word=cleaned_data['word'])
return cleaned_data
class WordCoarseDataForm(forms.ModelForm):
word = forms.CharField()
class Meta:
model = AlternateData
fields = '__all__'
def __init__(self, *args, **kwargs):
instance = kwargs.get('instance', None)
initial = kwargs.pop('initial', {})
if instance:
initial.update({'word': instance.word.word })
super(WordCoarseDataForm, self).__init__(initial=initial, *args, **kwargs)
def clean(self):
cleaned_data = self.cleaned_data
cleaned_data['word'], _ = Word.objects.get_or_create(word=cleaned_data['word'])
return cleaned_data
| gpl-2.0 |
shurihell/testasia | lms/djangoapps/certificates/tests/factories.py | 16 | 3695 | # Factories are self documenting
# pylint: disable=missing-docstring
import factory
from uuid import uuid4
from django.core.files.base import ContentFile
from factory.django import DjangoModelFactory, ImageField
from student.models import LinkedInAddToProfileConfiguration
from certificates.models import (
GeneratedCertificate, CertificateStatuses, CertificateHtmlViewConfiguration, CertificateWhitelist, BadgeAssertion,
BadgeImageConfiguration,
)
class GeneratedCertificateFactory(DjangoModelFactory):
class Meta(object):
model = GeneratedCertificate
course_id = None
status = CertificateStatuses.unavailable
mode = GeneratedCertificate.MODES.honor
name = ''
verify_uuid = uuid4().hex
class CertificateWhitelistFactory(DjangoModelFactory):
class Meta(object):
model = CertificateWhitelist
course_id = None
whitelist = True
notes = 'Test Notes'
class BadgeAssertionFactory(DjangoModelFactory):
class Meta(object):
model = BadgeAssertion
mode = 'honor'
data = {
'image': 'http://www.example.com/image.png',
'json': {'id': 'http://www.example.com/assertion.json'},
'issuer': 'http://www.example.com/issuer.json',
}
class BadgeImageConfigurationFactory(DjangoModelFactory):
class Meta(object):
model = BadgeImageConfiguration
mode = 'honor'
icon = factory.LazyAttribute(
lambda _: ContentFile(
ImageField()._make_data( # pylint: disable=protected-access
{'color': 'blue', 'width': 50, 'height': 50, 'format': 'PNG'}
), 'test.png'
)
)
class CertificateHtmlViewConfigurationFactory(DjangoModelFactory):
class Meta(object):
model = CertificateHtmlViewConfiguration
enabled = True
configuration = """{
"default": {
"accomplishment_class_append": "accomplishment-certificate",
"platform_name": "edX",
"company_about_url": "http://www.edx.org/about-us",
"company_privacy_url": "http://www.edx.org/edx-privacy-policy",
"company_tos_url": "http://www.edx.org/edx-terms-service",
"company_verified_certificate_url": "http://www.edx.org/verified-certificate",
"document_stylesheet_url_application": "/static/certificates/sass/main-ltr.css",
"logo_src": "/static/certificates/images/logo-edx.png",
"logo_url": "http://www.edx.org"
},
"honor": {
"certificate_type": "Honor Code",
"certificate_title": "Certificate of Achievement",
"logo_url": "http://www.edx.org/honor_logo.png"
},
"verified": {
"certificate_type": "Verified",
"certificate_title": "Verified Certificate of Achievement"
},
"xseries": {
"certificate_title": "XSeries Certificate of Achievement",
"certificate_type": "XSeries"
},
"microsites": {
"testmicrosite": {
"company_about_url": "http://www.testmicrosite.org/about-us",
"company_privacy_url": "http://www.testmicrosite.org/edx-privacy-policy",
"company_tos_url": "http://www.testmicrosite.org/edx-terms-service"
}
}
}"""
class LinkedInAddToProfileConfigurationFactory(DjangoModelFactory):
class Meta(object):
model = LinkedInAddToProfileConfiguration
enabled = True
company_identifier = "0_0dPSPyS070e0HsE9HNz_13_d11_"
trk_partner_name = 'unittest'
| agpl-3.0 |
viz-dev/viz | qa/rpc-tests/importprunedfunds.py | 55 | 5061 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ImportPrunedFundsTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
self.is_network_split=False
self.sync_all()
def run_test(self):
print("Mining blocks...")
self.nodes[0].generate(101)
self.sync_all()
# address
address1 = self.nodes[0].getnewaddress()
# pubkey
address2 = self.nodes[0].getnewaddress()
address2_pubkey = self.nodes[0].validateaddress(address2)['pubkey'] # Using pubkey
# privkey
address3 = self.nodes[0].getnewaddress()
address3_privkey = self.nodes[0].dumpprivkey(address3) # Using privkey
#Check only one address
address_info = self.nodes[0].validateaddress(address1)
assert_equal(address_info['ismine'], True)
self.sync_all()
#Node 1 sync test
assert_equal(self.nodes[1].getblockcount(),101)
#Address Test - before import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
#Send funds to self
txnid1 = self.nodes[0].sendtoaddress(address1, 0.1)
self.nodes[0].generate(1)
rawtxn1 = self.nodes[0].gettransaction(txnid1)['hex']
proof1 = self.nodes[0].gettxoutproof([txnid1])
txnid2 = self.nodes[0].sendtoaddress(address2, 0.05)
self.nodes[0].generate(1)
rawtxn2 = self.nodes[0].gettransaction(txnid2)['hex']
proof2 = self.nodes[0].gettxoutproof([txnid2])
txnid3 = self.nodes[0].sendtoaddress(address3, 0.025)
self.nodes[0].generate(1)
rawtxn3 = self.nodes[0].gettransaction(txnid3)['hex']
proof3 = self.nodes[0].gettxoutproof([txnid3])
self.sync_all()
#Import with no affiliated address
try:
self.nodes[1].importprunedfunds(rawtxn1, proof1)
except JSONRPCException as e:
assert('No addresses' in e.error['message'])
else:
assert(False)
balance1 = self.nodes[1].getbalance("", 0, True)
assert_equal(balance1, Decimal(0))
#Import with affiliated address with no rescan
self.nodes[1].importaddress(address2, "add2", False)
result2 = self.nodes[1].importprunedfunds(rawtxn2, proof2)
balance2 = self.nodes[1].getbalance("add2", 0, True)
assert_equal(balance2, Decimal('0.05'))
#Import with private key with no rescan
self.nodes[1].importprivkey(address3_privkey, "add3", False)
result3 = self.nodes[1].importprunedfunds(rawtxn3, proof3)
balance3 = self.nodes[1].getbalance("add3", 0, False)
assert_equal(balance3, Decimal('0.025'))
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.075'))
#Addresses Test - after import
address_info = self.nodes[1].validateaddress(address1)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address2)
assert_equal(address_info['iswatchonly'], True)
assert_equal(address_info['ismine'], False)
address_info = self.nodes[1].validateaddress(address3)
assert_equal(address_info['iswatchonly'], False)
assert_equal(address_info['ismine'], True)
#Remove transactions
try:
self.nodes[1].removeprunedfunds(txnid1)
except JSONRPCException as e:
assert('does not exist' in e.error['message'])
else:
assert(False)
balance1 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance1, Decimal('0.075'))
self.nodes[1].removeprunedfunds(txnid2)
balance2 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance2, Decimal('0.025'))
self.nodes[1].removeprunedfunds(txnid3)
balance3 = self.nodes[1].getbalance("*", 0, True)
assert_equal(balance3, Decimal('0.0'))
if __name__ == '__main__':
ImportPrunedFundsTest().main()
| mit |
nadley/Sick-Beard | sickbeard/search_queue.py | 29 | 9175 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import datetime
import time
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers
from sickbeard import generic_queue
from sickbeard import search
from sickbeard import ui
BACKLOG_SEARCH = 10
RSS_SEARCH = 20
MANUAL_SEARCH = 30
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = "SEARCHQUEUE"
def is_in_queue(self, show, segment):
for cur_item in self.queue:
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, ep_obj):
for cur_item in self.queue:
if isinstance(cur_item, ManualSearchQueueItem) and cur_item.ep_obj == ep_obj:
return True
return False
def pause_backlog(self):
self.min_priority = generic_queue.QueuePriorities.HIGH
def unpause_backlog(self):
self.min_priority = 0
def is_backlog_paused(self):
# backlog priorities are NORMAL, this should be done properly somewhere
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def is_backlog_in_progress(self):
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem):
return True
return False
def add_item(self, item):
if isinstance(item, RSSSearchQueueItem):
generic_queue.GenericQueue.add_item(self, item)
# don't do duplicates
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, ManualSearchQueueItem) and not self.is_ep_in_queue(item.ep_obj):
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u"Not adding item, it's already in the queue", logger.DEBUG)
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, ep_obj):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.ep_obj = ep_obj
self.success = None
def execute(self):
generic_queue.QueueItem.execute(self)
logger.log("Searching for download for " + self.ep_obj.prettyName())
foundEpisode = search.findEpisode(self.ep_obj, manualSearch=True)
result = False
if not foundEpisode:
ui.notifications.message('No downloads were found', "Couldn't find a download for <i>%s</i>" % self.ep_obj.prettyName())
logger.log(u"Unable to find a download for "+self.ep_obj.prettyName())
else:
# just use the first result for now
logger.log(u"Downloading episode from " + foundEpisode.url)
result = search.snatchEpisode(foundEpisode)
providerModule = foundEpisode.provider
if not result:
ui.notifications.error('Error while attempting to snatch '+foundEpisode.name+', check your logs')
elif providerModule == None:
ui.notifications.error('Provider is configured incorrectly, unable to download')
self.success = result
def finish(self):
# don't let this linger if something goes wrong
if self.success == None:
self.success = False
generic_queue.QueueItem.finish(self)
class RSSSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'RSS Search', RSS_SEARCH)
def execute(self):
generic_queue.QueueItem.execute(self)
self._changeMissingEpisodes()
logger.log(u"Beginning search for new episodes on RSS")
foundResults = search.searchForNeededEpisodes()
if not len(foundResults):
logger.log(u"No needed episodes found on the RSS feeds")
else:
for curResult in foundResults:
search.snatchEpisode(curResult)
time.sleep(2)
generic_queue.QueueItem.finish(self)
def _changeMissingEpisodes(self):
logger.log(u"Changing all old missing episodes to status WANTED")
curDate = datetime.date.today().toordinal()
myDB = db.DBConnection()
sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE status = ? AND airdate < ?", [common.UNAIRED, curDate])
for sqlEp in sqlResults:
try:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp["showid"]))
except exceptions.MultipleShowObjectsException:
logger.log(u"ERROR: expected to find a single show matching " + sqlEp["showid"])
return None
if show == None:
logger.log(u"Unable to find the show with ID "+str(sqlEp["showid"])+" in your show list! DB value was "+str(sqlEp), logger.ERROR)
return None
ep = show.getEpisode(sqlEp["season"], sqlEp["episode"])
with ep.lock:
if ep.show.paused:
ep.status = common.SKIPPED
else:
ep.status = common.WANTED
ep.saveToDB()
class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.thread_name = 'BACKLOG-'+str(show.tvdbid)
self.show = show
self.segment = segment
logger.log(u"Seeing if we need any episodes from "+self.show.name+" season "+str(self.segment))
myDB = db.DBConnection()
# see if there is anything in this season worth searching for
if not self.show.air_by_date:
statusResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ?", [self.show.tvdbid, self.segment])
else:
segment_year, segment_month = map(int, self.segment.split('-'))
min_date = datetime.date(segment_year, segment_month, 1)
# it's easier to just hard code this than to worry about rolling the year over or making a month length map
if segment_month == 12:
max_date = datetime.date(segment_year, 12, 31)
else:
max_date = datetime.date(segment_year, segment_month+1, 1) - datetime.timedelta(days=1)
statusResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND airdate >= ? AND airdate <= ?",
[self.show.tvdbid, min_date.toordinal(), max_date.toordinal()])
anyQualities, bestQualities = common.Quality.splitQuality(self.show.quality) #@UnusedVariable
self.wantSeason = self._need_any_episodes(statusResults, bestQualities)
def execute(self):
generic_queue.QueueItem.execute(self)
results = search.findSeason(self.show, self.segment)
# download whatever we find
if results:
for curResult in results:
search.snatchEpisode(curResult)
time.sleep(5)
self.finish()
def _need_any_episodes(self, statusResults, bestQualities):
wantSeason = False
# check through the list of statuses to see if we want any
for curStatusResult in statusResults:
curCompositeStatus = int(curStatusResult["status"])
curStatus, curQuality = common.Quality.splitCompositeStatus(curCompositeStatus)
if bestQualities:
highestBestQuality = max(bestQualities)
else:
highestBestQuality = 0
# if we need a better one then say yes
if (curStatus in (common.DOWNLOADED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_FRENCH) and curQuality < highestBestQuality) or curStatus == common.WANTED:
wantSeason = True
break
return wantSeason
| gpl-3.0 |
bihealth/vcfpy | tests/test_header.py | 1 | 13943 | # -*- coding: utf-8 -*-
"""Tests for vcfpy.header
"""
import sys
import vcfpy
from vcfpy import header
import pytest
def test_header_field_info():
"""Test the builtin functions of the FieldInfo class"""
info1 = header.FieldInfo("Integer", 1, "Some description")
info2 = header.FieldInfo("Integer", 1, "Some description")
info3 = header.FieldInfo("Integer", ".", "Some description")
assert info1 == info2
assert info1 != info3
assert hash(info1) == hash(info2)
assert str(info1) == "FieldInfo('Integer', 1, 'Some description', None)"
assert repr(info1) == "FieldInfo('Integer', 1, 'Some description', None)"
def test_sample_infos():
info1 = header.SamplesInfos(["one", "two", "three"])
info2 = header.SamplesInfos(["one", "two", "three"])
info3 = header.SamplesInfos(["one", "two", "four"])
assert info1 == info2
assert info1 != info3
with pytest.raises(TypeError):
assert hash(info1)
assert (
str(info1)
== "SamplesInfos(names=['one', 'two', 'three'], name_to_idx={'one': 0, 'three': 2, 'two': 1})"
)
assert (
repr(info1)
== "SamplesInfos(names=['one', 'two', 'three'], name_to_idx={'one': 0, 'three': 2, 'two': 1})"
)
def test_header_header():
lines1 = [header.HeaderLine("foo", "bar"), header.HeaderLine("foo2", "bar2")]
samples1 = header.SamplesInfos(["one", "two", "three"])
hdr1 = header.Header(lines1, samples1)
lines2 = [header.HeaderLine("foo", "bar"), header.HeaderLine("foo2", "bar2")]
samples2 = header.SamplesInfos(["one", "two", "three"])
hdr2 = header.Header(lines2, samples2)
lines3 = [header.HeaderLine("foo3", "bar"), header.HeaderLine("foo2", "bar2")]
samples3 = header.SamplesInfos(["one", "two", "three"])
hdr3 = header.Header(lines3, samples3)
assert hdr1 == hdr2
assert hdr1 != hdr3
EXPECTED = (
"Header(lines=[HeaderLine('foo', 'bar'), HeaderLine('foo2', 'bar2')], "
"samples=SamplesInfos(names=['one', 'two', 'three'], "
"name_to_idx={'one': 0, 'three': 2, 'two': 1}))"
)
assert str(hdr1) == EXPECTED
with pytest.raises(TypeError):
hash(hdr1)
def test_header_without_lines():
lines = [header.HeaderLine("foo", "bar"), header.HeaderLine("foo2", "bar2")]
samples = header.SamplesInfos(["one", "two", "three"])
hdr = header.Header(lines, samples)
hdr.add_filter_line(vcfpy.OrderedDict([("ID", "PASS")]))
hdr.add_filter_line(vcfpy.OrderedDict([("ID", "q30")]))
assert len(hdr.lines) == 4
hdr2 = header.header_without_lines(hdr, [("foo", "bar"), ("FILTER", "q30")])
assert len(hdr2.lines) == 2
assert hdr2.samples == hdr.samples
def test_header_header_line():
line1 = header.HeaderLine("key", "value")
line2 = header.HeaderLine("key", "value")
line3 = header.HeaderLine("key2", "value")
assert line1 == line2
assert line1 != line3
assert str(line1) == "HeaderLine('key', 'value')"
assert repr(line1) == "HeaderLine('key', 'value')"
assert line1.value == "value"
assert line1.serialize() == "##key=value"
with pytest.raises(TypeError):
hash(line1)
def test_header_alt_allele_header_line():
line1 = header.AltAlleleHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "DEL"), ("Description", "deletion")])
)
line2 = header.AltAlleleHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "DEL"), ("Description", "deletion")])
)
line3 = header.AltAlleleHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "DUP"), ("Description", "duplication")])
)
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"""AltAlleleHeaderLine('ALT', '<ID=DEL,Description="deletion">', """
"""OrderedDict([('ID', 'DEL'), ('Description', 'deletion')]))"""
)
assert repr(line1) == (
"""AltAlleleHeaderLine('ALT', '<ID=DEL,Description="deletion">', """
"""OrderedDict([('ID', 'DEL'), ('Description', 'deletion')]))"""
)
else:
assert str(line1) == (
"AltAlleleHeaderLine('ALT', '<ID=DEL,Description=\"deletion\">', "
"{'ID': 'DEL', 'Description': 'deletion'})"
)
assert repr(line1) == (
"AltAlleleHeaderLine('ALT', '<ID=DEL,Description=\"deletion\">', "
"{'ID': 'DEL', 'Description': 'deletion'})"
)
assert line1.value == '<ID=DEL,Description="deletion">'
assert line1.serialize() == '##ALT=<ID=DEL,Description="deletion">'
with pytest.raises(TypeError):
hash(line1)
def test_header_contig_header_line():
line1 = header.ContigHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "1"), ("length", 234)]))
line2 = header.ContigHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "1"), ("length", 234)]))
line3 = header.ContigHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "2"), ("length", 123)]))
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"ContigHeaderLine('contig', '<ID=1,length=234>', OrderedDict([('ID', '1'), ('length', 234)]))"
)
assert repr(line1) == (
"ContigHeaderLine('contig', '<ID=1,length=234>', OrderedDict([('ID', '1'), ('length', 234)]))"
)
else:
assert str(line1) == (
"ContigHeaderLine('contig', '<ID=1,length=234>', {'ID': '1', 'length': 234})"
)
assert repr(line1) == (
"ContigHeaderLine('contig', '<ID=1,length=234>', {'ID': '1', 'length': 234})"
)
assert line1.value == "<ID=1,length=234>"
assert line1.serialize() == "##contig=<ID=1,length=234>"
with pytest.raises(TypeError):
hash(line1)
def test_header_filter_header_line():
line1 = header.FilterHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "PASS"), ("Description", "All filters passed")])
)
line2 = header.FilterHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "PASS"), ("Description", "All filters passed")])
)
line3 = header.FilterHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "q30"), ("Description", "Phred score <30")])
)
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"FilterHeaderLine('FILTER', '<ID=PASS,Description=\"All filters passed\">', "
"OrderedDict([('ID', 'PASS'), ('Description', 'All filters passed')]))"
)
assert repr(line1) == (
"FilterHeaderLine('FILTER', '<ID=PASS,Description=\"All filters passed\">', "
"OrderedDict([('ID', 'PASS'), ('Description', 'All filters passed')]))"
)
else:
assert str(line1) == (
"FilterHeaderLine('FILTER', '<ID=PASS,Description=\"All filters passed\">', "
"{'ID': 'PASS', 'Description': 'All filters passed'})"
)
assert repr(line1) == (
"FilterHeaderLine('FILTER', '<ID=PASS,Description=\"All filters passed\">', "
"{'ID': 'PASS', 'Description': 'All filters passed'})"
)
assert line1.value == '<ID=PASS,Description="All filters passed">'
assert line1.serialize() == '##FILTER=<ID=PASS,Description="All filters passed">'
with pytest.raises(TypeError):
hash(line1)
def test_header_pedigree_header_line():
line1 = header.PedigreeHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "child"), ("Father", "father")])
)
line2 = header.PedigreeHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "child"), ("Father", "father")])
)
line3 = header.PedigreeHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "father")]))
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"PedigreeHeaderLine('PEDIGREE', '<ID=child,Father=father>', "
"OrderedDict([('ID', 'child'), ('Father', 'father')]))"
)
assert repr(line1) == (
"PedigreeHeaderLine('PEDIGREE', '<ID=child,Father=father>', "
"OrderedDict([('ID', 'child'), ('Father', 'father')]))"
)
else:
assert str(line1) == (
"PedigreeHeaderLine('PEDIGREE', '<ID=child,Father=father>', {'ID': 'child', 'Father': 'father'})"
)
assert repr(line1) == (
"PedigreeHeaderLine('PEDIGREE', '<ID=child,Father=father>', {'ID': 'child', 'Father': 'father'})"
)
assert line1.value == "<ID=child,Father=father>"
assert line1.serialize() == "##PEDIGREE=<ID=child,Father=father>"
with pytest.raises(TypeError):
hash(line1)
def test_header_sample_header_line():
line1 = header.SampleHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "sample1")]))
line2 = header.SampleHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "sample1")]))
line3 = header.SampleHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "sample2")]))
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"SampleHeaderLine('SAMPLE', '<ID=sample1>', OrderedDict([('ID', 'sample1')]))"
)
assert repr(line1) == (
"SampleHeaderLine('SAMPLE', '<ID=sample1>', OrderedDict([('ID', 'sample1')]))"
)
else:
assert str(line1) == ("SampleHeaderLine('SAMPLE', '<ID=sample1>', {'ID': 'sample1'})")
assert repr(line1) == ("SampleHeaderLine('SAMPLE', '<ID=sample1>', {'ID': 'sample1'})")
assert line1.value == "<ID=sample1>"
assert line1.serialize() == "##SAMPLE=<ID=sample1>"
with pytest.raises(TypeError):
hash(line1)
def test_header_info_header_line():
line1 = header.InfoHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "SVTYPE"), ("Number", 1), ("Type", "String")])
)
line2 = header.InfoHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "SVTYPE"), ("Number", 1), ("Type", "String")])
)
line3 = header.InfoHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "END"), ("Number", 1), ("Type", "Integer")])
)
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"InfoHeaderLine('INFO', '<ID=SVTYPE,Number=1,Type=String>', "
"OrderedDict([('ID', 'SVTYPE'), ('Number', 1), ('Type', 'String')]))"
)
assert repr(line1) == (
"InfoHeaderLine('INFO', '<ID=SVTYPE,Number=1,Type=String>', "
"OrderedDict([('ID', 'SVTYPE'), ('Number', 1), ('Type', 'String')]))"
)
else:
assert str(line1) == (
"InfoHeaderLine('INFO', '<ID=SVTYPE,Number=1,Type=String>', "
"{'ID': 'SVTYPE', 'Number': 1, 'Type': 'String'})"
)
assert repr(line1) == (
"InfoHeaderLine('INFO', '<ID=SVTYPE,Number=1,Type=String>', "
"{'ID': 'SVTYPE', 'Number': 1, 'Type': 'String'})"
)
assert line1.value == "<ID=SVTYPE,Number=1,Type=String>"
assert line1.serialize() == "##INFO=<ID=SVTYPE,Number=1,Type=String>"
with pytest.raises(TypeError):
hash(line1)
def test_header_format_header_line():
line1 = header.FormatHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "AD"), ("Number", "R"), ("Type", "Integer")])
)
line2 = header.FormatHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "AD"), ("Number", "R"), ("Type", "Integer")])
)
line3 = header.FormatHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "DP"), ("Number", 1), ("Type", "Integer")])
)
assert line1 == line2
assert line1 != line3
if sys.version_info < (3, 6):
assert str(line1) == (
"FormatHeaderLine('FORMAT', '<ID=AD,Number=R,Type=Integer>', "
"OrderedDict([('ID', 'AD'), ('Number', 'R'), ('Type', 'Integer')]))"
)
assert repr(line1) == (
"FormatHeaderLine('FORMAT', '<ID=AD,Number=R,Type=Integer>', "
"OrderedDict([('ID', 'AD'), ('Number', 'R'), ('Type', 'Integer')]))"
)
else:
assert str(line1) == (
"FormatHeaderLine('FORMAT', '<ID=AD,Number=R,Type=Integer>', "
"{'ID': 'AD', 'Number': 'R', 'Type': 'Integer'})"
)
assert repr(line1) == (
"FormatHeaderLine('FORMAT', '<ID=AD,Number=R,Type=Integer>', "
"{'ID': 'AD', 'Number': 'R', 'Type': 'Integer'})"
)
assert line1.value == "<ID=AD,Number=R,Type=Integer>"
assert line1.serialize() == "##FORMAT=<ID=AD,Number=R,Type=Integer>"
with pytest.raises(TypeError):
hash(line1)
def test_header_has_header_line_positive():
lines = [
header.FormatHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "DP"), ("Number", "R"), ("Type", "Integer")])
),
header.InfoHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "AD"), ("Number", "R"), ("Type", "Integer")])
),
header.FilterHeaderLine.from_mapping(
vcfpy.OrderedDict([("ID", "PASS"), ("Description", "All filters passed")])
),
header.ContigHeaderLine.from_mapping(vcfpy.OrderedDict([("ID", "1"), ("length", 234)])),
]
samples = header.SamplesInfos(["one", "two", "three"])
hdr = header.Header(lines, samples)
assert hdr.has_header_line("FORMAT", "DP")
assert hdr.has_header_line("INFO", "AD")
assert hdr.has_header_line("FILTER", "PASS")
assert hdr.has_header_line("contig", "1")
def test_header_has_header_line_positive_no_samples():
lines = []
samples = header.SamplesInfos(["one", "two", "three"])
hdr = header.Header(lines, samples)
assert not hdr.has_header_line("FORMAT", "DP")
assert not hdr.has_header_line("INFO", "AD")
assert not hdr.has_header_line("FILTER", "PASS")
assert not hdr.has_header_line("contig", "1")
| mit |
Tehsmash/ironic | ironic/drivers/modules/snmp.py | 2 | 24149 | # Copyright 2013,2014 Cray Inc
#
# Authors: David Hewson <[email protected]>
# Stig Telfer <[email protected]>
# Mark Goddard <[email protected]>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Ironic SNMP power manager.
Provides basic power control using an SNMP-enabled smart power controller.
Uses a pluggable driver model to support devices with different SNMP object
models.
"""
import abc
from oslo.config import cfg
from oslo.utils import importutils
import six
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LW
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers import base
from ironic.openstack.common import log as logging
from ironic.openstack.common import loopingcall
pysnmp = importutils.try_import('pysnmp')
if pysnmp:
from pysnmp.entity.rfc3413.oneliner import cmdgen
from pysnmp import error as snmp_error
from pysnmp.proto import rfc1902
else:
cmdgen = None
snmp_error = None
rfc1902 = None
opts = [
cfg.IntOpt('power_timeout',
default=10,
help='Seconds to wait for power action to be completed')
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(opts, group='snmp')
SNMP_V1 = '1'
SNMP_V2C = '2c'
SNMP_V3 = '3'
SNMP_PORT = 161
REQUIRED_PROPERTIES = {
'snmp_driver': _("PDU manufacturer driver. Required."),
'snmp_address': _("PDU IPv4 address or hostname. Required."),
'snmp_outlet': _("PDU power outlet index (1-based). Required."),
}
OPTIONAL_PROPERTIES = {
'snmp_version':
_("SNMP protocol version: %(v1)s, %(v2c)s, %(v3)s "
"(optional, default %(v1)s)")
% {"v1": SNMP_V1, "v2c": SNMP_V2C, "v3": SNMP_V3},
'snmp_port':
_("SNMP port, default %(port)d") % {"port": SNMP_PORT},
'snmp_community':
_("SNMP community. Required for versions %(v1)s, %(v2c)s")
% {"v1": SNMP_V1, "v2c": SNMP_V2C},
'snmp_security':
_("SNMP security name. Required for version %(v3)s")
% {"v3": SNMP_V3},
}
COMMON_PROPERTIES = REQUIRED_PROPERTIES.copy()
COMMON_PROPERTIES.update(OPTIONAL_PROPERTIES)
class SNMPClient(object):
"""SNMP client object.
Performs low level SNMP get and set operations. Encapsulates all
interaction with PySNMP to simplify dynamic importing and unit testing.
"""
def __init__(self, address, port, version, community=None, security=None):
self.address = address
self.port = port
self.version = version
if self.version == SNMP_V3:
self.security = security
else:
self.community = community
self.cmd_gen = cmdgen.CommandGenerator()
def _get_auth(self):
"""Return the authorization data for an SNMP request.
:returns: A
:class:`pysnmp.entity.rfc3413.oneliner.cmdgen.CommunityData`
object.
"""
if self.version == SNMP_V3:
# Handling auth/encryption credentials is not (yet) supported.
# This version supports a security name analagous to community.
return cmdgen.UsmUserData(self.security)
else:
mp_model = 1 if self.version == SNMP_V2C else 0
return cmdgen.CommunityData(self.community, mpModel=mp_model)
def _get_transport(self):
"""Return the transport target for an SNMP request.
:returns: A :class:
`pysnmp.entity.rfc3413.oneliner.cmdgen.UdpTransportTarget` object.
:raises: snmp_error.PySnmpError if the transport address is bad.
"""
# The transport target accepts timeout and retries parameters, which
# default to 1 (second) and 5 respectively. These are deemed sensible
# enough to allow for an unreliable network or slow device.
return cmdgen.UdpTransportTarget((self.address, self.port))
def get(self, oid):
"""Use PySNMP to perform an SNMP GET operation on a single object.
:param oid: The OID of the object to get.
:raises: SNMPFailure if an SNMP request fails.
:returns: The value of the requested object.
"""
try:
results = self.cmd_gen.getCmd(self._get_auth(),
self._get_transport(),
oid)
except snmp_error.PySnmpError as e:
raise exception.SNMPFailure(operation="GET", error=e)
error_indication, error_status, error_index, var_binds = results
if error_indication:
# SNMP engine-level error.
raise exception.SNMPFailure(operation="GET",
error=error_indication)
if error_status:
# SNMP PDU error.
raise exception.SNMPFailure(operation="GET",
error=error_status.prettyPrint())
# We only expect a single value back
name, val = var_binds[0]
return val
def set(self, oid, value):
"""Use PySNMP to perform an SNMP SET operation on a single object.
:param oid: The OID of the object to set.
:param value: The value of the object to set.
:raises: SNMPFailure if an SNMP request fails.
"""
try:
results = self.cmd_gen.setCmd(self._get_auth(),
self._get_transport(),
(oid, value))
except snmp_error.PySnmpError as e:
raise exception.SNMPFailure(operation="SET", error=e)
error_indication, error_status, error_index, var_binds = results
if error_indication:
# SNMP engine-level error.
raise exception.SNMPFailure(operation="SET",
error=error_indication)
if error_status:
# SNMP PDU error.
raise exception.SNMPFailure(operation="SET",
error=error_status.prettyPrint())
def _get_client(snmp_info):
"""Create and return an SNMP client object.
:param snmp_info: SNMP driver info.
:returns: A :class:`SNMPClient` object.
"""
return SNMPClient(snmp_info["address"],
snmp_info["port"],
snmp_info["version"],
snmp_info.get("community"),
snmp_info.get("security"))
@six.add_metaclass(abc.ABCMeta)
class SNMPDriverBase(object):
"""SNMP power driver base class.
The SNMPDriver class hierarchy implements manufacturer-specific MIB actions
over SNMP to interface with different smart power controller products.
"""
oid_enterprise = (1, 3, 6, 1, 4, 1)
retry_interval = 1
def __init__(self, snmp_info):
self.snmp_info = snmp_info
self.client = _get_client(snmp_info)
@abc.abstractmethod
def _snmp_power_state(self):
"""Perform the SNMP request required to get the current power state.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
@abc.abstractmethod
def _snmp_power_on(self):
"""Perform the SNMP request required to set the power on.
:raises: SNMPFailure if an SNMP request fails.
"""
@abc.abstractmethod
def _snmp_power_off(self):
"""Perform the SNMP request required to set the power off.
:raises: SNMPFailure if an SNMP request fails.
"""
def _snmp_wait_for_state(self, goal_state):
"""Wait for the power state of the PDU outlet to change.
:param goal_state: The power state to wait for, one of
:class:`ironic.common.states`.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
def _poll_for_state(mutable):
"""Called at an interval until the node's power is consistent.
:param mutable: dict object containing "state" and "next_time"
:raises: SNMPFailure if an SNMP request fails.
"""
mutable["state"] = self._snmp_power_state()
if mutable["state"] == goal_state:
raise loopingcall.LoopingCallDone()
mutable["next_time"] += self.retry_interval
if mutable["next_time"] >= CONF.snmp.power_timeout:
mutable["state"] = states.ERROR
raise loopingcall.LoopingCallDone()
# Pass state to the looped function call in a mutable form.
state = {"state": None, "next_time": 0}
timer = loopingcall.FixedIntervalLoopingCall(_poll_for_state,
state)
timer.start(interval=self.retry_interval).wait()
LOG.debug("power state '%s'", state["state"])
return state["state"]
def power_state(self):
"""Returns a node's current power state.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
return self._snmp_power_state()
def power_on(self):
"""Set the power state to this node to ON.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
self._snmp_power_on()
return self._snmp_wait_for_state(states.POWER_ON)
def power_off(self):
"""Set the power state to this node to OFF.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
self._snmp_power_off()
return self._snmp_wait_for_state(states.POWER_OFF)
def power_reset(self):
"""Reset the power to this node.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
power_result = self.power_off()
if power_result != states.POWER_OFF:
return states.ERROR
power_result = self.power_on()
if power_result != states.POWER_ON:
return states.ERROR
return power_result
class SNMPDriverSimple(SNMPDriverBase):
"""SNMP driver base class for simple PDU devices.
Here, simple refers to devices which provide a single SNMP object for
controlling the power state of an outlet.
The default OID of the power state object is of the form
<enterprise OID>.<device OID>.<outlet ID>. A different OID may be specified
by overriding the _snmp_oid method in a subclass.
"""
def __init__(self, *args, **kwargs):
super(SNMPDriverSimple, self).__init__(*args, **kwargs)
self.oid = self._snmp_oid()
@abc.abstractproperty
def oid_device(self):
"""Device dependent portion of the power state object OID."""
@abc.abstractproperty
def value_power_on(self):
"""Value representing power on state."""
@abc.abstractproperty
def value_power_off(self):
"""Value representing power off state."""
def _snmp_oid(self):
"""Return the OID of the power state object.
:returns: Power state object OID as a tuple of integers.
"""
outlet = int(self.snmp_info['outlet'])
return self.oid_enterprise + self.oid_device + (outlet,)
def _snmp_power_state(self):
state = self.client.get(self.oid)
# Translate the state to an Ironic power state.
if state == self.value_power_on:
power_state = states.POWER_ON
elif state == self.value_power_off:
power_state = states.POWER_OFF
else:
LOG.warning(_LW("SNMP PDU %(addr)s outlet %(outlet)s: "
"unrecognised power state %(state)s."),
{'addr': self.snmp_info['address'],
'outlet': self.snmp_info['outlet'],
'state': state})
power_state = states.ERROR
return power_state
def _snmp_power_on(self):
value = rfc1902.Integer(self.value_power_on)
self.client.set(self.oid, value)
def _snmp_power_off(self):
value = rfc1902.Integer(self.value_power_off)
self.client.set(self.oid, value)
class SNMPDriverAten(SNMPDriverSimple):
"""SNMP driver class for Aten PDU devices.
SNMP objects for Aten PDU:
1.3.6.1.4.1.21317.1.3.2.2.2.2 Outlet Power
Values: 1=Off, 2=On, 3=Pending, 4=Reset
"""
oid_device = (21317, 1, 3, 2, 2, 2, 2)
value_power_on = 2
value_power_off = 1
def _snmp_oid(self):
"""Return the OID of the power state object.
:returns: Power state object OID as a tuple of integers.
"""
outlet = int(self.snmp_info['outlet'])
return self.oid_enterprise + self.oid_device + (outlet, 0,)
class SNMPDriverAPC(SNMPDriverSimple):
"""SNMP driver class for APC PDU devices.
SNMP objects for APC PDU:
1.3.6.1.4.1.318.1.1.4.4.2.1.3 sPDUOutletCtl
Values: 1=On, 2=Off, 3=PowerCycle, [...more options follow]
"""
oid_device = (318, 1, 1, 4, 4, 2, 1, 3)
value_power_on = 1
value_power_off = 2
class SNMPDriverCyberPower(SNMPDriverSimple):
"""SNMP driver class for CyberPower PDU devices.
SNMP objects for CyberPower PDU:
1.3.6.1.4.1.3808.1.1.3.3.3.1.1.4 ePDUOutletControlOutletCommand
Values: 1=On, 2=Off, 3=PowerCycle, [...more options follow]
"""
# NOTE(mgoddard): This device driver is currently untested, this driver has
# been implemented based upon its published MIB
# documentation.
oid_device = (3808, 1, 1, 3, 3, 3, 1, 1, 4)
value_power_on = 1
value_power_off = 2
class SNMPDriverTeltronix(SNMPDriverSimple):
"""SNMP driver class for Teltronix PDU devices.
SNMP objects for Teltronix PDU:
1.3.6.1.4.1.23620.1.2.2.1.4 Outlet Power
Values: 1=Off, 2=On
"""
oid_device = (23620, 1, 2, 2, 1, 4)
value_power_on = 2
value_power_off = 1
class SNMPDriverEatonPower(SNMPDriverBase):
"""SNMP driver class for Eaton Power PDU.
The Eaton power PDU does not follow the model of SNMPDriverSimple as it
uses multiple SNMP objects.
SNMP objects for Eaton Power PDU
1.3.6.1.4.1.534.6.6.7.6.6.1.2.<outlet ID> outletControlStatus
Read 0=off, 1=on, 2=pending off, 3=pending on
1.3.6.1.4.1.534.6.6.7.6.6.1.3.<outlet ID> outletControlOffCmd
Write 0 for immediate power off
1.3.6.1.4.1.534.6.6.7.6.6.1.4.<outlet ID> outletControlOnCmd
Write 0 for immediate power on
"""
# NOTE(mgoddard): This device driver is currently untested, this driver has
# been implemented based upon its published MIB
# documentation.
oid_device = (534, 6, 6, 7, 6, 6, 1)
oid_status = (2,)
oid_poweron = (3,)
oid_poweroff = (4,)
status_off = 0
status_on = 1
status_pending_off = 2
status_pending_on = 3
value_power_on = 0
value_power_off = 0
def __init__(self, *args, **kwargs):
super(SNMPDriverEatonPower, self).__init__(*args, **kwargs)
# Due to its use of different OIDs for different actions, we only form
# an OID that holds the common substring of the OIDs for power
# operations.
self.oid_base = self.oid_enterprise + self.oid_device
def _snmp_oid(self, oid):
"""Return the OID for one of the outlet control objects.
:param oid: The action-dependent portion of the OID, as a tuple of
integers.
:returns: The full OID as a tuple of integers.
"""
outlet = int(self.snmp_info['outlet'])
return self.oid_base + oid + (outlet,)
def _snmp_power_state(self):
oid = self._snmp_oid(self.oid_status)
state = self.client.get(oid)
# Translate the state to an Ironic power state.
if state in (self.status_on, self.status_pending_off):
power_state = states.POWER_ON
elif state in (self.status_off, self.status_pending_on):
power_state = states.POWER_OFF
else:
LOG.warning(_LW("Eaton Power SNMP PDU %(addr)s outlet %(outlet)s: "
"unrecognised power state %(state)s."),
{'addr': self.snmp_info['address'],
'outlet': self.snmp_info['outlet'],
'state': state})
power_state = states.ERROR
return power_state
def _snmp_power_on(self):
oid = self._snmp_oid(self.oid_poweron)
value = rfc1902.Integer(self.value_power_on)
self.client.set(oid, value)
def _snmp_power_off(self):
oid = self._snmp_oid(self.oid_poweroff)
value = rfc1902.Integer(self.value_power_off)
self.client.set(oid, value)
# A dictionary of supported drivers keyed by snmp_driver attribute
DRIVER_CLASSES = {
'apc': SNMPDriverAPC,
'aten': SNMPDriverAten,
'cyberpower': SNMPDriverCyberPower,
'eatonpower': SNMPDriverEatonPower,
'teltronix': SNMPDriverTeltronix
}
def _parse_driver_info(node):
"""Parse a node's driver_info values.
Return a dictionary of validated driver information, usable for
SNMPDriver object creation.
:param node: An Ironic node object.
:returns: SNMP driver info.
:raises: MissingParameterValue if any required parameters are missing.
:raises: InvalidParameterValue if any parameters are invalid.
"""
info = node.driver_info or {}
missing_info = [key for key in REQUIRED_PROPERTIES if not info.get(key)]
if missing_info:
raise exception.MissingParameterValue(_(
"SNMP driver requires the following parameters to be set in "
"node's driver_info: %s.") % missing_info)
snmp_info = {}
# Validate PDU driver type
snmp_info['driver'] = info.get('snmp_driver')
if snmp_info['driver'] not in DRIVER_CLASSES:
raise exception.InvalidParameterValue(_(
"SNMPPowerDriver: unknown driver: '%s'") % snmp_info['driver'])
# In absence of a version, default to SNMPv1
snmp_info['version'] = info.get('snmp_version', SNMP_V1)
if snmp_info['version'] not in (SNMP_V1, SNMP_V2C, SNMP_V3):
raise exception.InvalidParameterValue(_(
"SNMPPowerDriver: unknown SNMP version: '%s'") %
snmp_info['version'])
# In absence of a configured UDP port, default to the standard port
port_str = info.get('snmp_port', SNMP_PORT)
try:
snmp_info['port'] = int(port_str)
except ValueError:
raise exception.InvalidParameterValue(_(
"SNMPPowerDriver: SNMP UDP port must be numeric: %s") % port_str)
if snmp_info['port'] < 1 or snmp_info['port'] > 65535:
raise exception.InvalidParameterValue(_(
"SNMPPowerDriver: SNMP UDP port out of range: %d")
% snmp_info['port'])
# Extract version-dependent required parameters
if snmp_info['version'] in (SNMP_V1, SNMP_V2C):
if 'snmp_community' not in info:
raise exception.MissingParameterValue(_(
"SNMP driver requires snmp_community to be set for version "
"%s.") % snmp_info['version'])
snmp_info['community'] = info.get('snmp_community')
elif snmp_info['version'] == SNMP_V3:
if 'snmp_security' not in info:
raise exception.MissingParameterValue(_(
"SNMP driver requires snmp_security to be set for version %s.")
% (SNMP_V3))
snmp_info['security'] = info.get('snmp_security')
# Target PDU IP address and power outlet identification
snmp_info['address'] = info.get('snmp_address')
snmp_info['outlet'] = info.get('snmp_outlet')
return snmp_info
def _get_driver(node):
"""Return a new SNMP driver object of the correct type for `node`.
:param node: Single node object.
:raises: InvalidParameterValue if node power config is incomplete or
invalid.
:returns: SNMP driver object.
"""
snmp_info = _parse_driver_info(node)
cls = DRIVER_CLASSES[snmp_info['driver']]
return cls(snmp_info)
class SNMPPower(base.PowerInterface):
"""SNMP Power Interface.
This PowerInterface class provides a mechanism for controlling the power
state of a physical device using an SNMP-enabled smart power controller.
"""
def get_properties(self):
"""Return the properties of the interface.
:returns: dictionary of <property name>:<property description> entries.
"""
return COMMON_PROPERTIES
def validate(self, task):
"""Check that node.driver_info contains the requisite fields.
:raises: MissingParameterValue if required SNMP parameters are missing.
:raises: InvalidParameterValue if SNMP parameters are invalid.
"""
_parse_driver_info(task.node)
def get_power_state(self, task):
"""Get the current power state.
Poll the SNMP device for the current power state of the node.
:param task: A instance of `ironic.manager.task_manager.TaskManager`.
:raises: MissingParameterValue if required SNMP parameters are missing.
:raises: InvalidParameterValue if SNMP parameters are invalid.
:raises: SNMPFailure if an SNMP request fails.
:returns: power state. One of :class:`ironic.common.states`.
"""
driver = _get_driver(task.node)
power_state = driver.power_state()
return power_state
@task_manager.require_exclusive_lock
def set_power_state(self, task, pstate):
"""Turn the power on or off.
Set the power state of a node.
:param task: A instance of `ironic.manager.task_manager.TaskManager`.
:param pstate: Either POWER_ON or POWER_OFF from :class:
`ironic.common.states`.
:raises: MissingParameterValue if required SNMP parameters are missing.
:raises: InvalidParameterValue if SNMP parameters are invalid or
`pstate` is invalid.
:raises: PowerStateFailure if the final power state of the node is not
as requested after the timeout.
:raises: SNMPFailure if an SNMP request fails.
"""
driver = _get_driver(task.node)
if pstate == states.POWER_ON:
state = driver.power_on()
elif pstate == states.POWER_OFF:
state = driver.power_off()
else:
raise exception.InvalidParameterValue(_("set_power_state called "
"with invalid power "
"state %s.") % str(pstate))
if state != pstate:
raise exception.PowerStateFailure(pstate=pstate)
@task_manager.require_exclusive_lock
def reboot(self, task):
"""Cycles the power to a node.
:param task: A instance of `ironic.manager.task_manager.TaskManager`.
:raises: MissingParameterValue if required SNMP parameters are missing.
:raises: InvalidParameterValue if SNMP parameters are invalid.
:raises: PowerStateFailure if the final power state of the node is not
POWER_ON after the timeout.
:raises: SNMPFailure if an SNMP request fails.
"""
driver = _get_driver(task.node)
state = driver.power_reset()
if state != states.POWER_ON:
raise exception.PowerStateFailure(pstate=states.POWER_ON)
| apache-2.0 |
lamby/pkg-rst2pdf | rst2pdf/tests/input/sphinx-issue172/conf.py | 9 | 7199 | # -*- coding: utf-8 -*-
#
# Sphinx markup documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 18 22:54:33 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['rst2pdf.pdfbuilder']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sphinx'
copyright = u'2009, RA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'test'
# The full version, including alpha/beta/rc tags.
release = 'test'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'sphinx'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'sphinx.tex', u'sphinx Documentation',
u'RA', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# -- Options for PDF output --------------------------------------------------
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author).
pdf_documents = [
('index', u'MyProject', u'My Project', u'Author Name'),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['sphinx']
# Create a compressed PDF
# Use True/False or 1/0
# Example: compressed=True
#pdf_compressed=False
# A colon-separated list of folders to search for fonts. Example:
# pdf_font_path=['/usr/share/fonts', '/usr/share/texmf-dist/fonts/']
# Language to be used for hyphenation support
pdf_language="en_US"
# If false, no index is generated.
pdf_use_index = True
# If false, no modindex is generated.
pdf_use_modindex = True
# If false, no coverpage is generated.
pdf_use_coverpage = False
pdf_break_level = 1
pdf_verbosity=0
pdf_invariant = True
| mit |
brainelectronics/towerdefense | tests/text/EMPTY.py | 28 | 1136 | #!/usr/bin/env python
'''Test that an empty document doesn't break.
'''
__docformat__ = 'restructuredtext'
__noninteractive = True
import unittest
from pyglet import gl
from pyglet import graphics
from pyglet.text import document
from pyglet.text import layout
from pyglet import window
class TestWindow(window.Window):
def __init__(self, doctype, *args, **kwargs):
super(TestWindow, self).__init__(*args, **kwargs)
self.batch = graphics.Batch()
self.document = doctype()
self.layout = layout.IncrementalTextLayout(self.document,
self.width, self.height, batch=self.batch)
def on_draw(self):
gl.glClearColor(1, 1, 1, 1)
self.clear()
self.batch.draw()
class TestCase(unittest.TestCase):
def testUnformatted(self):
self.window = TestWindow(document.UnformattedDocument)
self.window.dispatch_events()
self.window.close()
def testFormatted(self):
self.window = TestWindow(document.FormattedDocument)
self.window.dispatch_events()
self.window.close()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
benesch/adspygoogle.dfp | adspygoogle/common/Client.py | 3 | 10125 | #!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Interface for accessing all other services."""
__author__ = '[email protected] (Stan Grinberg)'
import datetime
import os
import pickle
import warnings
from adspygoogle.common import PYXML
from adspygoogle.common import SanityCheck
from adspygoogle.common import Utils
from adspygoogle.common.Errors import ValidationError
# The values in _DEFAULT_CONFIG will be used to populate a user's configuration
# if any of these keys was not provided.
_DEFAULT_CONFIG = {
'proxy': None,
'xml_parser': PYXML,
'debug': 'n',
'raw_debug': 'n',
'xml_log': 'y',
'request_log': 'y',
'raw_response': 'n',
'strict': 'y',
'auth_token_epoch': 0,
'auth_type': '',
'pretty_xml': 'y',
'compress': 'y',
'access': '',
'wrap_in_tuple': 'y'
}
# The _OAUTH_2_AUTH_KEYS are the keys in the authentication dictionary that are
# used to construct an OAuth 2.0 credential.
_OAUTH_2_AUTH_KEYS = set(['clientId', 'clientSecret', 'refreshToken'])
# The web address for generating OAuth 2.0 credentials at Google.
_GOOGLE_OAUTH2_ENDPOINT = 'https://accounts.google.com/o/oauth2/token'
class Client(object):
"""Provides entry point to all web services.
Allows instantiation of all web services.
"""
home = os.getcwd()
auth_pkl = ''
config_pkl = ''
def __init__(self, headers=None, config=None, path=None):
"""Inits Client.
Args:
[optional]
headers: dict Object with populated authentication credentials.
config: dict Object with client configuration values.
path: str Relative or absolute path to home directory (i.e. location of
pickles and logs/).
"""
self._headers = headers or {}
self._config = config or self._SetMissingDefaultConfigValues()
def _LoadAuthCredentials(self):
"""Load existing authentication credentials from auth.pkl.
Returns:
dict Dictionary object with populated authentication credentials.
Raises:
ValidationError: if authentication data is missing.
"""
auth = {}
if os.path.exists(self.__class__.auth_pkl):
fh = open(self.__class__.auth_pkl, 'r')
try:
auth = pickle.load(fh)
finally:
fh.close()
if not auth:
msg = 'Authentication data is missing.'
raise ValidationError(msg)
if _OAUTH_2_AUTH_KEYS.issubset(set(auth.keys())):
from oauth2client.client import OAuth2Credentials
auth['oauth2credentials'] = OAuth2Credentials(
None, auth['clientId'], auth['clientSecret'], auth['refreshToken'],
datetime.datetime(1980, 1, 1, 12), _GOOGLE_OAUTH2_ENDPOINT,
'Google Ads* Python Client Library')
for auth_key in _OAUTH_2_AUTH_KEYS:
del auth[auth_key]
return auth
def _WriteUpdatedAuthValue(self, key, new_value):
"""Write updated authentication value for a key in auth.pkl.
Args:
key: str Key to update.
new_value: str New value to update the key with.
"""
auth = self._LoadAuthCredentials()
auth[key] = new_value
# Only write to an existing pickle.
if os.path.exists(self.__class__.auth_pkl):
fh = open(self.__class__.auth_pkl, 'w')
try:
pickle.dump(auth, fh)
finally:
fh.close()
def _LoadConfigValues(self):
"""Load existing configuration values from config.pkl.
Returns:
dict Dictionary object with populated configuration values.
"""
config = {}
if os.path.exists(self.__class__.config_pkl):
fh = open(self.__class__.config_pkl, 'r')
try:
config = pickle.load(fh)
finally:
fh.close()
if not config:
# Proceed to set default config values.
pass
return config
def _SetMissingDefaultConfigValues(self, config=None):
"""Set default configuration values for missing elements in the config dict.
Args:
config: dict Object with client configuration values.
Returns:
dict Given config dictionary with default values added in.
"""
if config is None: config = {}
for key in _DEFAULT_CONFIG:
if key not in config:
config[key] = _DEFAULT_CONFIG[key]
return config
def GetAuthCredentials(self):
"""Return authentication credentials.
Returns:
dict Authentiaction credentials.
"""
return self._headers
def GetConfigValues(self):
"""Return configuration values.
Returns:
dict Configuration values.
"""
return self._config
def SetDebug(self, new_state):
"""Temporarily change debug mode for a given Client instance.
Args:
new_state: bool New state of the debug mode.
"""
self._config['debug'] = Utils.BoolTypeConvert(new_state, str)
def __GetDebug(self):
"""Return current state of the debug mode.
Returns:
bool State of the debug mode.
"""
return self._config['debug']
def __SetDebug(self, new_state):
"""Temporarily change debug mode for a given Client instance.
Args:
new_state: bool New state of the debug mode.
"""
self._config['debug'] = Utils.BoolTypeConvert(new_state, str)
debug = property(__GetDebug, __SetDebug)
def __GetRawDebug(self):
"""Return current state of the raw debug mode.
Returns:
bool State of the debug mode.
"""
return self._config['raw_debug']
def __SetRawDebug(self, new_state):
"""Temporarily change raw debug mode for a given Client instance.
Args:
new_state: bool New state of the raw debug mode.
"""
self._config['raw_debug'] = Utils.BoolTypeConvert(new_state, str)
raw_debug = property(__GetRawDebug, __SetRawDebug)
def __GetUseStrict(self):
"""Return current state of the strictness mode.
Returns:
str State of the strictness mode.
"""
return self._config['strict']
def __SetUseStrict(self, new_state):
"""Temporarily change strictness mode for a given Client instance.
Args:
new_state: bool New state of the strictness mode.
"""
self._config['strict'] = Utils.BoolTypeConvert(new_state, str)
strict = property(__GetUseStrict, __SetUseStrict)
def __GetXmlParser(self):
"""Return current state of the xml parser in use.
Returns:
bool State of the xml parser in use.
"""
return self._config['xml_parser']
def __SetXmlParser(self, new_state):
"""Temporarily change xml parser in use for a given Client instance.
Args:
new_state: bool New state of the xml parser to use.
"""
SanityCheck.ValidateConfigXmlParser(new_state)
self._config['xml_parser'] = new_state
xml_parser = property(__GetXmlParser, __SetXmlParser)
def CallRawMethod(self, soap_message, url, http_proxy):
"""Call API method directly, using raw SOAP message.
For API calls performed with this method, outgoing data is not run through
library's validation logic.
Args:
soap_message: str SOAP XML message.
url: str URL of the API service for the method to call.
http_proxy: str HTTP proxy to use for this API call.
Returns:
tuple Response from the API method (SOAP XML response message).
"""
pass
def __SetOAuth2Credentials(self, credentials):
"""Sets the OAuth2 credentials into the config.
Args:
credentials: object OAuth2 credentials.
"""
self._headers['oauth2credentials'] = credentials
def __GetOAuth2Credentials(self):
"""Retrieves the OAuth2 credentials from the config.
Returns:
object The OAuth2 credentials.
"""
return self._headers['oauth2credentials']
oauth2credentials = property(__GetOAuth2Credentials, __SetOAuth2Credentials)
def __SetCaCertsFile(self, ca_certs_file):
"""Sets the certificates file to use for validating SSL certificates.
WARNING: Using this feature will monkey-patch a new HTTPS class into
httplib. Be aware that any other part of your application that uses httplib,
directly or indirectly, will be affected by its use.
Args:
ca_certs_file: string Path to a file storing trusted certificates. If this
variable cleared (as in, set to None or something that
evaluates to False), the original httplib.HTTPS class will
be put back in place and certificate validation will cease.
"""
try:
from https import Https
if not ca_certs_file: ca_certs_file = None
Https.MonkeyPatchHttplib(ca_certs_file)
except ImportError:
warnings.warn('Your Python installation does not support SSL certificate'
' validation!')
def __GetCaCertsFile(self):
"""Retrieves the current trusted certificates source file path."""
try:
from https import Https
return Https.GetCurrentCertsFile()
except ImportError:
warnings.warn('Your Python installation does not support SSL certificate'
' validation!')
ca_certs = property(__GetCaCertsFile, __SetCaCertsFile)
def __SetUsingCompression(self, is_using):
"""Sets the config to use HTTP message compression.
Args:
is_using: boolean Whether the client is using HTTP compression or not.
"""
self._config['compress'] = is_using
def __GetUsingCompression(self):
"""Returns if the client is currently set to use HTTP compression.
Returns:
boolean Whether this client is using HTTP comrpession or not
"""
return self._config['compress']
compress = property(__GetUsingCompression, __SetUsingCompression)
| apache-2.0 |
mszewczy/odoo | addons/portal/tests/test_portal.py | 198 | 14169 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.mail.tests.common import TestMail
from openerp.exceptions import AccessError
from openerp.osv.orm import except_orm
from openerp.tools.misc import mute_logger
class test_portal(TestMail):
@classmethod
def setUpClass(cls):
super(test_portal, cls).setUpClass()
cr, uid = cls.cr, cls.uid
# Find Portal group
cls.group_portal_id = cls.env.ref('base.group_portal').id
# Create Chell (portal user)
cls.user_chell_id = cls.res_users.create(cr, uid, {
'name': 'Chell Gladys',
'login': 'chell',
'email': '[email protected]',
'groups_id': [(6, 0, [cls.group_portal_id])]
}, {'no_reset_password': True})
cls.user_chell = cls.res_users.browse(cr, uid, cls.user_chell_id)
cls.partner_chell_id = cls.user_chell.partner_id.id
# Create a PigsPortal group
cls.group_port_id = cls.mail_group.create(cr, uid,
{'name': 'PigsPortal', 'public': 'groups', 'group_public_id': cls.group_portal_id},
{'mail_create_nolog': True})
# Set an email address for the user running the tests, used as Sender for outgoing mails
cls.res_users.write(cr, uid, uid, {'email': 'test@localhost'})
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_00_mail_access_rights(self):
""" Test basic mail_message and mail_group access rights for portal users. """
cr, uid = self.cr, self.uid
mail_compose = self.registry('mail.compose.message')
# Prepare group: Pigs and PigsPortal
pigs_msg_id = self.mail_group.message_post(cr, uid, self.group_pigs_id, body='Message')
port_msg_id = self.mail_group.message_post(cr, uid, self.group_port_id, body='Message')
# Do: Chell browses Pigs -> ko, employee group
chell_pigs = self.mail_group.browse(cr, self.user_chell_id, self.group_pigs_id)
with self.assertRaises(except_orm):
trigger_read = chell_pigs.name
# Do: Chell posts a message on Pigs, crash because can not write on group or is not in the followers
with self.assertRaises(AccessError):
self.mail_group.message_post(cr, self.user_chell_id, self.group_pigs_id, body='Message')
# Do: Chell is added into Pigs followers and browse it -> ok for messages, ko for partners (no read permission)
self.mail_group.message_subscribe_users(cr, uid, [self.group_pigs_id], [self.user_chell_id])
chell_pigs = self.mail_group.browse(cr, self.user_chell_id, self.group_pigs_id)
trigger_read = chell_pigs.name
for message in chell_pigs.message_ids:
trigger_read = message.subject
for partner in chell_pigs.message_follower_ids:
if partner.id == self.partner_chell_id:
# Chell can read her own partner record
continue
with self.assertRaises(except_orm):
trigger_read = partner.name
# Do: Chell comments Pigs, ok because he is now in the followers
self.mail_group.message_post(cr, self.user_chell_id, self.group_pigs_id, body='I love Pigs')
# Do: Chell creates a mail.compose.message record on Pigs, because he uses the wizard
compose_id = mail_compose.create(cr, self.user_chell_id,
{'subject': 'Subject', 'body': 'Body text', 'partner_ids': []},
{'default_composition_mode': 'comment', 'default_model': 'mail.group', 'default_res_id': self.group_pigs_id})
mail_compose.send_mail(cr, self.user_chell_id, [compose_id])
# Do: Chell replies to a Pigs message using the composer
compose_id = mail_compose.create(cr, self.user_chell_id,
{'subject': 'Subject', 'body': 'Body text'},
{'default_composition_mode': 'comment', 'default_parent_id': pigs_msg_id})
mail_compose.send_mail(cr, self.user_chell_id, [compose_id])
# Do: Chell browses PigsPortal -> ok because groups security, ko for partners (no read permission)
chell_port = self.mail_group.browse(cr, self.user_chell_id, self.group_port_id)
trigger_read = chell_port.name
for message in chell_port.message_ids:
trigger_read = message.subject
for partner in chell_port.message_follower_ids:
with self.assertRaises(except_orm):
trigger_read = partner.name
def test_10_mail_invite(self):
cr, uid = self.cr, self.uid
mail_invite = self.registry('mail.wizard.invite')
base_url = self.registry('ir.config_parameter').get_param(cr, uid, 'web.base.url', default='')
# Carine Poilvache, with email, should receive emails for comments and emails
partner_carine_id = self.res_partner.create(cr, uid, {'name': 'Carine Poilvache', 'email': 'c@c'})
# Do: create a mail_wizard_invite, validate it
self._init_mock_build_email()
context = {'default_res_model': 'mail.group', 'default_res_id': self.group_pigs_id}
mail_invite_id = mail_invite.create(cr, uid, {'partner_ids': [(4, partner_carine_id)], 'send_mail': True}, context)
mail_invite.add_followers(cr, uid, [mail_invite_id])
# Test: Pigs followers should contain Admin and Bert
group_pigs = self.mail_group.browse(cr, uid, self.group_pigs_id)
follower_ids = [follower.id for follower in group_pigs.message_follower_ids]
self.assertEqual(set(follower_ids), set([self.partner_admin_id, partner_carine_id]), 'Pigs followers after invite is incorrect')
# Test: partner must have been prepared for signup
partner_carine = self.res_partner.browse(cr, uid, partner_carine_id)
self.assertTrue(partner_carine.signup_valid, 'partner has not been prepared for signup')
self.assertTrue(base_url in partner_carine.signup_url, 'signup url is incorrect')
self.assertTrue(cr.dbname in partner_carine.signup_url, 'signup url is incorrect')
self.assertTrue(partner_carine.signup_token in partner_carine.signup_url, 'signup url is incorrect')
# Test: (pretend to) send email and check subject, body
self.assertEqual(len(self._build_email_kwargs_list), 1, 'sent email number incorrect, should be only for Bert')
for sent_email in self._build_email_kwargs_list:
self.assertEqual(sent_email.get('subject'), 'Invitation to follow Discussion group: Pigs',
'invite: subject of invitation email is incorrect')
self.assertIn('Administrator invited you to follow Discussion group document: Pigs', sent_email.get('body'),
'invite: body of invitation email is incorrect')
self.assertIn(partner_carine.signup_token, sent_email.get('body'),
'invite: body of invitation email does not contain signup token')
def test_20_notification_url(self):
""" Tests designed to test the URL added in notification emails. """
cr, uid, group_pigs = self.cr, self.uid, self.group_pigs
# Partner data
partner_raoul = self.res_partner.browse(cr, uid, self.partner_raoul_id)
partner_bert_id = self.res_partner.create(cr, uid, {'name': 'bert'})
partner_bert = self.res_partner.browse(cr, uid, partner_bert_id)
# Mail data
mail_mail_id = self.mail_mail.create(cr, uid, {'state': 'exception'})
mail = self.mail_mail.browse(cr, uid, mail_mail_id)
# Test: link for nobody -> None
url = self.mail_mail._get_partner_access_link(cr, uid, mail)
self.assertEqual(url, None,
'notification email: mails not send to a specific partner should not have any URL')
# Test: link for partner -> signup URL
url = self.mail_mail._get_partner_access_link(cr, uid, mail, partner=partner_bert)
self.assertIn(partner_bert.signup_token, url,
'notification email: mails send to a not-user partner should contain the signup token')
# Test: link for user -> signin
url = self.mail_mail._get_partner_access_link(cr, uid, mail, partner=partner_raoul)
self.assertIn('action=mail.action_mail_redirect', url,
'notification email: link should contain the redirect action')
self.assertIn('login=%s' % partner_raoul.user_ids[0].login, url,
'notification email: link should contain the user login')
@mute_logger('openerp.addons.mail.mail_thread', 'openerp.models')
def test_21_inbox_redirection(self):
""" Tests designed to test the inbox redirection of emails notification URLs. """
cr, uid, user_admin, group_pigs = self.cr, self.uid, self.user_admin, self.group_pigs
model, act_id = self.ir_model_data.get_object_reference(cr, uid, 'mail', 'action_mail_inbox_feeds')
model, port_act_id = self.ir_model_data.get_object_reference(cr, uid, 'portal', 'action_mail_inbox_feeds_portal')
# Data: post a message on pigs
msg_id = self.group_pigs.message_post(body='My body', partner_ids=[self.partner_bert_id, self.partner_chell_id], type='comment', subtype='mail.mt_comment')
# No specific parameters -> should redirect to Inbox
action = self.mail_thread.message_redirect_action(cr, self.user_raoul_id, {'params': {}})
self.assertEqual(action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox')
self.assertEqual(action.get('id'), act_id,
'URL redirection: action without parameters should redirect to client action Inbox')
# Bert has read access to Pigs -> should redirect to form view of Pigs
action = self.mail_thread.message_redirect_action(cr, self.user_raoul_id, {'params': {'message_id': msg_id}})
self.assertEqual(action.get('type'), 'ir.actions.act_window',
'URL redirection: action with message_id for read-accredited user should redirect to Pigs')
self.assertEqual(action.get('res_id'), group_pigs.id,
'URL redirection: action with message_id for read-accredited user should redirect to Pigs')
# Bert has no read access to Pigs -> should redirect to Inbox
action = self.mail_thread.message_redirect_action(cr, self.user_bert_id, {'params': {'message_id': msg_id}})
self.assertEqual(action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox')
self.assertEqual(action.get('id'), act_id,
'URL redirection: action without parameters should redirect to client action Inbox')
# Chell has no read access to pigs -> should redirect to Portal Inbox
action = self.mail_thread.message_redirect_action(cr, self.user_chell_id, {'params': {'message_id': msg_id}})
self.assertEqual(action.get('type'), 'ir.actions.client',
'URL redirection: action without parameters should redirect to client action Inbox')
self.assertEqual(action.get('id'), port_act_id,
'URL redirection: action without parameters should redirect to client action Inbox')
def test_30_message_read(self):
cr, uid, group_port_id = self.cr, self.uid, self.group_port_id
# Data: custom subtypes
mt_group_public_id = self.mail_message_subtype.create(cr, uid, {'name': 'group_public', 'description': 'Group changed'})
self.ir_model_data.create(cr, uid, {'name': 'mt_group_public', 'model': 'mail.message.subtype', 'module': 'mail', 'res_id': mt_group_public_id})
# Data: post messages with various subtypes
msg1_id = self.mail_group.message_post(cr, uid, group_port_id, body='Body1', type='comment', subtype='mail.mt_comment')
msg2_id = self.mail_group.message_post(cr, uid, group_port_id, body='Body2', type='comment', subtype='mail.mt_group_public')
msg3_id = self.mail_group.message_post(cr, uid, group_port_id, body='Body3', type='comment', subtype='mail.mt_comment')
msg4_id = self.mail_group.message_post(cr, uid, group_port_id, body='Body4', type='comment')
# msg5_id = self.mail_group.message_post(cr, uid, group_port_id, body='Body5', type='notification')
# Do: Chell search messages: should not see internal notes (comment without subtype)
msg_ids = self.mail_message.search(cr, self.user_chell_id, [('model', '=', 'mail.group'), ('res_id', '=', group_port_id)])
self.assertEqual(set(msg_ids), set([msg1_id, msg2_id, msg3_id]),
'mail_message: portal user has access to messages he should not read')
# Do: Chell read messages she can read
self.mail_message.read(cr, self.user_chell_id, msg_ids, ['body', 'type', 'subtype_id'])
# Do: Chell read a message she should not be able to read
with self.assertRaises(except_orm):
self.mail_message.read(cr, self.user_chell_id, [msg4_id], ['body', 'type', 'subtype_id'])
| agpl-3.0 |
hackerkid/zulip | zerver/management/commands/merge_streams.py | 3 | 3556 | from argparse import ArgumentParser
from typing import Any, List
from zerver.lib.actions import (
bulk_add_subscriptions,
bulk_remove_subscriptions,
do_deactivate_stream,
)
from zerver.lib.cache import cache_delete_many, to_dict_cache_key_id
from zerver.lib.management import ZulipBaseCommand
from zerver.models import Message, Subscription, get_stream
def bulk_delete_cache_keys(message_ids_to_clear: List[int]) -> None:
while len(message_ids_to_clear) > 0:
batch = message_ids_to_clear[0:5000]
keys_to_delete = [to_dict_cache_key_id(message_id) for message_id in batch]
cache_delete_many(keys_to_delete)
message_ids_to_clear = message_ids_to_clear[5000:]
class Command(ZulipBaseCommand):
help = """Merge two streams."""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument("stream_to_keep", help="name of stream to keep")
parser.add_argument(
"stream_to_destroy", help="name of stream to merge into the stream being kept"
)
self.add_realm_args(parser, True)
def handle(self, *args: Any, **options: str) -> None:
realm = self.get_realm(options)
assert realm is not None # Should be ensured by parser
stream_to_keep = get_stream(options["stream_to_keep"], realm)
stream_to_destroy = get_stream(options["stream_to_destroy"], realm)
recipient_to_destroy = stream_to_destroy.recipient
recipient_to_keep = stream_to_keep.recipient
# The high-level approach here is to move all the messages to
# the surviving stream, deactivate all the subscriptions on
# the stream to be removed and deactivate the stream, and add
# new subscriptions to the stream to keep for any users who
# were only on the now-deactivated stream.
# Move the messages, and delete the old copies from caches.
message_ids_to_clear = list(
Message.objects.filter(recipient=recipient_to_destroy).values_list("id", flat=True)
)
count = Message.objects.filter(recipient=recipient_to_destroy).update(
recipient=recipient_to_keep
)
print(f"Moved {count} messages")
bulk_delete_cache_keys(message_ids_to_clear)
# Move the Subscription objects. This algorithm doesn't
# preserve any stream settings/colors/etc. from the stream
# being destroyed, but it's convenient.
existing_subs = Subscription.objects.filter(recipient=recipient_to_keep)
users_already_subscribed = {sub.user_profile_id: sub.active for sub in existing_subs}
subs_to_deactivate = Subscription.objects.filter(
recipient=recipient_to_destroy, active=True
)
users_to_activate = [
sub.user_profile
for sub in subs_to_deactivate
if not users_already_subscribed.get(sub.user_profile_id, False)
]
if len(subs_to_deactivate) > 0:
print(f"Deactivating {len(subs_to_deactivate)} subscriptions")
bulk_remove_subscriptions(
[sub.user_profile for sub in subs_to_deactivate],
[stream_to_destroy],
self.get_client(),
acting_user=None,
)
do_deactivate_stream(stream_to_destroy, acting_user=None)
if len(users_to_activate) > 0:
print(f"Adding {len(users_to_activate)} subscriptions")
bulk_add_subscriptions(realm, [stream_to_keep], users_to_activate, acting_user=None)
| apache-2.0 |
QianBIG/odoo | openerp/addons/base/tests/test_ir_values.py | 462 | 6705 | import unittest2
import openerp.tests.common as common
class test_ir_values(common.TransactionCase):
def test_00(self):
# Create some default value for some (non-existing) model, for all users.
ir_values = self.registry('ir.values')
# use the old API
ir_values.set(self.cr, self.uid, 'default', False, 'my_test_field',
['unexisting_model'], 'global value')
# use the new API
ir_values.set_default(self.cr, self.uid, 'other_unexisting_model',
'my_other_test_field', 'conditional value', condition='foo=bar')
# Retrieve them.
ir_values = self.registry('ir.values')
# d is a list of triplets (id, name, value)
# Old API
d = ir_values.get(self.cr, self.uid, 'default', False, ['unexisting_model'])
assert len(d) == 1, "Only one single value should be retrieved for this model"
assert d[0][1] == 'my_test_field', "Can't retrieve the created default value. (1)"
assert d[0][2] == 'global value', "Can't retrieve the created default value. (2)"
# New API, Conditional version
d = ir_values.get_defaults(self.cr, self.uid, 'other_unexisting_model')
assert len(d) == 0, "No value should be retrieved, the condition is not met"
d = ir_values.get_defaults(self.cr, self.uid, 'other_unexisting_model', condition="foo=eggs")
assert len(d) == 0, 'Condition is not met either, no defaults should be returned'
d = ir_values.get_defaults(self.cr, self.uid, 'other_unexisting_model', condition="foo=bar")
assert len(d) == 1, "Only one single value should be retrieved"
assert d[0][1] == 'my_other_test_field', "Can't retrieve the created default value. (5)"
assert d[0][2] == 'conditional value', "Can't retrieve the created default value. (6)"
# Do it again but for a specific user.
ir_values = self.registry('ir.values')
ir_values.set(self.cr, self.uid, 'default', False, 'my_test_field',['unexisting_model'], 'specific value', preserve_user=True)
# Retrieve it and check it is the one for the current user.
ir_values = self.registry('ir.values')
d = ir_values.get(self.cr, self.uid, 'default', False, ['unexisting_model'])
assert len(d) == 1, "Only one default must be returned per field"
assert d[0][1] == 'my_test_field', "Can't retrieve the created default value."
assert d[0][2] == 'specific value', "Can't retrieve the created default value."
# Create some action bindings for a non-existing model.
act_id_1 = self.ref('base.act_values_form_action')
act_id_2 = self.ref('base.act_values_form_defaults')
act_id_3 = self.ref('base.action_res_company_form')
act_id_4 = self.ref('base.action_res_company_tree')
ir_values = self.registry('ir.values')
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_1, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action 2', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_2, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_multi', 'Side Wizard', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_3, isobject=True)
report_ids = self.registry('ir.actions.report.xml').search(self.cr, self.uid, [], {})
reports = self.registry('ir.actions.report.xml').browse(self.cr, self.uid, report_ids, {})
report_id = [report.id for report in reports if not report.groups_id][0] # assume at least one
ir_values.set(self.cr, self.uid, 'action', 'client_print_multi', 'Nice Report', ['unexisting_model'], 'ir.actions.report.xml,%d' % report_id, isobject=True)
ir_values.set(self.cr, self.uid, 'action', 'client_action_relate', 'Related Stuff', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_4, isobject=True)
# Replace one action binding to set a new name.
ir_values = self.registry('ir.values')
ir_values.set(self.cr, self.uid, 'action', 'tree_but_open', 'OnDblClick Action New', ['unexisting_model'], 'ir.actions.act_window,%d' % act_id_1, isobject=True)
# Retrieve the action bindings and check they're correct
ir_values = self.registry('ir.values')
actions = ir_values.get(self.cr, self.uid, 'action', 'tree_but_open', ['unexisting_model'])
assert len(actions) == 2, "Mismatching number of bound actions"
#first action
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'OnDblClick Action 2', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_2, 'Bound action does not match definition'
#second action - this ones comes last because it was re-created with a different name
assert len(actions[1]) == 3, "Malformed action definition"
assert actions[1][1] == 'OnDblClick Action New', 'Re-Registering an action should replace it'
assert isinstance(actions[1][2], dict) and actions[1][2]['id'] == act_id_1, 'Bound action does not match definition'
actions = ir_values.get(self.cr, self.uid, 'action', 'client_action_multi', ['unexisting_model'])
assert len(actions) == 1, "Mismatching number of bound actions"
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'Side Wizard', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_3, 'Bound action does not match definition'
actions = ir_values.get(self.cr, self.uid, 'action', 'client_print_multi', ['unexisting_model'])
assert len(actions) == 1, "Mismatching number of bound actions"
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'Nice Report', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == report_id, 'Bound action does not match definition'
actions = ir_values.get(self.cr, self.uid, 'action', 'client_action_relate', ['unexisting_model'])
assert len(actions) == 1, "Mismatching number of bound actions"
assert len(actions[0]) == 3, "Malformed action definition"
assert actions[0][1] == 'Related Stuff', 'Bound action does not match definition'
assert isinstance(actions[0][2], dict) and actions[0][2]['id'] == act_id_4, 'Bound action does not match definition'
if __name__ == '__main__':
unittest2.main()
| agpl-3.0 |
orezpraw/partycrasher | partycrasher/more_like_this_response.py | 2 | 5735 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Copyright (C) 2016, 2017 Joshua Charles Campbell
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from __future__ import print_function
from operator import itemgetter
import re
import logging
logger = logging.getLogger(__name__)
error = logger.error
warn = logger.warn
info = logger.info
debug = logger.debug
from partycrasher.bucket import Buckets, Bucket, TopMatch
from partycrasher.threshold import Threshold
from partycrasher.pc_exceptions import MissingBucketError
from partycrasher.es.bucket import ESBuckets
from partycrasher.pc_encoder import pretty
class MoreLikeThisHit(object):
def __init__(self, raw_hit):
self.raw_hit = raw_hit
self.score = raw_hit['_score']
assert isinstance(self.score, (float, int))
if '_source' in raw_hit:
self.database_id = raw_hit['_source']['database_id']
self.project = raw_hit['_source']['project']
@property
def prec_top_match(self):
return self.buckets.top_match
@property
def buckets(self):
# TODO: cache this?
crash = self.raw_hit['_source']
try:
buckets = crash['buckets']
except KeyError:
# We couldn't find the bucket field. ASSUME that this means that
# its bucket assignment has not yet propegated to whatever shard
# returned the results.
message = ('Bucket field {!r} not found in crash: '
'{!r}'.format('buckets', crash))
raise MissingBucketError(message)
buckets = ESBuckets(buckets)
return buckets
@property
def explanation(self):
try:
return self.raw_hit['_explanation']['details']
except:
error(json.dumps(body, indent=2, cls=ESCrashEncoder))
error(json.dumps(response, indent=2))
raise
@property
def explanation_summary(self):
explanation = self.explanation
with open('explained', 'w') as debug_file:
print(pretty(self.raw_hit['_explanation']), file=debug_file)
def flatten(explanation):
flattened = []
for subexplanation in explanation:
if subexplanation["description"].startswith("weight"):
flattened.append(subexplanation)
else:
#print(subexplanation["description"])
if "details" in subexplanation:
flattened.extend(flatten(subexplanation["details"]))
return flattened
explanation = flatten(explanation)
explanation = sorted(explanation, key=itemgetter('value'), reverse=True)
#with open("explanation", 'w') as f:
#print(json.dumps(explanation, indent=2), file=f)
summary = []
for i in explanation:
#print(i['description'])
match = re.match(r'^weight\(([^\s:]+):([^\s]+) in .*$', i['description'])
if match is not None:
summary.append({'field': match.group(1), 'term': match.group(2), 'value': i['value']})
#del summary[30:]
#print(json.dumps(summary, indent=2, cls=ESCrashEncoder), file=sys.stderr)
return summary
def as_top_match(self):
return TopMatch(report_id=self.database_id,
score=self.score,
project=self.project)
class MoreLikeThisResponse(object):
# JSON structure:
# matches['hit']['hits] ~> [
# {
# "_score": 8.9,
# "_source": {
# "buckets": {
# "1.0": "***bucket-id-1***",
# "9.0": "***bucket-id-2***"
# }
# }
# }
def __init__(self, response_dict):
self.response_dict = response_dict
self.raw_hits = self.response_dict['hits']['hits']
self.hits = [MoreLikeThisHit(h) for h in self.raw_hits]
if len(self.hits) > 0:
self.top_match = self.hits[0]
else:
self.top_match = None
@property
def explanation(self):
if len(self.hits) > 0:
return self.hits[0].explanation
else:
return None
@property
def explanation_summary(self):
accumulator = {}
for hit in self.hits:
s = hit.explanation_summary
for t in s:
if t['field'] not in accumulator:
accumulator[t['field']] = {}
if t['term'] not in accumulator[t['field']]:
accumulator[t['field']][t['term']] = 0.0
accumulator[t['field']][t['term']] += t['value']
explanation = []
for field in accumulator:
for term in accumulator[field]:
explanation.append({
'field': field,
'term': term,
'value': accumulator[field][term]
})
explanation = sorted(explanation, key=itemgetter('value'), reverse=True)
return explanation
| gpl-3.0 |
tareqalayan/ansible | lib/ansible/modules/network/cumulus/_cl_img_install.py | 25 | 3558 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Cumulus Networks <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cl_img_install
version_added: "2.1"
author: "Cumulus Networks (@CumulusNetworks)"
short_description: Install a different Cumulus Linux version.
deprecated:
removed_in: "2.5"
why: The image slot system no longer exists in Cumulus Linux.
alternative: n/a
description:
- install a different version of Cumulus Linux in the inactive slot. For
more details go the Image Management User Guide at
U(http://docs.cumulusnetworks.com/).
options:
src:
description:
- The full path to the Cumulus Linux binary image. Can be a local path,
http or https URL. If the code version is in the name of the file,
the module will assume this is the version of code you wish to
install.
required: true
version:
description:
- Inform the module of the exact version one is installing. This
overrides the automatic check of version in the file name. For
example, if the binary file name is called CumulusLinux-2.2.3.bin,
and version is set to '2.5.0', then the module will assume it is
installing '2.5.0' not '2.2.3'. If version is not included, then
the module will assume '2.2.3' is the version to install.
switch_slot:
description:
- Switch slots after installing the image.
To run the installed code, reboot the switch.
type: bool
requirements: ["Cumulus Linux OS"]
'''
EXAMPLES = '''
## Download and install the image from a webserver.
- name: Install image using using http url. Switch slots so the subsequent will load the new version
cl_img_install:
version: 2.0.1
src: http://10.1.1.1/CumulusLinux-2.0.1.bin
switch_slot: yes
## Copy the software from the ansible server to the switch.
## The module will get the code version from the filename
## The code will be installed in the alternate slot but the slot will not be primary
## A subsequent reload will not run the new code
- name: Download cumulus linux to local system
get_url:
src: ftp://cumuluslinux.bin
dest: /root/CumulusLinux-2.0.1.bin
- name: Install image from local filesystem. Get version from the filename.
cl_img_install:
src: /root/CumulusLinux-2.0.1.bin
## If the image name has been changed from the original name, use the `version` option
## to inform the module exactly what code version is been installed
- name: Download cumulus linux to local system
get_url:
src: ftp://CumulusLinux-2.0.1.bin
dest: /root/image.bin
- name: install image and switch slots. Only reboot needed
cl_img_install:
version: 2.0.1
src: /root/image.bin
switch_slot: yes
'''
RETURN = '''
changed:
description: whether the interface was changed
returned: changed
type: bool
sample: True
msg:
description: human-readable report of success or failure
returned: always
type: string
sample: "interface bond0 config updated"
'''
from ansible.module_utils.common.removed import removed_module
if __name__ == '__main__':
removed_module()
| gpl-3.0 |
rdeheele/odoo | addons/event/wizard/event_confirm.py | 339 | 1387 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api
class event_confirm(models.TransientModel):
"""Event Confirmation"""
_name = "event.confirm"
@api.multi
def confirm(self):
events = self.env['event.event'].browse(self._context.get('event_ids', []))
events.do_confirm()
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dorileo/soletta | data/scripts/template.py | 9 | 5158 | #!/usr/bin/env python3
# This file is part of the Soletta (TM) Project
#
# Copyright (C) 2015 Intel Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import configparser
import os
import re
import stat
class TemplateFragment:
def __init__(self, tpl_global, context, verbatim, expr):
self.context = context
self.verbatim = verbatim
self.expr = expr
self.tpl_global = tpl_global
self.subst = ""
def __append_subst(self, subst):
self.subst += "%s\n" % subst
def value_of(self, k):
value = self.context.get(k)
if value:
self.__append_subst(value)
def on_value(self, k, v, ontrue, onfalse):
value = self.context.get(k.lower())
if value and value == v:
self.__append_subst(ontrue)
else:
self.__append_subst(onfalse)
def on_set(self, k, ontrue, onfalse):
if self.context.get(k.lower()):
self.__append_subst(ontrue)
else:
self.__append_subst(onfalse)
def println(self, ln):
self.__append_subst(ln)
def include(self, template):
dir_path = os.path.dirname(self.tpl_global["root_tpl"])
path = os.path.join(dir_path, template)
try:
f = open(path)
except:
print("Could not open include file: %s" % path)
return
content = run_template(f.read(), self.tpl_global, self.context, self)
self.__append_subst(content)
def parse_template(raw, tpl_global, context):
result = []
curr = prev = ""
expr = False
for ch in raw:
if ch == "{" and prev == "{":
if curr:
fragment = TemplateFragment(tpl_global, context, curr, expr)
result.append(fragment)
curr = ""
expr = True
elif ch == "}" and prev == "}":
if curr:
fragment = TemplateFragment(tpl_global, context, curr[:len(curr) - 1], expr)
result.append(fragment)
curr = ""
expr = False
else:
curr += ch
prev = ch
if curr:
fragment = TemplateFragment(tpl_global, context, curr, expr)
result.append(fragment)
return result
def load_context(files):
result = {}
for f in files:
lines = f.read()
content = "[context]\n%s" % lines
handle = configparser.ConfigParser(delimiters=('=','?=',':='))
handle.read_string(content)
dc = handle["context"]
result = dict(list(result.items()) + list(dc.items()))
# also consider env vars in the context
for k,v in os.environ.items():
result[k.lower()] = v
return result
def try_subst(verbatim):
p = re.compile("^\@.*\@$")
m = p.match(verbatim)
if not m:
return None
return verbatim.replace("@","")
def run_template(raw, tpl_global, context, nested=None):
fragments = parse_template(raw, tpl_global, context)
for frag in fragments:
if frag.expr:
subst = try_subst(frag.verbatim)
if subst:
subst = context.get(subst.lower(), "")
subst = subst.replace("\"","")
frag.subst = subst
else:
if nested:
tpl_global["st"] = nested
else:
tpl_global["st"] = frag
tpl_global["context"] = context
exec(frag.verbatim, tpl_global)
raw = raw.replace("{{%s}}" % frag.verbatim, frag.subst)
return raw
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--context-files",
help=("The context files path. A context file"
"is a file containing key=value pairs, like"
"the kconfig's .config file"),
type=argparse.FileType("r"), nargs="+",
required=True)
parser.add_argument("--template", help="The template file path",
type=argparse.FileType("r"), required=True)
parser.add_argument("--output", help="The template file path",
type=argparse.FileType("w"), required=True)
args = parser.parse_args()
tpl_global = {"root_tpl": os.path.realpath(args.template.name)}
context = load_context(args.context_files)
output = run_template(args.template.read(), tpl_global, context)
args.output.write(output)
st = os.fstat(args.template.fileno())
os.fchmod(args.output.fileno(), st.st_mode)
| apache-2.0 |
calfonso/ansible | lib/ansible/modules/cloud/amazon/ec2_elb_facts.py | 24 | 8629 | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_elb_facts
short_description: Gather facts about EC2 Elastic Load Balancers in AWS
description:
- Gather facts about EC2 Elastic Load Balancers in AWS
version_added: "2.0"
author:
- "Michael Schultz (github.com/mjschultz)"
- "Fernando Jose Pando (@nand0p)"
options:
names:
description:
- List of ELB names to gather facts about. Pass this option to gather facts about a set of ELBs, otherwise, all ELBs are returned.
aliases: ['elb_ids', 'ec2_elbs']
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Output format tries to match ec2_elb_lb module input parameters
# Gather facts about all ELBs
- action:
module: ec2_elb_facts
register: elb_facts
- action:
module: debug
msg: "{{ item.dns_name }}"
with_items: "{{ elb_facts.elbs }}"
# Gather facts about a particular ELB
- action:
module: ec2_elb_facts
names: frontend-prod-elb
register: elb_facts
- action:
module: debug
msg: "{{ elb_facts.elbs.0.dns_name }}"
# Gather facts about a set of ELBs
- action:
module: ec2_elb_facts
names:
- frontend-prod-elb
- backend-prod-elb
register: elb_facts
- action:
module: debug
msg: "{{ item.dns_name }}"
with_items: "{{ elb_facts.elbs }}"
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (
AWSRetry,
connect_to_aws,
ec2_argument_spec,
get_aws_connection_info,
)
try:
import boto.ec2.elb
from boto.ec2.tag import Tag
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
class ElbInformation(object):
"""Handles ELB information."""
def __init__(self,
module,
names,
region,
**aws_connect_params):
self.module = module
self.names = names
self.region = region
self.aws_connect_params = aws_connect_params
self.connection = self._get_elb_connection()
def _get_tags(self, elbname):
params = {'LoadBalancerNames.member.1': elbname}
elb_tags = self.connection.get_list('DescribeTags', params, [('member', Tag)])
return dict((tag.Key, tag.Value) for tag in elb_tags if hasattr(tag, 'Key'))
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def _get_elb_connection(self):
return connect_to_aws(boto.ec2.elb, self.region, **self.aws_connect_params)
def _get_elb_listeners(self, listeners):
listener_list = []
for listener in listeners:
listener_dict = {
'load_balancer_port': listener[0],
'instance_port': listener[1],
'protocol': listener[2],
}
try:
ssl_certificate_id = listener[4]
except IndexError:
pass
else:
if ssl_certificate_id:
listener_dict['ssl_certificate_id'] = ssl_certificate_id
listener_list.append(listener_dict)
return listener_list
def _get_health_check(self, health_check):
protocol, port_path = health_check.target.split(':')
try:
port, path = port_path.split('/', 1)
path = '/{0}'.format(path)
except ValueError:
port = port_path
path = None
health_check_dict = {
'ping_protocol': protocol.lower(),
'ping_port': int(port),
'response_timeout': health_check.timeout,
'interval': health_check.interval,
'unhealthy_threshold': health_check.unhealthy_threshold,
'healthy_threshold': health_check.healthy_threshold,
}
if path:
health_check_dict['ping_path'] = path
return health_check_dict
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def _get_elb_info(self, elb):
elb_info = {
'name': elb.name,
'zones': elb.availability_zones,
'dns_name': elb.dns_name,
'canonical_hosted_zone_name': elb.canonical_hosted_zone_name,
'canonical_hosted_zone_name_id': elb.canonical_hosted_zone_name_id,
'hosted_zone_name': elb.canonical_hosted_zone_name,
'hosted_zone_id': elb.canonical_hosted_zone_name_id,
'instances': [instance.id for instance in elb.instances],
'listeners': self._get_elb_listeners(elb.listeners),
'scheme': elb.scheme,
'security_groups': elb.security_groups,
'health_check': self._get_health_check(elb.health_check),
'subnets': elb.subnets,
'instances_inservice': [],
'instances_inservice_count': 0,
'instances_outofservice': [],
'instances_outofservice_count': 0,
'instances_inservice_percent': 0.0,
'tags': self._get_tags(elb.name)
}
if elb.vpc_id:
elb_info['vpc_id'] = elb.vpc_id
if elb.instances:
instance_health = self.connection.describe_instance_health(elb.name)
elb_info['instances_inservice'] = [inst.instance_id for inst in instance_health if inst.state == 'InService']
elb_info['instances_inservice_count'] = len(elb_info['instances_inservice'])
elb_info['instances_outofservice'] = [inst.instance_id for inst in instance_health if inst.state == 'OutOfService']
elb_info['instances_outofservice_count'] = len(elb_info['instances_outofservice'])
try:
elb_info['instances_inservice_percent'] = (
float(elb_info['instances_inservice_count']) /
float(elb_info['instances_inservice_count'] + elb_info['instances_outofservice_count'])
) * 100.
except ZeroDivisionError:
elb_info['instances_inservice_percent'] = 0.
return elb_info
def list_elbs(self):
elb_array, token = [], None
get_elb_with_backoff = AWSRetry.backoff(tries=5, delay=5, backoff=2.0)(self.connection.get_all_load_balancers)
while True:
all_elbs = get_elb_with_backoff(marker=token)
token = all_elbs.next_marker
if all_elbs:
if self.names:
for existing_lb in all_elbs:
if existing_lb.name in self.names:
elb_array.append(existing_lb)
else:
elb_array.extend(all_elbs)
else:
break
if token is None:
break
return list(map(self._get_elb_info, elb_array))
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
names={'default': [], 'type': 'list'}
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
try:
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="region must be specified")
names = module.params['names']
elb_information = ElbInformation(
module, names, region, **aws_connect_params)
ec2_facts_result = dict(changed=False,
elbs=elb_information.list_elbs())
except BotoServerError as err:
module.fail_json(msg="{0}: {1}".format(err.error_code, err.error_message),
exception=traceback.format_exc())
module.exit_json(**ec2_facts_result)
if __name__ == '__main__':
main()
| gpl-3.0 |
xuewei4d/scikit-learn | asv_benchmarks/benchmarks/decomposition.py | 12 | 2754 | from sklearn.decomposition import (PCA, DictionaryLearning,
MiniBatchDictionaryLearning)
from .common import Benchmark, Estimator, Transformer
from .datasets import _olivetti_faces_dataset, _mnist_dataset
from .utils import make_pca_scorers, make_dict_learning_scorers
class PCABenchmark(Transformer, Estimator, Benchmark):
"""
Benchmarks for PCA.
"""
param_names = ['svd_solver']
params = (['full', 'arpack', 'randomized'],)
def setup_cache(self):
super().setup_cache()
def make_data(self, params):
return _mnist_dataset()
def make_estimator(self, params):
svd_solver, = params
estimator = PCA(n_components=32,
svd_solver=svd_solver,
random_state=0)
return estimator
def make_scorers(self):
make_pca_scorers(self)
class DictionaryLearningBenchmark(Transformer, Estimator, Benchmark):
"""
Benchmarks for DictionaryLearning.
"""
param_names = ['fit_algorithm', 'n_jobs']
params = (['lars', 'cd'], Benchmark.n_jobs_vals)
def setup_cache(self):
super().setup_cache()
def make_data(self, params):
return _olivetti_faces_dataset()
def make_estimator(self, params):
fit_algorithm, n_jobs = params
estimator = DictionaryLearning(n_components=15,
fit_algorithm=fit_algorithm,
alpha=0.1,
max_iter=20,
tol=1e-16,
random_state=0,
n_jobs=n_jobs)
return estimator
def make_scorers(self):
make_dict_learning_scorers(self)
class MiniBatchDictionaryLearningBenchmark(Transformer, Estimator, Benchmark):
"""
Benchmarks for MiniBatchDictionaryLearning
"""
param_names = ['fit_algorithm', 'n_jobs']
params = (['lars', 'cd'], Benchmark.n_jobs_vals)
def setup_cache(self):
super().setup_cache()
def make_data(self, params):
return _olivetti_faces_dataset()
def make_estimator(self, params):
fit_algorithm, n_jobs = params
estimator = MiniBatchDictionaryLearning(n_components=15,
fit_algorithm=fit_algorithm,
alpha=0.1,
batch_size=3,
random_state=0,
n_jobs=n_jobs)
return estimator
def make_scorers(self):
make_dict_learning_scorers(self)
| bsd-3-clause |
DeltaEpsilon-HackFMI2/FMICalendar-REST | venv/lib/python2.7/site-packages/rest_framework/tests/test_relations_pk.py | 21 | 22294 | from __future__ import unicode_literals
from django.db import models
from django.test import TestCase
from rest_framework import serializers
from rest_framework.tests.models import (
BlogPost, ManyToManyTarget, ManyToManySource, ForeignKeyTarget, ForeignKeySource,
NullableForeignKeySource, OneToOneTarget, NullableOneToOneSource,
)
from rest_framework.compat import six
# ManyToMany
class ManyToManyTargetSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManyTarget
fields = ('id', 'name', 'sources')
class ManyToManySourceSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManySource
fields = ('id', 'name', 'targets')
# ForeignKey
class ForeignKeyTargetSerializer(serializers.ModelSerializer):
class Meta:
model = ForeignKeyTarget
fields = ('id', 'name', 'sources')
class ForeignKeySourceSerializer(serializers.ModelSerializer):
class Meta:
model = ForeignKeySource
fields = ('id', 'name', 'target')
# Nullable ForeignKey
class NullableForeignKeySourceSerializer(serializers.ModelSerializer):
class Meta:
model = NullableForeignKeySource
fields = ('id', 'name', 'target')
# Nullable OneToOne
class NullableOneToOneTargetSerializer(serializers.ModelSerializer):
class Meta:
model = OneToOneTarget
fields = ('id', 'name', 'nullable_source')
# TODO: Add test that .data cannot be accessed prior to .is_valid
class PKManyToManyTests(TestCase):
def setUp(self):
for idx in range(1, 4):
target = ManyToManyTarget(name='target-%d' % idx)
target.save()
source = ManyToManySource(name='source-%d' % idx)
source.save()
for target in ManyToManyTarget.objects.all():
source.targets.add(target)
def test_many_to_many_retrieve(self):
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]}
]
self.assertEqual(serializer.data, expected)
def test_reverse_many_to_many_retrieve(self):
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]}
]
self.assertEqual(serializer.data, expected)
def test_many_to_many_update(self):
data = {'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]}
instance = ManyToManySource.objects.get(pk=1)
serializer = ManyToManySourceSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
serializer.save()
self.assertEqual(serializer.data, data)
# Ensure source 1 is updated, and everything else is as expected
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1, 2, 3]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]}
]
self.assertEqual(serializer.data, expected)
def test_reverse_many_to_many_update(self):
data = {'id': 1, 'name': 'target-1', 'sources': [1]}
instance = ManyToManyTarget.objects.get(pk=1)
serializer = ManyToManyTargetSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
serializer.save()
self.assertEqual(serializer.data, data)
# Ensure target 1 is updated, and everything else is as expected
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]}
]
self.assertEqual(serializer.data, expected)
def test_many_to_many_create(self):
data = {'id': 4, 'name': 'source-4', 'targets': [1, 3]}
serializer = ManyToManySourceSerializer(data=data)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, data)
self.assertEqual(obj.name, 'source-4')
# Ensure source 4 is added, and everything else is as expected
queryset = ManyToManySource.objects.all()
serializer = ManyToManySourceSerializer(queryset, many=True)
self.assertFalse(serializer.fields['targets'].read_only)
expected = [
{'id': 1, 'name': 'source-1', 'targets': [1]},
{'id': 2, 'name': 'source-2', 'targets': [1, 2]},
{'id': 3, 'name': 'source-3', 'targets': [1, 2, 3]},
{'id': 4, 'name': 'source-4', 'targets': [1, 3]},
]
self.assertEqual(serializer.data, expected)
def test_reverse_many_to_many_create(self):
data = {'id': 4, 'name': 'target-4', 'sources': [1, 3]}
serializer = ManyToManyTargetSerializer(data=data)
self.assertFalse(serializer.fields['sources'].read_only)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, data)
self.assertEqual(obj.name, 'target-4')
# Ensure target 4 is added, and everything else is as expected
queryset = ManyToManyTarget.objects.all()
serializer = ManyToManyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': [2, 3]},
{'id': 3, 'name': 'target-3', 'sources': [3]},
{'id': 4, 'name': 'target-4', 'sources': [1, 3]}
]
self.assertEqual(serializer.data, expected)
class PKForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
new_target = ForeignKeyTarget(name='target-2')
new_target.save()
for idx in range(1, 4):
source = ForeignKeySource(name='source-%d' % idx, target=target)
source.save()
def test_foreign_key_retrieve(self):
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1}
]
self.assertEqual(serializer.data, expected)
def test_reverse_foreign_key_retrieve(self):
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': []},
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_update(self):
data = {'id': 1, 'name': 'source-1', 'target': 2}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
self.assertEqual(serializer.data, data)
serializer.save()
# Ensure source 1 is updated, and everything else is as expected
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 2},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1}
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_update_incorrect_type(self):
data = {'id': 1, 'name': 'source-1', 'target': 'foo'}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
self.assertFalse(serializer.is_valid())
self.assertEqual(serializer.errors, {'target': ['Incorrect type. Expected pk value, received %s.' % six.text_type.__name__]})
def test_reverse_foreign_key_update(self):
data = {'id': 2, 'name': 'target-2', 'sources': [1, 3]}
instance = ForeignKeyTarget.objects.get(pk=2)
serializer = ForeignKeyTargetSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
# We shouldn't have saved anything to the db yet since save
# hasn't been called.
queryset = ForeignKeyTarget.objects.all()
new_serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [1, 2, 3]},
{'id': 2, 'name': 'target-2', 'sources': []},
]
self.assertEqual(new_serializer.data, expected)
serializer.save()
self.assertEqual(serializer.data, data)
# Ensure target 2 is update, and everything else is as expected
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [2]},
{'id': 2, 'name': 'target-2', 'sources': [1, 3]},
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_create(self):
data = {'id': 4, 'name': 'source-4', 'target': 2}
serializer = ForeignKeySourceSerializer(data=data)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, data)
self.assertEqual(obj.name, 'source-4')
# Ensure source 4 is added, and everything else is as expected
queryset = ForeignKeySource.objects.all()
serializer = ForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': 1},
{'id': 4, 'name': 'source-4', 'target': 2},
]
self.assertEqual(serializer.data, expected)
def test_reverse_foreign_key_create(self):
data = {'id': 3, 'name': 'target-3', 'sources': [1, 3]}
serializer = ForeignKeyTargetSerializer(data=data)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, data)
self.assertEqual(obj.name, 'target-3')
# Ensure target 3 is added, and everything else is as expected
queryset = ForeignKeyTarget.objects.all()
serializer = ForeignKeyTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'sources': [2]},
{'id': 2, 'name': 'target-2', 'sources': []},
{'id': 3, 'name': 'target-3', 'sources': [1, 3]},
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_update_with_invalid_null(self):
data = {'id': 1, 'name': 'source-1', 'target': None}
instance = ForeignKeySource.objects.get(pk=1)
serializer = ForeignKeySourceSerializer(instance, data=data)
self.assertFalse(serializer.is_valid())
self.assertEqual(serializer.errors, {'target': ['This field is required.']})
def test_foreign_key_with_empty(self):
"""
Regression test for #1072
https://github.com/tomchristie/django-rest-framework/issues/1072
"""
serializer = NullableForeignKeySourceSerializer()
self.assertEqual(serializer.data['target'], None)
class PKNullableForeignKeyTests(TestCase):
def setUp(self):
target = ForeignKeyTarget(name='target-1')
target.save()
for idx in range(1, 4):
if idx == 3:
target = None
source = NullableForeignKeySource(name='source-%d' % idx, target=target)
source.save()
def test_foreign_key_retrieve_with_null(self):
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_create_with_valid_null(self):
data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, data)
self.assertEqual(obj.name, 'source-4')
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_create_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 4, 'name': 'source-4', 'target': ''}
expected_data = {'id': 4, 'name': 'source-4', 'target': None}
serializer = NullableForeignKeySourceSerializer(data=data)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(serializer.data, expected_data)
self.assertEqual(obj.name, 'source-4')
# Ensure source 4 is created, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': 1},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None},
{'id': 4, 'name': 'source-4', 'target': None}
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_update_with_valid_null(self):
data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
self.assertEqual(serializer.data, data)
serializer.save()
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
self.assertEqual(serializer.data, expected)
def test_foreign_key_update_with_valid_emptystring(self):
"""
The emptystring should be interpreted as null in the context
of relationships.
"""
data = {'id': 1, 'name': 'source-1', 'target': ''}
expected_data = {'id': 1, 'name': 'source-1', 'target': None}
instance = NullableForeignKeySource.objects.get(pk=1)
serializer = NullableForeignKeySourceSerializer(instance, data=data)
self.assertTrue(serializer.is_valid())
self.assertEqual(serializer.data, expected_data)
serializer.save()
# Ensure source 1 is updated, and everything else is as expected
queryset = NullableForeignKeySource.objects.all()
serializer = NullableForeignKeySourceSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'source-1', 'target': None},
{'id': 2, 'name': 'source-2', 'target': 1},
{'id': 3, 'name': 'source-3', 'target': None}
]
self.assertEqual(serializer.data, expected)
# reverse foreign keys MUST be read_only
# In the general case they do not provide .remove() or .clear()
# and cannot be arbitrarily set.
# def test_reverse_foreign_key_update(self):
# data = {'id': 1, 'name': 'target-1', 'sources': [1]}
# instance = ForeignKeyTarget.objects.get(pk=1)
# serializer = ForeignKeyTargetSerializer(instance, data=data)
# self.assertTrue(serializer.is_valid())
# self.assertEqual(serializer.data, data)
# serializer.save()
# # Ensure target 1 is updated, and everything else is as expected
# queryset = ForeignKeyTarget.objects.all()
# serializer = ForeignKeyTargetSerializer(queryset, many=True)
# expected = [
# {'id': 1, 'name': 'target-1', 'sources': [1]},
# {'id': 2, 'name': 'target-2', 'sources': []},
# ]
# self.assertEqual(serializer.data, expected)
class PKNullableOneToOneTests(TestCase):
def setUp(self):
target = OneToOneTarget(name='target-1')
target.save()
new_target = OneToOneTarget(name='target-2')
new_target.save()
source = NullableOneToOneSource(name='source-1', target=new_target)
source.save()
def test_reverse_foreign_key_retrieve_with_null(self):
queryset = OneToOneTarget.objects.all()
serializer = NullableOneToOneTargetSerializer(queryset, many=True)
expected = [
{'id': 1, 'name': 'target-1', 'nullable_source': None},
{'id': 2, 'name': 'target-2', 'nullable_source': 1},
]
self.assertEqual(serializer.data, expected)
# The below models and tests ensure that serializer fields corresponding
# to a ManyToManyField field with a user-specified ``through`` model are
# set to read only
class ManyToManyThroughTarget(models.Model):
name = models.CharField(max_length=100)
class ManyToManyThrough(models.Model):
source = models.ForeignKey('ManyToManyThroughSource')
target = models.ForeignKey(ManyToManyThroughTarget)
class ManyToManyThroughSource(models.Model):
name = models.CharField(max_length=100)
targets = models.ManyToManyField(ManyToManyThroughTarget,
related_name='sources',
through='ManyToManyThrough')
class ManyToManyThroughTargetSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManyThroughTarget
fields = ('id', 'name', 'sources')
class ManyToManyThroughSourceSerializer(serializers.ModelSerializer):
class Meta:
model = ManyToManyThroughSource
fields = ('id', 'name', 'targets')
class PKManyToManyThroughTests(TestCase):
def setUp(self):
self.source = ManyToManyThroughSource.objects.create(
name='through-source-1')
self.target = ManyToManyThroughTarget.objects.create(
name='through-target-1')
def test_many_to_many_create(self):
data = {'id': 2, 'name': 'source-2', 'targets': [self.target.pk]}
serializer = ManyToManyThroughSourceSerializer(data=data)
self.assertTrue(serializer.fields['targets'].read_only)
self.assertTrue(serializer.is_valid())
obj = serializer.save()
self.assertEqual(obj.name, 'source-2')
self.assertEqual(obj.targets.count(), 0)
def test_many_to_many_reverse_create(self):
data = {'id': 2, 'name': 'target-2', 'sources': [self.source.pk]}
serializer = ManyToManyThroughTargetSerializer(data=data)
self.assertTrue(serializer.fields['sources'].read_only)
self.assertTrue(serializer.is_valid())
serializer.save()
obj = serializer.save()
self.assertEqual(obj.name, 'target-2')
self.assertEqual(obj.sources.count(), 0)
# Regression tests for #694 (`source` attribute on related fields)
class PrimaryKeyRelatedFieldSourceTests(TestCase):
def test_related_manager_source(self):
"""
Relational fields should be able to use manager-returning methods as their source.
"""
BlogPost.objects.create(title='blah')
field = serializers.PrimaryKeyRelatedField(many=True, source='get_blogposts_manager')
class ClassWithManagerMethod(object):
def get_blogposts_manager(self):
return BlogPost.objects
obj = ClassWithManagerMethod()
value = field.field_to_native(obj, 'field_name')
self.assertEqual(value, [1])
def test_related_queryset_source(self):
"""
Relational fields should be able to use queryset-returning methods as their source.
"""
BlogPost.objects.create(title='blah')
field = serializers.PrimaryKeyRelatedField(many=True, source='get_blogposts_queryset')
class ClassWithQuerysetMethod(object):
def get_blogposts_queryset(self):
return BlogPost.objects.all()
obj = ClassWithQuerysetMethod()
value = field.field_to_native(obj, 'field_name')
self.assertEqual(value, [1])
def test_dotted_source(self):
"""
Source argument should support dotted.source notation.
"""
BlogPost.objects.create(title='blah')
field = serializers.PrimaryKeyRelatedField(many=True, source='a.b.c')
class ClassWithQuerysetMethod(object):
a = {
'b': {
'c': BlogPost.objects.all()
}
}
obj = ClassWithQuerysetMethod()
value = field.field_to_native(obj, 'field_name')
self.assertEqual(value, [1])
| mit |
lbdreyer/iris | lib/iris/tests/unit/aux_factory/test_AuxCoordFactory.py | 5 | 6412 | # Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Unit tests for `iris.aux_factory.AuxCoordFactory`.
"""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import iris
from iris._lazy_data import as_lazy_data, is_lazy_data
from iris.aux_factory import AuxCoordFactory
from iris.coords import AuxCoord
class Test__nd_points(tests.IrisTest):
def test_numpy_scalar_coord__zero_ndim(self):
points = np.array(1)
coord = AuxCoord(points)
result = AuxCoordFactory._nd_points(coord, (), 0)
expected = np.array([1])
self.assertArrayEqual(result, expected)
def test_numpy_scalar_coord(self):
value = 1
points = np.array(value)
coord = AuxCoord(points)
result = AuxCoordFactory._nd_points(coord, (), 2)
expected = np.array(value).reshape(1, 1)
self.assertArrayEqual(result, expected)
def test_numpy_simple(self):
points = np.arange(12).reshape(4, 3)
coord = AuxCoord(points)
result = AuxCoordFactory._nd_points(coord, (0, 1), 2)
expected = points
self.assertArrayEqual(result, expected)
def test_numpy_complex(self):
points = np.arange(12).reshape(4, 3)
coord = AuxCoord(points)
result = AuxCoordFactory._nd_points(coord, (3, 2), 5)
expected = points.T[np.newaxis, np.newaxis, ..., np.newaxis]
self.assertArrayEqual(result, expected)
def test_lazy_simple(self):
raw_points = np.arange(12).reshape(4, 3)
points = as_lazy_data(raw_points, raw_points.shape)
coord = AuxCoord(points)
self.assertTrue(is_lazy_data(coord.core_points()))
result = AuxCoordFactory._nd_points(coord, (0, 1), 2)
# Check we haven't triggered the loading of the coordinate values.
self.assertTrue(is_lazy_data(coord.core_points()))
self.assertTrue(is_lazy_data(result))
expected = raw_points
self.assertArrayEqual(result, expected)
def test_lazy_complex(self):
raw_points = np.arange(12).reshape(4, 3)
points = as_lazy_data(raw_points, raw_points.shape)
coord = AuxCoord(points)
self.assertTrue(is_lazy_data(coord.core_points()))
result = AuxCoordFactory._nd_points(coord, (3, 2), 5)
# Check we haven't triggered the loading of the coordinate values.
self.assertTrue(is_lazy_data(coord.core_points()))
self.assertTrue(is_lazy_data(result))
expected = raw_points.T[np.newaxis, np.newaxis, ..., np.newaxis]
self.assertArrayEqual(result, expected)
class Test__nd_bounds(tests.IrisTest):
def test_numpy_scalar_coord__zero_ndim(self):
points = np.array(0.5)
bounds = np.arange(2)
coord = AuxCoord(points, bounds=bounds)
result = AuxCoordFactory._nd_bounds(coord, (), 0)
expected = bounds
self.assertArrayEqual(result, expected)
def test_numpy_scalar_coord(self):
points = np.array(0.5)
bounds = np.arange(2).reshape(1, 2)
coord = AuxCoord(points, bounds=bounds)
result = AuxCoordFactory._nd_bounds(coord, (), 2)
expected = bounds[np.newaxis]
self.assertArrayEqual(result, expected)
def test_numpy_simple(self):
points = np.arange(12).reshape(4, 3)
bounds = np.arange(24).reshape(4, 3, 2)
coord = AuxCoord(points, bounds=bounds)
result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2)
expected = bounds
self.assertArrayEqual(result, expected)
def test_numpy_complex(self):
points = np.arange(12).reshape(4, 3)
bounds = np.arange(24).reshape(4, 3, 2)
coord = AuxCoord(points, bounds=bounds)
result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5)
expected = bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2)
self.assertArrayEqual(result, expected)
def test_lazy_simple(self):
raw_points = np.arange(12).reshape(4, 3)
points = as_lazy_data(raw_points, raw_points.shape)
raw_bounds = np.arange(24).reshape(4, 3, 2)
bounds = as_lazy_data(raw_bounds, raw_bounds.shape)
coord = AuxCoord(points, bounds=bounds)
self.assertTrue(is_lazy_data(coord.core_bounds()))
result = AuxCoordFactory._nd_bounds(coord, (0, 1), 2)
# Check we haven't triggered the loading of the coordinate values.
self.assertTrue(is_lazy_data(coord.core_bounds()))
self.assertTrue(is_lazy_data(result))
expected = raw_bounds
self.assertArrayEqual(result, expected)
def test_lazy_complex(self):
raw_points = np.arange(12).reshape(4, 3)
points = as_lazy_data(raw_points, raw_points.shape)
raw_bounds = np.arange(24).reshape(4, 3, 2)
bounds = as_lazy_data(raw_bounds, raw_bounds.shape)
coord = AuxCoord(points, bounds=bounds)
self.assertTrue(is_lazy_data(coord.core_bounds()))
result = AuxCoordFactory._nd_bounds(coord, (3, 2), 5)
# Check we haven't triggered the loading of the coordinate values.
self.assertTrue(is_lazy_data(coord.core_bounds()))
self.assertTrue(is_lazy_data(result))
expected = raw_bounds.transpose((1, 0, 2)).reshape(1, 1, 3, 4, 1, 2)
self.assertArrayEqual(result, expected)
@tests.skip_data
class Test_lazy_aux_coords(tests.IrisTest):
def setUp(self):
path = tests.get_data_path(
["NetCDF", "testing", "small_theta_colpex.nc"]
)
self.cube = iris.load_cube(path, "air_potential_temperature")
def _check_lazy(self):
coords = self.cube.aux_coords + self.cube.derived_coords
for coord in coords:
self.assertTrue(coord.has_lazy_points())
if coord.has_bounds():
self.assertTrue(coord.has_lazy_bounds())
def test_lazy_coord_loading(self):
# Test that points and bounds arrays stay lazy upon cube loading.
self._check_lazy()
def test_lazy_coord_printing(self):
# Test that points and bounds arrays stay lazy after cube printing.
_ = str(self.cube)
self._check_lazy()
if __name__ == "__main__":
tests.main()
| lgpl-3.0 |
gymnasium/edx-platform | openedx/core/djangoapps/catalog/management/commands/tests/test_create_catalog_integrations.py | 13 | 4190 | """
Test cases for catalog_integrations command.
"""
from django.test import TestCase
from django.core.management import call_command, CommandError
from openedx.core.djangoapps.catalog.models import CatalogIntegration
from openedx.core.djangoapps.catalog.tests.mixins import CatalogIntegrationMixin
class TestCreateCatalogIntegrations(CatalogIntegrationMixin, TestCase):
""" Test the create_catalog_integrations command """
def test_without_required(self):
''' Test that required values are supplied '''
# test without service_username
with self.assertRaises(CommandError):
call_command(
"create_catalog_integrations",
"--internal_api_url", self.catalog_integration_defaults['internal_api_url'],
)
# test without internal_api_url
with self.assertRaises(CommandError):
call_command(
"create_catalog_integrations",
"--service_username", self.catalog_integration_defaults['service_username'],
)
def test_with_required(self):
''' Test with required arguments supplied'''
initial = CatalogIntegration.current()
# test with both required args
call_command(
"create_catalog_integrations",
"--internal_api_url", self.catalog_integration_defaults['internal_api_url'],
"--service_username", self.catalog_integration_defaults['service_username']
)
current = CatalogIntegration.current()
# assert current has changed
self.assertNotEqual(
initial,
current
)
self.assertEqual(
current.enabled,
False
)
self.assertEqual(
current.internal_api_url,
self.catalog_integration_defaults['internal_api_url']
)
self.assertEqual(
current.service_username,
self.catalog_integration_defaults['service_username']
)
def test_with_optional(self):
''' Test with optionals arguments supplied'''
initial = CatalogIntegration.current()
# test --enabled
call_command(
"create_catalog_integrations",
"--internal_api_url", self.catalog_integration_defaults['internal_api_url'],
"--service_username", self.catalog_integration_defaults['service_username'],
"--enabled"
)
current = CatalogIntegration.current()
# assert current has changed
self.assertNotEqual(
initial,
current
)
self.assertEqual(
current.enabled,
True
)
self.assertEqual(
current.internal_api_url,
self.catalog_integration_defaults['internal_api_url']
)
self.assertEqual(
current.service_username,
self.catalog_integration_defaults['service_username']
)
# test with all args
call_command(
"create_catalog_integrations",
"--internal_api_url", self.catalog_integration_defaults['internal_api_url'],
"--service_username", self.catalog_integration_defaults['service_username'],
"--enabled",
"--cache_ttl", 500,
"--long_term_cache_ttl", 500,
"--page_size", 500
)
current = CatalogIntegration.current()
# assert current has changed
self.assertNotEqual(
initial,
current
)
self.assertEqual(
current.enabled,
True
)
self.assertEqual(
current.internal_api_url,
self.catalog_integration_defaults['internal_api_url']
)
self.assertEqual(
current.service_username,
self.catalog_integration_defaults['service_username']
)
self.assertEqual(
current.cache_ttl,
500
)
self.assertEqual(
current.long_term_cache_ttl,
500
)
self.assertEqual(
current.page_size,
500
)
| agpl-3.0 |
Seklfreak/Robyul-Red-DiscordBot | cogs/mirror.py | 2 | 6343 | import discord
from discord.ext import commands
from __main__ import send_cmd_help
import os
from .utils.dataIO import dataIO
from .utils import checks
import re
import aiohttp
import json
from .utils.chat_formatting import pagify
import asyncio
__author__ = "Sebastian Winkler <[email protected]>"
__version__ = "1.0"
class Mirror:
"""Mirrors discord chats between servers!"""
def __init__(self, bot):
self.bot = bot
self.mirrored_channels_file_path = "data/mirror/mirrored_channels.json"
self.mirrored_channels = dataIO.load_json(self.mirrored_channels_file_path)
@commands.group(pass_context=True, no_pm=True, name="mirror")
@checks.mod_or_permissions(administrator=True)
async def _mirror(self, context):
"""Manages mirrored channels"""
if context.invoked_subcommand is None:
await send_cmd_help(context)
@_mirror.command(pass_context=True, name="list")
@checks.mod_or_permissions(administrator=True)
async def _list(self, context):
"""List active channel mirrors"""
message = ""
i = 0
for mirrored_channel_entry in self.mirrored_channels:
message += ":satellite: `#{0}`: `mode={1[mode]}`, connected channels:\n".format(i, mirrored_channel_entry)
for channel_entry in mirrored_channel_entry["channels"]:
mirrored_channel = self.bot.get_channel(channel_entry["channel_id"])
message += "`#{1.name} ({1.id})` on `{1.server.name} ({1.server.id})` (`webhook {0[webhook_id]}`): {1.mention}\n".format(channel_entry, mirrored_channel)
i += 1
for page in pagify(message, delims=["\n"]):
await self.bot.say(page)
async def mirror_message(self, message):
server = message.server
author = message.author
channel = message.channel
if message.server is None:
return
if message.channel.is_private:
return
if author == self.bot.user:
return
if author.bot == True:
return
if self._is_command(message.content):
return
for mirrored_channel_entry in self.mirrored_channels:
channel_gets_mirrored = False
for mirrored_channel in mirrored_channel_entry["channels"]:
if channel.id == mirrored_channel["channel_id"]:
channel_gets_mirrored = True
if channel_gets_mirrored == False:
continue
if mirrored_channel_entry["mode"] == "media":
links = []
if len(message.attachments) > 0:
for attachment in message.attachments:
links.append(attachment["url"])
if len(message.content) > 0:
if "http" in message.content:
for item in message.content.split(" "):
linksFound = re.findall("(?P<url><?https?://[^\s]+>?)", item)
if linksFound != None:
for linkFound in linksFound:
if not (linkFound[0] == "<" and linkFound[len(linkFound)-1] == ">"):
if linkFound[0] == "<":
links.append(linkFound[1:len(linkFound)])
else:
links.append(linkFound)
if len(links) > 0:
channels_to_mirror_to = []
for mirrored_channel in mirrored_channel_entry["channels"]:
if channel.id != mirrored_channel["channel_id"]:
channels_to_mirror_to.append(mirrored_channel)
for target_channel_data in channels_to_mirror_to:
for link in links:
target_channel = self.bot.get_channel(mirrored_channel["channel_id"])
if target_channel != None:
message = "posted {0} in `#{1.name}` on the `{1.server.name}` server ({1.mention})".format(link, channel)
await self._post_mirrored_message(message, author, channel, target_channel_data["webhook_id"], target_channel_data["webhook_token"])
async def _post_mirrored_message(self, message, author, source_channel, target_webhook_id, target_webhook_token):
headers = {"user-agent": "Red-cog-Mirror/"+__version__, "content-type": "application/json"}
# use webhook
conn = aiohttp.TCPConnector(verify_ssl=False)
session = aiohttp.ClientSession(connector=conn)
url = "https://discordapp.com/api/webhooks/{0}/{1}".format(target_webhook_id, target_webhook_token)
payload = {"username": author.name, "avatar_url": author.avatar_url, "content": message}
async with session.post(url, data=json.dumps(payload), headers=headers) as r:
result = await r.json()
session.close()
if result != None:
print("mirroring message webhook unexpected result:", result)
if "retry_after" in result and result["retry_after"] != "":
retry_delay = int(result["retry_after"])
print("Will retry in", retry_delay, "seconds")
await asyncio.sleep(retry_delay)
await self._post_mirrored_message(message, author, source_channel, target_webhook_id, target_webhook_token)
def _is_command(self, msg):
for p in self.bot.settings.prefixes:
if msg.startswith(p):
return True
return False
def check_folders():
folders = ("data", "data/mirror/")
for folder in folders:
if not os.path.exists(folder):
print("Creating " + folder + " folder...")
os.makedirs(folder)
def check_files():
mirrored_channels = []
if not os.path.isfile("data/mirror/mirrored_channels.json"):
print("Creating empty mirrored_channels.json, please fill in details...")
dataIO.save_json("data/mirror/mirrored_channels.json", mirrored_channels)
def setup(bot):
check_folders()
check_files()
n = Mirror(bot)
bot.add_listener(n.mirror_message, "on_message")
bot.add_cog(n) | gpl-3.0 |
ZhangXinNan/tensorflow | tensorflow/contrib/kfac/examples/convnet_mnist_multi_tower_main.py | 15 | 1573 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Train a ConvNet on MNIST using K-FAC.
Multi tower training mode. See `convnet.train_mnist_multitower` for details.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow as tf
from tensorflow.contrib.kfac.examples import convnet
FLAGS = flags.FLAGS
flags.DEFINE_string("data_dir", "/tmp/multitower_1/mnist", "local mnist dir")
flags.DEFINE_integer("num_towers", 2,
"Number of towers for multi tower training.")
def main(unused_argv):
_ = unused_argv
assert FLAGS.num_towers > 1
devices = ["/gpu:{}".format(tower_id) for tower_id in range(FLAGS.num_towers)]
convnet.train_mnist_multitower(
FLAGS.data_dir,
num_epochs=200,
num_towers=FLAGS.num_towers,
devices=devices)
if __name__ == "__main__":
tf.app.run(main=main)
| apache-2.0 |
maximus0/thrift | test/py/TestSyntax.py | 99 | 1318 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys, glob
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--genpydir', type='string', dest='genpydir', default='gen-py')
options, args = parser.parse_args()
del sys.argv[1:] # clean up hack so unittest doesn't complain
sys.path.insert(0, options.genpydir)
sys.path.insert(0, glob.glob('../../lib/py/build/lib.*')[0])
# Just import these generated files to make sure they are syntactically valid
from DebugProtoTest import EmptyService
from DebugProtoTest import Inherited
| apache-2.0 |
neumerance/cloudloon2 | openstack_dashboard/dashboards/admin/images/forms.py | 10 | 1025 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.dashboards.project.images_and_snapshots \
.images import forms
class AdminCreateImageForm(forms.CreateImageForm):
pass
class AdminUpdateImageForm(forms.UpdateImageForm):
pass
| apache-2.0 |
PercyLau/oonSIM | NFD/.waf-tools/websocket.py | 18 | 3039 | # encoding: utf-8
from waflib import Options, Logs, Errors
from waflib.Configure import conf
import re
def addWebsocketOptions(self, opt):
opt.add_option('--without-websocket', action='store_false', default=True,
dest='with_websocket',
help='Disable WebSocket face support')
setattr(Options.OptionsContext, "addWebsocketOptions", addWebsocketOptions)
@conf
def checkWebsocket(self, **kw):
if not self.options.with_websocket:
return
isMandatory = kw.get('mandatory', True)
self.start_msg('Checking for WebSocket includes')
try:
websocketDir = self.path.find_dir('websocketpp/websocketpp')
if not websocketDir:
raise Errors.WafError('Not found')
versionFile = websocketDir.find_node('version.hpp')
if not websocketDir:
raise Errors.WafError('Corrupted: WebSocket version file not found')
try:
txt = versionFile.read()
except (OSError, IOError):
raise Errors.WafError('Corrupted: cannot read WebSocket version file')
# Looking for the following:
# static int const major_version = 0;
# static int const minor_version = 5;
# static int const patch_version = 1;
version = [None, None, None]
majorVersion = re.compile('^static int const major_version = (\\d+);$', re.M)
version[0] = majorVersion.search(txt)
minorVersion = re.compile('^static int const minor_version = (\\d+);$', re.M)
version[1] = minorVersion.search(txt)
patchVersion = re.compile('^static int const patch_version = (\\d+);$', re.M)
version[2] = patchVersion.search(txt)
if not version[0] or not version[1] or not version[2]:
raise Errors.WafError('Corrupted: cannot detect websocket version')
self.env['WEBSOCKET_VERSION'] = [i.group(1) for i in version]
# todo: version checking, if necessary
self.end_msg('.'.join(self.env['WEBSOCKET_VERSION']))
self.env['INCLUDES_WEBSOCKET'] = websocketDir.parent.abspath()
self.env['HAVE_WEBSOCKET'] = True
self.define('HAVE_WEBSOCKET', 1)
self.define('_WEBSOCKETPP_CPP11_STL_', 1)
except Errors.WafError as error:
if isMandatory:
self.end_msg(str(error), color='RED')
Logs.warn('If you are using git NFD repository, checkout websocketpp submodule: ')
Logs.warn(' git submodule init && git submodule update')
Logs.warn('Otherwise, manually download and extract websocketpp library:')
Logs.warn(' mkdir websocketpp')
Logs.warn(' curl -L https://github.com/zaphoyd/websocketpp/archive/0.5.1.tar.gz > websocket.tar.gz')
Logs.warn(' tar zxf websocket.tar.gz -C websocketpp/ --strip 1')
Logs.warn('Alternatively, WebSocket support can be disabled with --without-websocket')
self.fatal("The configuration failed")
else:
self.end_msg(str(error))
| gpl-3.0 |
joshpelkey/cmap-parse | cmap_parse.py | 1 | 11702 | ##
#
# cmap_parse.py
# An attempt to parse concept maps, exported from cmap tools...take one
#
# Copyright 2016 Josh Pelkey
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing permissions and limitations under the
# License.
#
##
import glob
import re
import os
import itertools
import networkx as nx
def CxlConversion (file):
# get the concepts, linking phrases, and connections
concepts = {}
linking_phrases = {}
connections = []
concepts_linked = []
for line in f:
if "concept id=" in line:
concept = re.findall (r'"([^"]*)"', line)
concepts[concept[0]] = concept[1]
# get the linking phrases
if "linking-phrase id=" in line:
linking_phrase = re.findall (r'"([^"]*)"', line)
linking_phrases[linking_phrase[0]] = linking_phrase[1]
# get the connections
if "connection id=" in line:
connections.append (re.findall (r'"([^"]*)"', line))
# cycle through the linking phrase list, find all lines matching from-id and to-id
# edges are combinations of from-id and to-id
for key in linking_phrases:
from_links = []
to_links = []
for connection in connections:
# if linking phrase is in the from-id (linking phrase at beginning)
if key == connection[1]:
from_links.append ([linking_phrases[key],concepts[connection[2]]])
# if linking phrase is in the to-id (linking phrase at the end)
if key == connection[2]:
to_links.append ([concepts[connection[1]], linking_phrases[key]])
#print to_links
#print from_links
#print "--"
# now combine the lists, to_links to from_links
for to_link in to_links:
for from_link in from_links:
concepts_linked.append ([to_link[0], to_link[1], from_link[1]])
return concepts_linked
def CmapParse (cmap_files, result, filenames, root_concept, export_concepts):
# store all concepts to print later
all_concepts = []
# open the result file to write output
rfile = open(result, 'w')
rfile.write('Filename\t Num Concepts\t Num Hierarchies\t Highest Hierarchy\t Num Crosslinks\t\n\n')
# iterate over all the files and start doing stuffs
for index, cmap_file in enumerate(cmap_files):
# create an empty Multi-directed graph
G = nx.MultiDiGraph ()
# open a cmap text file and begin writing results
global f
f = open (cmap_file)
rfile.write(filenames[index] + '\t')
# if file extension cxl, do this fun conversion
textFormatCorrect = True
if os.path.splitext(filenames[index])[1][1:] == "cxl":
concepts_linked = CxlConversion(f)
for edge in concepts_linked:
G.add_edge (edge[0].lower(), edge[2].lower(), link=edge[1].lower())
else:
# split the lines in to a list
lines = ((f.read ()).splitlines ())
# iterate over the list and split each line
# in to individual lists, delimited by tab
for line in lines:
edge = line.split ('\t')
# break if not 3 items per line
if len(edge) != 3:
rfile.write('>> Text file not formatted correctly.\n')
textFormatCorrect = False
break
G.add_edge (edge[0].lower(), edge[2].lower(), link=edge[1].lower())
# if the file had a line without 3 items, break completely
if not textFormatCorrect:
continue
# if 'sustainability' isn't a concept, fail
if root_concept.lower() not in G:
rfile.write('>> ' + root_concept.lower() + ' not a concept in the map.\n')
continue
# store first-level hierarchy concepts
hierarchy_list = G.successors (root_concept.lower())
# iterate through the main graph and set hierarchy to zero for now
for x in G:
G.node[x]['hier'] = 0
# iterate through the top hierarchy in the main graph and set these first-level hierarchy
# concepts to an incrementing integer
hierIter = 1
for x in hierarchy_list:
G.node[x]['hier'] = hierIter
hierIter += 1
# number of concepts is the number of nodes
# minus the root node
num_concepts = G.order () - 1
# hierarchy is the out degree of the root node
# we assume the root is 'sustainabiliy'
hierarchy = G.out_degree (root_concept.lower())
# look at all paths from sustainability to all
# other nodes. no repeated nodes (cycles)
paths_list = []
for n in G.nodes ():
for path in nx.all_simple_paths (G, source=root_concept.lower(), target=n):
paths_list.append (path)
# highest hierarchy defined here as the max path length
# this is a bit different than how it's done manually
# discuss later
highest_hier = max (len (x) for x in paths_list) - 1
# let's make subgraphs of all hierarchies
# we can use these subgraphs to do some
# operations and check out cross links
subgraph_list = []
for x in hierarchy_list:
subgraph = nx.MultiDiGraph ()
connected_nodes = []
for y in G.nodes ():
if nx.has_path (G, x, y):
connected_nodes.append (y)
subgraph = G.subgraph(connected_nodes).copy ()
subgraph.graph['name'] = x
subgraph_list.append (subgraph)
# for node not in first-level hierarchy, check which
# of the first-level concepts is closest (shortest path)
# and then label it with that hierarchy
fail = False
for n in G.nodes ():
shortest_path = 0
assoc_hier = ''
if n not in (hierarchy_list, root_concept.lower ()):
path_list = []
for y in hierarchy_list:
if nx.has_path (G, y, n):
path_list = nx.shortest_path (G, y, n)
if shortest_path == 0:
assoc_hier = y
shortest_path = len (path_list)
else:
if (len (path_list) < shortest_path):
assoc_hier = y
shortest_path = len (path_list)
if assoc_hier:
G.node[n]['hier'] = G.node[assoc_hier]['hier']
#print G.node[n]['hier']
else:
fail = True
rfile.write('>> One or more concepts not connected to first-level hierarchy. \n')
break
# if exporting concepts, store the concepts
if export_concepts:
all_concepts.append(G.nodes())
# a concept was not connected to a first-level hierarchy
# move on to the next concept map
if fail:
continue
# now i need to find all edges that have
# two hier node attributes that don't match.
# these are crosslinks
total_crosslinks = 0
for x in G.edges():
if ((G.node[x[0]]['hier']) != 0) and ((G.node[x[1]]['hier']) != 0):
if G.node[x[0]]['hier'] != G.node[x[1]]['hier']:
#print (str (x[0]) + ' ---- ' + str (x[1]) + ' hier: ' + str (G.node[x[0]]['hier']) + ' ---- ' + str (G.node[x[1]]['hier']))
total_crosslinks += 1
# print out the stuffs
rfile.write(str (num_concepts) + '\t')
rfile.write(str (hierarchy) + '\t')
rfile.write(str (highest_hier) + '\t')
rfile.write(str (total_crosslinks) + '\t')
# make it pretty
rfile.write('\n')
# show me cycles
#print ('>> Cycles: ' + str (nx.simple_cycles (G)))
# close up the cmap file
f.close()
# if exporting concepts, print them out
rfile.write('\n')
if export_concepts:
rfile.write('Filename\t')
for filename in filenames:
rfile.write(filename + '\t')
rfile.write('\n')
rfile.write('Concepts')
# transpose to columns and write
transposed_all_concepts = map(lambda *row: list(row), *all_concepts)
for x, concepts in enumerate(transposed_all_concepts):
rfile.write('\t')
for concept in transposed_all_concepts[x]:
if concept:
#stripping these 
 characters, some cxl files seem to have for some reason
rfile.write(concept.replace('
', ' ') + '\t')
else:
rfile.write('\t')
rfile.write('\n')
# close the result file
rfile.close()
# eof.zomg
| apache-2.0 |
loop1024/pymo-global | android/pgs4a-0.9.6/python-install/lib/python2.7/zipfile.py | 4 | 54030 | """
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
endrec = list(struct.unpack(structEndArchive, recData))
comment = data[start+sizeEndCentDir:]
# check that comment length is correct
if endrec[_ECD_COMMENT_SIZE] == len(comment):
# Append the archive comment and start offset
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while len(extra) >= 4:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None):
self._fileobj = fileobj
self._decrypter = decrypter
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self.comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = open(file, modeDict[mode])
except IOError:
if mode == 'a':
mode = key = 'w'
self.fp = open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
if key == 'r':
self._GetContents()
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
else:
if not self._filePassed:
self.fp.close()
self.fp = None
raise RuntimeError, 'Mode must be "r", "w" or "a"'
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _GetContents(self):
"""Read the directory, making sure we close the file if the format
is bad."""
try:
self._RealGetContents()
except BadZipfile:
if not self._filePassed:
self.fp.close()
self.fp = None
raise
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self.comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if centdir[0:4] != stringCentralDir:
raise BadZipfile, "Bad magic number for central directory"
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
f = self.open(zinfo.filename, "r")
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = open(self.filename, 'rb')
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if fheader[0:4] != stringFileHeader:
raise BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(structFileHeader, fheader)
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd)
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
# Strip trailing path separator, unless it represents the root.
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
# don't include leading "/" from file name if present
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
source = self.open(member, pwd=pwd)
target = file(targetpath, "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader())
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
zinfo.file_size = file_size = 0
self.fp.write(zinfo.FileHeader())
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
# Seek backwards and write CRC and file sizes
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset + 14, 0)
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
zinfo.external_attr = 0600 << 16
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self.fp.write(zinfo.FileHeader())
self.fp.write(bytes)
self.fp.flush()
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
# check for valid comment length
if len(self.comment) >= ZIP_MAX_COMMENT:
if self.debug > 0:
msg = 'Archive comment is too long; truncating to %d bytes' \
% ZIP_MAX_COMMENT
self.comment = self.comment[:ZIP_MAX_COMMENT]
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self.comment))
self.fp.write(endrec)
self.fp.write(self.comment)
self.fp.flush()
if not self._filePassed:
self.fp.close()
self.fp = None
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
zf.printdir()
zf.close()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
with open(tgt, 'wb') as fp:
fp.write(zf.read(path))
zf.close()
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
zf = ZipFile(args[1], 'w', allowZip64=True)
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
zf.close()
if __name__ == "__main__":
main()
| mit |
nazo/ansible | lib/ansible/modules/windows/win_robocopy.py | 72 | 4833 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Corwin Brown <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_robocopy
version_added: "2.2"
short_description: Synchronizes the contents of two directories using Robocopy.
description:
- Synchronizes the contents of two directories on the remote machine. Under the hood this just calls out to RoboCopy, since that should be available
on most modern Windows Systems.
options:
src:
description:
- Source file/directory to sync.
required: true
dest:
description:
- Destination file/directory to sync (Will receive contents of src).
required: true
recurse:
description:
- Includes all subdirectories (Toggles the `/e` flag to RoboCopy). If "flags" is set, this will be ignored.
choices:
- true
- false
default: false
required: false
purge:
description:
- Deletes any files/directories found in the destination that do not exist in the source (Toggles the `/purge` flag to RoboCopy). If "flags" is
set, this will be ignored.
choices:
- true
- false
default: false
required: false
flags:
description:
- Directly supply Robocopy flags. If set, purge and recurse will be ignored.
default: None
required: false
author: Corwin Brown (@blakfeld)
notes:
- This is not a complete port of the "synchronize" module. Unlike the "synchronize" module this only performs the sync/copy on the remote machine,
not from the master to the remote machine.
- This module does not currently support all Robocopy flags.
- Works on Windows 7, Windows 8, Windows Server 2k8, and Windows Server 2k12
'''
EXAMPLES = r'''
- name: Sync the contents of one directory to another
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
- name: Sync the contents of one directory to another, including subdirectories
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
- name: Sync the contents of one directory to another, and remove any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
purge: True
- name: Sync content in recursive mode, removing any files/directories found in destination that do not exist in the source
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
recurse: True
purge: True
- name: Sync Two Directories in recursive and purging mode, specifying additional special flags
win_robocopy:
src: C:\DirectoryOne
dest: C:\DirectoryTwo
flags: /E /PURGE /XD SOME_DIR /XF SOME_FILE /MT:32
'''
RETURN = r'''
src:
description: The Source file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
dest:
description: The Destination file/directory of the sync.
returned: always
type: string
sample: c:\Some\Path
recurse:
description: Whether or not the recurse flag was toggled.
returned: always
type: bool
sample: False
purge:
description: Whether or not the purge flag was toggled.
returned: always
type: bool
sample: False
flags:
description: Any flags passed in by the user.
returned: always
type: string
sample: "/e /purge"
rc:
description: The return code retuned by robocopy.
returned: success
type: int
sample: 1
output:
description: The output of running the robocopy command.
returned: success
type: string
sample: "----------------------------------------\n ROBOCOPY :: Robust File Copy for Windows \n----------------------------------------\n"
msg:
description: Output intrepreted into a concise message.
returned: always
type: string
sample: No files copied!
changed:
description: Whether or not any changes were made.
returned: always
type: bool
sample: False
'''
| gpl-3.0 |
bis12/pushmanager | tests/test_template_push.py | 3 | 7070 | import time
import testing as T
class PushTemplateTest(T.TemplateTestCase):
authenticated = True
push_page = 'push.html'
push_status_page = 'push-status.html'
accepting_push_sections = ['blessed', 'verified', 'staged', 'added', 'pickme', 'requested']
now = time.time()
basic_push = {
'id': 0,
'user': 'pushmaster',
'title': 'fake_push',
'branch': 'deploy-fake-branch',
'state': 'accepting',
'pushtype': 'Regular',
'created': now,
'modified': now,
'extra_pings': None,
}
basic_kwargs = {
'page_title': 'fake_push_title',
'push_contents': {},
'available_requests': [],
'fullrepo': 'not/a/repo',
'override': False,
'push_survey_url': None
}
basic_request = {
'id': 0,
'repo': 'non-existent',
'branch': 'non-existent',
'user': 'testuser',
'reviewid': 0,
'title': 'some title',
'tags': None,
'revision': '0' * 40,
'state': 'requested',
'created': now,
'modified': now,
'description': 'nondescript',
'comments': 'nocomment',
'watchers': None,
}
def test_include_push_status_when_accepting(self):
tree = self.render_etree(
self.push_page,
push_info=self.basic_push,
**self.basic_kwargs)
found_h3 = []
for h3 in tree.iter('h3'):
T.assert_equal('status-header', h3.attrib['class'])
T.assert_in(h3.attrib['section'], self.accepting_push_sections)
found_h3.append(h3)
T.assert_equal(len(self.accepting_push_sections), len(found_h3))
def test_include_push_status_when_done(self):
push = dict(self.basic_push)
push['state'] = 'live'
tree = self.render_etree(
self.push_page,
push_info=push,
**self.basic_kwargs)
found_h3 = []
for h3 in tree.iter('h3'):
T.assert_equal('status-header', h3.attrib['class'])
found_h3.append(h3)
T.assert_equal(1, len(found_h3))
def generate_push_contents(self, requests):
push_contents = dict.fromkeys(self.accepting_push_sections, [])
for section in self.accepting_push_sections:
push_contents[section] = requests
return push_contents
def test_no_mine_on_requests_as_random_user(self):
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = self.generate_push_contents([self.basic_request])
kwargs['current_user'] = 'random_user'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_mockreq = []
for mockreq in tree.iter('mock'):
T.assert_not_in('class', mockreq.getparent().attrib.keys())
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_mockreq))
def test_mine_on_requests_as_request_user(self):
request = dict(self.basic_request)
request['user'] = 'notme'
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [self.basic_request, request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
kwargs['current_user'] = 'testuser'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_li = []
found_mockreq = []
for mockreq in tree.iter('mock'):
if 'class' in mockreq.getparent().attrib:
T.assert_equal('mine', mockreq.getparent().attrib['class'])
found_li.append(mockreq)
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_li))
T.assert_equal(10, len(found_mockreq))
def test_mine_on_requests_as_watcher(self):
request = dict(self.basic_request)
request['watchers'] = 'watcher1'
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [request, self.basic_request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
kwargs['current_user'] = 'watcher1'
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_li = []
found_mockreq = []
for mockreq in tree.iter('mock'):
if 'class' in mockreq.getparent().attrib:
T.assert_equal('mine', mockreq.getparent().attrib['class'])
found_li.append(mockreq)
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_li))
T.assert_equal(10, len(found_mockreq))
def test_mine_on_requests_as_pushmaster(self):
push_contents = {}
section_id = []
for section in self.accepting_push_sections:
push_contents[section] = [self.basic_request]
section_id.append('%s-items' % section)
kwargs = dict(self.basic_kwargs)
kwargs['push_contents'] = push_contents
with self.no_ui_modules():
tree = self.render_etree(
self.push_status_page,
push_info=self.basic_push,
**kwargs)
found_mockreq = []
for mockreq in tree.iter('mock'):
T.assert_not_in('class', mockreq.getparent().attrib.keys())
found_mockreq.append(mockreq)
T.assert_equal(5, len(found_mockreq))
def test_include_push_survey_exists(self):
push = dict(self.basic_push)
push['state'] = 'live'
kwargs = dict(**self.basic_kwargs)
kwargs['push_survey_url'] = 'http://sometestsurvey'
tree = self.render_etree(
self.push_page,
push_info=push,
**kwargs)
for script in tree.iter('script'):
if script.text and kwargs['push_survey_url'] in script.text:
break
else:
assert False, 'push_survey_url not found'
def test_include_new_request_form(self):
with self.no_ui_modules():
tree = self.render_etree(
self.push_page,
push_info=self.basic_push,
**self.basic_kwargs)
T.assert_exactly_one(
*[mock.attrib['name'] for mock in tree.iter('mock')],
truthy_fxn=lambda name: name == 'mock.NewRequestDialog()')
if __name__ == '__main__':
T.run()
| apache-2.0 |
t794104/ansible | lib/ansible/modules/network/f5/bigip_gtm_virtual_server.py | 38 | 40714 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_gtm_virtual_server
short_description: Manages F5 BIG-IP GTM virtual servers
description:
- Manages F5 BIG-IP GTM virtual servers. A GTM server can have many virtual servers
associated with it. They are arranged in much the same way that pool members are
to pools.
version_added: 2.6
options:
name:
description:
- Specifies the name of the virtual server.
type: str
version_added: 2.6
server_name:
description:
- Specifies the name of the server that the virtual server is associated with.
type: str
version_added: 2.6
address:
description:
- Specifies the IP Address of the virtual server.
- When creating a new GTM virtual server, this parameter is required.
type: str
version_added: 2.6
port:
description:
- Specifies the service port number for the virtual server or pool member. For example,
the HTTP service is typically port 80.
- To specify all ports, use an C(*).
- When creating a new GTM virtual server, if this parameter is not specified, a
default of C(*) will be used.
type: int
translation_address:
description:
- Specifies the translation IP address for the virtual server.
- To unset this parameter, provide an empty string (C("")) as a value.
- When creating a new GTM virtual server, if this parameter is not specified, a
default of C(::) will be used.
type: str
version_added: 2.6
translation_port:
description:
- Specifies the translation port number or service name for the virtual server.
- To specify all ports, use an C(*).
- When creating a new GTM virtual server, if this parameter is not specified, a
default of C(*) will be used.
type: str
version_added: 2.6
availability_requirements:
description:
- Specifies, if you activate more than one health monitor, the number of health
monitors that must receive successful responses in order for the link to be
considered available.
type: dict
suboptions:
type:
description:
- Monitor rule type when C(monitors) is specified.
- When creating a new virtual, if this value is not specified, the default of 'all' will be used.
type: str
choices:
- all
- at_least
- require
at_least:
description:
- Specifies the minimum number of active health monitors that must be successful
before the link is considered up.
- This parameter is only relevant when a C(type) of C(at_least) is used.
- This parameter will be ignored if a type of either C(all) or C(require) is used.
type: int
number_of_probes:
description:
- Specifies the minimum number of probes that must succeed for this server to be declared up.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probers)
parameter must also be specified.
- The value of this parameter should always be B(lower) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
type: int
number_of_probers:
description:
- Specifies the number of probers that should be used when running probes.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probes)
parameter must also be specified.
- The value of this parameter should always be B(higher) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
type: int
version_added: 2.6
monitors:
description:
- Specifies the health monitors that the system currently uses to monitor this resource.
- When C(availability_requirements.type) is C(require), you may only have a single monitor in the
C(monitors) list.
type: list
version_added: 2.6
virtual_server_dependencies:
description:
- Specifies the virtual servers on which the current virtual server depends.
- If any of the specified servers are unavailable, the current virtual server is also listed as unavailable.
type: list
suboptions:
server:
description:
- Server which the dependant virtual server is part of.
type: str
required: True
virtual_server:
description:
- Virtual server to depend on.
type: str
required: True
version_added: 2.6
link:
description:
- Specifies a link to assign to the server or virtual server.
type: str
version_added: 2.6
limits:
description:
- Specifies resource thresholds or limit requirements at the server level.
- When you enable one or more limit settings, the system then uses that data to take servers in and out
of service.
- You can define limits for any or all of the limit settings. However, when a server does not meet the resource
threshold limit requirement, the system marks the entire server as unavailable and directs load-balancing
traffic to another resource.
- The limit settings available depend on the type of server.
type: dict
suboptions:
bits_enabled:
description:
- Whether the bits limit is enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
packets_enabled:
description:
- Whether the packets limit is enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
connections_enabled:
description:
- Whether the current connections limit is enabled or not.
- This parameter allows you to switch on or off the effect of the limit.
type: bool
bits_limit:
description:
- Specifies the maximum allowable data throughput rate, in bits per second, for the virtual servers on the server.
- If the network traffic volume exceeds this limit, the system marks the server as unavailable.
type: int
packets_limit:
description:
- Specifies the maximum allowable data transfer rate, in packets per second, for the virtual servers on the server.
- If the network traffic volume exceeds this limit, the system marks the server as unavailable.
type: int
connections_limit:
description:
- Specifies the maximum number of concurrent connections, combined, for all of the virtual servers on the server.
- If the connections exceed this limit, the system marks the server as unavailable.
type: int
version_added: 2.6
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
version_added: 2.6
state:
description:
- When C(present), ensures that the resource exists.
- When C(absent), ensures the resource is removed.
type: str
choices:
- present
- absent
- enabled
- disabled
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Enable virtual server
bigip_gtm_virtual_server:
server_name: server1
name: my-virtual-server
state: enabled
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
server_name:
description: The server name associated with the virtual server.
returned: changed
type: str
sample: /Common/my-gtm-server
address:
description: The new address of the resource.
returned: changed
type: str
sample: 1.2.3.4
port:
description: The new port of the resource.
returned: changed
type: int
sample: 500
translation_address:
description: The new translation address of the resource.
returned: changed
type: int
sample: 500
translation_port:
description: The new translation port of the resource.
returned: changed
type: int
sample: 500
availability_requirements:
description: The new availability requirement configurations for the resource.
returned: changed
type: dict
sample: {'type': 'all'}
monitors:
description: The new list of monitors for the resource.
returned: changed
type: list
sample: ['/Common/monitor1', '/Common/monitor2']
virtual_server_dependencies:
description: The new list of virtual server dependencies for the resource
returned: changed
type: list
sample: ['/Common/vs1', '/Common/vs2']
link:
description: The new link value for the resource.
returned: changed
type: str
sample: /Common/my-link
limits:
description: The new limit configurations for the resource.
returned: changed
type: dict
sample: { 'bits_enabled': true, 'bits_limit': 100 }
'''
import os
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.compat.ipaddress import ip_address
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.compare import compare_complex_list
from library.module_utils.network.f5.icontrol import module_provisioned
from library.module_utils.network.f5.ipaddress import is_valid_ip
from library.module_utils.network.f5.ipaddress import validate_ip_v6_address
except ImportError:
from ansible.module_utils.compat.ipaddress import ip_address
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.compare import compare_complex_list
from ansible.module_utils.network.f5.icontrol import module_provisioned
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
from ansible.module_utils.network.f5.ipaddress import validate_ip_v6_address
class Parameters(AnsibleF5Parameters):
api_map = {
'limitMaxBps': 'bits_limit',
'limitMaxBpsStatus': 'bits_enabled',
'limitMaxConnections': 'connections_limit',
'limitMaxConnectionsStatus': 'connections_enabled',
'limitMaxPps': 'packets_limit',
'limitMaxPpsStatus': 'packets_enabled',
'translationAddress': 'translation_address',
'translationPort': 'translation_port',
'dependsOn': 'virtual_server_dependencies',
'explicitLinkName': 'link',
'monitor': 'monitors'
}
api_attributes = [
'dependsOn',
'destination',
'disabled',
'enabled',
'explicitLinkName',
'limitMaxBps',
'limitMaxBpsStatus',
'limitMaxConnections',
'limitMaxConnectionsStatus',
'limitMaxPps',
'limitMaxPpsStatus',
'translationAddress',
'translationPort',
'monitor',
]
returnables = [
'bits_enabled',
'bits_limit',
'connections_enabled',
'connections_limit',
'destination',
'disabled',
'enabled',
'link',
'monitors',
'packets_enabled',
'packets_limit',
'translation_address',
'translation_port',
'virtual_server_dependencies',
'availability_requirements',
]
updatables = [
'bits_enabled',
'bits_limit',
'connections_enabled',
'connections_limit',
'destination',
'enabled',
'link',
'monitors',
'packets_limit',
'packets_enabled',
'translation_address',
'translation_port',
'virtual_server_dependencies',
]
class ApiParameters(Parameters):
@property
def address(self):
if self._values['destination'].count(':') >= 2:
# IPv6
parts = self._values['destination'].split('.')
else:
# IPv4
parts = self._values['destination'].split(':')
if is_valid_ip(parts[0]):
return str(parts[0])
raise F5ModuleError(
"'address' parameter from API was not an IP address."
)
@property
def port(self):
if self._values['destination'].count(':') >= 2:
# IPv6
parts = self._values['destination'].split('.')
return parts[1]
# IPv4
parts = self._values['destination'].split(':')
return int(parts[1])
@property
def virtual_server_dependencies(self):
if self._values['virtual_server_dependencies'] is None:
return None
results = []
for dependency in self._values['virtual_server_dependencies']:
parts = dependency['name'].split(':')
result = dict(
server=parts[0],
virtual_server=parts[1],
)
results.append(result)
if results:
results = sorted(results, key=lambda k: k['server'])
return results
@property
def enabled(self):
if 'enabled' in self._values:
return True
else:
return False
@property
def disabled(self):
if 'disabled' in self._values:
return True
return False
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probes')
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probers')
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('least')
class ModuleParameters(Parameters):
def _get_limit_value(self, type):
if self._values['limits'] is None:
return None
if self._values['limits'][type] is None:
return None
return int(self._values['limits'][type])
def _get_availability_value(self, type):
if self._values['availability_requirements'] is None:
return None
if self._values['availability_requirements'][type] is None:
return None
return int(self._values['availability_requirements'][type])
def _get_limit_status(self, type):
if self._values['limits'] is None:
return None
if self._values['limits'][type] is None:
return None
if self._values['limits'][type]:
return 'enabled'
return 'disabled'
@property
def address(self):
if self._values['address'] is None:
return None
if is_valid_ip(self._values['address']):
ip = str(ip_address(u'{0}'.format(self._values['address'])))
return ip
raise F5ModuleError(
"Specified 'address' is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
if self._values['port'] == '*':
return 0
return int(self._values['port'])
@property
def destination(self):
if self.address is None:
return None
if self.port is None:
return None
if validate_ip_v6_address(self.address):
result = '{0}.{1}'.format(self.address, self.port)
else:
result = '{0}:{1}'.format(self.address, self.port)
return result
@property
def link(self):
if self._values['link'] is None:
return None
return fq_name(self.partition, self._values['link'])
@property
def bits_limit(self):
return self._get_limit_value('bits_limit')
@property
def packets_limit(self):
return self._get_limit_value('packets_limit')
@property
def connections_limit(self):
return self._get_limit_value('connections_limit')
@property
def bits_enabled(self):
return self._get_limit_status('bits_enabled')
@property
def packets_enabled(self):
return self._get_limit_status('packets_enabled')
@property
def connections_enabled(self):
return self._get_limit_status('connections_enabled')
@property
def translation_address(self):
if self._values['translation_address'] is None:
return None
if self._values['translation_address'] == '':
return 'none'
return self._values['translation_address']
@property
def translation_port(self):
if self._values['translation_port'] is None:
return None
if self._values['translation_port'] in ['*', ""]:
return 0
return int(self._values['translation_port'])
@property
def virtual_server_dependencies(self):
if self._values['virtual_server_dependencies'] is None:
return None
results = []
for dependency in self._values['virtual_server_dependencies']:
result = dict(
server=fq_name(self.partition, dependency['server']),
virtual_server=os.path.basename(dependency['virtual_server'])
)
results.append(result)
if results:
results = sorted(results, key=lambda k: k['server'])
return results
@property
def enabled(self):
if self._values['state'] == 'enabled':
return True
elif self._values['state'] == 'disabled':
return False
else:
return None
@property
def disabled(self):
if self._values['state'] == 'enabled':
return False
elif self._values['state'] == 'disabled':
return True
else:
return None
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
if self.at_least > len(self.monitors_list):
raise F5ModuleError(
"The 'at_least' value must not exceed the number of 'monitors'."
)
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
if self.number_of_probes > self.number_of_probers:
raise F5ModuleError(
"The 'number_of_probes' must not exceed the 'number_of_probers'."
)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def availability_requirement_type(self):
if self._values['availability_requirements'] is None:
return None
return self._values['availability_requirements']['type']
@property
def number_of_probes(self):
return self._get_availability_value('number_of_probes')
@property
def number_of_probers(self):
return self._get_availability_value('number_of_probers')
@property
def at_least(self):
return self._get_availability_value('at_least')
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def virtual_server_dependencies(self):
if self._values['virtual_server_dependencies'] is None:
return None
results = []
for depend in self._values['virtual_server_dependencies']:
name = '{0}:{1}'.format(depend['server'], depend['virtual_server'])
results.append(dict(name=name))
return results
@property
def monitors(self):
monitor_string = self._values['monitors']
if monitor_string is None:
return None
if '{' in monitor_string and '}':
tmp = monitor_string.strip('}').split('{')
monitor = ''.join(tmp).rstrip()
return monitor
return monitor_string
class ReportableChanges(Changes):
@property
def monitors(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probes'))
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probers'))
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('least'))
@property
def availability_requirements(self):
if self._values['monitors'] is None:
return None
result = dict()
result['type'] = self.availability_requirement_type
result['at_least'] = self.at_least
result['number_of_probers'] = self.number_of_probers
result['number_of_probes'] = self.number_of_probes
return result
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def destination(self):
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.address is None:
self.want.update({'address': self.have.address})
if self.want.destination != self.have.destination:
return self.want.destination
@property
def virtual_server_dependencies(self):
if self.have.virtual_server_dependencies is None:
return self.want.virtual_server_dependencies
if self.want.virtual_server_dependencies is None and self.have.virtual_server_dependencies is None:
return None
if self.want.virtual_server_dependencies is None:
return None
result = compare_complex_list(self.want.virtual_server_dependencies, self.have.virtual_server_dependencies)
return result
@property
def enabled(self):
if self.want.state == 'enabled' and self.have.disabled:
result = dict(
enabled=True,
disabled=False
)
return result
elif self.want.state == 'disabled' and self.have.enabled:
result = dict(
enabled=False,
disabled=True
)
return result
@property
def monitors(self):
if self.have.monitors is None:
return self.want.monitors
if self.have.monitors != self.want.monitors:
return self.want.monitors
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
if not module_provisioned(self.client, 'gtm'):
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
changed = False
result = dict()
state = self.want.state
if state in ['present', 'enabled', 'disabled']:
changed = self.present()
elif state == 'absent':
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.port in [None, ""]:
self.want.update({'port': '*'})
if self.want.translation_port in [None, ""]:
self.want.update({'translation_port': '*'})
if self.want.translation_address in [None, ""]:
self.want.update({'translation_address': '::'})
self._set_changed_options()
if self.want.address is None:
raise F5ModuleError(
"You must supply an 'address' when creating a new virtual server."
)
if self.want.availability_requirement_type == 'require' and len(self.want.monitors_list) > 1:
raise F5ModuleError(
"Only one monitor may be specified when using an availability_requirement type of 'require'"
)
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}/virtual-servers/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.server_name),
transform_name(name=self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}/virtual-servers/".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.server_name)
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}/virtual-servers/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.server_name),
transform_name(name=self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}/virtual-servers/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.server_name),
transform_name(name=self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/server/{2}/virtual-servers/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.server_name),
transform_name(name=self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
server_name=dict(required=True),
address=dict(),
port=dict(type='int'),
translation_address=dict(),
translation_port=dict(),
availability_requirements=dict(
type='dict',
options=dict(
type=dict(
choices=['all', 'at_least', 'require'],
required=True
),
at_least=dict(type='int'),
number_of_probes=dict(type='int'),
number_of_probers=dict(type='int')
),
mutually_exclusive=[
['at_least', 'number_of_probes'],
['at_least', 'number_of_probers'],
],
required_if=[
['type', 'at_least', ['at_least']],
['type', 'require', ['number_of_probes', 'number_of_probers']]
]
),
monitors=dict(type='list'),
virtual_server_dependencies=dict(
type='list',
options=dict(
server=dict(required=True),
virtual_server=dict(required=True)
)
),
link=dict(),
limits=dict(
type='dict',
options=dict(
bits_enabled=dict(type='bool'),
packets_enabled=dict(type='bool'),
connections_enabled=dict(type='bool'),
bits_limit=dict(type='int'),
packets_limit=dict(type='int'),
connections_limit=dict(type='int')
)
),
state=dict(
default='present',
choices=['present', 'absent', 'disabled', 'enabled']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
dischinator/pyload | module/plugins/hoster/PornhubCom.py | 5 | 2547 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.Hoster import Hoster
class PornhubCom(Hoster):
__name__ = "PornhubCom"
__type__ = "hoster"
__version__ = "0.55"
__status__ = "testing"
__pattern__ = r'http://(?:www\.)?pornhub\.com/view_video\.php\?viewkey=\w+'
__config__ = [("activated", "bool", "Activated", True)]
__description__ = """Pornhub.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("jeix", "[email protected]")]
def process(self, pyfile):
self.download_html()
if not self.file_exists():
self.offline()
pyfile.name = self.get_file_name()
self.download(self.get_file_url())
def download_html(self):
url = self.pyfile.url
self.data = self.load(url)
def get_file_url(self):
"""
Returns the absolute downloadable filepath
"""
if not self.data:
self.download_html()
url = "http://www.pornhub.com//gateway.php"
video_id = self.pyfile.url.split('=')[-1]
#: Thanks to jD team for this one v
post_data = "\x00\x03\x00\x00\x00\x01\x00\x0c\x70\x6c\x61\x79\x65\x72\x43\x6f\x6e\x66\x69\x67\x00\x02\x2f\x31\x00\x00\x00\x44\x0a\x00\x00\x00\x03\x02\x00"
post_data += chr(len(video_id))
post_data += video_id
post_data += "\x02\x00\x02\x2d\x31\x02\x00\x20"
post_data += "add299463d4410c6d1b1c418868225f7"
content = self.load(url, post=str(post_data))
new_content = ""
for x in content:
if ord(x) < 32 or ord(x) > 176:
new_content += '#'
else:
new_content += x
content = new_content
return re.search(r'flv_url.*(http.*?)##post_roll', content).group(1)
def get_file_name(self):
if not self.data:
self.download_html()
m = re.search(r'<title.+?>(.+?) - ', self.data)
if m is not None:
name = m.group(1)
else:
matches = re.findall('<h1>(.*?)</h1>', self.data)
if len(matches) > 1:
name = matches[1]
else:
name = matches[0]
return name + '.flv'
def file_exists(self):
"""
Returns True or False
"""
if not self.data:
self.download_html()
if re.search(r'This video is no longer in our database or is in conversion', self.data):
return False
else:
return True
| gpl-3.0 |
happyleavesaoc/home-assistant | homeassistant/helpers/event.py | 4 | 11145 | """Helpers for listening to events."""
import functools as ft
from homeassistant.helpers.sun import get_astral_event_next
from ..core import HomeAssistant, callback
from ..const import (
ATTR_NOW, EVENT_STATE_CHANGED, EVENT_TIME_CHANGED, MATCH_ALL)
from ..util import dt as dt_util
from ..util.async import run_callback_threadsafe
# PyLint does not like the use of threaded_listener_factory
# pylint: disable=invalid-name
def threaded_listener_factory(async_factory):
"""Convert an async event helper to a threaded one."""
@ft.wraps(async_factory)
def factory(*args, **kwargs):
"""Call async event helper safely."""
hass = args[0]
if not isinstance(hass, HomeAssistant):
raise TypeError('First parameter needs to be a hass instance')
async_remove = run_callback_threadsafe(
hass.loop, ft.partial(async_factory, *args, **kwargs)).result()
def remove():
"""Threadsafe removal."""
run_callback_threadsafe(hass.loop, async_remove).result()
return remove
return factory
@callback
def async_track_state_change(hass, entity_ids, action, from_state=None,
to_state=None):
"""Track specific state changes.
entity_ids, from_state and to_state can be string or list.
Use list to match multiple.
Returns a function that can be called to remove the listener.
Must be run within the event loop.
"""
from_state = _process_state_match(from_state)
to_state = _process_state_match(to_state)
# Ensure it is a lowercase list with entity ids we want to match on
if entity_ids == MATCH_ALL:
pass
elif isinstance(entity_ids, str):
entity_ids = (entity_ids.lower(),)
else:
entity_ids = tuple(entity_id.lower() for entity_id in entity_ids)
@callback
def state_change_listener(event):
"""Handle specific state changes."""
if entity_ids != MATCH_ALL and \
event.data.get('entity_id') not in entity_ids:
return
if event.data.get('old_state') is not None:
old_state = event.data['old_state'].state
else:
old_state = None
if event.data.get('new_state') is not None:
new_state = event.data['new_state'].state
else:
new_state = None
if _matcher(old_state, from_state) and _matcher(new_state, to_state):
hass.async_run_job(action, event.data.get('entity_id'),
event.data.get('old_state'),
event.data.get('new_state'))
return hass.bus.async_listen(EVENT_STATE_CHANGED, state_change_listener)
track_state_change = threaded_listener_factory(async_track_state_change)
@callback
def async_track_template(hass, template, action, variables=None):
"""Add a listener that track state changes with template condition."""
from . import condition
# Local variable to keep track of if the action has already been triggered
already_triggered = False
@callback
def template_condition_listener(entity_id, from_s, to_s):
"""Check if condition is correct and run action."""
nonlocal already_triggered
template_result = condition.async_template(hass, template, variables)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
hass.async_run_job(action, entity_id, from_s, to_s)
elif not template_result:
already_triggered = False
return async_track_state_change(
hass, template.extract_entities(), template_condition_listener)
track_template = threaded_listener_factory(async_track_template)
@callback
def async_track_point_in_time(hass, action, point_in_time):
"""Add a listener that fires once after a specific point in time."""
utc_point_in_time = dt_util.as_utc(point_in_time)
@callback
def utc_converter(utc_now):
"""Convert passed in UTC now to local now."""
hass.async_run_job(action, dt_util.as_local(utc_now))
return async_track_point_in_utc_time(hass, utc_converter,
utc_point_in_time)
track_point_in_time = threaded_listener_factory(async_track_point_in_time)
@callback
def async_track_point_in_utc_time(hass, action, point_in_time):
"""Add a listener that fires once after a specific point in UTC time."""
# Ensure point_in_time is UTC
point_in_time = dt_util.as_utc(point_in_time)
@callback
def point_in_time_listener(event):
"""Listen for matching time_changed events."""
now = event.data[ATTR_NOW]
if now < point_in_time or hasattr(point_in_time_listener, 'run'):
return
# Set variable so that we will never run twice.
# Because the event bus might have to wait till a thread comes
# available to execute this listener it might occur that the
# listener gets lined up twice to be executed. This will make
# sure the second time it does nothing.
point_in_time_listener.run = True
async_unsub()
hass.async_run_job(action, now)
async_unsub = hass.bus.async_listen(EVENT_TIME_CHANGED,
point_in_time_listener)
return async_unsub
track_point_in_utc_time = threaded_listener_factory(
async_track_point_in_utc_time)
@callback
def async_track_time_interval(hass, action, interval):
"""Add a listener that fires repetitively at every timedelta interval."""
remove = None
def next_interval():
"""Return the next interval."""
return dt_util.utcnow() + interval
@callback
def interval_listener(now):
"""Handle elaspsed intervals."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, interval_listener, next_interval())
hass.async_run_job(action, now)
remove = async_track_point_in_utc_time(
hass, interval_listener, next_interval())
def remove_listener():
"""Remove interval listener."""
remove()
return remove_listener
track_time_interval = threaded_listener_factory(async_track_time_interval)
@callback
def async_track_sunrise(hass, action, offset=None):
"""Add a listener that will fire a specified offset from sunrise daily."""
remove = None
@callback
def sunrise_automation_listener(now):
"""Handle points in time to execute actions."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, sunrise_automation_listener, get_astral_event_next(
hass, 'sunrise', offset=offset))
hass.async_run_job(action)
remove = async_track_point_in_utc_time(
hass, sunrise_automation_listener, get_astral_event_next(
hass, 'sunrise', offset=offset))
def remove_listener():
"""Remove sunset listener."""
remove()
return remove_listener
track_sunrise = threaded_listener_factory(async_track_sunrise)
@callback
def async_track_sunset(hass, action, offset=None):
"""Add a listener that will fire a specified offset from sunset daily."""
remove = None
@callback
def sunset_automation_listener(now):
"""Handle points in time to execute actions."""
nonlocal remove
remove = async_track_point_in_utc_time(
hass, sunset_automation_listener, get_astral_event_next(
hass, 'sunset', offset=offset))
hass.async_run_job(action)
remove = async_track_point_in_utc_time(
hass, sunset_automation_listener, get_astral_event_next(
hass, 'sunset', offset=offset))
def remove_listener():
"""Remove sunset listener."""
remove()
return remove_listener
track_sunset = threaded_listener_factory(async_track_sunset)
@callback
def async_track_utc_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None,
local=False):
"""Add a listener that will fire if time matches a pattern."""
# We do not have to wrap the function with time pattern matching logic
# if no pattern given
if all(val is None for val in (year, month, day, hour, minute, second)):
@callback
def time_change_listener(event):
"""Fire every time event that comes in."""
hass.async_run_job(action, event.data[ATTR_NOW])
return hass.bus.async_listen(EVENT_TIME_CHANGED, time_change_listener)
pmp = _process_time_match
year, month, day = pmp(year), pmp(month), pmp(day)
hour, minute, second = pmp(hour), pmp(minute), pmp(second)
@callback
def pattern_time_change_listener(event):
"""Listen for matching time_changed events."""
now = event.data[ATTR_NOW]
if local:
now = dt_util.as_local(now)
mat = _matcher
# pylint: disable=too-many-boolean-expressions
if mat(now.year, year) and \
mat(now.month, month) and \
mat(now.day, day) and \
mat(now.hour, hour) and \
mat(now.minute, minute) and \
mat(now.second, second):
hass.async_run_job(action, now)
return hass.bus.async_listen(EVENT_TIME_CHANGED,
pattern_time_change_listener)
track_utc_time_change = threaded_listener_factory(async_track_utc_time_change)
@callback
def async_track_time_change(hass, action, year=None, month=None, day=None,
hour=None, minute=None, second=None):
"""Add a listener that will fire if UTC time matches a pattern."""
return async_track_utc_time_change(hass, action, year, month, day, hour,
minute, second, local=True)
track_time_change = threaded_listener_factory(async_track_time_change)
def _process_state_match(parameter):
"""Wrap parameter in a tuple if it is not one and returns it."""
if parameter is None or parameter == MATCH_ALL:
return MATCH_ALL
elif isinstance(parameter, str) or not hasattr(parameter, '__iter__'):
return (parameter,)
else:
return tuple(parameter)
def _process_time_match(parameter):
"""Wrap parameter in a tuple if it is not one and returns it."""
if parameter is None or parameter == MATCH_ALL:
return MATCH_ALL
elif isinstance(parameter, str) and parameter.startswith('/'):
return parameter
elif isinstance(parameter, str) or not hasattr(parameter, '__iter__'):
return (parameter,)
else:
return tuple(parameter)
def _matcher(subject, pattern):
"""Return True if subject matches the pattern.
Pattern is either a tuple of allowed subjects or a `MATCH_ALL`.
"""
if isinstance(pattern, str) and pattern.startswith('/'):
try:
return subject % float(pattern.lstrip('/')) == 0
except ValueError:
return False
return MATCH_ALL == pattern or subject in pattern
| apache-2.0 |
pywikibot-catfiles/file-metadata | setupdeps.py | 2 | 16766 | # -*- coding: utf-8 -*-
"""
Various dependencies that are required for file-metadata which need some
special handling.
"""
from __future__ import (division, absolute_import, unicode_literals,
print_function)
import ctypes.util
import hashlib
import os
import subprocess
import sys
from distutils import sysconfig
from distutils.errors import DistutilsSetupError
try:
from urllib.request import urlopen
except ImportError: # Python 2
from urllib2 import urlopen
PROJECT_PATH = os.path.abspath(os.path.dirname(__file__))
def data_path():
name = os.path.join(PROJECT_PATH, 'file_metadata', 'datafiles')
if not os.path.exists(name):
os.makedirs(name)
return name
def which(cmd):
try:
from shutil import which
return which(cmd)
except ImportError: # For python 3.2 and lower
try:
output = subprocess.check_output(["which", cmd],
stderr=subprocess.STDOUT)
except (OSError, subprocess.CalledProcessError):
return None
else:
output = output.decode(sys.getfilesystemencoding())
return output.strip()
def setup_install(packages):
"""
Install packages using pip to the current folder. Useful to import
packages during setup itself.
"""
packages = list(packages)
if not packages:
return True
try:
subprocess.call([sys.executable, "-m", "pip", "install",
"-t", PROJECT_PATH] + packages)
return True
except subprocess.CalledProcessError:
return False
def download(url, filename, overwrite=False, sha1=None):
"""
Download the given URL to the given filename. If the file exists,
it won't be downloaded unless asked to overwrite. Both, text data
like html, txt, etc. or binary data like images, audio, etc. are
acceptable.
:param url: A URL to download.
:param filename: The file to store the downloaded file to.
:param overwrite: Set to True if the file should be downloaded even if it
already exists.
:param sha1: The sha1 checksum to verify the file using.
"""
blocksize = 16 * 1024
_hash = hashlib.sha1()
if os.path.exists(filename) and not overwrite:
# Do a pass for the hash if it already exists
with open(filename, "rb") as downloaded_file:
while True:
block = downloaded_file.read(blocksize)
if not block:
break
_hash.update(block)
else:
# If it doesn't exist, or overwrite=True, find hash while downloading
response = urlopen(url)
with open(filename, 'wb') as out_file:
while True:
block = response.read(blocksize)
if not block:
break
out_file.write(block)
_hash.update(block)
return _hash.hexdigest() == sha1
class CheckFailed(Exception):
"""
Exception thrown when a ``SetupPackage.check()`` fails.
"""
pass
class SetupPackage(object):
name = None
optional = False
pkg_names = {
"apt-get": None,
"yum": None,
"dnf": None,
"pacman": None,
"zypper": None,
"brew": None,
"port": None,
"windows_url": None
}
def check(self):
"""
Check whether the dependencies are met. Should raise a ``CheckFailed``
exception if the dependency was not found.
"""
pass
def get_install_requires(self):
"""
Return a list of Python packages that are required by the package.
pip / easy_install will attempt to download and install this
package if it is not installed.
"""
return []
def get_setup_requires(self):
"""
Return a list of Python packages that are required by the setup.py
itself. pip / easy_install will attempt to download and install this
package if it is not installed on top of the setup.py script.
"""
return []
def get_data_files(self):
"""
Perform required actions to add the data files into the directory
given by ``data_path()``.
"""
pass
def install_help_msg(self):
"""
The help message to show if the package is not installed. The help
message shown depends on whether some class variables are present.
"""
def _try_managers(*managers):
for manager in managers:
pkg_name = self.pkg_names.get(manager, None)
if pkg_name and which(manager) is not None:
pkg_note = None
if isinstance(pkg_name, (tuple, list)):
pkg_name, pkg_note = pkg_name
msg = ('Try installing {0} with `{1} install {2}`.'
.format(self.name, manager, pkg_name))
if pkg_note:
msg += ' Note: ' + pkg_note
return msg
message = ""
if sys.platform == "win32":
url = self.pkg_names.get("windows_url", None)
if url:
return ('Please check {0} for instructions to install {1}'
.format(url, self.name))
elif sys.platform == "darwin":
manager_message = _try_managers("brew", "port")
return manager_message or message
elif sys.platform.startswith("linux"):
try:
import distro
except ImportError:
setup_install(['distro'])
import distro
release = distro.id()
if release in ('debian', 'ubuntu', 'linuxmint', 'raspbian'):
manager_message = _try_managers('apt-get')
if manager_message:
return manager_message
elif release in ('centos', 'rhel', 'redhat', 'fedora',
'scientific', 'amazon', ):
manager_message = _try_managers('dnf', 'yum')
if manager_message:
return manager_message
elif release in ('sles', 'opensuse'):
manager_message = _try_managers('zypper')
if manager_message:
return manager_message
elif release in ('arch'):
manager_message = _try_managers('pacman')
if manager_message:
return manager_message
return message
class PkgConfig(SetupPackage):
"""
This is a class for communicating with pkg-config.
"""
name = "pkg-config"
pkg_names = {
"apt-get": 'pkg-config',
"yum": None,
"dnf": None,
"pacman": None,
"zypper": None,
"brew": 'pkg-config',
"port": None,
"windows_url": None
}
def __init__(self):
if sys.platform == 'win32':
self.has_pkgconfig = False
else:
self.pkg_config = os.environ.get('PKG_CONFIG', 'pkg-config')
self.set_pkgconfig_path()
try:
with open(os.devnull) as nul:
subprocess.check_call([self.pkg_config, "--help"],
stdout=nul, stderr=nul)
self.has_pkgconfig = True
except (subprocess.CalledProcessError, OSError):
self.has_pkgconfig = False
raise DistutilsSetupError("pkg-config is not installed. "
"Please install it to continue.\n" +
self.install_help_msg())
def set_pkgconfig_path(self):
pkgconfig_path = sysconfig.get_config_var('LIBDIR')
if pkgconfig_path is None:
return
pkgconfig_path = os.path.join(pkgconfig_path, 'pkgconfig')
if not os.path.isdir(pkgconfig_path):
return
os.environ['PKG_CONFIG_PATH'] = ':'.join(
[os.environ.get('PKG_CONFIG_PATH', ""), pkgconfig_path])
def get_version(self, package):
"""
Get the version of the package from pkg-config.
"""
if not self.has_pkgconfig:
return None
try:
output = subprocess.check_output(
[self.pkg_config, package, "--modversion"],
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
return None
else:
output = output.decode(sys.getfilesystemencoding())
return output.strip()
# The PkgConfig class should be used through this singleton
pkg_config = PkgConfig()
class Distro(SetupPackage):
name = "distro"
def check(self):
return 'Will be installed with pip.'
def get_setup_requires(self):
try:
import distro # noqa (unused import)
return []
except ImportError:
return ['distro']
class SetupTools(SetupPackage):
name = 'setuptools'
def check(self):
return 'Will be installed with pip.'
def get_setup_requires(self):
try:
import setuptools # noqa (unused import)
return []
except ImportError:
return ['setuptools']
class PathLib(SetupPackage):
name = 'pathlib'
def check(self):
if sys.version_info < (3, 4):
return 'Backported pathlib2 will be installed with pip.'
else:
return 'Already installed in python 3.4+'
def get_install_requires(self):
if sys.version_info < (3, 4):
return ['pathlib2']
else:
return []
class AppDirs(SetupPackage):
name = 'appdirs'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['appdirs']
class LibMagic(SetupPackage):
name = 'libmagic'
pkg_names = {
"apt-get": 'libmagic-dev',
"yum": 'file',
"dnf": 'file',
"pacman": None,
"zypper": None,
"brew": 'libmagic',
"port": None,
"windows_url": None
}
def check(self):
file_path = which('file')
if file_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found "file" utility at {0}.'.format(file_path)
class PythonMagic(SetupPackage):
name = 'python-magic'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['python-magic']
class Six(SetupPackage):
name = 'six'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['six>=1.8.0']
class ExifTool(SetupPackage):
name = 'exiftool'
pkg_names = {
"apt-get": 'exiftool',
"yum": 'perl-Image-ExifTool',
"dnf": 'perl-Image-ExifTool',
"pacman": None,
"zypper": None,
"brew": 'exiftool',
"port": 'p5-image-exiftool',
"windows_url": 'http://www.sno.phy.queensu.ca/~phil/exiftool/'
}
def check(self):
exiftool_path = which('exiftool')
if exiftool_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(exiftool_path)
class Pillow(SetupPackage):
name = 'pillow'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['pillow>=2.5.0']
class Numpy(SetupPackage):
name = 'numpy'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['numpy>=1.7.2']
class Dlib(SetupPackage):
name = 'dlib'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['dlib']
class ScikitImage(SetupPackage):
name = 'scikit-image'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
# For some reason some dependencies of scikit-image aren't installed
# by pip: https://github.com/scikit-image/scikit-image/issues/2155
return ['scipy', 'matplotlib', 'scikit-image>=0.12']
class MagickWand(SetupPackage):
name = 'magickwand'
pkg_names = {
"apt-get": 'libmagickwand-dev',
"yum": 'ImageMagick-devel',
"dnf": 'ImageMagick-devel',
"pacman": None,
"zypper": None,
"brew": 'imagemagick',
"port": 'imagemagick',
"windows_url": ("http://docs.wand-py.org/en/latest/guide/"
"install.html#install-imagemagick-on-windows")
}
def check(self):
# `wand` already checks for magickwand, but only when importing, not
# during installation. See https://github.com/dahlia/wand/issues/293
magick_wand = pkg_config.get_version("MagickWand")
if magick_wand is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found with pkg-config.'
class Wand(SetupPackage):
name = 'wand'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['wand']
class PyColorName(SetupPackage):
name = 'pycolorname'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['pycolorname']
class LibZBar(SetupPackage):
name = 'libzbar'
pkg_names = {
"apt-get": 'libzbar-dev',
"yum": 'zbar-devel',
"dnf": 'zbar-devel',
"pacman": None,
"zypper": None,
"brew": 'zbar',
"port": None,
"windows_url": None
}
def check(self):
libzbar = ctypes.util.find_library('zbar')
if libzbar is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found {0}.'.format(libzbar)
class ZBar(SetupPackage):
name = 'zbar'
def check(self):
return 'Will be installed with pip.'
def get_install_requires(self):
return ['zbar']
class JavaJRE(SetupPackage):
name = 'java'
pkg_names = {
"apt-get": 'default-jre',
"yum": 'java',
"dnf": 'java',
"pacman": None,
"zypper": None,
"brew": None,
"port": None,
"windows_url": "https://java.com/download/"
}
def check(self):
java_path = which('java')
if java_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(java_path)
class ZXing(SetupPackage):
name = 'zxing'
def check(self):
return 'Will be downloaded from their maven repositories.'
@staticmethod
def download_jar(data_folder, path, name, ver, **kwargs):
data = {'name': name, 'ver': ver, 'path': path}
fname = os.path.join(data_folder, '{name}-{ver}.jar'.format(**data))
url = ('http://central.maven.org/maven2/{path}/{name}/{ver}/'
'{name}-{ver}.jar'.format(**data))
download(url, fname, **kwargs)
return fname
def get_data_files(self):
msg = 'Unable to download "{0}" correctly.'
if not self.download_jar(
data_path(), 'com/google/zxing', 'core', '3.2.1',
sha1='2287494d4f5f9f3a9a2bb6980e3f32053721b315'):
return msg.format('zxing-core')
if not self.download_jar(
data_path(), 'com/google/zxing', 'javase', '3.2.1',
sha1='78e98099b87b4737203af1fcfb514954c4f479d9'):
return msg.format('zxing-javase')
if not self.download_jar(
data_path(), 'com/beust', 'jcommander', '1.48',
sha1='bfcb96281ea3b59d626704f74bc6d625ff51cbce'):
return msg.format('jcommander')
return 'Successfully downloaded zxing-javase, zxing-core, jcommander.'
class FFProbe(SetupPackage):
name = 'ffprobe'
pkg_names = {
"apt-get": 'libav-tools',
"yum": ('ffmpeg', 'This requires the RPMFusion repo to be enabled.'),
"dnf": ('ffmpeg', 'This requires the RPMFusion repo to be enabled.'),
"pacman": None,
"zypper": None,
"brew": 'ffmpeg',
"port": None,
"windows_url": None
}
def check(self):
ffprobe_path = which('ffprobe') or which('avprobe')
if ffprobe_path is None:
raise CheckFailed('Needs to be installed manually.')
else:
return 'Found at {0}.'.format(ffprobe_path)
| mit |
elektito/pybtracker | pybtracker/client.py | 1 | 13320 | import asyncio
import os
import struct
import logging
import random
import cmd
import argparse
from urllib.parse import urlparse
from collections import defaultdict
from ipaddress import ip_address
from datetime import datetime, timedelta
from version import __version__
class ServerError(Exception):
pass
class UdpTrackerClientProto(asyncio.Protocol):
def __init__(self, client):
self.client = client
self.received_msg = None
self.sent_msgs = {}
self.logger = self.client.logger
self.connection_lost_received = asyncio.Event()
def connection_made(self, transport):
self.transport = transport
def connection_lost(self, exc):
self.connection_lost_received.set()
def datagram_received(self, data, addr):
if len(data) < 8:
self.logger.warning('Invalid datagram received.')
return
action, tid = struct.unpack('!II', data[:8])
if tid in self.sent_msgs:
self.received_msg = (action, tid, data[8:])
self.sent_msgs[tid].set()
else:
self.logger.warning('Invalid transaction ID received.')
def error_received(self, exc):
self.logger.info('UDP client transmision error: {}'.format(exc))
def get_tid(self):
tid = random.randint(0, 0xffffffff)
while tid in self.sent_msgs:
tid = random.randint(0, 0xffffffff)
self.sent_msgs[tid] = asyncio.Event()
return tid
async def send_msg(self, msg, tid):
n = 0
timeout = 15
for i in range(self.client.max_retransmissions):
try:
self.transport.sendto(msg)
await asyncio.wait_for(
self.sent_msgs[tid].wait(),
timeout=timeout)
del self.sent_msgs[tid]
except asyncio.TimeoutError:
if n >= self.client.max_retransmissions - 1:
del self.sent_msgs[tid]
raise TimeoutError('Tracker server timeout.')
action = int.from_bytes(msg[8:12], byteorder='big')
if action != 0: # if not CONNECT
delta = timedelta(seconds=self.client.connid_valid_period)
if self.client.connid_timestamp < datetime.now() - delta:
await self.connect()
n += 1
timeout = 15 * 2 ** n
self.logger.info(
'Request timeout. Retransmitting. '
'(try #{}, next timeout {} seconds)'.format(n, timeout))
else:
return
async def connect(self):
self.logger.info('Sending connect message.')
tid = self.get_tid()
msg = struct.pack('!QII', 0x41727101980, 0, tid)
await self.send_msg(msg, tid)
if self.received_msg:
action, tid, data = self.received_msg
if action == 3:
self.logger.warn('An error was received in reply to connect: {}'
.format(data.decode()))
self.client.connid = None
raise ServerError(
'An error was received in reply to connect: {}'
.format(data.decode()))
else:
self.client.callback('connected')
self.client.connid = int.from_bytes(data, byteorder='big')
self.client.connid_timestamp = datetime.now()
self.received_msg = None
else:
self.logger.info('No reply received.')
async def announce(self, infohash, num_want, downloaded, left, uploaded,
event=0, ip=0):
if not self.client.interval or not self.client.connid or \
datetime.now() > self.client.connid_timestamp + \
timedelta(seconds=self.client.connid_valid_period):
# get a connection id first
await self.connect()
if not self.client.connid:
self.logger.info('No reply to connect message.')
return
self.logger.info('Sending announce message.')
action = 1
tid = self.get_tid()
port = self.transport._sock.getsockname()[1]
key = random.randint(0, 0xffffffff)
ip = int.from_bytes(ip_address(ip).packed, byteorder='big')
msg = struct.pack('!QII20s20sQQQIIIIH', self.client.connid, action, tid,
infohash, self.client.peerid, downloaded, left,
uploaded, event, ip, key, num_want, port)
await self.send_msg(msg, tid)
if self.received_msg:
action, tid, data = self.received_msg
if action == 3:
self.logger.warning('An error was received in reply to announce: {}'
.format(data.decode()))
raise ServerError(
'An error was received in reply to announce: {}'
.format(data.decode()))
else:
if len(data) < 12:
self.logger.warning('Invalid announce reply received. Too short.')
return None
self.client.interval, leechers, seeders = struct.unpack('!III', data[:12])
self.received_msg = None
data = data[12:]
if len(data) % 6 != 0:
self.logger.warning(
'Invalid announce reply received. Invalid length.')
return None
peers = [data[i:i+6] for i in range(0, len(data), 6)]
peers = [(str(ip_address(p[:4])), int.from_bytes(p[4:], byteorder='big'))
for p in peers]
self.client.callback('announced', infohash, peers)
else:
peers = None
self.logger.info('No reply received to announce message.')
return peers
class TrackerClient:
def __init__(self,
announce_uri,
max_retransmissions=8,
loop=None):
self.logger = logging.getLogger(__name__)
scheme, netloc, _, _, _, _ = urlparse(announce_uri)
if scheme != 'udp':
raise ValueError('Tracker scheme not supported: {}'.format(scheme))
if ':' not in netloc:
self.logger.info('Port not specified in announce URI. Assuming 80.')
tracker_host, tracker_port = netloc, 80
else:
tracker_host, tracker_port = netloc.split(':')
tracker_port = int(tracker_port)
self.server_addr = tracker_host, tracker_port
self.max_retransmissions = max_retransmissions
if loop:
self.loop = loop
else:
self.loop = asyncio.get_event_loop()
self.allowed_callbacks = ['connected', 'announced']
self.connid_valid_period = 60
self.callbacks = defaultdict(list)
self.connid = None
self.connid_timestamp = None
self.interval = None
self.peerid = os.urandom(20)
def callback(self, cb, *args):
if cb not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
for c in self.callbacks[cb]:
c(*args)
def add_callback(self, name, func):
if name not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
self.callbacks[name].append(func)
def rm_callback(self, name, func):
if name not in self.allowed_callbacks:
raise ValueError('Invalid callback: {}'.format(cb))
self.callbacks[name].remove(func)
async def start(self):
self.transport, self.proto = await self.loop.create_datagram_endpoint(
lambda: UdpTrackerClientProto(self),
remote_addr=self.server_addr)
async def stop(self):
self.transport.close()
await self.proto.connection_lost_received.wait()
async def announce(self, infohash, downloaded, left, uploaded, event,
num_want=160):
return await self.proto.announce(
infohash, num_want, downloaded, left, uploaded, event)
async def connect(self):
return await self.proto.connect()
def hex_encoded_infohash(v):
v = bytes.fromhex(v)
if len(v) != 20:
raise ValueError
return v
class NiceArgumentParser(argparse.ArgumentParser):
def error(self, message):
self.print_usage()
print('{}: error: {}'.format(self.prog, message))
raise argparse.ArgumentError(None, message)
class ClientShell(cmd.Cmd):
intro = 'BitTorrent tracker client. Type help or ? to list commands.\n'
prompt = '(btrc) '
file = None
def __init__(self, args):
super().__init__()
self.loop = asyncio.get_event_loop()
self.client = TrackerClient(args.tracker_uri)
self.loop.run_until_complete(self.client.start())
self.is_closed = False
def do_connect(self, arg):
'Obtain a connection ID from the tracker.'
self.loop.run_until_complete(self.client.connect())
if self.client.connid:
print('Connection ID:', self.client.connid)
else:
print('No connection ID.')
def do_announce(self, arg):
'Announce an event to the tracker.'
parser = NiceArgumentParser(description='Announce to tracker.')
parser.add_argument(
'infohash', type=hex_encoded_infohash,
help='The infohash of the torrent to announce in hex-encoded '
'format.')
parser.add_argument(
'downloaded', type=int,
help='Downloaded bytes to announce.')
parser.add_argument(
'left', type=int,
help='Left bytes to announce.')
parser.add_argument(
'uploaded', type=int,
help='Uploaded bytes to announce.')
parser.add_argument(
'--num-want', '-n', type=int, default=160,
help='Maximum number of peers to peers to request. '
'Defaults to 160.')
parser.add_argument(
'--event', '-e', default='none',
choices=['none', 'completed', 'started', 'stopped'],
help='The event to announce. Defaults to "none".')
try:
args = parser.parse_args(arg.split())
except argparse.ArgumentError:
return
args.event = [
'none',
'completed',
'started',
'stopped'
].index(args.event)
try:
ret = self.loop.run_until_complete(self.client.announce(
args.infohash,
args.downloaded,
args.left,
args.uploaded,
args.event,
args.num_want))
if ret:
print('Received {} peer(s) from the tracker:'.format(len(ret)))
for host, port in ret:
print(' {}:{}'.format(host, port))
else:
print('No peers received from the tracker.')
except ServerError as e:
print(e)
except TimeoutError:
print('Request timed out.')
def do_EOF(self, arg):
'Quit the shell.'
print()
self.close()
return True
def do_quit(self, arg):
'Quit the shell.'
self.close()
return True
def close(self):
self.loop.run_until_complete(self.client.stop())
self.loop.close()
self.is_closed = True
def setup_logging(args):
import sys
logger = logging.getLogger(__name__)
formatter = logging.Formatter(
'%(asctime) -15s - %(levelname) -8s - %(message)s')
level = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}[args.log_level]
if args.log_to_stdout:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(formatter)
handler.setLevel(level)
logger.addHandler(handler)
if args.log_file:
handler = logging.FileHandler(args.log_file)
handler.setFormatter(formatter)
handler.setLevel(level)
logger.addHandler(handler)
logger.setLevel(level)
def main():
parser = argparse.ArgumentParser(description='UDP tracker.')
parser.add_argument(
'tracker_uri', metavar='URI',
help='The tracker URI.')
parser.add_argument(
'--log-to-stdout', '-O', action='store_true', default=False,
help='Log to standard output.')
parser.add_argument('--log-file', '-l', help='Log to the specified file.')
parser.add_argument(
'--log-level', '-L', default='info',
choices=['debug', 'info', 'warning', 'error', 'critical'],
help='Set log level. Defaults to "info".')
parser.add_argument(
'--version', '-V', action='version',
version='pybtracker v' + __version__)
args = parser.parse_args()
setup_logging(args)
shell = ClientShell(args)
try:
shell.cmdloop()
except KeyboardInterrupt:
print()
finally:
if not shell.is_closed:
shell.close()
if __name__ == '__main__':
main()
| mit |
bmhatfield/Diamond | src/collectors/openstackswift/openstackswift.py | 31 | 3996 | # coding=utf-8
"""
Openstack swift collector.
#### Dependencies
* swift-dispersion-report commandline tool (for dispersion report)
if using this, make sure swift.conf and dispersion.conf are readable by
diamond also get an idea of the runtime of a swift-dispersion-report call
and make sure the collect interval is high enough to avoid contention.
* swift commandline tool (for container_metrics)
both of these should come installed with swift
"""
import diamond.collector
from subprocess import Popen, PIPE
try:
import json
except ImportError:
import simplejson as json
class OpenstackSwiftCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OpenstackSwiftCollector,
self).get_default_config_help()
config_help.update({
'enable_dispersion_report': 'gather swift-dispersion-report ' +
'metrics (default False)',
'enable_container_metrics': 'gather containers metrics ' +
'(# objects, bytes used, ' +
'x_timestamp. default True)',
'auth_url': 'authentication url (for enable_container_metrics)',
'account': 'swift auth account (for enable_container_metrics)',
'user': 'swift auth user (for enable_container_metrics)',
'password': 'swift auth password (for enable_container_metrics)',
'containers': 'containers on which to count number of objects, ' +
'space separated list (for enable_container_metrics)'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OpenstackSwiftCollector, self).get_default_config()
config.update({
'path': 'openstackswift',
'enable_dispersion_report': False,
'enable_container_metrics': True,
# don't use the threaded model with this one.
# for some reason it crashes.
'interval': 1200, # by default, every 20 minutes
})
return config
def collect(self):
# dispersion report. this can take easily >60s. beware!
if (self.config['enable_dispersion_report']):
p = Popen(
['swift-dispersion-report', '-j'],
stdout=PIPE,
stderr=PIPE)
stdout, stderr = p.communicate()
self.publish('dispersion.errors', len(stderr.split('\n')) - 1)
data = json.loads(stdout)
for t in ('object', 'container'):
for (k, v) in data[t].items():
self.publish('dispersion.%s.%s' % (t, k), v)
# container metrics returned by stat <container>
if(self.config['enable_container_metrics']):
account = '%s:%s' % (self.config['account'], self.config['user'])
for container in self.config['containers'].split(','):
cmd = ['swift', '-A', self.config['auth_url'],
'-U', account,
'-K', self.config['password'],
'stat', container]
p = Popen(cmd, stdout=PIPE, stderr=PIPE)
stdout, stderr = p.communicate()
stats = {}
# stdout is some lines in 'key : val' format
for line in stdout.split('\n'):
if line:
line = line.split(':', 2)
stats[line[0].strip()] = line[1].strip()
key = 'container_metrics.%s.%s' % (self.config['account'],
container)
self.publish('%s.objects' % key, stats['Objects'])
self.publish('%s.bytes' % key, stats['Bytes'])
self.publish('%s.x_timestamp' % key, stats['X-Timestamp'])
| mit |
haad/ansible | lib/ansible/modules/network/f5/bigip_monitor_http.py | 3 | 18788 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_http
short_description: Manages F5 BIG-IP LTM http monitors
description: Manages F5 BIG-IP LTM http monitors.
version_added: "2.5"
options:
name:
description:
- Monitor name.
required: True
aliases:
- monitor
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(http)
parent on the C(Common) partition.
default: "/Common/http"
send:
description:
- The send string for the monitor call. When creating a new monitor, if
this value is not provided, the default C(GET /\r\n) will be used.
receive:
description:
- The receive string for the monitor call.
receive_disable:
description:
- This setting works like C(receive), except that the system marks the node
or pool member disabled when its response matches the C(receive_disable)
string but not C(receive). To use this setting, you must specify both
C(receive_disable) and C(receive).
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
target_username:
description:
- Specifies the user name, if the monitored target requires authentication.
target_password:
description:
- Specifies the password, if the monitored target requires authentication.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create HTTP Monitor
bigip_monitor_http:
state: present
ip: 10.10.10.10
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Remove HTTP Monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
delegate_to: localhost
- name: Include a username and password in the HTTP monitor
bigip_monitor_http:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_http_monitor
target_username: monitor_user
target_password: monitor_pass
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: http
ip:
description: The new IP of IP/port definition.
returned: changed
type: string
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
HAS_DEVEL_IMPORTS = False
try:
# Sideband repository used for dev
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fqdn_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
HAS_DEVEL_IMPORTS = True
except ImportError:
# Upstream Ansible
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'recv', 'send',
'destination', 'username', 'password'
]
returnables = [
'parent', 'send', 'receive', 'ip', 'port', 'interval', 'timeout',
'time_until_up'
]
updatables = [
'destination', 'send', 'receive', 'interval', 'timeout', 'time_until_up',
'target_username', 'target_password'
]
def _fqdn_name(self, value):
if value is not None and not value.startswith('/'):
return '/{0}/{1}'.format(self.partition, value)
return value
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
try:
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
result = str(netaddr.IPAddress(self._values['ip']))
return result
except netaddr.core.AddrFormatError:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = self._fqdn_name(self._values['parent'])
return result
@property
def type(self):
return 'http'
@property
def username(self):
return self._values['target_username']
@property
def password(self):
return self._values['target_password']
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
if self.want.send is None:
self.want.update({'send': 'GET /\r\n'})
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.https.http.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.https.http.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.https.http.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/http'),
send=dict(),
receive=dict(),
receive_disable=dict(required=False),
ip=dict(),
port=dict(type='int'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
target_username=dict(),
target_password=dict(no_log=True),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
flavour/cedarbluff | modules/s3/fontmap/SazanamiMincho.py | 58 | 68515 | #!/usr/bin/env python
SazanamiMincho_map = [
(0, 1),
(32, 127),
(160, 384),
(402, 403),
(461, 477),
(501, 502),
(506, 512),
(592, 681),
(710, 712),
(713, 716),
(728, 734),
(884, 886),
(890, 891),
(894, 895),
(900, 907),
(908, 909),
(910, 930),
(931, 975),
(976, 978),
(981, 982),
(1013, 1014),
(1025, 1037),
(1038, 1104),
(1105, 1117),
(1118, 1159),
(1168, 1221),
(1223, 1225),
(1227, 1229),
(1232, 1260),
(1262, 1270),
(1272, 1274),
(7808, 7814),
(7922, 7924),
(8192, 8239),
(8240, 8263),
(8304, 8305),
(8308, 8335),
(8352, 8363),
(8364, 8365),
(8448, 8505),
(8531, 8579),
(8592, 8683),
(8704, 8946),
(8962, 8963),
(8967, 8968),
(8976, 8977),
(8978, 8979),
(8992, 8994),
(9312, 9451),
(9472, 9622),
(9632, 9712),
(9728, 9748),
(9754, 9840),
(9985, 9989),
(9990, 9994),
(9996, 10024),
(10025, 10060),
(10061, 10062),
(10063, 10067),
(10070, 10071),
(10072, 10079),
(10081, 10088),
(10102, 10133),
(10136, 10160),
(10161, 10175),
(12288, 12320),
(12336, 12337),
(12339, 12342),
(12353, 12439),
(12443, 12448),
(12449, 12543),
(12849, 12851),
(12857, 12858),
(12964, 12969),
(13059, 13060),
(13069, 13070),
(13076, 13077),
(13080, 13081),
(13090, 13092),
(13094, 13096),
(13099, 13100),
(13110, 13111),
(13115, 13116),
(13129, 13131),
(13133, 13134),
(13137, 13138),
(13143, 13144),
(13179, 13183),
(13198, 13200),
(13212, 13215),
(13217, 13218),
(13252, 13253),
(13259, 13260),
(13261, 13262),
(19968, 19974),
(19975, 19983),
(19984, 19987),
(19988, 19994),
(19998, 20000),
(20001, 20002),
(20003, 20005),
(20006, 20007),
(20008, 20009),
(20010, 20012),
(20013, 20019),
(20021, 20023),
(20024, 20026),
(20027, 20029),
(20031, 20038),
(20039, 20040),
(20043, 20044),
(20045, 20048),
(20049, 20050),
(20053, 20059),
(20060, 20064),
(20066, 20068),
(20072, 20074),
(20081, 20082),
(20083, 20086),
(20089, 20090),
(20094, 20097),
(20098, 20099),
(20101, 20103),
(20104, 20111),
(20113, 20115),
(20116, 20122),
(20123, 20131),
(20132, 20135),
(20136, 20137),
(20139, 20145),
(20147, 20148),
(20150, 20151),
(20153, 20155),
(20160, 20165),
(20166, 20168),
(20170, 20172),
(20173, 20177),
(20180, 20188),
(20189, 20198),
(20200, 20201),
(20205, 20212),
(20213, 20216),
(20219, 20228),
(20232, 20243),
(20245, 20248),
(20249, 20251),
(20252, 20254),
(20270, 20274),
(20275, 20287),
(20288, 20289),
(20290, 20292),
(20294, 20298),
(20299, 20321),
(20323, 20324),
(20329, 20331),
(20332, 20333),
(20334, 20338),
(20339, 20340),
(20341, 20352),
(20353, 20359),
(20360, 20373),
(20374, 20380),
(20381, 20386),
(20395, 20396),
(20397, 20400),
(20402, 20403),
(20405, 20408),
(20409, 20410),
(20411, 20423),
(20424, 20435),
(20436, 20437),
(20439, 20441),
(20442, 20446),
(20447, 20454),
(20462, 20465),
(20466, 20468),
(20469, 20471),
(20472, 20473),
(20474, 20475),
(20476, 20482),
(20484, 20488),
(20489, 20501),
(20502, 20512),
(20513, 20527),
(20528, 20529),
(20530, 20532),
(20533, 20535),
(20537, 20538),
(20539, 20540),
(20544, 20548),
(20549, 20555),
(20556, 20557),
(20558, 20564),
(20565, 20568),
(20569, 20571),
(20572, 20573),
(20575, 20577),
(20578, 20580),
(20581, 20584),
(20586, 20587),
(20588, 20590),
(20592, 20595),
(20596, 20599),
(20600, 20601),
(20605, 20606),
(20608, 20610),
(20611, 20615),
(20618, 20619),
(20621, 20629),
(20630, 20631),
(20632, 20637),
(20638, 20643),
(20650, 20651),
(20652, 20654),
(20655, 20657),
(20658, 20662),
(20663, 20664),
(20665, 20667),
(20669, 20671),
(20672, 20673),
(20674, 20678),
(20679, 20680),
(20681, 20683),
(20684, 20690),
(20691, 20695),
(20696, 20697),
(20698, 20699),
(20700, 20704),
(20706, 20714),
(20717, 20720),
(20721, 20723),
(20724, 20727),
(20729, 20732),
(20734, 20735),
(20736, 20741),
(20742, 20746),
(20747, 20751),
(20752, 20753),
(20754, 20755),
(20756, 20768),
(20769, 20770),
(20771, 20772),
(20775, 20777),
(20778, 20779),
(20780, 20782),
(20783, 20784),
(20785, 20790),
(20791, 20797),
(20799, 20817),
(20818, 20822),
(20823, 20825),
(20826, 20827),
(20828, 20829),
(20831, 20832),
(20834, 20835),
(20836, 20839),
(20840, 20847),
(20849, 20850),
(20853, 20857),
(20860, 20861),
(20862, 20863),
(20864, 20865),
(20866, 20871),
(20873, 20884),
(20885, 20890),
(20893, 20894),
(20896, 20903),
(20904, 20910),
(20912, 20921),
(20922, 20923),
(20924, 20928),
(20930, 20931),
(20932, 20935),
(20936, 20938),
(20939, 20942),
(20943, 20944),
(20945, 20948),
(20949, 20951),
(20952, 20953),
(20955, 20959),
(20960, 20963),
(20965, 20968),
(20969, 20971),
(20972, 20975),
(20976, 20987),
(20989, 20991),
(20992, 21001),
(21002, 21004),
(21006, 21007),
(21009, 21017),
(21021, 21022),
(21026, 21027),
(21028, 21030),
(21031, 21035),
(21038, 21039),
(21040, 21044),
(21045, 21053),
(21059, 21062),
(21063, 21064),
(21065, 21070),
(21071, 21072),
(21076, 21081),
(21082, 21085),
(21086, 21090),
(21091, 21095),
(21097, 21099),
(21102, 21110),
(21111, 21114),
(21117, 21118),
(21119, 21121),
(21122, 21124),
(21125, 21126),
(21127, 21131),
(21132, 21134),
(21137, 21145),
(21146, 21149),
(21151, 21153),
(21155, 21160),
(21161, 21166),
(21167, 21170),
(21172, 21183),
(21184, 21186),
(21187, 21194),
(21196, 21198),
(21199, 21200),
(21201, 21203),
(21204, 21210),
(21211, 21227),
(21228, 21229),
(21232, 21243),
(21246, 21252),
(21253, 21257),
(21258, 21262),
(21263, 21266),
(21267, 21268),
(21269, 21282),
(21283, 21286),
(21287, 21294),
(21295, 21300),
(21301, 21302),
(21304, 21316),
(21317, 21326),
(21329, 21333),
(21335, 21341),
(21342, 21343),
(21344, 21346),
(21347, 21348),
(21349, 21351),
(21353, 21354),
(21356, 21366),
(21367, 21370),
(21371, 21372),
(21374, 21376),
(21378, 21381),
(21383, 21385),
(21390, 21391),
(21395, 21397),
(21398, 21399),
(21400, 21403),
(21405, 21406),
(21407, 21410),
(21412, 21415),
(21416, 21420),
(21421, 21425),
(21426, 21433),
(21434, 21436),
(21437, 21438),
(21440, 21441),
(21442, 21444),
(21445, 21446),
(21448, 21456),
(21458, 21464),
(21465, 21468),
(21469, 21492),
(21493, 21497),
(21498, 21499),
(21505, 21509),
(21512, 21522),
(21523, 21524),
(21530, 21532),
(21533, 21534),
(21535, 21538),
(21542, 21552),
(21553, 21554),
(21556, 21559),
(21560, 21562),
(21563, 21567),
(21568, 21569),
(21570, 21573),
(21574, 21579),
(21581, 21584),
(21585, 21586),
(21598, 21600),
(21602, 21603),
(21604, 21605),
(21606, 21612),
(21613, 21615),
(21616, 21618),
(21619, 21624),
(21627, 21630),
(21631, 21634),
(21635, 21639),
(21640, 21651),
(21653, 21655),
(21660, 21661),
(21663, 21664),
(21665, 21667),
(21668, 21680),
(21681, 21684),
(21687, 21699),
(21700, 21701),
(21702, 21707),
(21709, 21711),
(21720, 21721),
(21728, 21731),
(21733, 21735),
(21736, 21739),
(21740, 21744),
(21746, 21747),
(21750, 21751),
(21754, 21755),
(21756, 21762),
(21764, 21770),
(21772, 21777),
(21780, 21783),
(21802, 21804),
(21806, 21808),
(21809, 21812),
(21813, 21815),
(21816, 21818),
(21819, 21823),
(21824, 21826),
(21828, 21832),
(21833, 21835),
(21836, 21838),
(21839, 21842),
(21843, 21844),
(21846, 21849),
(21850, 21855),
(21856, 21858),
(21859, 21861),
(21862, 21863),
(21883, 21885),
(21886, 21893),
(21894, 21900),
(21902, 21904),
(21905, 21909),
(21911, 21915),
(21916, 21920),
(21923, 21925),
(21927, 21935),
(21936, 21937),
(21938, 21939),
(21942, 21943),
(21951, 21952),
(21953, 21954),
(21955, 21960),
(21961, 21962),
(21963, 21965),
(21966, 21967),
(21969, 21973),
(21975, 21977),
(21978, 21981),
(21982, 21984),
(21986, 21989),
(21993, 21994),
(22006, 22008),
(22009, 22010),
(22013, 22016),
(22021, 22023),
(22024, 22027),
(22029, 22035),
(22036, 22037),
(22038, 22042),
(22043, 22044),
(22057, 22058),
(22060, 22061),
(22063, 22074),
(22075, 22078),
(22079, 22085),
(22086, 22087),
(22089, 22090),
(22091, 22097),
(22100, 22101),
(22107, 22108),
(22110, 22111),
(22112, 22117),
(22118, 22119),
(22120, 22126),
(22127, 22128),
(22129, 22131),
(22132, 22134),
(22136, 22137),
(22138, 22139),
(22144, 22145),
(22148, 22153),
(22154, 22157),
(22159, 22160),
(22164, 22166),
(22169, 22171),
(22173, 22177),
(22178, 22179),
(22181, 22186),
(22187, 22191),
(22193, 22194),
(22195, 22197),
(22198, 22200),
(22204, 22205),
(22206, 22207),
(22208, 22212),
(22213, 22214),
(22216, 22226),
(22227, 22228),
(22231, 22242),
(22243, 22249),
(22251, 22252),
(22253, 22255),
(22256, 22260),
(22262, 22264),
(22265, 22267),
(22269, 22270),
(22271, 22277),
(22279, 22286),
(22287, 22288),
(22289, 22292),
(22293, 22295),
(22296, 22297),
(22298, 22302),
(22303, 22305),
(22306, 22315),
(22316, 22321),
(22323, 22325),
(22327, 22329),
(22331, 22332),
(22333, 22337),
(22338, 22339),
(22341, 22344),
(22346, 22347),
(22348, 22355),
(22361, 22362),
(22369, 22371),
(22372, 22380),
(22381, 22386),
(22387, 22390),
(22391, 22392),
(22393, 22397),
(22398, 22400),
(22401, 22404),
(22408, 22410),
(22411, 22413),
(22419, 22422),
(22423, 22424),
(22425, 22427),
(22428, 22437),
(22439, 22443),
(22444, 22445),
(22448, 22449),
(22451, 22452),
(22456, 22457),
(22461, 22462),
(22464, 22465),
(22467, 22468),
(22470, 22473),
(22475, 22477),
(22478, 22480),
(22482, 22487),
(22492, 22498),
(22499, 22501),
(22502, 22504),
(22505, 22506),
(22509, 22510),
(22512, 22513),
(22516, 22523),
(22524, 22529),
(22530, 22535),
(22536, 22542),
(22549, 22550),
(22553, 22554),
(22555, 22556),
(22557, 22562),
(22564, 22565),
(22566, 22568),
(22570, 22571),
(22573, 22574),
(22575, 22579),
(22580, 22582),
(22585, 22587),
(22589, 22590),
(22591, 22594),
(22601, 22606),
(22607, 22611),
(22612, 22614),
(22615, 22619),
(22622, 22624),
(22625, 22627),
(22628, 22629),
(22631, 22634),
(22635, 22636),
(22640, 22641),
(22642, 22643),
(22645, 22646),
(22648, 22650),
(22652, 22653),
(22654, 22658),
(22659, 22660),
(22661, 22662),
(22663, 22667),
(22668, 22670),
(22671, 22673),
(22675, 22677),
(22678, 22680),
(22684, 22691),
(22694, 22695),
(22696, 22698),
(22699, 22700),
(22702, 22703),
(22705, 22708),
(22712, 22717),
(22718, 22719),
(22721, 22723),
(22724, 22726),
(22727, 22729),
(22730, 22731),
(22732, 22735),
(22736, 22747),
(22748, 22752),
(22753, 22755),
(22756, 22758),
(22761, 22762),
(22763, 22765),
(22766, 22772),
(22775, 22776),
(22777, 22782),
(22786, 22787),
(22789, 22791),
(22793, 22797),
(22799, 22801),
(22802, 22807),
(22808, 22814),
(22817, 22822),
(22823, 22836),
(22837, 22841),
(22846, 22848),
(22851, 22853),
(22854, 22858),
(22862, 22870),
(22871, 22876),
(22877, 22884),
(22885, 22886),
(22887, 22896),
(22898, 22903),
(22904, 22906),
(22907, 22910),
(22913, 22917),
(22922, 22927),
(22930, 22932),
(22933, 22936),
(22937, 22938),
(22939, 22940),
(22941, 22942),
(22943, 22944),
(22947, 22950),
(22951, 22953),
(22956, 22961),
(22962, 22964),
(22967, 22968),
(22969, 22973),
(22974, 22975),
(22977, 22978),
(22979, 22981),
(22982, 22983),
(22984, 22988),
(22989, 22990),
(22992, 22997),
(23001, 23003),
(23004, 23008),
(23011, 23017),
(23018, 23020),
(23022, 23024),
(23025, 23027),
(23028, 23029),
(23030, 23032),
(23035, 23036),
(23039, 23042),
(23043, 23045),
(23049, 23050),
(23052, 23055),
(23057, 23060),
(23064, 23065),
(23066, 23067),
(23068, 23069),
(23070, 23073),
(23075, 23078),
(23079, 23083),
(23085, 23086),
(23087, 23089),
(23093, 23095),
(23100, 23101),
(23104, 23106),
(23108, 23114),
(23116, 23117),
(23120, 23121),
(23125, 23126),
(23130, 23131),
(23134, 23135),
(23138, 23140),
(23141, 23144),
(23146, 23147),
(23148, 23150),
(23159, 23160),
(23162, 23164),
(23166, 23168),
(23179, 23180),
(23184, 23185),
(23186, 23188),
(23190, 23191),
(23193, 23197),
(23198, 23201),
(23202, 23203),
(23207, 23208),
(23212, 23213),
(23217, 23220),
(23221, 23222),
(23224, 23225),
(23226, 23232),
(23233, 23235),
(23236, 23237),
(23238, 23239),
(23240, 23242),
(23243, 23245),
(23247, 23249),
(23254, 23256),
(23258, 23259),
(23260, 23261),
(23264, 23266),
(23267, 23268),
(23269, 23271),
(23273, 23275),
(23278, 23279),
(23285, 23287),
(23290, 23292),
(23293, 23294),
(23296, 23298),
(23304, 23306),
(23307, 23309),
(23318, 23320),
(23321, 23322),
(23323, 23324),
(23325, 23326),
(23329, 23331),
(23333, 23334),
(23338, 23339),
(23340, 23342),
(23344, 23345),
(23346, 23347),
(23348, 23349),
(23350, 23351),
(23352, 23353),
(23358, 23359),
(23360, 23362),
(23363, 23364),
(23365, 23366),
(23371, 23373),
(23376, 23379),
(23380, 23385),
(23386, 23392),
(23395, 23399),
(23400, 23402),
(23403, 23404),
(23406, 23410),
(23411, 23412),
(23413, 23414),
(23416, 23417),
(23418, 23419),
(23420, 23426),
(23427, 23442),
(23443, 23454),
(23455, 23456),
(23458, 23463),
(23464, 23466),
(23468, 23483),
(23484, 23485),
(23487, 23496),
(23497, 23498),
(23500, 23502),
(23503, 23505),
(23506, 23509),
(23510, 23516),
(23517, 23523),
(23524, 23530),
(23531, 23533),
(23534, 23538),
(23539, 23543),
(23544, 23545),
(23546, 23547),
(23549, 23552),
(23553, 23555),
(23556, 23568),
(23569, 23570),
(23571, 23572),
(23574, 23576),
(23578, 23579),
(23582, 23585),
(23586, 23589),
(23590, 23591),
(23592, 23594),
(23595, 23599),
(23600, 23603),
(23605, 23607),
(23608, 23618),
(23621, 23623),
(23624, 23625),
(23626, 23628),
(23629, 23634),
(23635, 23636),
(23637, 23638),
(23641, 23643),
(23644, 23645),
(23646, 23647),
(23648, 23654),
(23655, 23658),
(23660, 23666),
(23668, 23671),
(23673, 23678),
(23687, 23689),
(23690, 23691),
(23692, 23693),
(23695, 23699),
(23700, 23701),
(23709, 23710),
(23711, 23716),
(23718, 23719),
(23720, 23725),
(23729, 23737),
(23738, 23741),
(23742, 23743),
(23749, 23750),
(23751, 23752),
(23753, 23754),
(23755, 23756),
(23762, 23763),
(23767, 23768),
(23769, 23770),
(23773, 23774),
(23776, 23778),
(23784, 23787),
(23789, 23795),
(23796, 23799),
(23802, 23804),
(23805, 23806),
(23809, 23810),
(23814, 23816),
(23819, 23820),
(23821, 23823),
(23825, 23827),
(23828, 23836),
(23839, 23840),
(23842, 23845),
(23846, 23848),
(23849, 23850),
(23851, 23852),
(23857, 23858),
(23860, 23861),
(23865, 23866),
(23869, 23870),
(23871, 23872),
(23874, 23876),
(23878, 23879),
(23880, 23881),
(23882, 23885),
(23886, 23887),
(23888, 23891),
(23893, 23894),
(23897, 23898),
(23900, 23901),
(23903, 23907),
(23908, 23909),
(23913, 23915),
(23916, 23918),
(23919, 23921),
(23923, 23924),
(23926, 23927),
(23929, 23931),
(23934, 23936),
(23937, 23941),
(23943, 23945),
(23946, 23949),
(23952, 23953),
(23954, 23958),
(23961, 23962),
(23963, 23964),
(23965, 23966),
(23967, 23969),
(23970, 23971),
(23975, 23976),
(23979, 23981),
(23982, 23983),
(23984, 23985),
(23986, 23987),
(23988, 23989),
(23991, 23995),
(23996, 23998),
(24003, 24004),
(24007, 24008),
(24009, 24010),
(24011, 24015),
(24016, 24017),
(24018, 24020),
(24022, 24023),
(24024, 24026),
(24027, 24028),
(24029, 24031),
(24032, 24034),
(24035, 24042),
(24043, 24044),
(24046, 24047),
(24049, 24054),
(24055, 24058),
(24059, 24060),
(24061, 24063),
(24064, 24065),
(24066, 24068),
(24070, 24072),
(24075, 24078),
(24081, 24083),
(24084, 24087),
(24088, 24092),
(24093, 24094),
(24095, 24097),
(24101, 24102),
(24104, 24105),
(24107, 24108),
(24109, 24113),
(24114, 24116),
(24117, 24121),
(24125, 24127),
(24128, 24129),
(24131, 24134),
(24135, 24136),
(24137, 24138),
(24139, 24141),
(24142, 24143),
(24144, 24146),
(24148, 24153),
(24155, 24157),
(24158, 24160),
(24161, 24165),
(24168, 24169),
(24170, 24175),
(24176, 24177),
(24178, 24183),
(24184, 24194),
(24195, 24197),
(24199, 24200),
(24202, 24204),
(24206, 24208),
(24213, 24216),
(24218, 24219),
(24220, 24221),
(24224, 24225),
(24226, 24227),
(24228, 24233),
(24234, 24238),
(24241, 24242),
(24243, 24244),
(24245, 24249),
(24253, 24256),
(24257, 24260),
(24262, 24263),
(24264, 24269),
(24270, 24279),
(24282, 24292),
(24293, 24294),
(24296, 24298),
(24299, 24301),
(24304, 24306),
(24307, 24309),
(24310, 24313),
(24314, 24317),
(24318, 24320),
(24321, 24325),
(24326, 24338),
(24339, 24346),
(24347, 24350),
(24351, 24352),
(24353, 24362),
(24363, 24370),
(24372, 24377),
(24379, 24386),
(24388, 24390),
(24391, 24393),
(24394, 24395),
(24396, 24399),
(24400, 24402),
(24403, 24405),
(24406, 24410),
(24411, 24414),
(24416, 24421),
(24422, 24424),
(24425, 24430),
(24431, 24438),
(24439, 24443),
(24444, 24454),
(24455, 24462),
(24463, 24468),
(24470, 24474),
(24476, 24479),
(24480, 24483),
(24484, 24485),
(24487, 24498),
(24499, 24501),
(24503, 24506),
(24508, 24510),
(24515, 24518),
(24519, 24522),
(24523, 24526),
(24528, 24533),
(24534, 24538),
(24540, 24543),
(24544, 24547),
(24548, 24549),
(24552, 24564),
(24565, 24567),
(24568, 24569),
(24570, 24574),
(24575, 24576),
(24583, 24584),
(24586, 24587),
(24589, 24593),
(24594, 24606),
(24607, 24610),
(24612, 24620),
(24621, 24622),
(24623, 24624),
(24625, 24626),
(24627, 24628),
(24629, 24630),
(24634, 24635),
(24640, 24644),
(24646, 24654),
(24656, 24659),
(24660, 24664),
(24665, 24667),
(24669, 24670),
(24671, 24678),
(24679, 24686),
(24687, 24690),
(24693, 24694),
(24695, 24696),
(24702, 24704),
(24705, 24711),
(24712, 24719),
(24721, 24729),
(24730, 24732),
(24733, 24737),
(24738, 24747),
(24752, 24761),
(24763, 24767),
(24770, 24771),
(24772, 24780),
(24782, 24784),
(24785, 24786),
(24787, 24790),
(24792, 24804),
(24805, 24806),
(24807, 24809),
(24816, 24830),
(24832, 24836),
(24838, 24843),
(24844, 24856),
(24857, 24861),
(24862, 24867),
(24871, 24873),
(24874, 24877),
(24880, 24882),
(24884, 24888),
(24889, 24890),
(24892, 24896),
(24897, 24899),
(24900, 24911),
(24915, 24916),
(24917, 24918),
(24920, 24923),
(24925, 24929),
(24930, 24932),
(24933, 24934),
(24935, 24937),
(24939, 24941),
(24942, 24953),
(24955, 24957),
(24958, 24965),
(24967, 24968),
(24970, 24972),
(24973, 24975),
(24976, 24981),
(24982, 24987),
(24988, 24990),
(24991, 24993),
(24996, 24998),
(24999, 25007),
(25010, 25011),
(25014, 25015),
(25016, 25019),
(25020, 25021),
(25022, 25023),
(25024, 25028),
(25030, 25041),
(25045, 25046),
(25052, 25056),
(25057, 25060),
(25061, 25064),
(25065, 25066),
(25068, 25070),
(25071, 25072),
(25074, 25075),
(25076, 25077),
(25078, 25081),
(25082, 25083),
(25084, 25090),
(25091, 25093),
(25095, 25099),
(25100, 25103),
(25104, 25111),
(25114, 25124),
(25126, 25128),
(25129, 25132),
(25134, 25137),
(25138, 25141),
(25144, 25146),
(25147, 25148),
(25149, 25150),
(25151, 25157),
(25158, 25162),
(25163, 25167),
(25168, 25175),
(25176, 25177),
(25178, 25181),
(25182, 25183),
(25184, 25185),
(25187, 25189),
(25192, 25193),
(25197, 25200),
(25201, 25202),
(25203, 25204),
(25206, 25207),
(25209, 25211),
(25212, 25217),
(25218, 25221),
(25225, 25227),
(25229, 25241),
(25243, 25245),
(25246, 25247),
(25254, 25255),
(25256, 25257),
(25259, 25261),
(25265, 25266),
(25267, 25268),
(25269, 25272),
(25273, 25280),
(25282, 25283),
(25284, 25291),
(25292, 25310),
(25312, 25314),
(25322, 25323),
(25324, 25328),
(25329, 25336),
(25340, 25344),
(25345, 25349),
(25351, 25358),
(25360, 25362),
(25363, 25364),
(25366, 25367),
(25368, 25370),
(25375, 25376),
(25383, 25388),
(25389, 25390),
(25391, 25392),
(25397, 25399),
(25401, 25403),
(25404, 25408),
(25409, 25413),
(25414, 25415),
(25417, 25425),
(25426, 25430),
(25431, 25433),
(25435, 25437),
(25445, 25450),
(25451, 25455),
(25457, 25459),
(25460, 25465),
(25466, 25470),
(25471, 25473),
(25474, 25477),
(25479, 25483),
(25484, 25485),
(25486, 25489),
(25490, 25491),
(25492, 25495),
(25496, 25500),
(25502, 25520),
(25522, 25523),
(25524, 25526),
(25531, 25532),
(25533, 25535),
(25536, 25538),
(25539, 25543),
(25544, 25546),
(25550, 25559),
(25562, 25565),
(25568, 25570),
(25571, 25572),
(25573, 25574),
(25577, 25579),
(25580, 25581),
(25582, 25583),
(25586, 25591),
(25592, 25595),
(25606, 25607),
(25609, 25611),
(25613, 25614),
(25615, 25617),
(25618, 25621),
(25622, 25625),
(25628, 25629),
(25630, 25631),
(25632, 25633),
(25634, 25635),
(25636, 25639),
(25640, 25643),
(25644, 25646),
(25647, 25649),
(25652, 25655),
(25658, 25659),
(25661, 25664),
(25666, 25667),
(25675, 25676),
(25678, 25680),
(25681, 25685),
(25688, 25689),
(25690, 25694),
(25695, 25698),
(25699, 25700),
(25703, 25704),
(25705, 25706),
(25709, 25710),
(25711, 25712),
(25715, 25717),
(25718, 25719),
(25720, 25721),
(25722, 25724),
(25725, 25726),
(25731, 25732),
(25733, 25734),
(25735, 25737),
(25743, 25748),
(25749, 25750),
(25752, 25756),
(25757, 25760),
(25761, 25762),
(25763, 25767),
(25768, 25770),
(25771, 25775),
(25776, 25777),
(25778, 25780),
(25785, 25786),
(25787, 25792),
(25793, 25795),
(25796, 25798),
(25799, 25800),
(25801, 25807),
(25808, 25811),
(25812, 25814),
(25815, 25817),
(25818, 25819),
(25824, 25832),
(25833, 25835),
(25836, 25838),
(25839, 25843),
(25844, 25848),
(25850, 25852),
(25853, 25858),
(25860, 25862),
(25864, 25867),
(25871, 25872),
(25875, 25877),
(25878, 25879),
(25880, 25882),
(25883, 25888),
(25890, 25893),
(25894, 25895),
(25897, 25901),
(25902, 25904),
(25905, 25906),
(25908, 25920),
(25923, 25924),
(25925, 25926),
(25927, 25930),
(25933, 25939),
(25940, 25946),
(25949, 25953),
(25954, 25956),
(25958, 25960),
(25963, 25965),
(25968, 25969),
(25970, 25971),
(25972, 25974),
(25975, 25977),
(25978, 25979),
(25981, 25982),
(25985, 25988),
(25989, 25990),
(25991, 25995),
(25996, 25997),
(25998, 25999),
(26000, 26003),
(26005, 26006),
(26007, 26010),
(26011, 26014),
(26015, 26018),
(26019, 26024),
(26027, 26033),
(26034, 26037),
(26039, 26040),
(26041, 26042),
(26044, 26046),
(26047, 26048),
(26049, 26055),
(26056, 26058),
(26059, 26061),
(26062, 26065),
(26066, 26067),
(26068, 26069),
(26070, 26074),
(26075, 26076),
(26079, 26083),
(26085, 26090),
(26092, 26094),
(26096, 26099),
(26100, 26102),
(26105, 26108),
(26110, 26113),
(26114, 26117),
(26118, 26123),
(26124, 26128),
(26129, 26135),
(26140, 26162),
(26163, 26168),
(26169, 26170),
(26172, 26173),
(26175, 26183),
(26185, 26189),
(26190, 26192),
(26193, 26195),
(26199, 26202),
(26203, 26211),
(26212, 26221),
(26222, 26225),
(26227, 26237),
(26238, 26242),
(26243, 26245),
(26247, 26250),
(26251, 26255),
(26256, 26259),
(26262, 26270),
(26271, 26273),
(26274, 26275),
(26276, 26277),
(26278, 26279),
(26283, 26284),
(26285, 26287),
(26289, 26291),
(26292, 26294),
(26296, 26298),
(26299, 26301),
(26302, 26309),
(26311, 26314),
(26316, 26317),
(26318, 26320),
(26324, 26325),
(26326, 26327),
(26329, 26334),
(26335, 26337),
(26342, 26343),
(26344, 26346),
(26347, 26349),
(26350, 26351),
(26352, 26353),
(26354, 26358),
(26359, 26369),
(26371, 26372),
(26373, 26374),
(26375, 26378),
(26379, 26380),
(26381, 26384),
(26387, 26392),
(26393, 26394),
(26395, 26401),
(26402, 26403),
(26406, 26409),
(26410, 26415),
(26417, 26418),
(26419, 26421),
(26422, 26425),
(26426, 26427),
(26429, 26432),
(26433, 26434),
(26437, 26442),
(26444, 26445),
(26446, 26450),
(26451, 26455),
(26457, 26458),
(26460, 26471),
(26474, 26475),
(26476, 26488),
(26491, 26493),
(26494, 26496),
(26497, 26498),
(26500, 26502),
(26503, 26504),
(26505, 26506),
(26507, 26509),
(26510, 26514),
(26515, 26516),
(26517, 26526),
(26528, 26531),
(26534, 26535),
(26537, 26538),
(26543, 26554),
(26555, 26558),
(26560, 26567),
(26568, 26571),
(26574, 26581),
(26583, 26587),
(26588, 26591),
(26593, 26595),
(26596, 26597),
(26598, 26600),
(26601, 26602),
(26604, 26605),
(26606, 26616),
(26617, 26618),
(26619, 26620),
(26622, 26624),
(26625, 26629),
(26643, 26645),
(26646, 26648),
(26649, 26650),
(26653, 26656),
(26657, 26659),
(26663, 26670),
(26671, 26677),
(26680, 26682),
(26683, 26686),
(26687, 26695),
(26696, 26697),
(26698, 26699),
(26700, 26703),
(26704, 26710),
(26711, 26714),
(26715, 26718),
(26719, 26720),
(26723, 26724),
(26727, 26728),
(26731, 26732),
(26734, 26739),
(26740, 26744),
(26745, 26749),
(26750, 26752),
(26753, 26759),
(26760, 26761),
(26765, 26766),
(26767, 26768),
(26771, 26773),
(26774, 26777),
(26778, 26782),
(26783, 26788),
(26789, 26795),
(26797, 26804),
(26805, 26807),
(26809, 26813),
(26820, 26823),
(26824, 26830),
(26831, 26843),
(26844, 26846),
(26847, 26850),
(26851, 26852),
(26853, 26854),
(26855, 26857),
(26858, 26867),
(26869, 26871),
(26873, 26878),
(26880, 26882),
(26884, 26887),
(26888, 26900),
(26902, 26904),
(26905, 26909),
(26913, 26916),
(26917, 26919),
(26920, 26921),
(26922, 26923),
(26928, 26930),
(26931, 26935),
(26936, 26938),
(26939, 26940),
(26941, 26942),
(26943, 26944),
(26946, 26947),
(26949, 26950),
(26953, 26955),
(26958, 26959),
(26963, 26966),
(26967, 26968),
(26969, 26975),
(26976, 26983),
(26984, 26998),
(26999, 27011),
(27018, 27019),
(27021, 27023),
(27025, 27027),
(27028, 27031),
(27032, 27033),
(27035, 27037),
(27040, 27042),
(27045, 27049),
(27051, 27052),
(27053, 27056),
(27057, 27059),
(27060, 27061),
(27063, 27065),
(27066, 27069),
(27070, 27072),
(27073, 27074),
(27075, 27076),
(27077, 27078),
(27079, 27081),
(27082, 27087),
(27088, 27090),
(27091, 27092),
(27094, 27098),
(27101, 27103),
(27106, 27107),
(27109, 27110),
(27111, 27113),
(27115, 27116),
(27117, 27120),
(27121, 27124),
(27125, 27126),
(27129, 27130),
(27131, 27132),
(27133, 27140),
(27141, 27142),
(27146, 27149),
(27151, 27152),
(27153, 27158),
(27159, 27160),
(27161, 27164),
(27165, 27173),
(27176, 27180),
(27182, 27183),
(27184, 27185),
(27186, 27187),
(27188, 27196),
(27197, 27200),
(27204, 27212),
(27214, 27215),
(27216, 27219),
(27221, 27223),
(27224, 27226),
(27227, 27228),
(27231, 27232),
(27233, 27235),
(27236, 27237),
(27238, 27240),
(27242, 27244),
(27249, 27252),
(27256, 27257),
(27262, 27266),
(27267, 27269),
(27270, 27272),
(27273, 27274),
(27275, 27276),
(27277, 27279),
(27280, 27282),
(27287, 27288),
(27291, 27297),
(27298, 27300),
(27301, 27302),
(27306, 27309),
(27310, 27314),
(27315, 27317),
(27320, 27321),
(27323, 27324),
(27325, 27328),
(27329, 27332),
(27334, 27335),
(27336, 27338),
(27340, 27341),
(27344, 27346),
(27347, 27351),
(27354, 27360),
(27362, 27363),
(27364, 27365),
(27367, 27369),
(27370, 27371),
(27372, 27373),
(27376, 27379),
(27386, 27390),
(27394, 27400),
(27401, 27403),
(27407, 27411),
(27414, 27416),
(27419, 27420),
(27421, 27426),
(27427, 27429),
(27431, 27433),
(27435, 27437),
(27439, 27440),
(27442, 27443),
(27445, 27452),
(27453, 27456),
(27459, 27460),
(27462, 27464),
(27465, 27467),
(27468, 27471),
(27472, 27473),
(27474, 27477),
(27478, 27479),
(27480, 27482),
(27483, 27484),
(27485, 27486),
(27487, 27493),
(27494, 27496),
(27497, 27500),
(27502, 27505),
(27507, 27510),
(27512, 27514),
(27515, 27516),
(27517, 27521),
(27522, 27527),
(27529, 27532),
(27533, 27534),
(27541, 27545),
(27547, 27548),
(27550, 27553),
(27554, 27557),
(27560, 27574),
(27575, 27585),
(27587, 27591),
(27593, 27594),
(27595, 27599),
(27602, 27605),
(27606, 27607),
(27608, 27609),
(27610, 27612),
(27615, 27616),
(27617, 27618),
(27619, 27620),
(27622, 27624),
(27627, 27629),
(27630, 27632),
(27633, 27634),
(27635, 27636),
(27639, 27640),
(27641, 27642),
(27647, 27648),
(27650, 27651),
(27652, 27654),
(27656, 27658),
(27661, 27669),
(27671, 27672),
(27673, 27674),
(27675, 27676),
(27679, 27680),
(27683, 27685),
(27686, 27689),
(27692, 27693),
(27694, 27695),
(27699, 27705),
(27706, 27708),
(27710, 27715),
(27722, 27724),
(27725, 27729),
(27730, 27731),
(27732, 27734),
(27735, 27736),
(27737, 27745),
(27746, 27747),
(27751, 27753),
(27754, 27756),
(27757, 27758),
(27759, 27761),
(27762, 27765),
(27766, 27767),
(27768, 27772),
(27773, 27775),
(27777, 27780),
(27781, 27786),
(27788, 27790),
(27792, 27793),
(27794, 27805),
(27807, 27808),
(27809, 27811),
(27819, 27820),
(27822, 27823),
(27824, 27829),
(27832, 27840),
(27841, 27843),
(27844, 27847),
(27849, 27851),
(27852, 27854),
(27855, 27864),
(27865, 27870),
(27872, 27876),
(27877, 27878),
(27879, 27885),
(27886, 27893),
(27908, 27909),
(27911, 27912),
(27914, 27917),
(27918, 27920),
(27921, 27924),
(27927, 27928),
(27929, 27932),
(27934, 27936),
(27941, 27948),
(27950, 27952),
(27953, 27956),
(27957, 27959),
(27960, 27962),
(27963, 27968),
(27969, 27970),
(27972, 27974),
(27991, 27992),
(27993, 27995),
(27996, 27997),
(27998, 28000),
(28001, 28002),
(28003, 28008),
(28009, 28011),
(28012, 28013),
(28014, 28017),
(28020, 28021),
(28023, 28026),
(28028, 28029),
(28034, 28035),
(28037, 28038),
(28039, 28041),
(28044, 28045),
(28046, 28047),
(28049, 28058),
(28059, 28061),
(28074, 28075),
(28076, 28077),
(28079, 28080),
(28082, 28083),
(28084, 28086),
(28087, 28090),
(28092, 28094),
(28095, 28097),
(28100, 28105),
(28106, 28109),
(28110, 28112),
(28113, 28115),
(28117, 28119),
(28120, 28122),
(28123, 28124),
(28125, 28131),
(28132, 28135),
(28136, 28141),
(28142, 28157),
(28160, 28161),
(28164, 28166),
(28167, 28172),
(28179, 28180),
(28181, 28182),
(28185, 28188),
(28189, 28200),
(28201, 28202),
(28203, 28208),
(28210, 28211),
(28214, 28215),
(28216, 28221),
(28222, 28223),
(28227, 28230),
(28232, 28236),
(28237, 28240),
(28241, 28245),
(28246, 28249),
(28251, 28256),
(28258, 28260),
(28263, 28265),
(28267, 28268),
(28270, 28272),
(28274, 28276),
(28278, 28279),
(28283, 28284),
(28285, 28289),
(28290, 28291),
(28300, 28302),
(28303, 28305),
(28307, 28308),
(28310, 28311),
(28312, 28314),
(28316, 28318),
(28319, 28321),
(28322, 28323),
(28325, 28326),
(28327, 28328),
(28330, 28331),
(28333, 28336),
(28337, 28340),
(28342, 28344),
(28346, 28348),
(28349, 28350),
(28351, 28358),
(28359, 28368),
(28369, 28370),
(28371, 28374),
(28381, 28383),
(28395, 28400),
(28402, 28403),
(28404, 28405),
(28407, 28410),
(28411, 28412),
(28413, 28416),
(28417, 28419),
(28420, 28421),
(28422, 28423),
(28424, 28427),
(28428, 28430),
(28431, 28432),
(28433, 28434),
(28435, 28439),
(28440, 28441),
(28442, 28444),
(28448, 28449),
(28450, 28452),
(28454, 28455),
(28457, 28462),
(28463, 28468),
(28470, 28471),
(28472, 28473),
(28475, 28477),
(28478, 28480),
(28481, 28482),
(28485, 28486),
(28495, 28496),
(28497, 28501),
(28503, 28512),
(28513, 28515),
(28516, 28517),
(28518, 28519),
(28520, 28521),
(28524, 28529),
(28532, 28533),
(28536, 28537),
(28538, 28539),
(28540, 28543),
(28544, 28549),
(28550, 28553),
(28555, 28559),
(28560, 28565),
(28566, 28568),
(28570, 28571),
(28575, 28578),
(28579, 28585),
(28586, 28587),
(28590, 28594),
(28595, 28596),
(28597, 28599),
(28601, 28602),
(28604, 28605),
(28608, 28612),
(28613, 28617),
(28618, 28619),
(28628, 28630),
(28632, 28633),
(28634, 28636),
(28638, 28642),
(28644, 28645),
(28648, 28650),
(28651, 28653),
(28654, 28658),
(28659, 28660),
(28661, 28663),
(28665, 28667),
(28668, 28671),
(28672, 28674),
(28677, 28680),
(28681, 28682),
(28683, 28684),
(28685, 28686),
(28687, 28688),
(28689, 28690),
(28693, 28694),
(28695, 28697),
(28698, 28700),
(28701, 28705),
(28707, 28708),
(28710, 28713),
(28716, 28717),
(28719, 28721),
(28722, 28723),
(28724, 28725),
(28727, 28728),
(28729, 28730),
(28732, 28733),
(28734, 28735),
(28739, 28741),
(28744, 28749),
(28750, 28751),
(28753, 28754),
(28756, 28758),
(28760, 28761),
(28765, 28767),
(28771, 28774),
(28779, 28781),
(28782, 28785),
(28789, 28791),
(28792, 28793),
(28796, 28799),
(28801, 28802),
(28805, 28807),
(28809, 28811),
(28814, 28815),
(28818, 28819),
(28820, 28826),
(28827, 28828),
(28836, 28837),
(28843, 28850),
(28851, 28853),
(28855, 28860),
(28872, 28873),
(28874, 28876),
(28879, 28880),
(28881, 28882),
(28883, 28887),
(28888, 28890),
(28892, 28894),
(28895, 28896),
(28900, 28901),
(28913, 28914),
(28921, 28923),
(28925, 28926),
(28931, 28936),
(28937, 28938),
(28939, 28941),
(28943, 28944),
(28948, 28949),
(28953, 28955),
(28956, 28957),
(28958, 28959),
(28960, 28962),
(28966, 28967),
(28971, 28972),
(28973, 28974),
(28975, 28978),
(28982, 28983),
(28984, 28985),
(28988, 28989),
(28993, 28994),
(28997, 29000),
(29001, 29005),
(29006, 29007),
(29008, 29009),
(29010, 29011),
(29013, 29016),
(29017, 29019),
(29020, 29021),
(29022, 29023),
(29024, 29025),
(29026, 29027),
(29028, 29034),
(29036, 29037),
(29038, 29039),
(29049, 29050),
(29053, 29054),
(29056, 29057),
(29060, 29062),
(29063, 29065),
(29066, 29067),
(29068, 29069),
(29071, 29072),
(29074, 29075),
(29076, 29078),
(29081, 29084),
(29087, 29089),
(29090, 29091),
(29096, 29097),
(29100, 29101),
(29103, 29108),
(29113, 29115),
(29118, 29122),
(29123, 29125),
(29128, 29130),
(29131, 29133),
(29134, 29135),
(29136, 29137),
(29138, 29144),
(29145, 29147),
(29148, 29149),
(29151, 29153),
(29157, 29160),
(29164, 29167),
(29173, 29174),
(29176, 29178),
(29179, 29181),
(29182, 29185),
(29190, 29194),
(29197, 29198),
(29200, 29201),
(29203, 29204),
(29207, 29208),
(29210, 29212),
(29213, 29214),
(29215, 29216),
(29220, 29221),
(29224, 29225),
(29226, 29230),
(29231, 29233),
(29234, 29235),
(29236, 29239),
(29240, 29252),
(29253, 29257),
(29259, 29261),
(29262, 29265),
(29266, 29268),
(29269, 29271),
(29272, 29284),
(29287, 29290),
(29291, 29292),
(29294, 29296),
(29297, 29299),
(29300, 29301),
(29303, 29306),
(29307, 29315),
(29316, 29317),
(29319, 29320),
(29321, 29322),
(29325, 29327),
(29330, 29332),
(29334, 29335),
(29339, 29340),
(29344, 29345),
(29346, 29347),
(29351, 29353),
(29356, 29360),
(29361, 29363),
(29364, 29365),
(29366, 29367),
(29369, 29370),
(29374, 29375),
(29377, 29381),
(29382, 29384),
(29385, 29386),
(29388, 29389),
(29390, 29391),
(29392, 29393),
(29394, 29395),
(29397, 29402),
(29403, 29404),
(29407, 29411),
(29413, 29414),
(29417, 29418),
(29420, 29422),
(29427, 29429),
(29431, 29439),
(29442, 29443),
(29444, 29446),
(29447, 29448),
(29450, 29452),
(29453, 29454),
(29458, 29460),
(29462, 29466),
(29467, 29472),
(29474, 29475),
(29476, 29478),
(29479, 29485),
(29486, 29488),
(29489, 29491),
(29492, 29496),
(29498, 29500),
(29501, 29504),
(29507, 29510),
(29517, 29521),
(29522, 29523),
(29526, 29529),
(29533, 29537),
(29539, 29540),
(29542, 29549),
(29550, 29555),
(29557, 29558),
(29559, 29565),
(29568, 29570),
(29571, 29576),
(29577, 29578),
(29579, 29580),
(29582, 29583),
(29584, 29585),
(29587, 29588),
(29589, 29593),
(29596, 29597),
(29598, 29601),
(29602, 29603),
(29605, 29607),
(29609, 29612),
(29613, 29614),
(29618, 29620),
(29621, 29622),
(29623, 29624),
(29625, 29626),
(29627, 29630),
(29631, 29633),
(29634, 29635),
(29637, 29639),
(29640, 29648),
(29650, 29652),
(29654, 29655),
(29657, 29658),
(29661, 29663),
(29664, 29666),
(29667, 29668),
(29669, 29672),
(29673, 29675),
(29677, 29679),
(29681, 29682),
(29684, 29686),
(29687, 29692),
(29693, 29698),
(29699, 29704),
(29705, 29707),
(29713, 29714),
(29722, 29724),
(29730, 29731),
(29732, 29735),
(29736, 29751),
(29753, 29755),
(29759, 29762),
(29763, 29765),
(29766, 29768),
(29771, 29772),
(29773, 29774),
(29777, 29779),
(29781, 29782),
(29783, 29784),
(29785, 29793),
(29794, 29797),
(29798, 29804),
(29805, 29812),
(29814, 29815),
(29822, 29823),
(29824, 29826),
(29827, 29828),
(29829, 29832),
(29833, 29834),
(29835, 29836),
(29839, 29843),
(29848, 29851),
(29852, 29853),
(29854, 29860),
(29862, 29868),
(29870, 29875),
(29877, 29878),
(29881, 29882),
(29883, 29884),
(29885, 29886),
(29887, 29888),
(29896, 29899),
(29900, 29901),
(29903, 29905),
(29907, 29909),
(29912, 29913),
(29914, 29917),
(29918, 29921),
(29922, 29925),
(29926, 29932),
(29934, 29939),
(29940, 29941),
(29942, 29945),
(29946, 29949),
(29951, 29952),
(29953, 29954),
(29955, 29959),
(29964, 29967),
(29969, 29972),
(29973, 29977),
(29978, 29979),
(29980, 29981),
(29982, 29986),
(29987, 29997),
(29999, 30004),
(30006, 30017),
(30019, 30021),
(30022, 30035),
(30036, 30037),
(30039, 30040),
(30041, 30051),
(30052, 30056),
(30057, 30060),
(30061, 30062),
(30063, 30066),
(30067, 30069),
(30070, 30080),
(30081, 30083),
(30085, 30088),
(30089, 30092),
(30094, 30102),
(30105, 30107),
(30108, 30110),
(30114, 30118),
(30123, 30124),
(30129, 30134),
(30136, 30139),
(30140, 30152),
(30154, 30155),
(30156, 30160),
(30162, 30163),
(30164, 30166),
(30167, 30170),
(30171, 30173),
(30174, 30181),
(30183, 30184),
(30185, 30186),
(30188, 30189),
(30190, 30197),
(30201, 30203),
(30204, 30205),
(30206, 30213),
(30215, 30222),
(30223, 30224),
(30226, 30228),
(30229, 30231),
(30233, 30234),
(30235, 30248),
(30249, 30250),
(30253, 30254),
(30256, 30257),
(30258, 30262),
(30264, 30269),
(30272, 30285),
(30290, 30291),
(30293, 30295),
(30296, 30298),
(30300, 30301),
(30303, 30304),
(30305, 30307),
(30308, 30310),
(30311, 30315),
(30316, 30323),
(30324, 30325),
(30326, 30327),
(30328, 30329),
(30330, 30335),
(30336, 30345),
(30347, 30351),
(30352, 30353),
(30355, 30356),
(30357, 30359),
(30361, 30369),
(30370, 30377),
(30378, 30379),
(30381, 30383),
(30384, 30385),
(30388, 30389),
(30391, 30395),
(30397, 30398),
(30399, 30400),
(30401, 30404),
(30405, 30407),
(30408, 30415),
(30418, 30419),
(30420, 30421),
(30422, 30424),
(30425, 30426),
(30427, 30429),
(30430, 30434),
(30435, 30441),
(30442, 30443),
(30444, 30445),
(30446, 30447),
(30448, 30451),
(30452, 30453),
(30454, 30455),
(30456, 30458),
(30459, 30461),
(30462, 30463),
(30464, 30466),
(30468, 30469),
(30470, 30477),
(30478, 30479),
(30482, 30483),
(30484, 30486),
(30487, 30488),
(30489, 30493),
(30494, 30497),
(30498, 30499),
(30500, 30503),
(30504, 30506),
(30509, 30512),
(30516, 30523),
(30524, 30527),
(30528, 30529),
(30530, 30531),
(30533, 30536),
(30538, 30539),
(30541, 30544),
(30546, 30547),
(30550, 30552),
(30554, 30557),
(30558, 30569),
(30570, 30573),
(30576, 30577),
(30578, 30581),
(30585, 30587),
(30589, 30593),
(30596, 30597),
(30603, 30607),
(30609, 30610),
(30612, 30615),
(30618, 30619),
(30622, 30625),
(30626, 30627),
(30629, 30630),
(30631, 30632),
(30634, 30635),
(30636, 30642),
(30643, 30644),
(30645, 30647),
(30649, 30650),
(30651, 30656),
(30659, 30660),
(30663, 30664),
(30665, 30666),
(30669, 30670),
(30673, 30675),
(30677, 30678),
(30679, 30680),
(30681, 30685),
(30686, 30689),
(30690, 30696),
(30697, 30699),
(30700, 30706),
(30707, 30709),
(30712, 30713),
(30715, 30717),
(30722, 30723),
(30725, 30727),
(30729, 30730),
(30732, 30735),
(30737, 30739),
(30740, 30742),
(30749, 30750),
(30752, 30756),
(30757, 30760),
(30765, 30767),
(30768, 30769),
(30770, 30771),
(30772, 30774),
(30775, 30776),
(30778, 30779),
(30783, 30784),
(30787, 30790),
(30791, 30793),
(30796, 30797),
(30798, 30799),
(30802, 30803),
(30812, 30815),
(30816, 30818),
(30819, 30821),
(30824, 30825),
(30826, 30829),
(30830, 30832),
(30834, 30835),
(30836, 30837),
(30842, 30843),
(30844, 30845),
(30846, 30847),
(30849, 30850),
(30854, 30856),
(30858, 30859),
(30860, 30864),
(30865, 30866),
(30867, 30870),
(30871, 30873),
(30874, 30875),
(30877, 30880),
(30881, 30882),
(30883, 30885),
(30887, 30891),
(30892, 30894),
(30895, 30900),
(30901, 30902),
(30906, 30912),
(30913, 30914),
(30917, 30925),
(30926, 30927),
(30928, 30935),
(30938, 30940),
(30943, 30946),
(30948, 30949),
(30950, 30953),
(30954, 30955),
(30956, 30957),
(30959, 30960),
(30962, 30965),
(30966, 30968),
(30970, 30972),
(30973, 30974),
(30975, 30978),
(30982, 30984),
(30988, 30989),
(30990, 30991),
(30992, 30995),
(31001, 31003),
(31004, 31005),
(31006, 31009),
(31013, 31016),
(31017, 31022),
(31024, 31026),
(31028, 31030),
(31034, 31042),
(31044, 31052),
(31055, 31058),
(31059, 31065),
(31066, 31073),
(31074, 31075),
(31077, 31078),
(31079, 31082),
(31083, 31084),
(31085, 31086),
(31090, 31091),
(31095, 31096),
(31097, 31101),
(31102, 31106),
(31108, 31110),
(31114, 31120),
(31121, 31122),
(31123, 31127),
(31128, 31129),
(31131, 31134),
(31137, 31138),
(31142, 31148),
(31150, 31154),
(31155, 31157),
(31160, 31164),
(31165, 31171),
(31172, 31173),
(31175, 31180),
(31183, 31184),
(31185, 31187),
(31188, 31191),
(31192, 31193),
(31194, 31195),
(31197, 31208),
(31209, 31214),
(31216, 31218),
(31224, 31225),
(31227, 31229),
(31232, 31233),
(31234, 31236),
(31239, 31247),
(31249, 31250),
(31252, 31254),
(31255, 31261),
(31262, 31266),
(31271, 31272),
(31275, 31276),
(31277, 31283),
(31284, 31286),
(31287, 31297),
(31298, 31306),
(31308, 31313),
(31317, 31320),
(31321, 31322),
(31324, 31326),
(31327, 31332),
(31333, 31334),
(31335, 31336),
(31337, 31340),
(31341, 31342),
(31344, 31345),
(31348, 31351),
(31352, 31355),
(31357, 31367),
(31368, 31369),
(31370, 31372),
(31376, 31385),
(31390, 31393),
(31395, 31396),
(31401, 31403),
(31404, 31405),
(31406, 31409),
(31411, 31412),
(31413, 31415),
(31417, 31421),
(31423, 31424),
(31427, 31440),
(31441, 31444),
(31445, 31446),
(31449, 31454),
(31455, 31460),
(31461, 31470),
(31471, 31474),
(31476, 31477),
(31478, 31479),
(31480, 31484),
(31485, 31488),
(31490, 31491),
(31492, 31493),
(31494, 31497),
(31498, 31500),
(31503, 31504),
(31505, 31506),
(31508, 31509),
(31512, 31514),
(31515, 31516),
(31518, 31521),
(31523, 31524),
(31525, 31538),
(31539, 31543),
(31545, 31546),
(31549, 31550),
(31551, 31554),
(31557, 31562),
(31563, 31571),
(31572, 31575),
(31581, 31582),
(31584, 31585),
(31588, 31592),
(31593, 31595),
(31596, 31606),
(31607, 31608),
(31610, 31611),
(31620, 31621),
(31622, 31624),
(31625, 31626),
(31627, 31628),
(31629, 31635),
(31636, 31650),
(31653, 31654),
(31658, 31659),
(31660, 31662),
(31663, 31667),
(31668, 31671),
(31672, 31673),
(31674, 31678),
(31680, 31683),
(31684, 31693),
(31695, 31696),
(31700, 31701),
(31702, 31704),
(31705, 31708),
(31709, 31710),
(31712, 31713),
(31716, 31719),
(31720, 31723),
(31725, 31726),
(31730, 31739),
(31740, 31741),
(31742, 31743),
(31744, 31749),
(31750, 31752),
(31753, 31754),
(31755, 31760),
(31761, 31765),
(31767, 31768),
(31769, 31770),
(31771, 31772),
(31775, 31778),
(31779, 31780),
(31781, 31785),
(31786, 31789),
(31793, 31794),
(31795, 31797),
(31798, 31803),
(31805, 31809),
(31811, 31812),
(31814, 31815),
(31818, 31819),
(31820, 31822),
(31823, 31831),
(31832, 31842),
(31843, 31846),
(31847, 31848),
(31849, 31850),
(31852, 31855),
(31856, 31857),
(31858, 31860),
(31861, 31862),
(31865, 31866),
(31868, 31871),
(31873, 31876),
(31878, 31880),
(31881, 31882),
(31883, 31884),
(31885, 31886),
(31887, 31889),
(31890, 31891),
(31892, 31894),
(31895, 31897),
(31899, 31900),
(31902, 31907),
(31908, 31913),
(31915, 31916),
(31917, 31919),
(31920, 31924),
(31926, 31928),
(31929, 31937),
(31938, 31939),
(31940, 31942),
(31943, 31947),
(31949, 31952),
(31954, 31963),
(31964, 31969),
(31970, 31971),
(31974, 31976),
(31977, 31978),
(31979, 31980),
(31983, 31984),
(31986, 31987),
(31988, 31991),
(31992, 31993),
(31994, 31996),
(31998, 31999),
(32000, 32001),
(32002, 32012),
(32013, 32014),
(32015, 32031),
(32032, 32036),
(32038, 32039),
(32042, 32052),
(32053, 32054),
(32057, 32059),
(32060, 32073),
(32075, 32082),
(32083, 32084),
(32086, 32088),
(32089, 32095),
(32097, 32100),
(32101, 32105),
(32106, 32107),
(32110, 32111),
(32112, 32116),
(32117, 32119),
(32120, 32124),
(32125, 32126),
(32127, 32128),
(32129, 32132),
(32133, 32135),
(32136, 32138),
(32139, 32142),
(32143, 32144),
(32145, 32146),
(32147, 32148),
(32150, 32152),
(32153, 32161),
(32162, 32164),
(32166, 32168),
(32170, 32188),
(32189, 32192),
(32194, 32200),
(32202, 32208),
(32209, 32211),
(32213, 32219),
(32220, 32223),
(32224, 32227),
(32228, 32231),
(32232, 32238),
(32239, 32240),
(32241, 32243),
(32244, 32247),
(32249, 32252),
(32256, 32258),
(32260, 32262),
(32264, 32268),
(32272, 32275),
(32277, 32278),
(32279, 32280),
(32283, 32292),
(32294, 32297),
(32299, 32304),
(32305, 32308),
(32309, 32312),
(32313, 32316),
(32317, 32320),
(32321, 32322),
(32323, 32328),
(32330, 32332),
(32333, 32335),
(32336, 32337),
(32338, 32339),
(32340, 32343),
(32344, 32347),
(32349, 32352),
(32353, 32355),
(32357, 32360),
(32361, 32364),
(32365, 32369),
(32371, 32372),
(32376, 32378),
(32379, 32384),
(32385, 32388),
(32390, 32395),
(32396, 32407),
(32408, 32409),
(32410, 32415),
(32566, 32567),
(32568, 32569),
(32570, 32576),
(32579, 32582),
(32583, 32584),
(32588, 32598),
(32600, 32601),
(32603, 32606),
(32607, 32610),
(32611, 32620),
(32621, 32623),
(32624, 32627),
(32629, 32630),
(32631, 32634),
(32637, 32641),
(32642, 32644),
(32645, 32649),
(32650, 32658),
(32660, 32661),
(32662, 32664),
(32666, 32667),
(32668, 32671),
(32673, 32677),
(32678, 32679),
(32680, 32683),
(32685, 32688),
(32690, 32691),
(32692, 32693),
(32694, 32695),
(32696, 32698),
(32700, 32702),
(32703, 32706),
(32707, 32708),
(32709, 32711),
(32712, 32713),
(32714, 32715),
(32716, 32717),
(32718, 32720),
(32722, 32723),
(32724, 32726),
(32731, 32732),
(32735, 32738),
(32739, 32740),
(32741, 32743),
(32744, 32746),
(32747, 32749),
(32750, 32753),
(32754, 32756),
(32761, 32770),
(32771, 32777),
(32778, 32794),
(32796, 32802),
(32804, 32805),
(32806, 32807),
(32808, 32809),
(32812, 32813),
(32814, 32815),
(32816, 32817),
(32819, 32824),
(32825, 32833),
(32836, 32837),
(32838, 32839),
(32842, 32843),
(32850, 32851),
(32854, 32855),
(32856, 32857),
(32858, 32859),
(32862, 32867),
(32868, 32869),
(32870, 32871),
(32872, 32873),
(32877, 32878),
(32879, 32888),
(32889, 32890),
(32893, 32896),
(32897, 32898),
(32900, 32906),
(32907, 32909),
(32910, 32911),
(32915, 32916),
(32918, 32919),
(32920, 32921),
(32922, 32927),
(32929, 32931),
(32933, 32936),
(32937, 32942),
(32943, 32944),
(32945, 32947),
(32948, 32949),
(32952, 32955),
(32963, 32965),
(32966, 32967),
(32968, 32969),
(32972, 32976),
(32978, 32979),
(32980, 32988),
(32989, 32991),
(32992, 32994),
(32996, 32998),
(33005, 33013),
(33014, 33015),
(33016, 33019),
(33020, 33023),
(33026, 33028),
(33029, 33036),
(33046, 33049),
(33050, 33053),
(33054, 33055),
(33056, 33057),
(33059, 33061),
(33063, 33064),
(33065, 33066),
(33068, 33069),
(33071, 33074),
(33075, 33076),
(33077, 33078),
(33081, 33083),
(33084, 33085),
(33086, 33087),
(33093, 33096),
(33098, 33101),
(33102, 33103),
(33104, 33110),
(33111, 33112),
(33119, 33122),
(33125, 33130),
(33131, 33132),
(33133, 33138),
(33140, 33141),
(33143, 33147),
(33151, 33159),
(33160, 33161),
(33162, 33164),
(33166, 33169),
(33171, 33172),
(33173, 33175),
(33176, 33177),
(33178, 33183),
(33184, 33185),
(33186, 33189),
(33192, 33194),
(33198, 33199),
(33200, 33201),
(33202, 33206),
(33208, 33209),
(33210, 33212),
(33213, 33217),
(33218, 33220),
(33221, 33223),
(33224, 33228),
(33229, 33232),
(33233, 33234),
(33235, 33236),
(33237, 33238),
(33239, 33244),
(33245, 33250),
(33251, 33254),
(33255, 33257),
(33258, 33262),
(33264, 33271),
(33272, 33284),
(33285, 33286),
(33287, 33291),
(33292, 33297),
(33298, 33301),
(33302, 33312),
(33313, 33315),
(33320, 33325),
(33326, 33327),
(33330, 33339),
(33344, 33345),
(33347, 33352),
(33355, 33356),
(33358, 33360),
(33361, 33362),
(33366, 33367),
(33368, 33371),
(33372, 33374),
(33375, 33377),
(33378, 33381),
(33382, 33385),
(33386, 33388),
(33389, 33392),
(33393, 33395),
(33396, 33397),
(33398, 33401),
(33403, 33404),
(33405, 33410),
(33411, 33413),
(33415, 33416),
(33417, 33420),
(33421, 33423),
(33425, 33427),
(33428, 33429),
(33430, 33431),
(33432, 33436),
(33437, 33438),
(33439, 33442),
(33443, 33461),
(33463, 33472),
(33477, 33479),
(33488, 33494),
(33495, 33496),
(33497, 33501),
(33502, 33513),
(33514, 33516),
(33517, 33518),
(33519, 33520),
(33521, 33522),
(33523, 33525),
(33526, 33528),
(33529, 33532),
(33533, 33535),
(33536, 33548),
(33550, 33551),
(33558, 33561),
(33563, 33568),
(33569, 33572),
(33576, 33577),
(33579, 33595),
(33596, 33598),
(33600, 33601),
(33602, 33606),
(33607, 33608),
(33609, 33611),
(33613, 33625),
(33634, 33635),
(33648, 33649),
(33651, 33652),
(33653, 33654),
(33655, 33657),
(33659, 33662),
(33663, 33665),
(33666, 33667),
(33668, 33672),
(33673, 33675),
(33677, 33679),
(33682, 33687),
(33688, 33697),
(33698, 33699),
(33702, 33710),
(33713, 33714),
(33717, 33718),
(33725, 33730),
(33733, 33734),
(33735, 33736),
(33737, 33739),
(33740, 33741),
(33742, 33746),
(33747, 33749),
(33750, 33751),
(33752, 33753),
(33756, 33758),
(33759, 33761),
(33768, 33772),
(33775, 33779),
(33780, 33781),
(33782, 33786),
(33787, 33790),
(33793, 33794),
(33795, 33797),
(33798, 33800),
(33802, 33808),
(33809, 33810),
(33811, 33812),
(33813, 33814),
(33817, 33818),
(33824, 33825),
(33826, 33827),
(33833, 33835),
(33836, 33837),
(33839, 33840),
(33841, 33842),
(33845, 33846),
(33848, 33850),
(33852, 33854),
(33861, 33867),
(33869, 33872),
(33873, 33875),
(33878, 33885),
(33888, 33896),
(33897, 33906),
(33907, 33915),
(33916, 33918),
(33921, 33923),
(33924, 33926),
(33931, 33932),
(33936, 33937),
(33938, 33942),
(33945, 33946),
(33948, 33949),
(33950, 33952),
(33953, 33954),
(33958, 33959),
(33960, 33963),
(33965, 33966),
(33967, 33968),
(33969, 33971),
(33972, 33973),
(33976, 33987),
(33988, 33989),
(33990, 33998),
(33999, 34002),
(34003, 34004),
(34006, 34007),
(34009, 34011),
(34012, 34013),
(34023, 34024),
(34026, 34027),
(34028, 34029),
(34030, 34035),
(34036, 34037),
(34039, 34040),
(34042, 34046),
(34047, 34049),
(34050, 34052),
(34054, 34056),
(34060, 34061),
(34062, 34063),
(34064, 34066),
(34067, 34070),
(34071, 34073),
(34074, 34075),
(34076, 34077),
(34078, 34080),
(34081, 34088),
(34090, 34094),
(34095, 34096),
(34098, 34103),
(34109, 34110),
(34111, 34114),
(34115, 34116),
(34118, 34119),
(34120, 34124),
(34126, 34132),
(34133, 34139),
(34140, 34149),
(34152, 34156),
(34157, 34158),
(34159, 34160),
(34167, 34168),
(34169, 34172),
(34173, 34178),
(34180, 34189),
(34191, 34194),
(34195, 34197),
(34199, 34202),
(34203, 34206),
(34207, 34209),
(34210, 34211),
(34212, 34225),
(34228, 34229),
(34230, 34235),
(34236, 34240),
(34241, 34243),
(34247, 34248),
(34249, 34252),
(34253, 34257),
(34261, 34262),
(34264, 34265),
(34266, 34267),
(34268, 34270),
(34271, 34273),
(34276, 34279),
(34280, 34283),
(34285, 34286),
(34291, 34292),
(34294, 34296),
(34297, 34301),
(34302, 34305),
(34306, 34307),
(34308, 34312),
(34314, 34316),
(34317, 34319),
(34320, 34324),
(34326, 34332),
(34334, 34335),
(34337, 34339),
(34343, 34344),
(34345, 34346),
(34349, 34350),
(34351, 34353),
(34358, 34359),
(34360, 34361),
(34362, 34363),
(34364, 34366),
(34367, 34371),
(34374, 34375),
(34381, 34383),
(34384, 34385),
(34386, 34395),
(34396, 34405),
(34407, 34408),
(34409, 34410),
(34411, 34413),
(34415, 34416),
(34417, 34418),
(34421, 34424),
(34425, 34428),
(34440, 34441),
(34442, 34446),
(34449, 34450),
(34451, 34452),
(34453, 34455),
(34456, 34457),
(34458, 34459),
(34460, 34461),
(34465, 34466),
(34467, 34469),
(34470, 34476),
(34477, 34478),
(34479, 34482),
(34483, 34490),
(34495, 34498),
(34499, 34504),
(34505, 34506),
(34507, 34508),
(34509, 34511),
(34513, 34515),
(34516, 34518),
(34519, 34520),
(34521, 34525),
(34526, 34529),
(34531, 34534),
(34535, 34536),
(34537, 34538),
(34540, 34544),
(34552, 34559),
(34560, 34561),
(34562, 34572),
(34573, 34581),
(34584, 34587),
(34588, 34589),
(34590, 34592),
(34593, 34594),
(34595, 34596),
(34597, 34598),
(34600, 34602),
(34606, 34608),
(34609, 34611),
(34612, 34613),
(34615, 34616),
(34617, 34625),
(34627, 34628),
(34629, 34630),
(34633, 34634),
(34635, 34639),
(34643, 34644),
(34645, 34646),
(34647, 34650),
(34653, 34654),
(34655, 34658),
(34659, 34663),
(34664, 34665),
(34666, 34667),
(34670, 34672),
(34673, 34675),
(34676, 34677),
(34678, 34679),
(34680, 34681),
(34683, 34684),
(34687, 34688),
(34690, 34698),
(34699, 34702),
(34704, 34705),
(34707, 34708),
(34709, 34710),
(34711, 34714),
(34718, 34721),
(34722, 34724),
(34727, 34728),
(34731, 34736),
(34737, 34738),
(34739, 34740),
(34741, 34742),
(34746, 34748),
(34749, 34754),
(34756, 34757),
(34758, 34764),
(34766, 34767),
(34768, 34769),
(34770, 34771),
(34773, 34775),
(34777, 34779),
(34780, 34781),
(34783, 34785),
(34786, 34789),
(34794, 34796),
(34797, 34798),
(34799, 34800),
(34801, 34804),
(34806, 34812),
(34814, 34816),
(34817, 34818),
(34819, 34820),
(34821, 34824),
(34825, 34828),
(34829, 34839),
(34840, 34845),
(34846, 34848),
(34849, 34852),
(34855, 34857),
(34861, 34863),
(34864, 34867),
(34869, 34871),
(34873, 34877),
(34880, 34887),
(34888, 34895),
(34897, 34900),
(34901, 34917),
(34920, 34922),
(34923, 34924),
(34928, 34931),
(34933, 34934),
(34935, 34936),
(34937, 34938),
(34939, 34940),
(34941, 34947),
(34952, 34953),
(34955, 34956),
(34957, 34958),
(34962, 34963),
(34966, 34973),
(34974, 34977),
(34978, 34979),
(34980, 34981),
(34984, 34985),
(34986, 34988),
(34990, 34991),
(34992, 34994),
(34996, 34998),
(34999, 35000),
(35002, 35003),
(35005, 35014),
(35018, 35024),
(35025, 35030),
(35032, 35034),
(35035, 35040),
(35041, 35042),
(35047, 35049),
(35055, 35062),
(35063, 35066),
(35068, 35071),
(35073, 35075),
(35076, 35077),
(35078, 35080),
(35082, 35083),
(35084, 35089),
(35090, 35092),
(35093, 35095),
(35096, 35099),
(35100, 35103),
(35104, 35105),
(35109, 35113),
(35114, 35116),
(35120, 35123),
(35125, 35127),
(35128, 35132),
(35134, 35135),
(35136, 35143),
(35145, 35146),
(35148, 35150),
(35151, 35152),
(35154, 35155),
(35158, 35160),
(35162, 35165),
(35166, 35173),
(35174, 35175),
(35178, 35180),
(35181, 35185),
(35186, 35190),
(35191, 35192),
(35194, 35200),
(35201, 35202),
(35203, 35204),
(35206, 35212),
(35213, 35214),
(35215, 35217),
(35219, 35225),
(35226, 35229),
(35231, 35234),
(35237, 35240),
(35241, 35243),
(35244, 35245),
(35247, 35249),
(35250, 35256),
(35258, 35259),
(35260, 35262),
(35263, 35265),
(35282, 35283),
(35284, 35289),
(35290, 35291),
(35292, 35294),
(35299, 35300),
(35301, 35304),
(35305, 35306),
(35307, 35308),
(35309, 35310),
(35313, 35314),
(35315, 35317),
(35318, 35319),
(35320, 35322),
(35325, 35326),
(35327, 35329),
(35330, 35334),
(35335, 35337),
(35338, 35339),
(35340, 35341),
(35342, 35353),
(35355, 35356),
(35357, 35361),
(35362, 35367),
(35370, 35374),
(35375, 35376),
(35377, 35378),
(35379, 35384),
(35386, 35391),
(35392, 35394),
(35395, 35396),
(35397, 35402),
(35405, 35407),
(35408, 35417),
(35419, 35423),
(35424, 35428),
(35429, 35432),
(35433, 35434),
(35435, 35439),
(35440, 35444),
(35445, 35448),
(35449, 35453),
(35454, 35457),
(35458, 35464),
(35465, 35466),
(35467, 35470),
(35471, 35476),
(35477, 35483),
(35486, 35490),
(35491, 35498),
(35500, 35505),
(35506, 35508),
(35510, 35512),
(35513, 35514),
(35515, 35517),
(35518, 35520),
(35522, 35525),
(35526, 35534),
(35535, 35536),
(35537, 35544),
(35546, 35555),
(35556, 35557),
(35558, 35560),
(35563, 35567),
(35568, 35570),
(35571, 35577),
(35578, 35579),
(35580, 35581),
(35582, 35587),
(35588, 35592),
(35594, 35597),
(35598, 35599),
(35600, 35602),
(35604, 35605),
(35606, 35608),
(35609, 35618),
(35622, 35623),
(35624, 35625),
(35627, 35630),
(35632, 35633),
(35635, 35636),
(35639, 35640),
(35641, 35642),
(35644, 35645),
(35646, 35647),
(35649, 35655),
(35656, 35658),
(35660, 35664),
(35666, 35669),
(35670, 35671),
(35672, 35677),
(35678, 35680),
(35683, 35684),
(35686, 35687),
(35691, 35694),
(35695, 35699),
(35700, 35701),
(35702, 35706),
(35708, 35714),
(35715, 35718),
(35722, 35729),
(35730, 35735),
(35737, 35739),
(35740, 35741),
(35742, 35744),
(35895, 35899),
(35901, 35904),
(35905, 35906),
(35909, 35917),
(35918, 35922),
(35923, 35926),
(35927, 35932),
(35933, 35934),
(35937, 35941),
(35942, 35943),
(35944, 35950),
(35955, 35956),
(35957, 35959),
(35960, 35965),
(35966, 35967),
(35970, 35971),
(35973, 35976),
(35977, 35983),
(35984, 35985),
(35986, 35989),
(35992, 35994),
(35995, 35999),
(36000, 36003),
(36004, 36005),
(36007, 36017),
(36018, 36021),
(36022, 36030),
(36031, 36044),
(36045, 36048),
(36049, 36050),
(36051, 36052),
(36053, 36055),
(36057, 36063),
(36064, 36069),
(36070, 36071),
(36072, 36073),
(36074, 36075),
(36076, 36078),
(36079, 36081),
(36082, 36083),
(36084, 36086),
(36087, 36089),
(36090, 36096),
(36097, 36098),
(36099, 36102),
(36103, 36108),
(36109, 36110),
(36111, 36113),
(36114, 36117),
(36118, 36120),
(36123, 36124),
(36196, 36200),
(36201, 36202),
(36203, 36207),
(36208, 36210),
(36211, 36213),
(36214, 36216),
(36223, 36224),
(36225, 36227),
(36228, 36230),
(36232, 36233),
(36234, 36235),
(36237, 36238),
(36240, 36242),
(36245, 36246),
(36249, 36250),
(36254, 36257),
(36259, 36260),
(36262, 36263),
(36264, 36265),
(36267, 36269),
(36271, 36272),
(36274, 36276),
(36277, 36278),
(36279, 36280),
(36281, 36285),
(36286, 36287),
(36288, 36289),
(36290, 36291),
(36293, 36297),
(36298, 36301),
(36302, 36304),
(36305, 36306),
(36308, 36312),
(36313, 36316),
(36317, 36318),
(36319, 36320),
(36321, 36322),
(36323, 36326),
(36327, 36329),
(36330, 36333),
(36335, 36342),
(36348, 36350),
(36351, 36352),
(36353, 36354),
(36356, 36359),
(36360, 36364),
(36367, 36370),
(36372, 36373),
(36374, 36375),
(36381, 36388),
(36390, 36392),
(36394, 36395),
(36400, 36402),
(36403, 36410),
(36413, 36414),
(36416, 36419),
(36420, 36421),
(36423, 36433),
(36436, 36438),
(36441, 36442),
(36443, 36453),
(36457, 36458),
(36460, 36462),
(36463, 36467),
(36468, 36469),
(36470, 36471),
(36473, 36477),
(36481, 36486),
(36487, 36488),
(36489, 36492),
(36493, 36494),
(36496, 36502),
(36505, 36508),
(36509, 36511),
(36513, 36515),
(36519, 36520),
(36521, 36530),
(36531, 36532),
(36533, 36534),
(36538, 36540),
(36542, 36543),
(36544, 36546),
(36547, 36553),
(36554, 36558),
(36559, 36560),
(36561, 36563),
(36564, 36565),
(36571, 36573),
(36575, 36576),
(36578, 36580),
(36584, 36585),
(36587, 36588),
(36589, 36591),
(36592, 36594),
(36599, 36607),
(36608, 36609),
(36610, 36612),
(36613, 36614),
(36615, 36619),
(36620, 36621),
(36623, 36625),
(36626, 36634),
(36635, 36642),
(36643, 36644),
(36645, 36651),
(36652, 36656),
(36659, 36668),
(36670, 36680),
(36681, 36682),
(36684, 36688),
(36689, 36694),
(36695, 36697),
(36700, 36704),
(36705, 36710),
(36763, 36770),
(36771, 36777),
(36781, 36787),
(36789, 36793),
(36794, 36797),
(36798, 36803),
(36804, 36807),
(36810, 36812),
(36813, 36815),
(36816, 36822),
(36826, 36827),
(36832, 36833),
(36834, 36839),
(36840, 36844),
(36845, 36850),
(36852, 36860),
(36861, 36863),
(36864, 36871),
(36872, 36873),
(36875, 36882),
(36883, 36892),
(36893, 36900),
(36903, 36907),
(36908, 36912),
(36913, 36922),
(36924, 36925),
(36926, 36928),
(36929, 36934),
(36935, 36936),
(36937, 36951),
(36952, 36954),
(36955, 36959),
(36960, 36964),
(36965, 36970),
(36972, 36977),
(36978, 36979),
(36980, 36987),
(36988, 36990),
(36991, 36998),
(36999, 37005),
(37006, 37010),
(37013, 37014),
(37015, 37018),
(37019, 37020),
(37024, 37028),
(37029, 37031),
(37032, 37033),
(37034, 37035),
(37039, 37047),
(37048, 37049),
(37053, 37055),
(37057, 37058),
(37059, 37062),
(37063, 37065),
(37066, 37067),
(37068, 37069),
(37070, 37071),
(37074, 37075),
(37077, 37078),
(37079, 37082),
(37083, 37088),
(37089, 37091),
(37092, 37094),
(37096, 37097),
(37099, 37100),
(37101, 37102),
(37103, 37105),
(37108, 37112),
(37117, 37121),
(37122, 37123),
(37124, 37127),
(37128, 37129),
(37133, 37134),
(37136, 37137),
(37138, 37139),
(37140, 37147),
(37148, 37149),
(37150, 37151),
(37152, 37153),
(37154, 37156),
(37157, 37158),
(37159, 37160),
(37161, 37162),
(37165, 37171),
(37172, 37173),
(37174, 37176),
(37177, 37179),
(37180, 37182),
(37187, 37188),
(37191, 37200),
(37202, 37205),
(37206, 37212),
(37217, 37222),
(37223, 37224),
(37225, 37227),
(37228, 37230),
(37234, 37238),
(37239, 37244),
(37249, 37252),
(37253, 37256),
(37257, 37260),
(37261, 37263),
(37264, 37270),
(37271, 37273),
(37276, 37277),
(37278, 37279),
(37281, 37283),
(37284, 37285),
(37286, 37287),
(37288, 37289),
(37290, 37303),
(37304, 37305),
(37306, 37310),
(37311, 37316),
(37317, 37322),
(37323, 37330),
(37331, 37333),
(37334, 37344),
(37345, 37346),
(37347, 37352),
(37353, 37355),
(37356, 37362),
(37365, 37368),
(37369, 37370),
(37371, 37374),
(37375, 37378),
(37380, 37384),
(37385, 37387),
(37388, 37391),
(37392, 37399),
(37400, 37401),
(37404, 37407),
(37411, 37415),
(37416, 37418),
(37420, 37421),
(37422, 37425),
(37427, 37435),
(37436, 37437),
(37438, 37441),
(37442, 37452),
(37453, 37458),
(37463, 37471),
(37472, 37475),
(37476, 37482),
(37486, 37490),
(37493, 37498),
(37499, 37505),
(37507, 37508),
(37509, 37510),
(37512, 37515),
(37517, 37519),
(37521, 37524),
(37525, 37533),
(37535, 37537),
(37540, 37542),
(37543, 37545),
(37547, 37548),
(37549, 37550),
(37551, 37552),
(37554, 37555),
(37558, 37566),
(37567, 37572),
(37573, 37577),
(37579, 37585),
(37586, 37588),
(37589, 37590),
(37591, 37594),
(37596, 37598),
(37599, 37602),
(37603, 37606),
(37607, 37611),
(37612, 37615),
(37616, 37617),
(37618, 37620),
(37624, 37629),
(37631, 37633),
(37634, 37635),
(37638, 37639),
(37640, 37641),
(37645, 37646),
(37647, 37650),
(37652, 37654),
(37656, 37659),
(37660, 37677),
(37678, 37680),
(37682, 37688),
(37690, 37692),
(37700, 37701),
(37703, 37706),
(37707, 37708),
(37709, 37710),
(37712, 37715),
(37716, 37721),
(37722, 37725),
(37726, 37727),
(37728, 37729),
(37732, 37734),
(37735, 37736),
(37737, 37739),
(37740, 37746),
(37747, 37751),
(37754, 37755),
(37756, 37763),
(37768, 37769),
(37770, 37774),
(37775, 37776),
(37778, 37779),
(37780, 37785),
(37786, 37788),
(37790, 37791),
(37793, 37794),
(37795, 37797),
(37798, 37802),
(37803, 37807),
(37808, 37809),
(37812, 37815),
(37817, 37819),
(37825, 37826),
(37827, 37838),
(37840, 37842),
(37843, 37844),
(37846, 37850),
(37852, 37856),
(37857, 37859),
(37860, 37865),
(37879, 37884),
(37885, 37886),
(37889, 37893),
(37895, 37898),
(37901, 37905),
(37907, 37915),
(37919, 37920),
(37921, 37922),
(37931, 37932),
(37934, 37936),
(37937, 37943),
(37944, 37945),
(37946, 37948),
(37949, 37950),
(37951, 37952),
(37953, 37954),
(37955, 37958),
(37960, 37961),
(37962, 37963),
(37964, 37965),
(37969, 37972),
(37973, 37974),
(37977, 37981),
(37982, 37988),
(37992, 37993),
(37994, 37996),
(37997, 38003),
(38005, 38006),
(38007, 38008),
(38012, 38016),
(38017, 38018),
(38019, 38021),
(38263, 38266),
(38270, 38271),
(38272, 38273),
(38274, 38277),
(38279, 38288),
(38289, 38293),
(38294, 38295),
(38296, 38298),
(38301, 38314),
(38315, 38318),
(38322, 38323),
(38324, 38325),
(38326, 38327),
(38329, 38336),
(38339, 38340),
(38342, 38350),
(38352, 38359),
(38360, 38363),
(38364, 38371),
(38372, 38375),
(38428, 38431),
(38433, 38435),
(38436, 38439),
(38440, 38441),
(38442, 38443),
(38444, 38445),
(38446, 38448),
(38449, 38452),
(38455, 38462),
(38463, 38467),
(38468, 38469),
(38475, 38478),
(38479, 38481),
(38482, 38483),
(38484, 38485),
(38486, 38489),
(38491, 38496),
(38497, 38503),
(38506, 38507),
(38508, 38509),
(38510, 38511),
(38512, 38513),
(38514, 38521),
(38522, 38528),
(38529, 38535),
(38536, 38540),
(38541, 38544),
(38545, 38546),
(38548, 38558),
(38559, 38561),
(38563, 38571),
(38574, 38581),
(38582, 38589),
(38592, 38594),
(38596, 38600),
(38601, 38607),
(38609, 38611),
(38613, 38615),
(38616, 38624),
(38626, 38628),
(38632, 38636),
(38639, 38643),
(38646, 38648),
(38649, 38652),
(38656, 38657),
(38658, 38667),
(38669, 38672),
(38673, 38674),
(38675, 38676),
(38678, 38679),
(38681, 38687),
(38689, 38693),
(38695, 38697),
(38698, 38699),
(38704, 38708),
(38712, 38714),
(38715, 38716),
(38717, 38719),
(38721, 38725),
(38726, 38727),
(38728, 38731),
(38733, 38736),
(38737, 38739),
(38741, 38749),
(38750, 38751),
(38752, 38757),
(38758, 38764),
(38765, 38767),
(38769, 38770),
(38771, 38773),
(38774, 38782),
(38783, 38786),
(38788, 38791),
(38793, 38794),
(38795, 38796),
(38797, 38798),
(38799, 38801),
(38805, 38811),
(38812, 38813),
(38814, 38817),
(38818, 38820),
(38822, 38823),
(38824, 38825),
(38827, 38831),
(38833, 38839),
(38840, 38843),
(38844, 38845),
(38846, 38848),
(38849, 38850),
(38851, 38863),
(38864, 38866),
(38867, 38869),
(38871, 38874),
(38875, 38879),
(38880, 38882),
(38884, 38885),
(38893, 38896),
(38897, 38905),
(38906, 38908),
(38911, 38912),
(38913, 38916),
(38917, 38921),
(38922, 38923),
(38924, 38933),
(38934, 38939),
(38940, 38941),
(38942, 38943),
(38944, 38946),
(38947, 38951),
(38955, 38961),
(38962, 38966),
(38967, 38969),
(38971, 38975),
(38980, 38981),
(38982, 38984),
(38986, 38992),
(38993, 39004),
(39006, 39007),
(39010, 39012),
(39013, 39016),
(39018, 39021),
(39023, 39026),
(39027, 39029),
(39080, 39081),
(39082, 39084),
(39085, 39090),
(39092, 39093),
(39094, 39097),
(39098, 39100),
(39103, 39104),
(39106, 39111),
(39112, 39113),
(39116, 39117),
(39131, 39133),
(39135, 39136),
(39137, 39140),
(39141, 39144),
(39145, 39148),
(39149, 39152),
(39154, 39157),
(39158, 39159),
(39164, 39167),
(39170, 39172),
(39173, 39174),
(39175, 39179),
(39180, 39181),
(39184, 39193),
(39194, 39203),
(39204, 39205),
(39206, 39209),
(39211, 39213),
(39214, 39215),
(39217, 39222),
(39225, 39231),
(39232, 39235),
(39237, 39242),
(39243, 39247),
(39248, 39251),
(39252, 39254),
(39255, 39258),
(39259, 39261),
(39262, 39265),
(39318, 39322),
(39323, 39324),
(39325, 39328),
(39333, 39335),
(39336, 39337),
(39340, 39343),
(39344, 39350),
(39353, 39355),
(39356, 39358),
(39359, 39360),
(39361, 39362),
(39363, 39367),
(39368, 39370),
(39376, 39382),
(39384, 39392),
(39394, 39395),
(39399, 39400),
(39402, 39407),
(39408, 39411),
(39412, 39414),
(39416, 39418),
(39419, 39420),
(39421, 39424),
(39425, 39430),
(39435, 39437),
(39438, 39444),
(39446, 39447),
(39449, 39450),
(39454, 39455),
(39456, 39457),
(39458, 39461),
(39463, 39465),
(39467, 39468),
(39469, 39471),
(39472, 39473),
(39475, 39476),
(39477, 39481),
(39486, 39487),
(39488, 39494),
(39495, 39496),
(39498, 39503),
(39505, 39506),
(39508, 39512),
(39514, 39516),
(39517, 39518),
(39519, 39520),
(39522, 39523),
(39524, 39526),
(39529, 39532),
(39592, 39593),
(39594, 39595),
(39596, 39601),
(39602, 39603),
(39604, 39607),
(39608, 39610),
(39611, 39613),
(39614, 39618),
(39619, 39621),
(39622, 39623),
(39624, 39625),
(39630, 39642),
(39643, 39645),
(39646, 39649),
(39650, 39656),
(39657, 39664),
(39665, 39670),
(39671, 39672),
(39673, 39676),
(39677, 39678),
(39679, 39687),
(39688, 39690),
(39691, 39695),
(39696, 39697),
(39698, 39699),
(39702, 39703),
(39704, 39709),
(39711, 39713),
(39714, 39716),
(39717, 39724),
(39725, 39728),
(39729, 39734),
(39735, 39736),
(39737, 39742),
(39745, 39750),
(39752, 39753),
(39755, 39760),
(39761, 39762),
(39764, 39769),
(39770, 39772),
(39774, 39775),
(39777, 39778),
(39779, 39780),
(39781, 39783),
(39784, 39785),
(39786, 39792),
(39794, 39798),
(39799, 39802),
(39807, 39809),
(39811, 39816),
(39817, 39820),
(39821, 39829),
(39830, 39832),
(39834, 39835),
(39837, 39841),
(39846, 39855),
(39856, 39859),
(39860, 39861),
(39863, 39866),
(39867, 39869),
(39870, 39874),
(39878, 39883),
(39886, 39891),
(39892, 39893),
(39894, 39897),
(39899, 39900),
(39901, 39902),
(39903, 39904),
(39905, 39910),
(39911, 39913),
(39914, 39916),
(39919, 39924),
(39925, 39926),
(39927, 39931),
(39933, 39934),
(39935, 39937),
(39938, 39939),
(39940, 39941),
(39942, 39943),
(39944, 39950),
(39951, 39959),
(39960, 39965),
(39966, 39967),
(39969, 39979),
(39981, 39987),
(39989, 39992),
(39993, 39996),
(39997, 39999),
(40001, 40002),
(40003, 40011),
(40014, 40017),
(40018, 40021),
(40022, 40025),
(40026, 40033),
(40035, 40036),
(40039, 40044),
(40046, 40047),
(40048, 40049),
(40050, 40051),
(40053, 40057),
(40059, 40060),
(40165, 40168),
(40169, 40170),
(40171, 40173),
(40176, 40177),
(40178, 40181),
(40182, 40184),
(40185, 40186),
(40194, 40196),
(40198, 40202),
(40203, 40204),
(40206, 40207),
(40209, 40211),
(40213, 40214),
(40215, 40217),
(40219, 40224),
(40227, 40228),
(40230, 40231),
(40232, 40233),
(40234, 40237),
(40239, 40241),
(40242, 40245),
(40250, 40256),
(40257, 40265),
(40266, 40267),
(40272, 40274),
(40275, 40277),
(40281, 40282),
(40284, 40294),
(40297, 40301),
(40303, 40305),
(40306, 40307),
(40310, 40312),
(40314, 40317),
(40318, 40319),
(40323, 40325),
(40326, 40328),
(40329, 40331),
(40333, 40336),
(40338, 40340),
(40341, 40345),
(40346, 40347),
(40353, 40354),
(40356, 40357),
(40361, 40365),
(40366, 40368),
(40369, 40371),
(40372, 40374),
(40376, 40381),
(40383, 40384),
(40385, 40389),
(40390, 40392),
(40393, 40395),
(40399, 40400),
(40403, 40408),
(40409, 40411),
(40414, 40417),
(40421, 40424),
(40425, 40426),
(40427, 40428),
(40429, 40433),
(40434, 40437),
(40440, 40443),
(40445, 40447),
(40450, 40451),
(40455, 40456),
(40458, 40459),
(40462, 40463),
(40464, 40467),
(40469, 40471),
(40473, 40479),
(40565, 40566),
(40568, 40574),
(40575, 40582),
(40583, 40585),
(40587, 40589),
(40590, 40592),
(40593, 40596),
(40597, 40601),
(40603, 40604),
(40605, 40608),
(40612, 40615),
(40616, 40619),
(40620, 40625),
(40627, 40630),
(40632, 40637),
(40638, 40640),
(40644, 40645),
(40646, 40647),
(40648, 40649),
(40651, 40659),
(40660, 40662),
(40664, 40666),
(40667, 40673),
(40676, 40678),
(40679, 40681),
(40684, 40691),
(40692, 40698),
(40699, 40702),
(40703, 40704),
(40706, 40708),
(40711, 40714),
(40718, 40728),
(40729, 40732),
(40735, 40739),
(40742, 40743),
(40746, 40749),
(40751, 40752),
(40753, 40755),
(40756, 40757),
(40759, 40760),
(40761, 40768),
(40769, 40770),
(40771, 40776),
(40778, 40780),
(40782, 40784),
(40786, 40793),
(40794, 40795),
(40797, 40804),
(40806, 40811),
(40812, 40820),
(40821, 40824),
(40826, 40827),
(40829, 40830),
(40845, 40846),
(40847, 40851),
(40852, 40856),
(40860, 40863),
(40864, 40868),
(40869, 40870),
(63785, 63786),
(63964, 63965),
(64014, 64046),
(64259, 64261),
(65281, 65375),
(65377, 65440),
(65504, 65509),
]
| mit |
drammock/mne-python | mne/preprocessing/realign.py | 7 | 3982 | # -*- coding: utf-8 -*-
# Authors: Eric Larson <[email protected]>
# License: BSD (3-clause)
import numpy as np
from ..io import BaseRaw
from ..utils import _validate_type, warn, logger, verbose
@verbose
def realign_raw(raw, other, t_raw, t_other, verbose=None):
"""Realign two simultaneous recordings.
Due to clock drift, recordings at a given same sample rate made by two
separate devices simultaneously can become out of sync over time. This
function uses event times captured by both acquisition devices to resample
``other`` to match ``raw``.
Parameters
----------
raw : instance of Raw
The first raw instance.
other : instance of Raw
The second raw instance. It will be resampled to match ``raw``.
t_raw : array-like, shape (n_events,)
The times of shared events in ``raw`` relative to ``raw.times[0]`` (0).
Typically these could be events on some TTL channel like
``find_events(raw)[:, 0] - raw.first_event``.
t_other : array-like, shape (n_events,)
The times of shared events in ``other`` relative to ``other.times[0]``.
%(verbose)s
Notes
-----
This function operates inplace. It will:
1. Estimate the zero-order (start offset) and first-order (clock drift)
correction.
2. Crop the start of ``raw`` or ``other``, depending on which started
recording first.
3. Resample ``other`` to match ``raw`` based on the clock drift.
4. Crop the end of ``raw`` or ``other``, depending on which stopped
recording first (and the clock drift rate).
This function is primarily designed to work on recordings made at the same
sample rate, but it can also operate on recordings made at different
sample rates to resample and deal with clock drift simultaneously.
.. versionadded:: 0.22
"""
from scipy import stats
_validate_type(raw, BaseRaw, 'raw')
_validate_type(other, BaseRaw, 'other')
t_raw = np.array(t_raw, float)
t_other = np.array(t_other, float)
if t_raw.ndim != 1 or t_raw.shape != t_other.shape:
raise ValueError('t_raw and t_other must be 1D with the same shape, '
f'got shapes {t_raw.shape} and {t_other.shape}')
if len(t_raw) < 20:
warn('Fewer than 20 times passed, results may be unreliable')
# 1. Compute correction factors
coef = np.polyfit(t_other, t_raw, deg=1)
r, p = stats.pearsonr(t_other, t_raw)
msg = f'Linear correlation computed as R={r:0.3f} and p={p:0.2e}'
if p > 0.05 or r <= 0:
raise ValueError(msg + ', cannot resample safely')
if p > 1e-6:
warn(msg + ', results may be unreliable')
else:
logger.info(msg)
dr_ms_s = 1000 * abs(1 - coef[0])
logger.info(
f'Drift rate: {1000 * dr_ms_s:0.1f} μs/sec '
f'(total drift over {raw.times[-1]:0.1f} sec recording: '
f'{raw.times[-1] * dr_ms_s:0.1f} ms)')
# 2. Crop start of recordings to match using the zero-order term
msg = f'Cropping {coef[1]:0.3f} sec from the start of '
if coef[1] > 0: # need to crop start of raw to match other
logger.info(msg + 'raw')
raw.crop(coef[1], None)
t_raw -= coef[1]
else: # need to crop start of other to match raw
logger.info(msg + 'other')
other.crop(-coef[1], None)
t_other += coef[1]
# 3. Resample data using the first-order term
logger.info('Resampling other')
coef = coef[0]
sfreq_new = raw.info['sfreq'] * coef
other.load_data().resample(sfreq_new, verbose=True)
other.info['sfreq'] = raw.info['sfreq']
# 4. Crop the end of one of the recordings if necessary
delta = raw.times[-1] - other.times[-1]
msg = f'Cropping {abs(delta):0.3f} sec from the end of '
if delta > 0:
logger.info(msg + 'raw')
raw.crop(0, other.times[-1])
elif delta < 0:
logger.info(msg + 'other')
other.crop(0, raw.times[-1])
| bsd-3-clause |
hef/samba | lib/testtools/testtools/tests/matchers/helpers.py | 14 | 1643 | # Copyright (c) 2008-2012 testtools developers. See LICENSE for details.
from testtools.tests.helpers import FullStackRunTest
class TestMatchersInterface(object):
run_tests_with = FullStackRunTest
def test_matches_match(self):
matcher = self.matches_matcher
matches = self.matches_matches
mismatches = self.matches_mismatches
for candidate in matches:
self.assertEqual(None, matcher.match(candidate))
for candidate in mismatches:
mismatch = matcher.match(candidate)
self.assertNotEqual(None, mismatch)
self.assertNotEqual(None, getattr(mismatch, 'describe', None))
def test__str__(self):
# [(expected, object to __str__)].
from testtools.matchers._doctest import DocTestMatches
examples = self.str_examples
for expected, matcher in examples:
self.assertThat(matcher, DocTestMatches(expected))
def test_describe_difference(self):
# [(expected, matchee, matcher), ...]
examples = self.describe_examples
for difference, matchee, matcher in examples:
mismatch = matcher.match(matchee)
self.assertEqual(difference, mismatch.describe())
def test_mismatch_details(self):
# The mismatch object must provide get_details, which must return a
# dictionary mapping names to Content objects.
examples = self.describe_examples
for difference, matchee, matcher in examples:
mismatch = matcher.match(matchee)
details = mismatch.get_details()
self.assertEqual(dict(details), details)
| gpl-3.0 |
apple/swift-lldb | packages/Python/lldbsuite/test/tools/lldb-server/TestGdbRemote_vCont.py | 5 | 5722 | from __future__ import print_function
import gdbremote_testcase
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestGdbRemote_vCont(gdbremote_testcase.GdbRemoteTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
def vCont_supports_mode(self, mode, inferior_args=None):
# Setup the stub and set the gdb remote command stream.
procs = self.prep_debug_monitor_and_inferior(
inferior_args=inferior_args)
self.add_vCont_query_packets()
# Run the gdb remote command stream.
context = self.expect_gdbremote_sequence()
self.assertIsNotNone(context)
# Pull out supported modes.
supported_vCont_modes = self.parse_vCont_query_response(context)
self.assertIsNotNone(supported_vCont_modes)
# Verify we support the given mode.
self.assertTrue(mode in supported_vCont_modes)
def vCont_supports_c(self):
self.vCont_supports_mode("c")
def vCont_supports_C(self):
self.vCont_supports_mode("C")
def vCont_supports_s(self):
self.vCont_supports_mode("s")
def vCont_supports_S(self):
self.vCont_supports_mode("S")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_c_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_c()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_c_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_c()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_C_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_C()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_C_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_C()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_s_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_s()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_s_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_s()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_vCont_supports_S_debugserver(self):
self.init_debugserver_test()
self.build()
self.vCont_supports_S()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@llgs_test
def test_vCont_supports_S_llgs(self):
self.init_llgs_test()
self.build()
self.vCont_supports_S()
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_single_step_only_steps_one_instruction_with_Hc_vCont_s_debugserver(
self):
self.init_debugserver_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=True, step_instruction="vCont;s")
@skipIfWindows # No pty support to test O* & I* notification packets.
@llgs_test
@expectedFailureAndroid(
bugnumber="llvm.org/pr24739",
archs=[
"arm",
"aarch64"])
@expectedFailureAll(
oslist=["linux"],
archs=[
"arm",
"aarch64"],
bugnumber="llvm.org/pr24739")
@skipIf(triple='^mips')
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
def test_single_step_only_steps_one_instruction_with_Hc_vCont_s_llgs(self):
self.init_llgs_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=True, step_instruction="vCont;s")
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
@debugserver_test
def test_single_step_only_steps_one_instruction_with_vCont_s_thread_debugserver(
self):
self.init_debugserver_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=False, step_instruction="vCont;s:{thread}")
@skipIfWindows # No pty support to test O* & I* notification packets.
@llgs_test
@expectedFailureAndroid(
bugnumber="llvm.org/pr24739",
archs=[
"arm",
"aarch64"])
@expectedFailureAll(
oslist=["linux"],
archs=[
"arm",
"aarch64"],
bugnumber="llvm.org/pr24739")
@skipIf(triple='^mips')
@expectedFailureAll(oslist=["ios", "tvos", "watchos", "bridgeos"], bugnumber="rdar://27005337")
def test_single_step_only_steps_one_instruction_with_vCont_s_thread_llgs(
self):
self.init_llgs_test()
self.build()
self.set_inferior_startup_launch()
self.single_step_only_steps_one_instruction(
use_Hc_packet=False, step_instruction="vCont;s:{thread}")
| apache-2.0 |
brianwoo/django-tutorial | build/Django/tests/model_meta/results.py | 23 | 26393 | from .models import AbstractPerson, BasePerson, Person, Relating, Relation
TEST_RESULTS = {
'get_all_field_names': {
Person: [
'baseperson_ptr',
'baseperson_ptr_id',
'content_type_abstract',
'content_type_abstract_id',
'content_type_base',
'content_type_base_id',
'content_type_concrete',
'content_type_concrete_id',
'data_abstract',
'data_base',
'data_inherited',
'data_not_concrete_abstract',
'data_not_concrete_base',
'data_not_concrete_inherited',
'fk_abstract',
'fk_abstract_id',
'fk_base',
'fk_base_id',
'fk_inherited',
'fk_inherited_id',
'followers_abstract',
'followers_base',
'followers_concrete',
'following_abstract',
'following_base',
'following_inherited',
'friends_abstract',
'friends_base',
'friends_inherited',
'generic_relation_abstract',
'generic_relation_base',
'generic_relation_concrete',
'id',
'm2m_abstract',
'm2m_base',
'm2m_inherited',
'object_id_abstract',
'object_id_base',
'object_id_concrete',
'relating_basepeople',
'relating_baseperson',
'relating_people',
'relating_person',
],
BasePerson: [
'content_type_abstract',
'content_type_abstract_id',
'content_type_base',
'content_type_base_id',
'data_abstract',
'data_base',
'data_not_concrete_abstract',
'data_not_concrete_base',
'fk_abstract',
'fk_abstract_id',
'fk_base',
'fk_base_id',
'followers_abstract',
'followers_base',
'following_abstract',
'following_base',
'friends_abstract',
'friends_base',
'generic_relation_abstract',
'generic_relation_base',
'id',
'm2m_abstract',
'm2m_base',
'object_id_abstract',
'object_id_base',
'person',
'relating_basepeople',
'relating_baseperson'
],
AbstractPerson: [
'content_type_abstract',
'content_type_abstract_id',
'data_abstract',
'data_not_concrete_abstract',
'fk_abstract',
'fk_abstract_id',
'following_abstract',
'friends_abstract',
'generic_relation_abstract',
'm2m_abstract',
'object_id_abstract',
],
Relating: [
'basepeople',
'basepeople_hidden',
'baseperson',
'baseperson_hidden',
'baseperson_hidden_id',
'baseperson_id',
'id',
'people',
'people_hidden',
'person',
'person_hidden',
'person_hidden_id',
'person_id',
'proxyperson',
'proxyperson_hidden',
'proxyperson_hidden_id',
'proxyperson_id',
],
},
'fields': {
Person: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'data_not_concrete_inherited',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'local_fields': {
Person: [
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'data_not_concrete_inherited',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'data_not_concrete_base',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'data_not_concrete_abstract',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'local_concrete_fields': {
Person: [
'baseperson_ptr_id',
'data_inherited',
'fk_inherited_id',
'content_type_concrete_id',
'object_id_concrete',
],
BasePerson: [
'id',
'data_abstract',
'fk_abstract_id',
'content_type_abstract_id',
'object_id_abstract',
'data_base',
'fk_base_id',
'content_type_base_id',
'object_id_base',
],
AbstractPerson: [
'data_abstract',
'fk_abstract_id',
'content_type_abstract_id',
'object_id_abstract',
],
Relating: [
'id',
'baseperson_id',
'baseperson_hidden_id',
'person_id',
'person_hidden_id',
'proxyperson_id',
'proxyperson_hidden_id',
],
},
'many_to_many': {
Person: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
'm2m_base',
'friends_base',
'following_base',
'm2m_inherited',
'friends_inherited',
'following_inherited',
],
BasePerson: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
'm2m_base',
'friends_base',
'following_base',
],
AbstractPerson: [
'm2m_abstract',
'friends_abstract',
'following_abstract',
],
Relating: [
'basepeople',
'basepeople_hidden',
'people',
'people_hidden',
],
},
'many_to_many_with_model': {
Person: [
BasePerson,
BasePerson,
BasePerson,
BasePerson,
BasePerson,
BasePerson,
None,
None,
None,
],
BasePerson: [
None,
None,
None,
None,
None,
None,
],
AbstractPerson: [
None,
None,
None,
],
Relating: [
None,
None,
None,
None,
],
},
'get_all_related_objects_with_model_legacy': {
Person: (
('relating_baseperson', BasePerson),
('relating_person', None),
),
BasePerson: (
('person', None),
('relating_baseperson', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_local': {
Person: (
('+', None),
('+', None),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_people+', None),
('Relating_people_hidden+', None),
('followers_concrete', None),
('friends_inherited_rel_+', None),
('relating_people', None),
('relating_person', None),
),
BasePerson: (
('+', None),
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('followers_abstract', None),
('followers_base', None),
('friends_abstract_rel_+', None),
('friends_base_rel_+', None),
('person', None),
('relating_basepeople', None),
('relating_baseperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden': {
Person: (
('+', BasePerson),
('+', BasePerson),
('+', None),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('followers_abstract', BasePerson),
('followers_base', BasePerson),
('followers_concrete', None),
('friends_abstract_rel_+', BasePerson),
('friends_base_rel_+', BasePerson),
('friends_inherited_rel_+', None),
('relating_basepeople', BasePerson),
('relating_baseperson', BasePerson),
('relating_people', None),
('relating_person', None),
),
BasePerson: (
('+', None),
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('followers_abstract', None),
('followers_base', None),
('friends_abstract_rel_+', None),
('friends_base_rel_+', None),
('person', None),
('relating_basepeople', None),
('relating_baseperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_local': {
Person: (
('followers_concrete', None),
('relating_person', None),
('relating_people', None),
),
BasePerson: (
('followers_abstract', None),
('followers_base', None),
('person', None),
('relating_baseperson', None),
('relating_basepeople', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model': {
Person: (
('followers_abstract', BasePerson),
('followers_base', BasePerson),
('relating_baseperson', BasePerson),
('relating_basepeople', BasePerson),
('followers_concrete', None),
('relating_person', None),
('relating_people', None),
),
BasePerson: (
('followers_abstract', None),
('followers_base', None),
('person', None),
('relating_baseperson', None),
('relating_basepeople', None),
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_objects_with_model_local_legacy': {
Person: (
('relating_person', None),
),
BasePerson: (
('person', None),
('relating_baseperson', None)
),
Relation: (
('fk_abstract_rel', None),
('fo_abstract_rel', None),
('fk_base_rel', None),
('fo_base_rel', None),
('fk_concrete_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', BasePerson),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_baseperson', BasePerson),
('relating_person', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_hidden_local_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', None),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_person', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_proxy_legacy': {
BasePerson: (
('person', None),
('relating_baseperson', None),
),
Person: (
('relating_baseperson', BasePerson),
('relating_person', None), ('relating_proxyperson', None),
),
Relation: (
('fk_abstract_rel', None), ('fo_abstract_rel', None),
('fk_base_rel', None), ('fo_base_rel', None),
('fk_concrete_rel', None), ('fo_concrete_rel', None),
),
},
'get_all_related_objects_with_model_proxy_hidden_legacy': {
BasePerson: (
('+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_abstract+', None),
('BasePerson_following_base+', None),
('BasePerson_following_base+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_abstract+', None),
('BasePerson_friends_base+', None),
('BasePerson_friends_base+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Relating_basepeople+', None),
('Relating_basepeople_hidden+', None),
('person', None),
('relating_baseperson', None),
),
Person: (
('+', BasePerson),
('+', None),
('+', None),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_abstract+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_following_base+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_abstract+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_friends_base+', BasePerson),
('BasePerson_m2m_abstract+', BasePerson),
('BasePerson_m2m_base+', BasePerson),
('Person_following_inherited+', None),
('Person_following_inherited+', None),
('Person_friends_inherited+', None),
('Person_friends_inherited+', None),
('Person_m2m_inherited+', None),
('Relating_basepeople+', BasePerson),
('Relating_basepeople_hidden+', BasePerson),
('Relating_people+', None),
('Relating_people_hidden+', None),
('relating_baseperson', BasePerson),
('relating_person', None),
('relating_proxyperson', None),
),
Relation: (
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('+', None),
('BasePerson_m2m_abstract+', None),
('BasePerson_m2m_base+', None),
('Person_m2m_inherited+', None),
('fk_abstract_rel', None),
('fk_base_rel', None),
('fk_concrete_rel', None),
('fo_abstract_rel', None),
('fo_base_rel', None),
('fo_concrete_rel', None),
),
},
'get_all_related_many_to_many_with_model_legacy': {
BasePerson: (
('friends_abstract_rel_+', None),
('followers_abstract', None),
('friends_base_rel_+', None),
('followers_base', None),
('relating_basepeople', None),
('+', None),
),
Person: (
('friends_abstract_rel_+', BasePerson),
('followers_abstract', BasePerson),
('friends_base_rel_+', BasePerson),
('followers_base', BasePerson),
('relating_basepeople', BasePerson),
('+', BasePerson),
('friends_inherited_rel_+', None),
('followers_concrete', None),
('relating_people', None),
('+', None),
),
Relation: (
('m2m_abstract_rel', None),
('m2m_base_rel', None),
('m2m_concrete_rel', None),
),
},
'get_all_related_many_to_many_local_legacy': {
BasePerson: [
'friends_abstract_rel_+',
'followers_abstract',
'friends_base_rel_+',
'followers_base',
'relating_basepeople',
'+',
],
Person: [
'friends_inherited_rel_+',
'followers_concrete',
'relating_people',
'+',
],
Relation: [
'm2m_abstract_rel',
'm2m_base_rel',
'm2m_concrete_rel',
],
},
'virtual_fields': {
AbstractPerson: [
'generic_relation_abstract',
'content_object_abstract',
],
BasePerson: [
'generic_relation_base',
'content_object_base',
'generic_relation_abstract',
'content_object_abstract',
],
Person: [
'content_object_concrete',
'generic_relation_concrete',
'generic_relation_base',
'content_object_base',
'generic_relation_abstract',
'content_object_abstract',
],
},
}
| gpl-3.0 |
indhub/mxnet | example/speech_recognition/stt_layer_batchnorm.py | 52 | 1994 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
def batchnorm(net,
gamma=None,
beta=None,
eps=0.001,
momentum=0.9,
fix_gamma=False,
use_global_stats=False,
output_mean_var=False,
name=None):
if gamma is not None and beta is not None:
net = mx.sym.BatchNorm(data=net,
gamma=gamma,
beta=beta,
eps=eps,
momentum=momentum,
fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var,
name=name
)
else:
net = mx.sym.BatchNorm(data=net,
eps=eps,
momentum=momentum,
fix_gamma=fix_gamma,
use_global_stats=use_global_stats,
output_mean_var=output_mean_var,
name=name
)
return net
| apache-2.0 |
MatteoNardi/dyanote-server | api/views.py | 1 | 1637 | from django.contrib.auth.models import User
from rest_framework import mixins
from rest_framework import generics
from rest_framework import renderers
from rest_framework import permissions
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from api.models import Page
from api.serializers import PageSerializer
from api.permissions import IsOwnerOrAdmin
@api_view(('GET',))
def api_root(request, format=None):
return Response({
'users': reverse('user-list', request=request, format=format),
})
class PageList(generics.ListCreateAPIView):
"""
API endpoint that allows pages to be listed and created.
"""
serializer_class = PageSerializer
permission_classes = (permissions.IsAuthenticated, IsOwnerOrAdmin)
def get_queryset(self):
user = self.request.user
if user.is_superuser:
user = User.objects.get(username=self.kwargs['username'])
return Page.objects.filter(author=user)
def pre_save(self, obj):
obj.author = self.request.user
class PageDetail(generics.RetrieveUpdateDestroyAPIView):
"""
API endpoint that allows pages to be viewed, updated and deleted.
"""
serializer_class = PageSerializer
permission_classes = (permissions.IsAuthenticated, IsOwnerOrAdmin)
def get_queryset(self):
user = self.request.user
if user.is_superuser:
return Page.objects.all()
return Page.objects.filter(author=user)
def pre_save(self, obj):
obj.author = self.request.user
| mit |
flwh/KK_mt6589_iq451 | prebuilts/python/darwin-x86/2.7.5/lib/python2.7/test/test_linecache.py | 96 | 4079 | """ Tests for the linecache module """
import linecache
import unittest
import os.path
from test import test_support as support
FILENAME = linecache.__file__
INVALID_NAME = '!@$)(!@#_1'
EMPTY = ''
TESTS = 'inspect_fodder inspect_fodder2 mapping_tests'
TESTS = TESTS.split()
TEST_PATH = os.path.dirname(support.__file__)
MODULES = "linecache abc".split()
MODULE_PATH = os.path.dirname(FILENAME)
SOURCE_1 = '''
" Docstring "
def function():
return result
'''
SOURCE_2 = '''
def f():
return 1 + 1
a = f()
'''
SOURCE_3 = '''
def f():
return 3''' # No ending newline
class LineCacheTests(unittest.TestCase):
def test_getline(self):
getline = linecache.getline
# Bad values for line number should return an empty string
self.assertEqual(getline(FILENAME, 2**15), EMPTY)
self.assertEqual(getline(FILENAME, -1), EMPTY)
# Float values currently raise TypeError, should it?
self.assertRaises(TypeError, getline, FILENAME, 1.1)
# Bad filenames should return an empty string
self.assertEqual(getline(EMPTY, 1), EMPTY)
self.assertEqual(getline(INVALID_NAME, 1), EMPTY)
# Check whether lines correspond to those from file iteration
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
# Check module loading
for entry in MODULES:
filename = os.path.join(MODULE_PATH, entry) + '.py'
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
# Check that bogus data isn't returned (issue #1309567)
empty = linecache.getlines('a/b/c/__init__.py')
self.assertEqual(empty, [])
def test_no_ending_newline(self):
self.addCleanup(support.unlink, support.TESTFN)
with open(support.TESTFN, "w") as fp:
fp.write(SOURCE_3)
lines = linecache.getlines(support.TESTFN)
self.assertEqual(lines, ["\n", "def f():\n", " return 3\n"])
def test_clearcache(self):
cached = []
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
cached.append(filename)
linecache.getline(filename, 1)
# Are all files cached?
cached_empty = [fn for fn in cached if fn not in linecache.cache]
self.assertEqual(cached_empty, [])
# Can we clear the cache?
linecache.clearcache()
cached_empty = [fn for fn in cached if fn in linecache.cache]
self.assertEqual(cached_empty, [])
def test_checkcache(self):
getline = linecache.getline
# Create a source file and cache its contents
source_name = support.TESTFN + '.py'
self.addCleanup(support.unlink, source_name)
with open(source_name, 'w') as source:
source.write(SOURCE_1)
getline(source_name, 1)
# Keep a copy of the old contents
source_list = []
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
with open(source_name, 'w') as source:
source.write(SOURCE_2)
# Try to update a bogus cache entry
linecache.checkcache('dummy')
# Check that the cache matches the old contents
for index, line in enumerate(source_list):
self.assertEqual(line, getline(source_name, index + 1))
# Update the cache and check whether it matches the new source file
linecache.checkcache(source_name)
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
def test_main():
support.run_unittest(LineCacheTests)
if __name__ == "__main__":
test_main()
| gpl-2.0 |
eonpatapon/nova | nova/api/openstack/compute/views/flavors.py | 49 | 3452 | # Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import common
class ViewBuilder(common.ViewBuilder):
_collection_name = "flavors"
def basic(self, request, flavor):
return {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
def show(self, request, flavor):
flavor_dict = {
"flavor": {
"id": flavor["flavorid"],
"name": flavor["name"],
"ram": flavor["memory_mb"],
"disk": flavor["root_gb"],
"vcpus": flavor.get("vcpus") or "",
"links": self._get_links(request,
flavor["flavorid"],
self._collection_name),
},
}
return flavor_dict
def index(self, request, flavors):
"""Return the 'index' view of flavors."""
coll_name = self._collection_name
return self._list_view(self.basic, request, flavors, coll_name)
def detail(self, request, flavors):
"""Return the 'detail' view of flavors."""
coll_name = self._collection_name + '/detail'
return self._list_view(self.show, request, flavors, coll_name)
def _list_view(self, func, request, flavors, coll_name):
"""Provide a view for a list of flavors.
:param func: Function used to format the flavor data
:param request: API request
:param flavors: List of flavors in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:returns: Flavor reply data in dictionary format
"""
flavor_list = [func(request, flavor)["flavor"] for flavor in flavors]
flavors_links = self._get_collection_links(request,
flavors,
coll_name,
"flavorid")
flavors_dict = dict(flavors=flavor_list)
if flavors_links:
flavors_dict["flavors_links"] = flavors_links
return flavors_dict
class V3ViewBuilder(ViewBuilder):
def show(self, request, flavor):
flavor_dict = super(V3ViewBuilder, self).show(request, flavor)
flavor_dict['flavor'].update({
"swap": flavor["swap"] or "",
"OS-FLV-EXT-DATA:ephemeral": flavor["ephemeral_gb"],
"OS-FLV-DISABLED:disabled": flavor["disabled"],
"vcpus": flavor["vcpus"],
})
return flavor_dict
| apache-2.0 |
tobiasgehring/qudi | hardware/awg/tektronix_awg70k.py | 1 | 63429 | # -*- coding: utf-8 -*-
"""
This file contains the Qudi hardware module for AWG70000 Series.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import os
import time
import re
import visa
import numpy as np
from socket import socket, AF_INET, SOCK_STREAM
from ftplib import FTP
from collections import OrderedDict
from fnmatch import fnmatch
from core.base import Base
from interface.pulser_interface import PulserInterface, PulserConstraints
class AWG70K(Base, PulserInterface):
"""
"""
_modclass = 'awg70k'
_modtype = 'hardware'
def on_activate(self):
""" Initialisation performed during activation of the module.
"""
config = self.getConfiguration()
if 'awg_visa_address' in config.keys():
self.visa_address = config['awg_visa_address']
else:
self.log.error('This is AWG: Did not find >>awg_visa_address<< in configuration.')
if 'awg_ip_address' in config.keys():
self.ip_address = config['awg_ip_address']
else:
self.log.error('This is AWG: Did not find >>awg_visa_address<< in configuration.')
if 'pulsed_file_dir' in config.keys():
self.pulsed_file_dir = config['pulsed_file_dir']
if not os.path.exists(self.pulsed_file_dir):
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('The directory defined in parameter "pulsed_file_dir" in the '
'config for SequenceGeneratorLogic class does not exist!\n'
'The default home directory\n{0}\n will be taken instead.'
''.format(self.pulsed_file_dir))
else:
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('No parameter "pulsed_file_dir" was specified in the config for '
'SequenceGeneratorLogic as directory for the pulsed files!\nThe '
'default home directory\n{0}\nwill be taken instead.'
''.format(self.pulsed_file_dir))
if 'ftp_root_dir' in config.keys():
self.ftp_root_directory = config['ftp_root_dir']
else:
self.ftp_root_directory = 'C:\\inetpub\\ftproot'
self.log.warning('No parameter "ftp_root_dir" was specified in the config for '
'tektronix_awg70k as directory for the FTP server root on the AWG!\n'
'The default root directory\n{0}\nwill be taken instead.'
''.format(self.ftp_root_directory))
self.host_waveform_directory = self._get_dir_for_name('sampled_hardware_files')
self.asset_directory = 'waves'
self.user = 'anonymous'
self.passwd = 'anonymous@'
if 'ftp_login' in config.keys() and 'ftp_passwd' in config.keys():
self.user = config['ftp_login']
self.passwd = config['ftp_passwd']
# connect ethernet socket and FTP
self._rm = visa.ResourceManager()
if self.visa_address not in self._rm.list_resources():
self.log.error('VISA address "{0}" not found by the pyVISA resource manager.\nCheck '
'the connection by using for example "Agilent Connection Expert".'
''.format(self.visa_address))
else:
self.awg = self._rm.open_resource(self.visa_address)
# Set data transfer format (datatype, is_big_endian, container)
self.awg.values_format.use_binary('f', False, np.array)
# set timeout by default to 15 sec
self.awg.timeout = 15000
self.ftp = FTP(self.ip_address)
self.ftp.login(user=self.user, passwd=self.passwd)
self.ftp.cwd(self.asset_directory)
self.connected = True
self.awg_model = self._get_model_ID()[1]
self.log.debug('Found the following model: {0}'.format(self.awg_model))
self.sample_rate = self.get_sample_rate()
self.amplitude_list, self.offset_list = self.get_analog_level()
self.markers_low, self.markers_high = self.get_digital_level()
self.is_output_enabled = self._is_output_on()
self.use_sequencer = self.has_sequence_mode()
self.active_channel = self.get_active_channels()
self.interleave = self.get_interleave()
self.current_loaded_asset = ''
self._init_loaded_asset()
self.current_status = 0
def on_deactivate(self):
""" Required tasks to be performed during deactivation of the module.
"""
# Closes the connection to the AWG
try:
self.awg.close()
except:
self.log.debug('Closing AWG connection using pyvisa failed.')
self.log.info('Closed connection to AWG')
self.connected = False
return
def get_constraints(self):
"""
Retrieve the hardware constrains from the Pulsing device.
@return constraints object: object with pulser constraints as attributes.
Provides all the constraints (e.g. sample_rate, amplitude, total_length_bins,
channel_config, ...) related to the pulse generator hardware to the caller.
SEE PulserConstraints CLASS IN pulser_interface.py FOR AVAILABLE CONSTRAINTS!!!
If you are not sure about the meaning, look in other hardware files to get an impression.
If still additional constraints are needed, then they have to be added to the
PulserConstraints class.
Each scalar parameter is an ScalarConstraints object defined in cor.util.interfaces.
Essentially it contains min/max values as well as min step size, default value and unit of
the parameter.
PulserConstraints.activation_config differs, since it contain the channel
configuration/activation information of the form:
{<descriptor_str>: <channel_list>,
<descriptor_str>: <channel_list>,
...}
If the constraints cannot be set in the pulsing hardware (e.g. because it might have no
sequence mode) just leave it out so that the default is used (only zeros).
"""
constraints = PulserConstraints()
# The compatible file formats are hardware specific.
constraints.waveform_format = ['wfmx', 'wfm']
constraints.sequence_format = ['seqx', 'seq']
if self.awg_model == 'AWG70002A':
constraints.sample_rate.min = 1.5e3
constraints.sample_rate.max = 25.0e9
constraints.sample_rate.step = 5.0e2
constraints.sample_rate.default = 25.0e9
elif self.awg_model == 'AWG70001A':
constraints.sample_rate.min = 3.0e3
constraints.sample_rate.max = 50.0e9
constraints.sample_rate.step = 1.0e3
constraints.sample_rate.default = 50.0e9
constraints.a_ch_amplitude.min = 0.25
constraints.a_ch_amplitude.max = 0.5
constraints.a_ch_amplitude.step = 0.001
constraints.a_ch_amplitude.default = 0.5
# FIXME: Enter the proper digital channel low constraints:
constraints.d_ch_low.min = 0.0
constraints.d_ch_low.max = 0.0
constraints.d_ch_low.step = 0.0
constraints.d_ch_low.default = 0.0
# FIXME: Enter the proper digital channel high constraints:
constraints.d_ch_high.min = 0.0
constraints.d_ch_high.max = 1.4
constraints.d_ch_high.step = 0.1
constraints.d_ch_high.default = 1.4
constraints.sampled_file_length.min = 1
constraints.sampled_file_length.max = 8000000000
constraints.sampled_file_length.step = 1
constraints.sampled_file_length.default = 1
# FIXME: Check the proper number for your device
constraints.waveform_num.min = 1
constraints.waveform_num.max = 32000
constraints.waveform_num.step = 1
constraints.waveform_num.default = 1
# FIXME: Check the proper number for your device
constraints.sequence_num.min = 1
constraints.sequence_num.max = 4000
constraints.sequence_num.step = 1
constraints.sequence_num.default = 1
# FIXME: Check the proper number for your device
constraints.subsequence_num.min = 1
constraints.subsequence_num.max = 8000
constraints.subsequence_num.step = 1
constraints.subsequence_num.default = 1
# If sequencer mode is available then these should be specified
constraints.repetitions.min = 0
constraints.repetitions.max = 65536
constraints.repetitions.step = 1
constraints.repetitions.default = 0
# ToDo: Check how many external triggers are available
constraints.trigger_in.min = 0
constraints.trigger_in.max = 2
constraints.trigger_in.step = 1
constraints.trigger_in.default = 0
constraints.event_jump_to.min = 0
constraints.event_jump_to.max = 8000
constraints.event_jump_to.step = 1
constraints.event_jump_to.default = 0
constraints.go_to.min = 0
constraints.go_to.max = 8000
constraints.go_to.step = 1
constraints.go_to.default = 0
# the name a_ch<num> and d_ch<num> are generic names, which describe UNAMBIGUOUSLY the
# channels. Here all possible channel configurations are stated, where only the generic
# names should be used. The names for the different configurations can be customary chosen.
activation_config = OrderedDict()
if self.awg_model == 'AWG70002A':
activation_config['all'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3', 'd_ch4']
# Usage of both channels but reduced markers (higher analog resolution)
activation_config['ch1_2mrk_ch2_1mrk'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3']
activation_config['ch1_2mrk_ch2_0mrk'] = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2']
activation_config['ch1_1mrk_ch2_2mrk'] = ['a_ch1', 'd_ch1', 'a_ch2', 'd_ch3', 'd_ch4']
activation_config['ch1_0mrk_ch2_2mrk'] = ['a_ch1', 'a_ch2', 'd_ch3', 'd_ch4']
activation_config['ch1_1mrk_ch2_1mrk'] = ['a_ch1', 'd_ch1', 'a_ch2', 'd_ch3']
activation_config['ch1_0mrk_ch2_1mrk'] = ['a_ch1', 'a_ch2', 'd_ch3']
activation_config['ch1_1mrk_ch2_0mrk'] = ['a_ch1', 'd_ch1', 'a_ch2']
# Usage of channel 1 only:
activation_config['ch1_2mrk'] = ['a_ch1', 'd_ch1', 'd_ch2']
# Usage of channel 2 only:
activation_config['ch2_2mrk'] = ['a_ch2', 'd_ch3', 'd_ch4']
# Usage of only channel 1 with one marker:
activation_config['ch1_1mrk'] = ['a_ch1', 'd_ch1']
# Usage of only channel 2 with one marker:
activation_config['ch2_1mrk'] = ['a_ch2', 'd_ch3']
# Usage of only channel 1 with no marker:
activation_config['ch1_0mrk'] = ['a_ch1']
# Usage of only channel 2 with no marker:
activation_config['ch2_0mrk'] = ['a_ch2']
elif self.awg_model == 'AWG70001A':
activation_config['all'] = ['a_ch1', 'd_ch1', 'd_ch2']
# Usage of only channel 1 with one marker:
activation_config['ch1_1mrk'] = ['a_ch1', 'd_ch1']
# Usage of only channel 1 with no marker:
activation_config['ch1_0mrk'] = ['a_ch1']
constraints.activation_config = activation_config
# FIXME: additional constraint really necessary?
constraints.dac_resolution = {'min': 8, 'max': 10, 'step': 1, 'unit': 'bit'}
return constraints
def pulser_on(self):
""" Switches the pulsing device on.
@return int: error code (0:OK, -1:error, higher number corresponds to
current status of the device. Check then the
class variable status_dic.)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('AWGC:RUN')
# wait until the AWG is actually running
while not self._is_output_on():
time.sleep(0.25)
self.current_status = 1
self.is_output_enabled = True
return self.current_status
def pulser_off(self):
""" Switches the pulsing device off.
@return int: error code (0:OK, -1:error, higher number corresponds to
current status of the device. Check then the
class variable status_dic.)
"""
self.awg.write('AWGC:STOP')
# wait until the AWG has actually stopped
while self._is_output_on():
time.sleep(0.25)
self.current_status = 0
self.is_output_enabled = False
return 0
def upload_asset(self, asset_name=None):
""" Upload an already hardware conform file to the device.
Does NOT load it into channels.
@param str name: name of the ensemble/sequence to be uploaded
@return int: error code (0:OK, -1:error)
If nothing is passed, method will be skipped.
"""
# check input
if asset_name is None:
self.log.warning('No asset name provided for upload!\nCorrect that!\n'
'Command will be ignored.')
return -1
# self._activate_awg_mode()
# at first delete all the name, which might lead to confusions in the upload procedure:
self.delete_asset(asset_name)
# determine which files to transfer
filelist = self._get_filenames_on_host()
upload_names = []
for filename in filelist:
if filename == asset_name + '.seq':
upload_names.append(filename)
break
elif filename == asset_name + '.seqx':
upload_names.append(filename)
break
elif fnmatch(filename, asset_name + '_ch?.wfm*'):
upload_names.append(filename)
elif fnmatch(filename, asset_name + '.wfm*'):
upload_names.append(filename)
break
elif filename == asset_name + '.mat':
upload_names.append(filename)
break
# Transfer files and load into AWG workspace
for filename in upload_names:
self._send_file(filename)
file_path = os.path.join(self.ftp_root_directory, self.asset_directory, filename)
if filename.endswith('.mat'):
self.awg.write('MMEM:OPEN:SASS:WAV "{0}"'.format(file_path))
else:
self.awg.write('MMEM:OPEN "{0}"'.format(file_path))
self.awg.query('*OPC?')
# Wait for the loading to completed
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def load_asset(self, asset_name, load_dict=None):
""" Loads a sequence or waveform to the specified channel of the pulsing
device.
@param str asset_name: The name of the asset to be loaded
@param dict load_dict: a dictionary with keys being one of the
available channel numbers and items being the
name of the already sampled
waveform/sequence files.
Examples: {1: rabi_ch1, 2: rabi_ch2}
{1: rabi_ch2, 2: rabi_ch1}
This parameter is optional. If none is given
then the channel association is invoked from
the sequence generation,
i.e. the filename appendix (_ch1, _ch2 etc.)
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
# self._activate_awg_mode()
# Get all sequence and waveform names currently loaded into AWG workspace
seq_list = self._get_sequence_names_memory()
wfm_list = self._get_waveform_names_memory()
# Check if load_dict is None or an empty dict
if not load_dict:
# check if the desired asset is in workspace. Load to channels if that is the case.
if asset_name in seq_list:
trac_num = int(self.awg.query('SLIS:SEQ:TRAC? "{0}"'.format(asset_name)))
for chnl in range(1, trac_num + 1):
self.awg.write('SOUR{0}:CASS:SEQ "{1}", {2}'.format(chnl, asset_name, chnl))
# check if the desired asset is in workspace. Load to channels if that is the case.
elif asset_name + '_ch1' in wfm_list:
self.awg.write('SOUR1:CASS:WAV "{0}"'.format(asset_name + '_ch1'))
if self._get_max_a_channel_number() > 1 and asset_name + '_ch2' in wfm_list:
self.awg.write('SOUR2:CASS:WAV "{0}"'.format(asset_name + '_ch2'))
self.current_loaded_asset = asset_name
else:
self.log.error('Loading assets into user defined channels is not yet implemented.\n'
'In other words: The "load_dict" parameter of the "load_asset" method '
'is not handled yet.')
# Wait for the loading to completed
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def get_loaded_asset(self):
""" Retrieve the currently loaded asset name of the device.
@return str: Name of the current asset, that can be either a filename
a waveform, a sequence ect.
"""
return self.current_loaded_asset
def _send_file(self, filename):
""" Sends an already hardware specific waveform file to the pulse
generators waveform directory.
@param string filename: The file name of the source file
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
filepath = os.path.join(self.host_waveform_directory, filename)
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
ftp.cwd(self.asset_directory)
with open(filepath, 'rb') as uploaded_file:
ftp.storbinary('STOR '+filename, uploaded_file)
return 0
def clear_all(self):
""" Clears the loaded waveform from the pulse generators RAM.
@return int: error code (0:OK, -1:error)
Delete all waveforms and sequences from Hardware memory and clear the
visual display. Unused for digital pulse generators without sequence
storage capability (PulseBlaster, FPGA).
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('WLIS:WAV:DEL ALL')
self.awg.write('SLIS:SEQ:DEL ALL')
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
self.current_loaded_asset = ''
return 0
def get_status(self):
""" Retrieves the status of the pulsing hardware
@return (int, dict): inter value of the current status with the
corresponding dictionary containing status
description for all the possible status variables
of the pulse generator hardware.
0 indicates that the instrument has stopped.
1 indicates that the instrument is waiting for trigger.
2 indicates that the instrument is running.
-1 indicates that the request of the status for AWG has failed.
"""
status_dic = {}
status_dic[-1] = 'Failed Request or Communication'
status_dic[0] = 'Device has stopped, but can receive commands.'
status_dic[1] = 'Device is active and running.'
# All the other status messages should have higher integer values
# then 1.
return self.current_status, status_dic
def set_sample_rate(self, sample_rate):
""" Set the sample rate of the pulse generator hardware
@param float sample_rate: The sample rate to be set (in Hz)
@return foat: the sample rate returned from the device (-1:error)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
self.awg.write('CLOCK:SRATE %.4G' % sample_rate)
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
time.sleep(1)
self.get_sample_rate()
return self.sample_rate
def get_sample_rate(self):
""" Set the sample rate of the pulse generator hardware
@return float: The current sample rate of the device (in Hz)
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
return_rate = float(self.awg.query('CLOCK:SRATE?'))
self.sample_rate = return_rate
return self.sample_rate
def get_analog_level(self, amplitude=None, offset=None):
""" Retrieve the analog amplitude and offset of the provided channels.
@param list amplitude: optional, if a specific amplitude value (in Volt
peak to peak, i.e. the full amplitude) of a
channel is desired.
@param list offset: optional, if a specific high value (in Volt) of a
channel is desired.
@return: ({}, {}): tuple of two dicts, with keys being the channel
number and items being the values for those channels.
Amplitude is always denoted in Volt-peak-to-peak and
Offset in (absolute) Voltage.
If no entries provided then the levels of all channels where simply
returned. If no analog channels provided, return just an empty dict.
Example of a possible input:
amplitude = [1,4], offset =[1,3]
to obtain the amplitude of channel 1 and 4 and the offset
{1: -0.5, 4: 2.0} {}
since no high request was performed.
Note, the major difference to digital signals is that analog signals are
always oscillating or changing signals, otherwise you can use just
digital output. In contrast to digital output levels, analog output
levels are defined by an amplitude (here total signal span, denoted in
Voltage peak to peak) and an offset (denoted by an (absolute) voltage).
"""
amp = {}
off = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
chnl_list = ['a_ch' + str(ch_num) for ch_num in
range(1, self._get_max_a_channel_number() + 1)]
pattern = re.compile('[0-9]+')
# get pp amplitudes
if amplitude is None:
for ch_num, chnl in enumerate(chnl_list):
amp[chnl] = float(self.awg.query('SOUR' + str(ch_num + 1) + ':VOLT:AMPL?'))
else:
for chnl in amplitude:
if chnl in chnl_list:
ch_num = int(re.search(pattern, chnl).group(0))
amp[chnl] = float(self.awg.query('SOUR' + str(ch_num) + ':VOLT:AMPL?'))
else:
self.log.warning('Get analog amplitude from AWG70k channel "{0}" failed. '
'Channel non-existent.'.format(str(chnl)))
# get voltage offsets
if offset is None:
for ch_num, chnl in enumerate(chnl_list):
off[chnl] = 0.0
else:
for chnl in offset:
if chnl in chnl_list:
ch_num = int(re.search(pattern, chnl).group(0))
off[chnl] = 0.0
else:
self.log.warning('Get analog offset from AWG70k channel "{0}" failed. '
'Channel non-existent.'.format(str(chnl)))
self.amplitude_list = amp
self.offset_list = off
return amp, off
def set_analog_level(self, amplitude=None, offset=None):
""" Set amplitude and/or offset value of the provided analog channel.
@param dict amplitude: dictionary, with key being the channel and items
being the amplitude values (in Volt peak to peak,
i.e. the full amplitude) for the desired channel.
@param dict offset: dictionary, with key being the channel and items
being the offset values (in absolute volt) for the
desired channel.
If nothing is passed then the command is being ignored.
Note, the major difference to digital signals is that analog signals are
always oscillating or changing signals, otherwise you can use just
digital output. In contrast to digital output levels, analog output
levels are defined by an amplitude (here total signal span, denoted in
Voltage peak to peak) and an offset (denoted by an (absolute) voltage).
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
# Check the inputs by using the constraints...
constraints = self.get_constraints()
# ...and the channel numbers
num_of_channels = self._get_max_a_channel_number()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
# amplitude sanity check
pattern = re.compile('[0-9]+')
if amplitude is not None:
for chnl in amplitude:
ch_num = int(re.search(pattern, chnl).group(0))
if ch_num > num_of_channels or ch_num < 1:
self.log.warning('Channel to set (a_ch{0}) not available in AWG.\nSetting '
'analogue voltage for this channel ignored.'.format(chnl))
del amplitude[chnl]
if amplitude[chnl] < constraints.a_ch_amplitude.min:
self.log.warning('Minimum Vpp for channel "{0}" is {1}. Requested Vpp of {2}V '
'was ignored and instead set to min value.'
''.format(chnl, constraints.a_ch_amplitude.min,
amplitude[chnl]))
amplitude[chnl] = constraints.a_ch_amplitude.min
elif amplitude[chnl] > constraints.a_ch_amplitude.max:
self.log.warning('Maximum Vpp for channel "{0}" is {1}. Requested Vpp of {2}V '
'was ignored and instead set to max value.'
''.format(chnl, constraints.a_ch_amplitude.max,
amplitude[chnl]))
amplitude[chnl] = constraints.a_ch_amplitude.max
# offset sanity check
if offset is not None:
for chnl in offset:
ch_num = int(re.search(pattern, chnl).group(0))
if ch_num > num_of_channels or ch_num < 1:
self.log.warning('Channel to set (a_ch{0}) not available in AWG.\nSetting '
'offset voltage for this channel ignored.'.format(chnl))
del offset[chnl]
if offset[chnl] < constraints.a_ch_offset.min:
self.log.warning('Minimum offset for channel "{0}" is {1}. Requested offset of '
'{2}V was ignored and instead set to min value.'
''.format(chnl, constraints.a_ch_offset.min, offset[chnl]))
offset[chnl] = constraints.a_ch_offset.min
elif offset[chnl] > constraints.a_ch_offset.max:
self.log.warning('Maximum offset for channel "{0}" is {1}. Requested offset of '
'{2}V was ignored and instead set to max value.'
''.format(chnl, constraints.a_ch_offset.max,
offset[chnl]))
offset[chnl] = constraints.a_ch_offset.max
if amplitude is not None:
for a_ch in amplitude:
self.awg.write('SOUR{0}:VOLT:AMPL {1}'.format(a_ch, amplitude[a_ch]))
self.amplitude_list[a_ch] = amplitude[a_ch]
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
if offset is not None:
for a_ch in offset:
self.awg.write('SOUR{0}:VOLT:OFFSET {1}'.format(a_ch, offset[a_ch]))
self.offset_list[a_ch] = offset[a_ch]
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.25)
return self.amplitude_list, self.offset_list
def get_digital_level(self, low=None, high=None):
""" Retrieve the digital low and high level of the provided channels.
@param list low: optional, if a specific low value (in Volt) of a
channel is desired.
@param list high: optional, if a specific high value (in Volt) of a
channel is desired.
@return: tuple of two dicts, with keys being the channel number and
items being the values for those channels. Both low and high
value of a channel is denoted in (absolute) Voltage.
If no entries provided then the levels of all channels where simply
returned. If no digital channels provided, return just an empty dict.
Example of a possible input:
low = [1,4]
to obtain the low voltage values of digital channel 1 an 4. A possible
answer might be
{1: -0.5, 4: 2.0} {}
since no high request was performed.
Note, the major difference to analog signals is that digital signals are
either ON or OFF, whereas analog channels have a varying amplitude
range. In contrast to analog output levels, digital output levels are
defined by a voltage, which corresponds to the ON status and a voltage
which corresponds to the OFF status (both denoted in (absolute) voltage)
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
# FIXME: Test with multiple channel AWG
low_val = {}
high_val = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
digital_channels = list(range(1, 2 * self._get_max_a_channel_number() + 1))
analog_channels = [chnl // 2 + chnl % 2 for chnl in digital_channels]
marker_indices = [((chnl - 1) % 2) + 1 for chnl in digital_channels]
# get low marker levels
if low is None:
for chnl in digital_channels:
low_val[chnl] = float(
self.awg.query('SOUR' + str(analog_channels[chnl - 1]) + ':MARK'
+ str(marker_indices[chnl - 1]) + ':VOLT:LOW?'))
else:
for chnl in low:
low_val[chnl] = float(
self.awg.query('SOUR' + str(analog_channels[chnl - 1]) + ':MARK'
+ str(marker_indices[chnl - 1]) + ':VOLT:LOW?'))
# get high marker levels
if high is None:
for chnl in digital_channels:
high_val[chnl] = float(self.awg.query('SOUR' + str(analog_channels[chnl - 1])
+ ':MARK' + str(marker_indices[chnl - 1])
+ ':VOLT:HIGH?'))
else:
for chnl in high:
high_val[chnl] = float(self.awg.query('SOUR' + str(analog_channels[chnl - 1])
+ ':MARK' + str(marker_indices[chnl - 1])
+ ':VOLT:HIGH?'))
self.markers_high = high_val
self.markers_low = low_val
return low_val, high_val
def set_digital_level(self, low=None, high=None):
""" Set low and/or high value of the provided digital channel.
@param dict low: dictionary, with key being the channel and items being
the low values (in volt) for the desired channel.
@param dict high: dictionary, with key being the channel and items being
the high values (in volt) for the desired channel.
If nothing is passed then the command is being ignored.
Note, the major difference to analog signals is that digital signals are
either ON or OFF, whereas analog channels have a varying amplitude
range. In contrast to analog output levels, digital output levels are
defined by a voltage, which corresponds to the ON status and a voltage
which corresponds to the OFF status (both denoted in (absolute) voltage)
In general there is not a bijective correspondence between
(amplitude, offset) for analog and (value high, value low) for digital!
"""
if low is None:
low = {}
if high is None:
high = {}
# Check if AWG is in function generator mode
# self._activate_awg_mode()
#If you want to check the input use the constraints:
constraints = self.get_constraints()
for d_ch in low:
#FIXME: Tell the device the proper digital voltage low value:
# self.tell('SOURCE1:MARKER{0}:VOLTAGE:LOW {1}'.format(d_ch, low[d_ch]))
pass
for d_ch in high:
#FIXME: Tell the device the proper digital voltage high value:
# self.tell('SOURCE1:MARKER{0}:VOLTAGE:HIGH {1}'.format(d_ch, high[d_ch]))
pass
def get_active_channels(self, ch=None):
""" Get the active channels of the pulse generator hardware.
@param list ch: optional, if specific analog or digital channels are
needed to be asked without obtaining all the channels.
@return dict: where keys denoting the channel number and items boolean
expressions whether channel are active or not.
Example for an possible input (order is not important):
ch = ['a_ch2', 'd_ch2', 'a_ch1', 'd_ch5', 'd_ch1']
then the output might look like
{'a_ch2': True, 'd_ch2': False, 'a_ch1': False, 'd_ch5': True, 'd_ch1': False}
If no parameters are passed to this method all channels will be asked
for their setting.
"""
# If you want to check the input use the constraints:
constraints = self.get_constraints()
max_analog_channels = self._get_max_a_channel_number()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
active_ch = {}
for a_ch in range(max_analog_channels):
active_ch['a_ch' + str(a_ch + 1)] = False
active_ch['d_ch' + str((2 * a_ch) + 1)] = False
active_ch['d_ch' + str((2 * a_ch) + 2)] = False
# check what analog channels are active
for a_ch in range(1, max_analog_channels + 1):
if bool(int(self.awg.query('OUTPUT' + str(a_ch) + ':STATE?'))):
active_ch['a_ch' + str(a_ch)] = True
# check how many markers are active on each channel, i.e. the DAC resolution
max_res = constraints.dac_resolution['max']
for a_ch in range(max_analog_channels):
if active_ch['a_ch' + str(a_ch + 1)]:
digital_mrk = max_res - int(self.awg.query('SOUR' + str(a_ch + 1) + ':DAC:RES?'))
if digital_mrk > 0:
active_ch['d_ch' + str((2 * a_ch) + 1)] = True
if digital_mrk == 2:
active_ch['d_ch' + str((2 * a_ch) + 2)] = True
self.active_channel = active_ch
# return either all channel information or just the one asked for.
if ch is None:
return_ch = active_ch
else:
return_ch = dict()
for channel in ch:
return_ch[channel] = active_ch[channel]
return return_ch
def set_active_channels(self, ch=None):
""" Set the active channels for the pulse generator hardware.
@param dict ch: dictionary with keys being the analog or digital
string generic names for the channels with items being
a boolean value.
@return dict: with the actual set values for active channels for analog
and digital values.
If nothing is passed then the command will return an empty dict.
Note: After setting the active channels of the device, retrieve them
again for obtaining the actual set value(s) and use that
information for further processing.
Example for possible input:
ch={'a_ch2': True, 'd_ch1': False, 'd_ch3': True, 'd_ch4': True}
to activate analog channel 2 digital channel 3 and 4 and to deactivate
digital channel 1.
AWG5000 Series instruments support only 14-bit resolution. Therefore
this command will have no effect on the DAC for these instruments. On
other devices the deactivation of digital channels increase the DAC
resolution of the analog channels.
"""
if ch is None:
return {}
constraints = self.get_constraints()
# Check if AWG is in function generator mode
# self._activate_awg_mode()
new_channels_state = self.active_channel.copy()
for chnl in ch:
if chnl in self.active_channel:
new_channels_state[chnl] = ch[chnl]
else:
self.log.error('Trying to (de)activate channel "{0}". This channel is not present '
'in AWG. Setting channels aborted.'.format(chnl))
return {}
# check if the channels to set are part of the activation_config constraints
new_active_channels = [chnl for chnl in new_channels_state if new_channels_state[chnl]]
new_active_channels.sort()
active_channels_ok = False
for conf in constraints.activation_config:
if sorted(constraints.activation_config[conf]) == new_active_channels:
active_channels_ok = True
if not active_channels_ok:
self.log.error('activation_config to set ({0}) is not allowed according to constraints.'
''.format(new_active_channels))
return {}
# get lists of all digital and analog channels separately
a_chan = [chnl for chnl in new_channels_state if 'a_ch' in chnl]
d_chan = [chnl for chnl in new_channels_state if 'd_ch' in chnl]
# calculate dac resolution for each analog channel and set it in hardware.
# Also (de)activate the analog channels accordingly
num_pattern = re.compile('[0-9]+')
max_res = constraints.dac_resolution['max']
for a_ch in a_chan:
ach_num = int(re.search(num_pattern, a_ch).group(0))
# determine number of markers for current a_ch
if new_channels_state['d_ch' + str(2 * ach_num - 1)]:
if new_channels_state['d_ch' + str(2 * ach_num)]:
marker_num = 2
else:
marker_num = 1
else:
marker_num = 0
# set DAC resolution for this channel
dac_res = max_res - marker_num
self.awg.write('SOUR' + str(ach_num) + ':DAC:RES ' + str(dac_res))
# (de)activate the analog channel
if new_channels_state[a_ch]:
self.awg.write('OUTPUT' + str(ach_num) + ':STATE ON')
else:
self.awg.write('OUTPUT' + str(ach_num) + ':STATE OFF')
self.active_channel = new_channels_state
return self.active_channel
def get_uploaded_asset_names(self):
""" Retrieve the names of all uploaded assets on the device.
@return list: List of all uploaded asset name strings in the current
device directory. This is no list of the file names.
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
uploaded_files = self._get_filenames_on_device()
name_list = []
for filename in uploaded_files:
asset_name = None
if fnmatch(filename, '*_ch?.wfmx'):
asset_name = filename.rsplit('_', 1)[0]
elif fnmatch(filename, '*_ch?.wfm'):
asset_name = filename.rsplit('_', 1)[0]
elif filename.endswith('.seqx'):
asset_name = filename[:-5]
elif filename.endswith('.seq'):
asset_name = filename[:-4]
elif filename.endswith('.mat'):
asset_name = filename[:-4]
if asset_name is not None and asset_name not in name_list:
name_list.append(asset_name)
return name_list
def get_saved_asset_names(self):
""" Retrieve the names of all sampled and saved assets on the host PC.
This is no list of the file names.
@return list: List of all saved asset name strings in the current
directory of the host PC.
"""
# list of all files in the waveform directory ending with .mat or .WFMX
file_list = self._get_filenames_on_host()
# exclude the channel specifier for multiple analog channels and create return list
name_list = []
for filename in file_list:
asset_name = None
if fnmatch(filename, '*_ch?.wfmx'):
asset_name = filename.rsplit('_', 1)[0]
elif fnmatch(filename, '*_ch?.wfm'):
asset_name = filename.rsplit('_', 1)[0]
elif filename.endswith('.seqx'):
asset_name = filename[:-5]
elif filename.endswith('.seq'):
asset_name = filename[:-4]
elif filename.endswith('.mat'):
asset_name = filename[:-4]
if asset_name is not None and asset_name not in name_list:
name_list.append(asset_name)
return name_list
def delete_asset(self, asset_name):
""" Delete all files associated with an asset with the passed asset_name from the device memory.
@param str asset_name: The name of the asset to be deleted
Optionally a list of asset names can be passed.
@return list: a list with strings of the files which were deleted.
Unused for digital pulse generators without sequence storage capability
(PulseBlaster, FPGA).
"""
if not isinstance(asset_name, list):
asset_name = [asset_name]
# self._activate_awg_mode()
# get all uploaded files and asset names in workspace
uploaded_files = self._get_filenames_on_device()
wfm_list = self._get_waveform_names_memory()
seq_list = self._get_sequence_names_memory()
# Create list of uploaded files to be deleted
files_to_delete = []
for name in asset_name:
for filename in uploaded_files:
if fnmatch(filename, name + '_ch?.wfm*') or \
fnmatch(filename, name + '.wfm*') or \
filename.endswith(('.mat', '.seq', '.seqx')):
files_to_delete.append(filename)
# delete files
with FTP(self.ip_address) as ftp:
# login as default user anonymous, passwd anonymous@
ftp.login(user=self.user, passwd=self.passwd)
ftp.cwd(self.asset_directory)
for filename in files_to_delete:
ftp.delete(filename)
# clear waveforms from AWG workspace
for wfm in wfm_list:
for name in asset_name:
if fnmatch(wfm, name + '_ch?') or wfm == name:
self.awg.write('WLIS:WAV:DEL "{0}"'.format(wfm))
# clear sequences from AWG workspace
for name in asset_name:
if name in seq_list:
self.awg.write('SLIS:SEQ:DEL "{0}"'.format(name))
return files_to_delete
def set_asset_dir_on_device(self, dir_path):
""" Change the directory where the assets are stored on the device.
@param string dir_path: The target directory
@return int: error code (0:OK, -1:error)
Unused for digital pulse generators without changeable file structure
(PulseBlaster, FPGA).
"""
# check whether the desired directory exists:
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
try:
ftp.cwd(dir_path)
except:
self.log.info('Desired directory {0} not found on AWG '
'device.\n'
'Create new.'.format(dir_path))
ftp.mkd(dir_path)
self.asset_directory = dir_path
return 0
def get_asset_dir_on_device(self):
""" Ask for the directory where the assets are stored on the device.
@return string: The current sequence directory
Unused for digital pulse generators without changeable file structure
(PulseBlaster, FPGA).
"""
return self.asset_directory
def has_sequence_mode(self):
""" Asks the pulse generator whether sequence mode exists.
@return: bool, True for yes, False for no.
"""
options = self.awg.query('*OPT?')[1:-2].split(',')
has_seq_mode = '03' in options
return has_seq_mode
def set_interleave(self, state=False):
""" Turns the interleave of an AWG on or off.
@param bool state: The state the interleave should be set to
(True: ON, False: OFF)
@return int: error code (0:OK, -1:error)
Unused for pulse generator hardware other than an AWG. The AWG 5000
Series does not have an interleave mode and this method exists only for
compability reasons.
"""
if state:
self.log.warning('Interleave mode not available for the AWG 70000 Series!\n'
'Method call will be ignored.')
return False
def get_interleave(self):
""" Check whether Interleave is on in AWG.
Unused for pulse generator hardware other than an AWG. The AWG 70000
Series does not have an interleave mode and this method exists only for
compability reasons.
@return bool: will be always False since no interleave functionality
"""
return False
def reset(self):
"""Reset the device.
@return int: error code (0:OK, -1:error)
"""
self.awg.write('*RST')
self.awg.write('*WAI')
return 0
def ask(self, question):
""" Asks the device a 'question' and receive and return an answer from it.
@param string question: string containing the command
@return string: the answer of the device to the 'question' in a string
"""
answer = self.awg.query(question).replace('\n', '')
return answer
def tell(self, command):
""" Sends a command string to the device.
@param string command: string containing the command
@return int: error code (0:OK, -1:error)
"""
bytes_written, enum_status_code = self.awg.write(command)
return int(enum_status_code)
def direct_write_ensemble(self, ensemble_name, analog_samples, digital_samples):
"""
@param ensemble_name: Name for the waveform to be created.
@param analog_samples: numpy.ndarray of type float32 containing the voltage samples.
@param digital_samples: numpy.ndarray of type bool containing the marker states for each
sample.
First dimension is marker index; second dimension is sample number
@return:
"""
# check input
if not ensemble_name:
self.log.error('Please specify an ensemble name for direct waveform creation.')
return -1
if type(analog_samples).__name__ != 'ndarray':
self.log.warning('Analog samples for direct waveform creation have wrong data type.\n'
'Converting to numpy.ndarray of type float32.')
analog_samples = np.array(analog_samples, dtype='float32')
if type(digital_samples).__name__ != 'ndarray':
self.log.warning('Digital samples for direct waveform creation have wrong data type.\n'
'Converting to numpy.ndarray of type bool.')
digital_samples = np.array(digital_samples, dtype=bool)
min_samples = int(self.awg.query('WLIS:WAV:LMIN?'))
if analog_samples.shape[1] < min_samples or digital_samples.shape[1] < min_samples:
self.log.error('Minimum waveform length for AWG70000A series is {0} samples.\n'
'Direct waveform creation failed.'.format(min_samples))
return -1
if analog_samples.shape[1] != digital_samples.shape[1]:
self.log.error('Number of analog and digital samples must be the same.\n'
'Direct waveform creation failed.')
return -1
# determine active channels
activation_dict = self.get_active_channels()
active_chnl = [chnl for chnl in activation_dict if activation_dict[chnl]]
active_analog = [chnl for chnl in active_chnl if 'a_ch' in chnl]
active_analog.sort()
active_digital = [chnl for chnl in active_chnl if 'd_ch' in chnl]
active_digital.sort()
# Sanity check of channel numbers
if len(active_analog) != analog_samples.shape[0] or len(active_digital) != digital_samples.shape[0]:
self.log.error('Mismatch of channel activation and sample array dimensions for direct '
'write.\nChannel activation is: {0} analog, {1} digital.\n'
'Sample arrays have: {2} analog, {3} digital.'
''.format(len(active_analog), len(active_digital),
analog_samples.shape[0], digital_samples.shape[0]))
return -1
for a_ch in active_analog:
a_ch_num = int(a_ch.split('ch')[-1])
mrk_ch_1 = 'd_ch{0}'.format(a_ch_num * 2 - 2)
mrk_ch_2 = 'd_ch{0}'.format(a_ch_num * 2 - 1)
wfm_name = ensemble_name + '_ch' + str(a_ch_num)
# Encode marker information in an array of bytes (uint8)
if mrk_ch_1 in active_digital and mrk_ch_2 in active_digital:
mrk1_index = active_digital.index(mrk_ch_1)
mrk2_index = active_digital.index(mrk_ch_2)
mrk_bytes = np.add(np.left_shift(digital_samples[mrk2_index].astype('uint8'), 7),
np.left_shift(digital_samples[mrk1_index].astype('uint8'), 6))
if mrk_ch_1 in active_digital and mrk_ch_2 not in active_digital:
mrk1_index = active_digital.index(mrk_ch_1)
mrk_bytes = np.left_shift(digital_samples[mrk1_index].astype('uint8'), 6)
else:
mrk_bytes = None
# Check if waveform already exists and delete if necessary.
if wfm_name in self._get_waveform_names_memory():
self.awg.write('WLIS:WAV:DEL "{0}"'.format(wfm_name))
# Create waveform in AWG workspace and fill in sample data
self.awg.write('WLIS:WAV:NEW "{0}", {1}'.format(wfm_name, digital_samples.shape[1]))
self.awg.write_values('WLIS:WAV:DATA "{0}",'.format(wfm_name),
analog_samples[a_ch_num - 1])
if mrk_bytes is not None:
self.awg.write_values('WLIS:WAV:MARK:DATA "{0}",'.format(wfm_name), mrk_bytes)
# Wait for everything to complete
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def direct_write_sequence(self, sequence_name, sequence_params):
"""
@param sequence_name:
@param sequence_params:
@return:
"""
trig_dict = {-1: 'OFF', 0: 'OFF', 1: 'ATR', 2: 'BTR'}
active_analog = [chnl for chnl in self.get_active_channels() if 'a_ch' in chnl]
num_tracks = len(active_analog)
num_steps = len(sequence_params)
# Check if sequence already exists and delete if necessary.
if sequence_name in self._get_sequence_names_memory():
self.awg.write('SLIS:SEQ:DEL "{0}"'.format(sequence_name))
# Create new sequence and set jump timing to immediate
self.awg.write('SLIS:SEQ:NEW "{0}", {1}, {2}'.format(sequence_name, num_steps, num_tracks))
self.awg.write('SLIS:SEQ:EVEN:JTIM "{0}", IMM'.format(sequence_name))
# Fill in sequence information
for step in range(num_steps):
self.awg.write('SLIS:SEQ:STEP{0}:EJIN "{1}", {2}'.format(step + 1, sequence_name,
trig_dict[sequence_params[step]['trigger_wait']]))
if sequence_params[step]['event_jump_to'] <= 0:
jumpto = 'NEXT'
else:
jumpto = str(sequence_params[step]['event_jump_to'])
self.awg.write('SLIS:SEQ:STEP{0}:EJUM "{1}", {2}'.format(step + 1,
sequence_name, jumpto))
if sequence_params[step]['repetitions'] <= 0:
repeat = 'INF'
else:
repeat = str(sequence_params[step]['repetitions'])
self.awg.write('SLIS:SEQ:STEP{0}:RCO "{1}", {2}'.format(step + 1,
sequence_name, repeat))
if sequence_params[step]['go_to'] <= 0:
goto = 'NEXT'
else:
goto = str(sequence_params[step]['go_to'])
self.awg.write('SLIS:SEQ:STEP{0}:GOTO "{1}", {2}'.format(step + 1, sequence_name, goto))
waveform_name = sequence_params[step]['name'][0].rsplit('_ch', 1)[0]
if num_tracks == 1:
self.awg.write('SLIS:SEQ:STEP{0}:TASS1:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch1'))
elif num_tracks == 2:
self.awg.write('SLIS:SEQ:STEP{0}:TASS1:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch1'))
self.awg.write('SLIS:SEQ:STEP{0}:TASS2:WAV "{1}", "{2}"'.format(step + 1,
sequence_name,
waveform_name + '_ch2'))
# Wait for everything to complete
while int(self.awg.query('*OPC?')) != 1:
time.sleep(0.2)
return 0
def _init_loaded_asset(self):
"""
Gets the name of the currently loaded asset from the AWG and sets the attribute accordingly.
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
# first get all the channel assets
a_ch_asset = [self.awg.query('SOUR{0}:CASS?'.format(count))[1:-2]
for count in range(1, self._get_max_a_channel_number() + 1)]
tmp_list = [a_ch.split('_ch') for a_ch in a_ch_asset]
a_ch_asset = [ele[0] for ele in filter(lambda x: len(x) == 2, tmp_list)]
# the case
if len(a_ch_asset) != 0:
all_same = True
for asset in a_ch_asset:
if asset != a_ch_asset[0]:
all_same = False
break
if all_same:
self.current_loaded_asset = a_ch_asset[0]
else:
self.log.error("In _init_loaded_asset: The case of differing asset names is not "
"yet handled")
self.current_loaded_asset = ''
else:
self.current_loaded_asset = ''
return self.current_loaded_asset
def _get_sequence_names_memory(self):
"""
Gets all sequence names currently loaded into the AWG workspace
@return: list of names
"""
number_of_seq = int(self.awg.query('SLIS:SIZE?'))
sequence_list = [None] * number_of_seq
for i in range(number_of_seq):
seq_name = self.awg.query('SLIS:NAME? {0}'.format(i + 1))[1:-2]
sequence_list[i] = seq_name
return sequence_list
def _get_dir_for_name(self, name):
""" Get the path to the pulsed sub-directory 'name'.
@param name: string, name of the folder
@return: string, absolute path to the directory with folder 'name'.
"""
path = os.path.join(self.pulsed_file_dir, name)
if not os.path.exists(path):
os.makedirs(os.path.abspath(path))
return os.path.abspath(path)
def _get_filenames_on_device(self):
""" Get the full filenames of all assets saved on the device.
@return: list, The full filenames of all assets saved on the device.
"""
filename_list = []
with FTP(self.ip_address) as ftp:
ftp.login(user=self.user,passwd=self.passwd) # login as default user anonymous, passwd anonymous@
ftp.cwd(self.asset_directory)
# get only the files from the dir and skip possible directories
log =[]
file_list = []
ftp.retrlines('LIST', callback=log.append)
for line in log:
if '<DIR>' not in line:
# that is how a potential line is looking like:
# '05-10-16 05:22PM 292 SSR aom adjusted.seq'
# One can see that the first part consists of the date
# information. Remove those information and separate then
# the first number, which indicates the size of the file,
# from the following. That is necessary if the filename has
# whitespaces in the name:
size_filename = line[18:].lstrip()
# split after the first appearing whitespace and take the
# rest as filename, remove for safety all trailing
# whitespaces:
actual_filename = size_filename.split(' ', 1)[1].lstrip()
file_list.append(actual_filename)
for filename in file_list:
if filename.endswith(('.wfm', '.wfmx', '.mat', '.seq', '.seqx')):
if filename not in filename_list:
filename_list.append(filename)
return filename_list
def _get_filenames_on_host(self):
""" Get the full filenames of all assets saved on the host PC.
@return: list, The full filenames of all assets saved on the host PC.
"""
filename_list = [f for f in os.listdir(self.host_waveform_directory) if
f.endswith('.wfmx') or f.endswith('.wfm') or f.endswith(
'.seq') or f.endswith('.mat')]
return filename_list
def _get_model_ID(self):
"""
@return: a string which represents the model id of the AWG.
"""
model_id = self.awg.query('*IDN?').replace('\n', '').split(',')
return model_id
def _get_max_a_channel_number(self):
"""
@return: Returns an integer which represents the number of analog
channels.
"""
constraints = self.get_constraints()
config = constraints.activation_config
largest_list = config[max(config, key=config.get)]
lst = [kk for kk in largest_list if 'a_ch' in kk]
analog_channel_lst = [w.replace('a_ch', '') for w in lst]
max_number_of_channels = max(map(int, analog_channel_lst))
return max_number_of_channels
def _get_waveform_names_memory(self):
"""
Gets all waveform names currently loaded into the AWG workspace
@return: list of names
"""
# Check if AWG is in function generator mode
# self._activate_awg_mode()
number_of_wfm = int(self.awg.query('WLIS:SIZE?'))
waveform_list = [None] * number_of_wfm
for i in range(number_of_wfm):
wfm_name = self.awg.query('WLIS:NAME? {0}'.format(i + 1))[1:-2]
waveform_list[i] = wfm_name
return waveform_list
def _is_output_on(self):
"""
Aks the AWG if the output is enabled, i.e. if the AWG is running
@return: bool, (True: output on, False: output off)
"""
run_state = bool(int(self.awg.query('AWGC:RST?')))
return run_state
# def _activate_awg_mode(self):
# """
# Helper method to activate AWG mode if the device is currently in function generator mode.
# """
# # Check if AWG is still in MW mode (function generator mode)
# if self.awg.query('INST:MODE?').replace('\n', '') != 'AWG':
# self.awg.write('INST:MODE AWG')
# self.awg.write('*WAI')
# return
| gpl-3.0 |
sdague/home-assistant | homeassistant/components/smhi/__init__.py | 26 | 1025 | """Support for the Swedish weather institute weather service."""
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
# Have to import for config_flow to work even if they are not used here
from .config_flow import smhi_locations # noqa: F401
from .const import DOMAIN # noqa: F401
DEFAULT_NAME = "smhi"
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured SMHI."""
# We allow setup only through config flow type of config
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up SMHI forecast as config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
return True
| apache-2.0 |
OCA/l10n-brazil | l10n_br_purchase_stock/wizards/stock_invocing_onshipping.py | 1 | 2256 | # @ 2021 Akretion - www.akretion.com.br -
# Magno Costa <[email protected]>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import fields, models
class StockInvoiceOnshipping(models.TransientModel):
_inherit = "stock.invoice.onshipping"
def _build_invoice_values_from_pickings(self, pickings):
"""
Build dict to create a new invoice from given pickings
:param pickings: stock.picking recordset
:return: dict
"""
invoice, values = super()._build_invoice_values_from_pickings(pickings)
pick = fields.first(pickings)
if pick.purchase_id:
values["purchase_id"] = pick.purchase_id.id
if pick.purchase_id.payment_term_id.id != values["payment_term_id"]:
values.update({"payment_term_id": pick.purchase_id.payment_term_id.id})
return invoice, values
def _get_move_key(self, move):
"""
Get the key based on the given move
:param move: stock.move recordset
:return: key
"""
key = super()._get_move_key(move)
if move.purchase_line_id:
# TODO: deveria permitir agrupar as linhas ?
# Deveria permitir agrupar Pedidos de Compras ?
if type(key) is tuple:
key = key + (move.purchase_line_id,)
else:
# TODO - seria melhor identificar o TYPE para saber se
# o KEY realmente é um objeto nesse caso
key = (key, move.purchase_line_id)
return key
def _get_invoice_line_values(self, moves, invoice_values, invoice):
"""
Create invoice line values from given moves
:param moves: stock.move
:param invoice: account.invoice
:return: dict
"""
values = super()._get_invoice_line_values(moves, invoice_values, invoice)
# Devido ao KEY com purchase_line_id aqui
# vem somente um registro
if len(moves) == 1:
# Caso venha apenas uma linha porem sem
# purchase_line_id é preciso ignora-la
if moves.purchase_line_id:
values["purchase_line_id"] = moves.purchase_line_id.id
return values
| agpl-3.0 |
cloudera/recordservice | thirdparty/thrift-0.9.0/test/py/TestServer.py | 30 | 7232 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import division
import sys, glob, time
sys.path.insert(0, glob.glob('../../lib/py/build/lib.*')[0])
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--genpydir', type='string', dest='genpydir',
default='gen-py',
help='include this local directory in sys.path for locating generated code')
parser.add_option("--port", type="int", dest="port",
help="port number for server to listen on")
parser.add_option("--zlib", action="store_true", dest="zlib",
help="use zlib wrapper for compressed transport")
parser.add_option("--ssl", action="store_true", dest="ssl",
help="use SSL for encrypted transport")
parser.add_option('-v', '--verbose', action="store_const",
dest="verbose", const=2,
help="verbose output")
parser.add_option('-q', '--quiet', action="store_const",
dest="verbose", const=0,
help="minimal output")
parser.add_option('--proto', dest="proto", type="string",
help="protocol to use, one of: accel, binary, compact")
parser.set_defaults(port=9090, verbose=1, proto='binary')
options, args = parser.parse_args()
sys.path.insert(0, options.genpydir)
from ThriftTest import ThriftTest
from ThriftTest.ttypes import *
from thrift.transport import TTransport
from thrift.transport import TSocket
from thrift.transport import TZlibTransport
from thrift.protocol import TBinaryProtocol
from thrift.protocol import TCompactProtocol
from thrift.server import TServer, TNonblockingServer, THttpServer
PROT_FACTORIES = {'binary': TBinaryProtocol.TBinaryProtocolFactory,
'accel': TBinaryProtocol.TBinaryProtocolAcceleratedFactory,
'compact': TCompactProtocol.TCompactProtocolFactory}
class TestHandler:
def testVoid(self):
if options.verbose > 1:
print 'testVoid()'
def testString(self, str):
if options.verbose > 1:
print 'testString(%s)' % str
return str
def testByte(self, byte):
if options.verbose > 1:
print 'testByte(%d)' % byte
return byte
def testI16(self, i16):
if options.verbose > 1:
print 'testI16(%d)' % i16
return i16
def testI32(self, i32):
if options.verbose > 1:
print 'testI32(%d)' % i32
return i32
def testI64(self, i64):
if options.verbose > 1:
print 'testI64(%d)' % i64
return i64
def testDouble(self, dub):
if options.verbose > 1:
print 'testDouble(%f)' % dub
return dub
def testStruct(self, thing):
if options.verbose > 1:
print 'testStruct({%s, %d, %d, %d})' % (thing.string_thing, thing.byte_thing, thing.i32_thing, thing.i64_thing)
return thing
def testException(self, str):
if options.verbose > 1:
print 'testException(%s)' % str
if str == 'Xception':
x = Xception()
x.errorCode = 1001
x.message = str
raise x
elif str == "throw_undeclared":
raise ValueError("Exception test PASSES.")
def testOneway(self, seconds):
if options.verbose > 1:
print 'testOneway(%d) => sleeping...' % seconds
time.sleep(seconds / 3) # be quick
if options.verbose > 1:
print 'done sleeping'
def testNest(self, thing):
if options.verbose > 1:
print 'testNest(%s)' % thing
return thing
def testMap(self, thing):
if options.verbose > 1:
print 'testMap(%s)' % thing
return thing
def testSet(self, thing):
if options.verbose > 1:
print 'testSet(%s)' % thing
return thing
def testList(self, thing):
if options.verbose > 1:
print 'testList(%s)' % thing
return thing
def testEnum(self, thing):
if options.verbose > 1:
print 'testEnum(%s)' % thing
return thing
def testTypedef(self, thing):
if options.verbose > 1:
print 'testTypedef(%s)' % thing
return thing
def testMapMap(self, thing):
if options.verbose > 1:
print 'testMapMap(%s)' % thing
return thing
def testMulti(self, arg0, arg1, arg2, arg3, arg4, arg5):
if options.verbose > 1:
print 'testMulti(%s)' % [arg0, arg1, arg2, arg3, arg4, arg5]
x = Xtruct(byte_thing=arg0, i32_thing=arg1, i64_thing=arg2)
return x
# set up the protocol factory form the --proto option
pfactory_cls = PROT_FACTORIES.get(options.proto, None)
if pfactory_cls is None:
raise AssertionError('Unknown --proto option: %s' % options.proto)
pfactory = pfactory_cls()
# get the server type (TSimpleServer, TNonblockingServer, etc...)
if len(args) > 1:
raise AssertionError('Only one server type may be specified, not multiple types.')
server_type = args[0]
# Set up the handler and processor objects
handler = TestHandler()
processor = ThriftTest.Processor(handler)
# Handle THttpServer as a special case
if server_type == 'THttpServer':
server =THttpServer.THttpServer(processor, ('', options.port), pfactory)
server.serve()
sys.exit(0)
# set up server transport and transport factory
host = None
if options.ssl:
from thrift.transport import TSSLSocket
transport = TSSLSocket.TSSLServerSocket(host, options.port, certfile='test_cert.pem')
else:
transport = TSocket.TServerSocket(host, options.port)
tfactory = TTransport.TBufferedTransportFactory()
# if --zlib, then wrap server transport, and use a different transport factory
if options.zlib:
transport = TZlibTransport.TZlibTransport(transport) # wrap with zlib
tfactory = TZlibTransport.TZlibTransportFactory()
# do server-specific setup here:
if server_type == "TNonblockingServer":
server = TNonblockingServer.TNonblockingServer(processor, transport, inputProtocolFactory=pfactory)
elif server_type == "TProcessPoolServer":
import signal
from thrift.server import TProcessPoolServer
server = TProcessPoolServer.TProcessPoolServer(processor, transport, tfactory, pfactory)
server.setNumWorkers(5)
def set_alarm():
def clean_shutdown(signum, frame):
for worker in server.workers:
if options.verbose > 0:
print 'Terminating worker: %s' % worker
worker.terminate()
if options.verbose > 0:
print 'Requesting server to stop()'
try:
server.stop()
except:
pass
signal.signal(signal.SIGALRM, clean_shutdown)
signal.alarm(2)
set_alarm()
else:
# look up server class dynamically to instantiate server
ServerClass = getattr(TServer, server_type)
server = ServerClass(processor, transport, tfactory, pfactory)
# enter server main loop
server.serve()
| apache-2.0 |
fmacias64/deap | examples/pso/speciation.py | 12 | 6672 | # This file is part of DEAP.
#
# DEAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# DEAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with DEAP. If not, see <http://www.gnu.org/licenses/>.
"""Implementation of the Speciation Particle Swarm Optimization algorithm as
presented in *Li, Blackwell, and Branke, 2006, Particle Swarm with Speciation
and Adaptation in a Dynamic Environment.*
"""
import itertools
import math
import operator
import random
import numpy
try:
from itertools import imap
except:
# Python 3 nothing to do
pass
else:
map = imap
from deap import base
from deap.benchmarks import movingpeaks
from deap import creator
from deap import tools
scenario = movingpeaks.SCENARIO_2
NDIM = 5
BOUNDS = [scenario["min_coord"], scenario["max_coord"]]
mpb = movingpeaks.MovingPeaks(dim=NDIM, **scenario)
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Particle", list, fitness=creator.FitnessMax, speed=list,
best=None, bestfit=creator.FitnessMax)
def generate(pclass, dim, pmin, pmax, smin, smax):
part = pclass(random.uniform(pmin, pmax) for _ in range(dim))
part.speed = [random.uniform(smin, smax) for _ in range(dim)]
return part
def convert_quantum(swarm, rcloud, centre):
dim = len(swarm[0])
for part in swarm:
position = [random.gauss(0, 1) for _ in range(dim)]
dist = math.sqrt(sum(x**2 for x in position))
# Gaussian distribution
# u = abs(random.gauss(0, 1.0/3.0))
# part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)]
# UVD distribution
# u = random.random()
# part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)]
# NUVD distribution
u = abs(random.gauss(0, 1.0/3.0))
part[:] = [(rcloud * x * u / dist) + c for x, c in zip(position, centre)]
del part.fitness.values
del part.bestfit.values
part.best = None
return swarm
def updateParticle(part, best, chi, c):
ce1 = (c*random.uniform(0, 1) for _ in range(len(part)))
ce2 = (c*random.uniform(0, 1) for _ in range(len(part)))
ce1_p = map(operator.mul, ce1, map(operator.sub, best, part))
ce2_g = map(operator.mul, ce2, map(operator.sub, part.best, part))
a = map(operator.sub,
map(operator.mul,
itertools.repeat(chi),
map(operator.add, ce1_p, ce2_g)),
map(operator.mul,
itertools.repeat(1-chi),
part.speed))
part.speed = list(map(operator.add, part.speed, a))
part[:] = list(map(operator.add, part, part.speed))
toolbox = base.Toolbox()
toolbox.register("particle", generate, creator.Particle, dim=NDIM,
pmin=BOUNDS[0], pmax=BOUNDS[1], smin=-(BOUNDS[1] - BOUNDS[0])/2.0,
smax=(BOUNDS[1] - BOUNDS[0])/2.0)
toolbox.register("swarm", tools.initRepeat, list, toolbox.particle)
toolbox.register("update", updateParticle, chi=0.729843788, c=2.05)
toolbox.register("convert", convert_quantum)
toolbox.register("evaluate", mpb)
def main(verbose=True):
NPARTICLES = 100
RS = (BOUNDS[1] - BOUNDS[0]) / (50**(1.0/NDIM)) # between 1/20 and 1/10 of the domain's range
PMAX = 10
RCLOUD = 1.0 # 0.5 times the move severity
stats = tools.Statistics(lambda ind: ind.fitness.values)
stats.register("avg", numpy.mean)
stats.register("std", numpy.std)
stats.register("min", numpy.min)
stats.register("max", numpy.max)
logbook = tools.Logbook()
logbook.header = "gen", "nswarm", "evals", "error", "offline_error", "avg", "max"
swarm = toolbox.swarm(n=NPARTICLES)
generation = 0
while mpb.nevals < 5e5:
# Evaluate each particle in the swarm
for part in swarm:
part.fitness.values = toolbox.evaluate(part)
if not part.best or part.bestfit < part.fitness:
part.best = toolbox.clone(part[:]) # Get the position
part.bestfit.values = part.fitness.values # Get the fitness
# Sort swarm into species, best individual comes first
sorted_swarm = sorted(swarm, key=lambda ind: ind.bestfit, reverse=True)
species = []
while sorted_swarm:
found = False
for s in species:
dist = math.sqrt(sum((x1 - x2)**2 for x1, x2 in zip(sorted_swarm[0].best, s[0].best)))
if dist <= RS:
found = True
s.append(sorted_swarm[0])
break
if not found:
species.append([sorted_swarm[0]])
sorted_swarm.pop(0)
record = stats.compile(swarm)
logbook.record(gen=generation, evals=mpb.nevals, nswarm=len(species),
error=mpb.currentError(), offline_error=mpb.offlineError(), **record)
if verbose:
print(logbook.stream)
# Detect change
if any(s[0].bestfit.values != toolbox.evaluate(s[0].best) for s in species):
# Convert particles to quantum particles
for s in species:
s[:] = toolbox.convert(s, rcloud=RCLOUD, centre=s[0].best)
else:
# Replace exceeding particles in a species with new particles
for s in species:
if len(s) > PMAX:
n = len(s) - PMAX
del s[PMAX:]
s.extend(toolbox.swarm(n=n))
# Update particles that have not been reinitialized
for s in species[:-1]:
for part in s[:PMAX]:
toolbox.update(part, s[0].best)
del part.fitness.values
# Return all but the worst species' updated particles to the swarm
# The worst species is replaced by new particles
swarm = list(itertools.chain(toolbox.swarm(n=len(species[-1])), *species[:-1]))
generation += 1
if __name__ == '__main__':
main()
| lgpl-3.0 |
RedHatQE/cfme_tests | cfme/scripting/conf.py | 1 | 3763 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""Script to encrypt config files.
Usage:
scripts/encrypt_conf.py confname1 confname2 ... confnameN
scripts/encrypt_conf.py credentials
"""
import io
import click
import yaycl_crypt
from . import link_config
from cfme.utils import conf
@click.group(help='Functions affecting configuration files')
def main():
pass
main.add_command(link_config.main, name='link')
@main.command(help='Tests a yaml file')
@click.argument('conf_name', default='credentials')
def test(conf_name):
"""Test yaml file to see how many keys exist"""
creds = conf.__getattr__(conf_name)
print("{} keys found, if this value seems low, there may be a YAML error".format(len(creds)))
@main.command('show-credential', help='Shows the value of a crednetial key')
@click.argument('cred-or-provider-key')
@click.option('--only-credentials', is_flag=True, help='Only search credentials, (not providers)')
def show_credential(cred_or_provider_key, only_credentials):
"""Function to show the given credentials, takes either a provider key or a credential key"""
data = conf.cfme_data
if cred_or_provider_key in data.get('management_systems', {}) and not only_credentials:
endpoints_data = data['management_systems'][cred_or_provider_key].get('endpoints', {})
for endpoint in endpoints_data:
print(endpoint)
cred_key = endpoints_data[endpoint]['credentials']
cred_dict = conf.credentials[cred_key]
for k in cred_dict:
print(" {}: {}".format(k, cred_dict[k]))
elif cred_or_provider_key in conf.credentials:
cred_dict = conf.credentials[cred_or_provider_key]
for k in cred_dict:
print("{}: {}".format(k, cred_dict[k]))
else:
print("Key couldn't be found in providers or credentials YAMLS")
@main.command('show-provider', help='Shows the configuration of a provider')
@click.argument('provider-key')
def show_provider(provider_key):
"""Function to show provider data"""
output = io.BytesIO()
data = conf.cfme_data
if provider_key in data.get('management_systems', {}):
data['management_systems'][provider_key].dump(output)
print(output.getvalue())
else:
print("Key couldn't be found in provider data")
@main.command(help='Encrypts a yaml file')
@click.argument('conf_name', default='credentials')
@click.option('--delete', default=False, is_flag=True,
help='If supplied delete the unencrypted config of the same name.')
def encrypt(conf_name, delete):
"""Function to encrypt a given conf file"""
conf_name = conf_name.strip()
yaycl_crypt.encrypt_yaml(conf, conf_name, delete=delete)
print('{} conf encrypted'.format(conf_name))
if not delete:
print('WARNING: unencrypted file left which will override encrypted')
@main.command(help='Decrypts a yaml file')
@click.argument('conf_name', default='credentials')
@click.option('--delete', default=False, is_flag=True,
help='If supplied delete the encrypted config of the same name.')
@click.option('--skip/--no-skip', default=True,
help='If supplied raise exception if decrypted file already exists')
def decrypt(conf_name, delete, skip):
"""Function to decrypt a given conf file"""
conf_name = conf_name.strip()
try:
yaycl_crypt.decrypt_yaml(conf, conf_name, delete=delete)
except yaycl_crypt.YayclCryptError as ex:
if skip and 'overwrite' in ex.message:
print('{} conf decrypt skipped, decrypted file already exists'.format(conf_name))
return
else:
raise
print('{} conf decrypted'.format(conf_name))
if __name__ == "__main__":
main()
| gpl-2.0 |
40223232/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/formatter.py | 751 | 14930 | """Generic output formatting.
Formatter objects transform an abstract flow of formatting events into
specific output events on writer objects. Formatters manage several stack
structures to allow various properties of a writer object to be changed and
restored; writers need not be able to handle relative changes nor any sort
of ``change back'' operation. Specific writer properties which may be
controlled via formatter objects are horizontal alignment, font, and left
margin indentations. A mechanism is provided which supports providing
arbitrary, non-exclusive style settings to a writer as well. Additional
interfaces facilitate formatting events which are not reversible, such as
paragraph separation.
Writer objects encapsulate device interfaces. Abstract devices, such as
file formats, are supported as well as physical devices. The provided
implementations all work with abstract devices. The interface makes
available mechanisms for setting the properties which formatter objects
manage and inserting data into the output.
"""
import sys
AS_IS = None
class NullFormatter:
"""A formatter which does nothing.
If the writer parameter is omitted, a NullWriter instance is created.
No methods of the writer are called by NullFormatter instances.
Implementations should inherit from this class if implementing a writer
interface but don't need to inherit any implementation.
"""
def __init__(self, writer=None):
if writer is None:
writer = NullWriter()
self.writer = writer
def end_paragraph(self, blankline): pass
def add_line_break(self): pass
def add_hor_rule(self, *args, **kw): pass
def add_label_data(self, format, counter, blankline=None): pass
def add_flowing_data(self, data): pass
def add_literal_data(self, data): pass
def flush_softspace(self): pass
def push_alignment(self, align): pass
def pop_alignment(self): pass
def push_font(self, x): pass
def pop_font(self): pass
def push_margin(self, margin): pass
def pop_margin(self): pass
def set_spacing(self, spacing): pass
def push_style(self, *styles): pass
def pop_style(self, n=1): pass
def assert_line_data(self, flag=1): pass
class AbstractFormatter:
"""The standard formatter.
This implementation has demonstrated wide applicability to many writers,
and may be used directly in most circumstances. It has been used to
implement a full-featured World Wide Web browser.
"""
# Space handling policy: blank spaces at the boundary between elements
# are handled by the outermost context. "Literal" data is not checked
# to determine context, so spaces in literal data are handled directly
# in all circumstances.
def __init__(self, writer):
self.writer = writer # Output device
self.align = None # Current alignment
self.align_stack = [] # Alignment stack
self.font_stack = [] # Font state
self.margin_stack = [] # Margin state
self.spacing = None # Vertical spacing state
self.style_stack = [] # Other state, e.g. color
self.nospace = 1 # Should leading space be suppressed
self.softspace = 0 # Should a space be inserted
self.para_end = 1 # Just ended a paragraph
self.parskip = 0 # Skipped space between paragraphs?
self.hard_break = 1 # Have a hard break
self.have_label = 0
def end_paragraph(self, blankline):
if not self.hard_break:
self.writer.send_line_break()
self.have_label = 0
if self.parskip < blankline and not self.have_label:
self.writer.send_paragraph(blankline - self.parskip)
self.parskip = blankline
self.have_label = 0
self.hard_break = self.nospace = self.para_end = 1
self.softspace = 0
def add_line_break(self):
if not (self.hard_break or self.para_end):
self.writer.send_line_break()
self.have_label = self.parskip = 0
self.hard_break = self.nospace = 1
self.softspace = 0
def add_hor_rule(self, *args, **kw):
if not self.hard_break:
self.writer.send_line_break()
self.writer.send_hor_rule(*args, **kw)
self.hard_break = self.nospace = 1
self.have_label = self.para_end = self.softspace = self.parskip = 0
def add_label_data(self, format, counter, blankline = None):
if self.have_label or not self.hard_break:
self.writer.send_line_break()
if not self.para_end:
self.writer.send_paragraph((blankline and 1) or 0)
if isinstance(format, str):
self.writer.send_label_data(self.format_counter(format, counter))
else:
self.writer.send_label_data(format)
self.nospace = self.have_label = self.hard_break = self.para_end = 1
self.softspace = self.parskip = 0
def format_counter(self, format, counter):
label = ''
for c in format:
if c == '1':
label = label + ('%d' % counter)
elif c in 'aA':
if counter > 0:
label = label + self.format_letter(c, counter)
elif c in 'iI':
if counter > 0:
label = label + self.format_roman(c, counter)
else:
label = label + c
return label
def format_letter(self, case, counter):
label = ''
while counter > 0:
counter, x = divmod(counter-1, 26)
# This makes a strong assumption that lowercase letters
# and uppercase letters form two contiguous blocks, with
# letters in order!
s = chr(ord(case) + x)
label = s + label
return label
def format_roman(self, case, counter):
ones = ['i', 'x', 'c', 'm']
fives = ['v', 'l', 'd']
label, index = '', 0
# This will die of IndexError when counter is too big
while counter > 0:
counter, x = divmod(counter, 10)
if x == 9:
label = ones[index] + ones[index+1] + label
elif x == 4:
label = ones[index] + fives[index] + label
else:
if x >= 5:
s = fives[index]
x = x-5
else:
s = ''
s = s + ones[index]*x
label = s + label
index = index + 1
if case == 'I':
return label.upper()
return label
def add_flowing_data(self, data):
if not data: return
prespace = data[:1].isspace()
postspace = data[-1:].isspace()
data = " ".join(data.split())
if self.nospace and not data:
return
elif prespace or self.softspace:
if not data:
if not self.nospace:
self.softspace = 1
self.parskip = 0
return
if not self.nospace:
data = ' ' + data
self.hard_break = self.nospace = self.para_end = \
self.parskip = self.have_label = 0
self.softspace = postspace
self.writer.send_flowing_data(data)
def add_literal_data(self, data):
if not data: return
if self.softspace:
self.writer.send_flowing_data(" ")
self.hard_break = data[-1:] == '\n'
self.nospace = self.para_end = self.softspace = \
self.parskip = self.have_label = 0
self.writer.send_literal_data(data)
def flush_softspace(self):
if self.softspace:
self.hard_break = self.para_end = self.parskip = \
self.have_label = self.softspace = 0
self.nospace = 1
self.writer.send_flowing_data(' ')
def push_alignment(self, align):
if align and align != self.align:
self.writer.new_alignment(align)
self.align = align
self.align_stack.append(align)
else:
self.align_stack.append(self.align)
def pop_alignment(self):
if self.align_stack:
del self.align_stack[-1]
if self.align_stack:
self.align = align = self.align_stack[-1]
self.writer.new_alignment(align)
else:
self.align = None
self.writer.new_alignment(None)
def push_font(self, font):
size, i, b, tt = font
if self.softspace:
self.hard_break = self.para_end = self.softspace = 0
self.nospace = 1
self.writer.send_flowing_data(' ')
if self.font_stack:
csize, ci, cb, ctt = self.font_stack[-1]
if size is AS_IS: size = csize
if i is AS_IS: i = ci
if b is AS_IS: b = cb
if tt is AS_IS: tt = ctt
font = (size, i, b, tt)
self.font_stack.append(font)
self.writer.new_font(font)
def pop_font(self):
if self.font_stack:
del self.font_stack[-1]
if self.font_stack:
font = self.font_stack[-1]
else:
font = None
self.writer.new_font(font)
def push_margin(self, margin):
self.margin_stack.append(margin)
fstack = [m for m in self.margin_stack if m]
if not margin and fstack:
margin = fstack[-1]
self.writer.new_margin(margin, len(fstack))
def pop_margin(self):
if self.margin_stack:
del self.margin_stack[-1]
fstack = [m for m in self.margin_stack if m]
if fstack:
margin = fstack[-1]
else:
margin = None
self.writer.new_margin(margin, len(fstack))
def set_spacing(self, spacing):
self.spacing = spacing
self.writer.new_spacing(spacing)
def push_style(self, *styles):
if self.softspace:
self.hard_break = self.para_end = self.softspace = 0
self.nospace = 1
self.writer.send_flowing_data(' ')
for style in styles:
self.style_stack.append(style)
self.writer.new_styles(tuple(self.style_stack))
def pop_style(self, n=1):
del self.style_stack[-n:]
self.writer.new_styles(tuple(self.style_stack))
def assert_line_data(self, flag=1):
self.nospace = self.hard_break = not flag
self.para_end = self.parskip = self.have_label = 0
class NullWriter:
"""Minimal writer interface to use in testing & inheritance.
A writer which only provides the interface definition; no actions are
taken on any methods. This should be the base class for all writers
which do not need to inherit any implementation methods.
"""
def __init__(self): pass
def flush(self): pass
def new_alignment(self, align): pass
def new_font(self, font): pass
def new_margin(self, margin, level): pass
def new_spacing(self, spacing): pass
def new_styles(self, styles): pass
def send_paragraph(self, blankline): pass
def send_line_break(self): pass
def send_hor_rule(self, *args, **kw): pass
def send_label_data(self, data): pass
def send_flowing_data(self, data): pass
def send_literal_data(self, data): pass
class AbstractWriter(NullWriter):
"""A writer which can be used in debugging formatters, but not much else.
Each method simply announces itself by printing its name and
arguments on standard output.
"""
def new_alignment(self, align):
print("new_alignment(%r)" % (align,))
def new_font(self, font):
print("new_font(%r)" % (font,))
def new_margin(self, margin, level):
print("new_margin(%r, %d)" % (margin, level))
def new_spacing(self, spacing):
print("new_spacing(%r)" % (spacing,))
def new_styles(self, styles):
print("new_styles(%r)" % (styles,))
def send_paragraph(self, blankline):
print("send_paragraph(%r)" % (blankline,))
def send_line_break(self):
print("send_line_break()")
def send_hor_rule(self, *args, **kw):
print("send_hor_rule()")
def send_label_data(self, data):
print("send_label_data(%r)" % (data,))
def send_flowing_data(self, data):
print("send_flowing_data(%r)" % (data,))
def send_literal_data(self, data):
print("send_literal_data(%r)" % (data,))
class DumbWriter(NullWriter):
"""Simple writer class which writes output on the file object passed in
as the file parameter or, if file is omitted, on standard output. The
output is simply word-wrapped to the number of columns specified by
the maxcol parameter. This class is suitable for reflowing a sequence
of paragraphs.
"""
def __init__(self, file=None, maxcol=72):
self.file = file or sys.stdout
self.maxcol = maxcol
NullWriter.__init__(self)
self.reset()
def reset(self):
self.col = 0
self.atbreak = 0
def send_paragraph(self, blankline):
self.file.write('\n'*blankline)
self.col = 0
self.atbreak = 0
def send_line_break(self):
self.file.write('\n')
self.col = 0
self.atbreak = 0
def send_hor_rule(self, *args, **kw):
self.file.write('\n')
self.file.write('-'*self.maxcol)
self.file.write('\n')
self.col = 0
self.atbreak = 0
def send_literal_data(self, data):
self.file.write(data)
i = data.rfind('\n')
if i >= 0:
self.col = 0
data = data[i+1:]
data = data.expandtabs()
self.col = self.col + len(data)
self.atbreak = 0
def send_flowing_data(self, data):
if not data: return
atbreak = self.atbreak or data[0].isspace()
col = self.col
maxcol = self.maxcol
write = self.file.write
for word in data.split():
if atbreak:
if col + len(word) >= maxcol:
write('\n')
col = 0
else:
write(' ')
col = col + 1
write(word)
col = col + len(word)
atbreak = 1
self.col = col
self.atbreak = data[-1].isspace()
def test(file = None):
w = DumbWriter()
f = AbstractFormatter(w)
if file is not None:
fp = open(file)
elif sys.argv[1:]:
fp = open(sys.argv[1])
else:
fp = sys.stdin
for line in fp:
if line == '\n':
f.end_paragraph(1)
else:
f.add_flowing_data(line)
f.end_paragraph(0)
if __name__ == '__main__':
test()
| gpl-3.0 |
hippyk/pix2code | model/convert_imgs_to_arrays.py | 2 | 1160 | #!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
__author__ = 'Tony Beltramelli - www.tonybeltramelli.com'
import os
import sys
import shutil
from classes.Utils import *
from classes.model.Config import *
argv = sys.argv[1:]
if len(argv) < 2:
print("Error: not enough argument supplied:")
print("convert_imgs_to_arrays.py <input path> <output path>")
exit(0)
else:
input_path = argv[0]
output_path = argv[1]
if not os.path.exists(output_path):
os.makedirs(output_path)
print("Converting images to numpy arrays...")
for f in os.listdir(input_path):
if f.find(".png") != -1:
img = Utils.get_preprocessed_img("{}/{}".format(input_path, f), IMAGE_SIZE)
file_name = f[:f.find(".png")]
np.savez_compressed("{}/{}".format(output_path, file_name), features=img)
retrieve = np.load("{}/{}.npz".format(output_path, file_name))["features"]
assert np.array_equal(img, retrieve)
shutil.copyfile("{}/{}.gui".format(input_path, file_name), "{}/{}.gui".format(output_path, file_name))
print("Numpy arrays saved in {}".format(output_path))
| apache-2.0 |
jvkops/titanium_mobile | support/android/tilogger.py | 37 | 1757 | from __future__ import with_statement
import os, sys
class TiLogger:
ERROR = 0
WARN = 1
INFO = 2
DEBUG = 3
TRACE = 4
def __init__(self, logfile, level=TRACE, output_stream=sys.stdout):
self.level = level
self.output_stream = output_stream
global _logfile
_logfile = logfile
if _logfile is not None:
logfolder = os.path.dirname(_logfile)
try:
if not os.path.exists(logfolder):
os.mkdir(logfolder)
except:
print "[ERROR] error creating log folder %s: %s" % (logfolder, sys.exc_info()[0])
try:
with open(_logfile, 'w') as f:
f.write('Logfile initialized\n')
except:
print "[ERROR] error initializing (writing to) log file %s: %s" % (_logfile, sys.exc_info()[0])
self.info("logfile = " + logfile)
def _level_prefix(self, level):
return {
TiLogger.ERROR: "ERROR",
TiLogger.WARN: "WARN",
TiLogger.INFO: "INFO",
TiLogger.DEBUG: "DEBUG",
TiLogger.TRACE: "TRACE"
}[level];
def _log(self, msg, level):
global _logfile
if self.level >= level:
prefix = self._level_prefix(level)
line = "[%s] %s" % (prefix, msg)
print >> self.output_stream, line
self.output_stream.flush()
sys.stdout.flush()
if _logfile is not None:
try:
with open(_logfile, 'a') as f:
f.write("%s\n" % line)
except:
print "[ERROR] error writing to log %s: %s" % (_logfile, sys.exc_info()[0])
def info(self, msg):
self._log(msg, TiLogger.INFO)
def debug(self, msg):
self._log(msg, TiLogger.DEBUG)
def warn(self, msg):
self._log(msg, TiLogger.WARN)
def trace(self, msg):
self._log(msg, TiLogger.TRACE)
def error(self, msg):
self._log(msg, TiLogger.ERROR)
# if __name__ == "__main__":
# _logfile = ''
# print "[DEBUG] TiLogger initialized"
| apache-2.0 |
subhacom/moose-core | tests/python/test_function.py | 2 | 2728 | # test_function.py ---
#
# Filename: test_function.py
# Description:
# Author: subha
# Maintainer:
# Created: Sat Mar 28 19:34:20 2015 (-0400)
# Version:
# Last-Updated:
# By:
# Update #: 0
# URL:
# Keywords:
# Compatibility:
#
#
# Commentary:
#
#
#
#
# Change log:
#
#
#
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA.
#
#
# Code:
"""Check variable ordering - bug #161"""
from __future__ import print_function
import numpy as np
import moose
def test_var_order():
"""The y values are one step behind the x values because of
scheduling sequences"""
nsteps = 5
simtime = nsteps
dt = 1.0
# fn0 = moose.Function('/fn0')
# fn0.x.num = 2
# fn0.expr = 'x0 + x1'
# fn0.mode = 1
fn1 = moose.Function('/fn1')
fn1.x.num = 2
fn1.expr = 'y1 + y0 + x1 + x0'
fn1.mode = 1
inputs = np.arange(0, nsteps+1, 1.0)
x0 = moose.StimulusTable('/x0')
x0.vector = inputs
x0.startTime = 0.0
x0.stopTime = simtime
x0.stepPosition = 0.0
inputs /= 10
x1 = moose.StimulusTable('/x1')
x1.vector = inputs
x1.startTime = 0.0
x1.stopTime = simtime
x1.stepPosition = 0.0
inputs /= 10
y0 = moose.StimulusTable('/y0')
y0.vector = inputs
y0.startTime = 0.0
y0.stopTime = simtime
y0.stepPosition = 0.0
inputs /= 10
y1 = moose.StimulusTable('/y1')
y1.vector = inputs
y1.startTime = 0.0
y1.startTime = 0.0
y1.stopTime = simtime
y1.stepPosition = 0.0
moose.connect(x0, 'output', fn1.x[0], 'input')
moose.connect(x1, 'output', fn1.x[1], 'input')
moose.connect(fn1, 'requestOut', y0, 'getOutputValue')
moose.connect(fn1, 'requestOut', y1, 'getOutputValue')
z1 = moose.Table('/z1')
moose.connect(z1, 'requestOut', fn1, 'getValue')
for ii in range(32):
moose.setClock(ii, dt)
moose.reinit()
moose.start(simtime)
for ii in range(len(z1.vector)):
print(ii, z1.vector[ii])
if __name__ == '__main__':
test_var_order()
#
# test_function.py ends here
| gpl-3.0 |
remybaranx/qtaste | demo/Testbeds/ControlScripts/controlscript_addon.py | 1 | 2642 | ##
# Control script Addon jython module.
#
# This module contains extention of the ControlScript class:
# - VirtualBox: this extention class is to be used to control Sun VirtualBox images.
##
from controlscript import *
import time
class ControlScriptAddon(ControlScript):
""" Control script Addon"""
def __init__(self, controlActions):
"""
Initialize ControlScript object.
Store controlActions in self.controlActions,
store additional command-line arguments (arguments except first one) in self.arguments,
store TESTBED environment variable in self.testbed,
and execute start() or stop() following the value of the first command-line argument (must be 'start' or 'stop')
@param controlActions sequence of ControlAction (list or tuple)
"""
ControlScript.__init__(self, controlActions)
class VirtualBox(ControlAction):
""" Control script action for starting/stopping a Virtual Box image """
def __init__(self, description, nameOfVBoxImage, active=True):
"""
Initialize VirtualBox object
@param description control script action description, also used as window title
@param nameOfVBoxImage the sun virtual box image id to be started
@param args arguments to pass to the application or None if no argument
@param workingDir working directory to start process in, defaults to QTaste root directory
"""
ControlAction.__init__(self, description, active)
self.callerScript = traceback.format_stack()[0].split("\"")[1]
self.nameOfVBoxImage = nameOfVBoxImage
def dumpDataType(self, prefix, writer):
""" Method called on start. It dumps the data type. to be overridden by subclasses """
super(VirtualBox, self).dumpDataType(prefix, writer)
writer.write(prefix + ".nameOfVBoxImage=string\n")
def dump(self, writer):
""" Method called on start. It dump the control action parameter in the writer, to be overridden by subclasses """
super(VirtualBox, self).dump(writer)
writer.write(str(self.caID) + ".nameOfVBoxImage=\"" + str(self.nameOfVBoxImage) + "\"\n")
def start(self):
# the VBoxManage command has to be in the PATH ...
commandArguments = ['VBoxManage','startvm',self.nameOfVBoxImage]
print "Starting " + self.description + "..."
print commandArguments;
self.executeCommand(commandArguments);
time.sleep(30)
print
def stop(self):
commandArguments = ['VBoxManage', 'controlvm', self.nameOfVBoxImage, 'poweroff']
print "Stopping " + self.description + "..."
print commandArguments;
self.executeCommand(commandArguments);
commandArguments = ['VBoxManage', 'snapshot', self.nameOfVBoxImage, 'restorecurrent']
self.executeCommand(commandArguments);
print
| gpl-3.0 |
colbyga/pychess | lib/pychess/Utils/Piece.py | 22 | 1197 | from pychess.Utils.const import KING, QUEEN, ROOK, BISHOP, KNIGHT, PAWN
from pychess.Utils.repr import reprSign, reprColor, reprPiece
class Piece:
def __init__ (self, color, piece, captured=False):
self.color = color
self.piece = piece
self.captured = captured
# in crazyhouse we need to know this for later captures
self.promoted = False
self.opacity = 1.0
self.x = None
self.y = None
# Sign is a deprecated synonym for piece
def _set_sign (self, sign):
self.piece = sign
def _get_sign (self):
return self.piece
sign = property(_get_sign, _set_sign)
def __repr__ (self):
represen = "<%s %s" % (reprColor[self.color], reprPiece[self.piece])
if self.opacity != 1.0:
represen += " Op:%0.1f" % self.opacity
if self.x != None or self.y != None:
if self.x != None:
represen += " X:%0.1f" % self.x
else: represen += " X:None"
if self.y != None:
represen += " Y:%0.1f" % self.y
else: represen += " Y:None"
represen += ">"
return represen
| gpl-3.0 |
mmt/deeprl22 | hw2/frozen_lake.py | 7 | 4392 | import numpy as np
import sys
from six import StringIO, b
from gym import utils
import discrete_env
LEFT = 0
DOWN = 1
RIGHT = 2
UP = 3
MAPS = {
"4x4": [
"SFFF",
"FHFH",
"FFFH",
"HFFG"
],
"8x8": [
"SFFFFFFF",
"FFFFFFFF",
"FFFHFFFF",
"FFFFFHFF",
"FFFHFFFF",
"FHHFFFHF",
"FHFFHFHF",
"FFFHFFFG"
],
}
class FrozenLakeEnv(discrete_env.DiscreteEnv):
"""
Winter is here. You and your friends were tossing around a frisbee at the park
when you made a wild throw that left the frisbee out in the middle of the lake.
The water is mostly frozen, but there are a few holes where the ice has melted.
If you step into one of those holes, you'll fall into the freezing water.
At this time, there's an international frisbee shortage, so it's absolutely imperative that
you navigate across the lake and retrieve the disc.
However, the ice is slippery, so you won't always move in the direction you intend.
The surface is described using a grid like the following
SFFF
FHFH
FFFH
HFFG
S : starting point, safe
F : frozen surface, safe
H : hole, fall to your doom
G : goal, where the frisbee is located
The episode ends when you reach the goal or fall in a hole.
You receive a reward of 1 if you reach the goal, and zero otherwise.
"""
metadata = {'render.modes': ['human', 'ansi']}
def __init__(self, desc=None, map_name="4x4",is_slippery=True):
if desc is None and map_name is None:
raise ValueError('Must provide either desc or map_name')
elif desc is None:
desc = MAPS[map_name]
self.desc = desc = np.asarray(desc,dtype='c')
self.nrow, self.ncol = nrow, ncol = desc.shape
nA = 4
nS = nrow * ncol
isd = np.array(desc == b'S').astype('float64').ravel()
isd /= isd.sum()
P = {s : {a : [] for a in range(nA)} for s in range(nS)}
def to_s(row, col):
return row*ncol + col
def inc(row, col, a):
if a==0: # left
col = max(col-1,0)
elif a==1: # down
row = min(row+1,nrow-1)
elif a==2: # right
col = min(col+1,ncol-1)
elif a==3: # up
row = max(row-1,0)
return (row, col)
for row in range(nrow):
for col in range(ncol):
s = to_s(row, col)
for a in range(4):
li = P[s][a]
letter = desc[row, col]
if letter in b'GH':
li.append((1.0, s, 0, True))
else:
if is_slippery:
for b in [(a-1)%4, a, (a+1)%4]:
newrow, newcol = inc(row, col, b)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((0.8 if b==a else 0.1, newstate, rew, done))
else:
newrow, newcol = inc(row, col, a)
newstate = to_s(newrow, newcol)
newletter = desc[newrow, newcol]
done = bytes(newletter) in b'GH'
rew = float(newletter == b'G')
li.append((1.0, newstate, rew, done))
super(FrozenLakeEnv, self).__init__(nS, nA, P, isd)
def _render(self, mode='human', close=False):
if close:
return
outfile = StringIO() if mode == 'ansi' else sys.stdout
row, col = self.s // self.ncol, self.s % self.ncol
desc = self.desc.tolist()
desc = [[c.decode('utf-8') for c in line] for line in desc]
desc[row][col] = utils.colorize(desc[row][col], "red", highlight=True)
if self.lastaction is not None:
outfile.write(" ({})\n".format(["Left","Down","Right","Up"][self.lastaction]))
else:
outfile.write("\n")
outfile.write("\n".join(''.join(line) for line in desc)+"\n")
return outfile
| mit |
felipenaselva/felipe.repository | plugin.video.mrpiracy/resources/lib/AADecoder.py | 14 | 8517 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector for openload.io
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
# by DrZ3r0
# ------------------------------------------------------------
# Modified by Shani
import re
class AADecoder(object):
def __init__(self, aa_encoded_data):
self.encoded_str = aa_encoded_data.replace('/*´∇`*/', '')
self.b = ["(c^_^o)", "(゚Θ゚)", "((o^_^o) - (゚Θ゚))", "(o^_^o)",
"(゚ー゚)", "((゚ー゚) + (゚Θ゚))", "((o^_^o) +(o^_^o))", "((゚ー゚) + (o^_^o))",
"((゚ー゚) + (゚ー゚))", "((゚ー゚) + (゚ー゚) + (゚Θ゚))", "(゚Д゚) .゚ω゚ノ", "(゚Д゚) .゚Θ゚ノ",
"(゚Д゚) ['c']", "(゚Д゚) .゚ー゚ノ", "(゚Д゚) .゚Д゚ノ", "(゚Д゚) [゚Θ゚]"]
def is_aaencoded(self):
idx = self.encoded_str.find("゚ω゚ノ= /`m´)ノ ~┻━┻ //*´∇`*/ ['_']; o=(゚ー゚) =_=3; c=(゚Θ゚) =(゚ー゚)-(゚ー゚); ")
if idx == -1:
return False
is_encoded = self.encoded_str.find("(゚Д゚)[゚o゚]) (゚Θ゚)) ('_');", idx) != -1
return is_encoded
def base_repr(self, number, base=2, padding=0):
digits = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
if base > len(digits):
base = len(digits)
num = abs(number)
res = []
while num:
res.append(digits[num % base])
num //= base
if padding:
res.append('0' * padding)
if number < 0:
res.append('-')
return ''.join(reversed(res or '0'))
def decode_char(self, enc_char, radix):
end_char = "+ "
str_char = ""
while enc_char != '':
found = False
# for i in range(len(self.b)):
# print self.b[i], enc_char.find(self.b[i])
# if enc_char.find(self.b[i]) == 0:
# str_char += self.base_repr(i, radix)
# enc_char = enc_char[len(self.b[i]):]
# found = True
# break
# print 'found', found, enc_char
if not found:
for i in range(len(self.b)):
enc_char = enc_char.replace(self.b[i], str(i))
# enc_char = enc_char.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'enc_char', enc_char
startpos = 0
findClose = True
balance = 1
result = []
if enc_char.startswith('('):
l = 0
for t in enc_char[1:]:
l += 1
# print 'looping', findClose, startpos, t, balance
if findClose and t == ')':
balance -= 1
if balance == 0:
result += [enc_char[startpos:l + 1]]
findClose = False
continue
elif not findClose and t == '(':
startpos = l
findClose = True
balance = 1
continue
elif t == '(':
balance += 1
if result is None or len(result) == 0:
return ""
else:
for r in result:
value = self.decode_digit(r, radix)
# print 'va', value
str_char += value
if value == "":
return ""
return str_char
enc_char = enc_char[len(end_char):]
return str_char
def parseJSString(self, s):
try:
# print s
# offset = 1 if s[0] == '+' else 0
tmp = (s.replace('!+[]', '1').replace('!![]', '1').replace('[]', '0')) # .replace('(','str(')[offset:])
val = int(eval(tmp))
return val
except:
pass
def decode_digit(self, enc_int, radix):
# enc_int = enc_int.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'enc_int before', enc_int
# for i in range(len(self.b)):
# print self.b[i], enc_char.find(self.b[i])
# if enc_char.find(self.b[i]) > 0:
# str_char += self.base_repr(i, radix)
# enc_char = enc_char[len(self.b[i]):]
# found = True
# break
# enc_int=enc_int.replace(self.b[i], str(i))
# print 'enc_int before', enc_int
try:
return str(eval(enc_int))
except: pass
rr = '(\(.+?\)\))\+'
rerr = enc_int.split('))+') # re.findall(rr, enc_int)
v = ""
# print rerr
for c in rerr:
if len(c) > 0:
# print 'v', c
if c.strip().endswith('+'):
c = c.strip()[:-1]
# print 'v', c
startbrackets = len(c) - len(c.replace('(', ''))
endbrackets = len(c) - len(c.replace(')', ''))
if startbrackets > endbrackets:
c += ')' * (startbrackets - endbrackets)
if '[' in c:
v += str(self.parseJSString(c))
else:
# print c
v += str(eval(c))
return v
# unreachable code
# mode 0=+, 1=-
# mode = 0
# value = 0
# while enc_int != '':
# found = False
# for i in range(len(self.b)):
# if enc_int.find(self.b[i]) == 0:
# if mode == 0:
# value += i
# else:
# value -= i
# enc_int = enc_int[len(self.b[i]):]
# found = True
# break
# if not found:
# return ""
# enc_int = re.sub('^\s+|\s+$', '', enc_int)
# if enc_int.find("+") == 0:
# mode = 0
# else:
# mode = 1
# enc_int = enc_int[1:]
# enc_int = re.sub('^\s+|\s+$', '', enc_int)
# return self.base_repr(value, radix)
def decode(self):
self.encoded_str = re.sub('^\s+|\s+$', '', self.encoded_str)
# get data
pattern = (r"\(゚Д゚\)\[゚o゚\]\+ (.+?)\(゚Д゚\)\[゚o゚\]\)")
result = re.search(pattern, self.encoded_str, re.DOTALL)
if result is None:
print "AADecoder: data not found"
return False
data = result.group(1)
# hex decode string
begin_char = "(゚Д゚)[゚ε゚]+"
alt_char = "(o゚ー゚o)+ "
out = ''
# print data
while data != '':
# Check new char
if data.find(begin_char) != 0:
print "AADecoder: data not found"
return False
data = data[len(begin_char):]
# Find encoded char
enc_char = ""
if data.find(begin_char) == -1:
enc_char = data
data = ""
else:
enc_char = data[:data.find(begin_char)]
data = data[len(enc_char):]
radix = 8
# Detect radix 16 for utf8 char
if enc_char.find(alt_char) == 0:
enc_char = enc_char[len(alt_char):]
radix = 16
# print repr(enc_char), radix
# print enc_char.replace('(゚Θ゚)', '1').replace('(゚ー゚)', '4').replace('(c^_^o)', '0').replace('(o^_^o)', '3')
# print 'The CHAR', enc_char, radix
str_char = self.decode_char(enc_char, radix)
if str_char == "":
print "no match : "
print data + "\nout = " + out + "\n"
return False
# print 'sofar', str_char, radix,out
out += chr(int(str_char, radix))
# print 'sfar', chr(int(str_char, radix)), out
if out == "":
print "no match : " + data
return False
return out
| gpl-2.0 |
pedrobaeza/project-service | project_baseuser/__openerp__.py | 2 | 3592 | # -*- coding: utf-8 -*-
##############################################################################
#
# Daniel Reis, 2013
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Projects extensions for user roles',
'version': '1.0',
'category': 'Project Management',
'summary': 'Extend Project user roles to support more complex use cases',
'description': """\
Employees are now basic Project users, able to create new documents (Issues
or Tasks). These are kept editable while in New and Cancelled states, to
allow for corrections or for the user himself to cancel an incorretly
created request.
Previously, Employee users did not have any write nor craete access to project
documents.
Project Users, on the other hand, are supposed to act on these documents,
sucha as reported issues, and update them accordingly, so they have write
access for all states. Employee users don't have write access on later states,
but can still write comments and communicate through the message board (open
chatter).
In general, users will only be able to see documents where:
* They are assigned/responsible for, or
* They are following, or
* They are a team member for the corresponding Project (but not if only in
the project's follower list).
Project Managers have access rules similar to Project Users, but additionally
can create new projects and can see all documents for the projects they are
the Manager.
As a consequence, Project Managers no longer have inconditional access to all
Tasks and Issues, and will only be able to edit the definitions of Projects
they manage.
This makes it possible for a Project Manager to have private projects that
other users, Project Managers inlcuded, will not be able to see. They will
need to be added as followers or team members to able to see it.
Public Projects and their documents are still visible to everyone.
Portal users access rules are kept unchanged.
---------------------
Access Rules summary:
---------------------
Employee Users
Can see only documents followed or responebile for (in "user_id").
Can create new documents and edit them while in "New"/"Cancelled" states.
Project Users
Can edit Project Issues and Tasks in any stage/state.
Can see all documents for projects they are followers on team memebers.
Can see only documents followed or assigned to for other projects.
Project Managers
Can create new projects and edit their attributes.
Can see all documents (Tasks or Issues) but only for their managed
projects.
For the other Projects, will see only followed documents, just like the
other users.
""",
'author': 'Daniel Reis',
'depends': [
'project',
],
'data': [
'project_view.xml',
'security/ir.model.access.csv',
'security/project_security.xml',
],
'installable': True,
}
| agpl-3.0 |
40123237/w17test | static/Brython3.1.0-20150301-090019/Lib/unittest/test/test_setups.py | 791 | 16440 | import io
import sys
import unittest
def resultFactory(*_):
return unittest.TestResult()
class TestSetups(unittest.TestCase):
def getRunner(self):
return unittest.TextTestRunner(resultclass=resultFactory,
stream=io.StringIO())
def runTests(self, *cases):
suite = unittest.TestSuite()
for case in cases:
tests = unittest.defaultTestLoader.loadTestsFromTestCase(case)
suite.addTests(tests)
runner = self.getRunner()
# creating a nested suite exposes some potential bugs
realSuite = unittest.TestSuite()
realSuite.addTest(suite)
# adding empty suites to the end exposes potential bugs
suite.addTest(unittest.TestSuite())
realSuite.addTest(unittest.TestSuite())
return runner.run(realSuite)
def test_setup_class(self):
class Test(unittest.TestCase):
setUpCalled = 0
@classmethod
def setUpClass(cls):
Test.setUpCalled += 1
unittest.TestCase.setUpClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(Test.setUpCalled, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_teardown_class(self):
class Test(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(Test.tearDownCalled, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_teardown_class_two_classes(self):
class Test(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
tearDownCalled = 0
@classmethod
def tearDownClass(cls):
Test2.tearDownCalled += 1
unittest.TestCase.tearDownClass()
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test, Test2)
self.assertEqual(Test.tearDownCalled, 1)
self.assertEqual(Test2.tearDownCalled, 1)
self.assertEqual(result.testsRun, 4)
self.assertEqual(len(result.errors), 0)
def test_error_in_setupclass(self):
class BrokenTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(BrokenTest)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error),
'setUpClass (%s.BrokenTest)' % __name__)
def test_error_in_teardown_class(self):
class Test(unittest.TestCase):
tornDown = 0
@classmethod
def tearDownClass(cls):
Test.tornDown += 1
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
tornDown = 0
@classmethod
def tearDownClass(cls):
Test2.tornDown += 1
raise TypeError('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test, Test2)
self.assertEqual(result.testsRun, 4)
self.assertEqual(len(result.errors), 2)
self.assertEqual(Test.tornDown, 1)
self.assertEqual(Test2.tornDown, 1)
error, _ = result.errors[0]
self.assertEqual(str(error),
'tearDownClass (%s.Test)' % __name__)
def test_class_not_torndown_when_setup_fails(self):
class Test(unittest.TestCase):
tornDown = False
@classmethod
def setUpClass(cls):
raise TypeError
@classmethod
def tearDownClass(cls):
Test.tornDown = True
raise TypeError('foo')
def test_one(self):
pass
self.runTests(Test)
self.assertFalse(Test.tornDown)
def test_class_not_setup_or_torndown_when_skipped(self):
class Test(unittest.TestCase):
classSetUp = False
tornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.tornDown = True
def test_one(self):
pass
Test = unittest.skip("hop")(Test)
self.runTests(Test)
self.assertFalse(Test.classSetUp)
self.assertFalse(Test.tornDown)
def test_setup_teardown_order_with_pathological_suite(self):
results = []
class Module1(object):
@staticmethod
def setUpModule():
results.append('Module1.setUpModule')
@staticmethod
def tearDownModule():
results.append('Module1.tearDownModule')
class Module2(object):
@staticmethod
def setUpModule():
results.append('Module2.setUpModule')
@staticmethod
def tearDownModule():
results.append('Module2.tearDownModule')
class Test1(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 1')
@classmethod
def tearDownClass(cls):
results.append('teardown 1')
def testOne(self):
results.append('Test1.testOne')
def testTwo(self):
results.append('Test1.testTwo')
class Test2(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 2')
@classmethod
def tearDownClass(cls):
results.append('teardown 2')
def testOne(self):
results.append('Test2.testOne')
def testTwo(self):
results.append('Test2.testTwo')
class Test3(unittest.TestCase):
@classmethod
def setUpClass(cls):
results.append('setup 3')
@classmethod
def tearDownClass(cls):
results.append('teardown 3')
def testOne(self):
results.append('Test3.testOne')
def testTwo(self):
results.append('Test3.testTwo')
Test1.__module__ = Test2.__module__ = 'Module'
Test3.__module__ = 'Module2'
sys.modules['Module'] = Module1
sys.modules['Module2'] = Module2
first = unittest.TestSuite((Test1('testOne'),))
second = unittest.TestSuite((Test1('testTwo'),))
third = unittest.TestSuite((Test2('testOne'),))
fourth = unittest.TestSuite((Test2('testTwo'),))
fifth = unittest.TestSuite((Test3('testOne'),))
sixth = unittest.TestSuite((Test3('testTwo'),))
suite = unittest.TestSuite((first, second, third, fourth, fifth, sixth))
runner = self.getRunner()
result = runner.run(suite)
self.assertEqual(result.testsRun, 6)
self.assertEqual(len(result.errors), 0)
self.assertEqual(results,
['Module1.setUpModule', 'setup 1',
'Test1.testOne', 'Test1.testTwo', 'teardown 1',
'setup 2', 'Test2.testOne', 'Test2.testTwo',
'teardown 2', 'Module1.tearDownModule',
'Module2.setUpModule', 'setup 3',
'Test3.testOne', 'Test3.testTwo',
'teardown 3', 'Module2.tearDownModule'])
def test_setup_module(self):
class Module(object):
moduleSetup = 0
@staticmethod
def setUpModule():
Module.moduleSetup += 1
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(Module.moduleSetup, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_error_in_setup_module(self):
class Module(object):
moduleSetup = 0
moduleTornDown = 0
@staticmethod
def setUpModule():
Module.moduleSetup += 1
raise TypeError('foo')
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
class Test(unittest.TestCase):
classSetUp = False
classTornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.classTornDown = True
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
Test2.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test, Test2)
self.assertEqual(Module.moduleSetup, 1)
self.assertEqual(Module.moduleTornDown, 0)
self.assertEqual(result.testsRun, 0)
self.assertFalse(Test.classSetUp)
self.assertFalse(Test.classTornDown)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error), 'setUpModule (Module)')
def test_testcase_with_missing_module(self):
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules.pop('Module', None)
result = self.runTests(Test)
self.assertEqual(result.testsRun, 2)
def test_teardown_module(self):
class Module(object):
moduleTornDown = 0
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(Module.moduleTornDown, 1)
self.assertEqual(result.testsRun, 2)
self.assertEqual(len(result.errors), 0)
def test_error_in_teardown_module(self):
class Module(object):
moduleTornDown = 0
@staticmethod
def tearDownModule():
Module.moduleTornDown += 1
raise TypeError('foo')
class Test(unittest.TestCase):
classSetUp = False
classTornDown = False
@classmethod
def setUpClass(cls):
Test.classSetUp = True
@classmethod
def tearDownClass(cls):
Test.classTornDown = True
def test_one(self):
pass
def test_two(self):
pass
class Test2(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
Test.__module__ = 'Module'
Test2.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test, Test2)
self.assertEqual(Module.moduleTornDown, 1)
self.assertEqual(result.testsRun, 4)
self.assertTrue(Test.classSetUp)
self.assertTrue(Test.classTornDown)
self.assertEqual(len(result.errors), 1)
error, _ = result.errors[0]
self.assertEqual(str(error), 'tearDownModule (Module)')
def test_skiptest_in_setupclass(self):
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
raise unittest.SkipTest('foo')
def test_one(self):
pass
def test_two(self):
pass
result = self.runTests(Test)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.skipped), 1)
skipped = result.skipped[0][0]
self.assertEqual(str(skipped), 'setUpClass (%s.Test)' % __name__)
def test_skiptest_in_setupmodule(self):
class Test(unittest.TestCase):
def test_one(self):
pass
def test_two(self):
pass
class Module(object):
@staticmethod
def setUpModule():
raise unittest.SkipTest('foo')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
result = self.runTests(Test)
self.assertEqual(result.testsRun, 0)
self.assertEqual(len(result.errors), 0)
self.assertEqual(len(result.skipped), 1)
skipped = result.skipped[0][0]
self.assertEqual(str(skipped), 'setUpModule (Module)')
def test_suite_debug_executes_setups_and_teardowns(self):
ordering = []
class Module(object):
@staticmethod
def setUpModule():
ordering.append('setUpModule')
@staticmethod
def tearDownModule():
ordering.append('tearDownModule')
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
ordering.append('setUpClass')
@classmethod
def tearDownClass(cls):
ordering.append('tearDownClass')
def test_something(self):
ordering.append('test_something')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test)
suite.debug()
expectedOrder = ['setUpModule', 'setUpClass', 'test_something', 'tearDownClass', 'tearDownModule']
self.assertEqual(ordering, expectedOrder)
def test_suite_debug_propagates_exceptions(self):
class Module(object):
@staticmethod
def setUpModule():
if phase == 0:
raise Exception('setUpModule')
@staticmethod
def tearDownModule():
if phase == 1:
raise Exception('tearDownModule')
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
if phase == 2:
raise Exception('setUpClass')
@classmethod
def tearDownClass(cls):
if phase == 3:
raise Exception('tearDownClass')
def test_something(self):
if phase == 4:
raise Exception('test_something')
Test.__module__ = 'Module'
sys.modules['Module'] = Module
_suite = unittest.defaultTestLoader.loadTestsFromTestCase(Test)
suite = unittest.TestSuite()
suite.addTest(_suite)
messages = ('setUpModule', 'tearDownModule', 'setUpClass', 'tearDownClass', 'test_something')
for phase, msg in enumerate(messages):
with self.assertRaisesRegex(Exception, msg):
suite.debug()
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
tpaszkowski/quantum | quantum/openstack/common/uuidutils.py | 159 | 1106 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
UUID related utilities and helper functions.
"""
import uuid
def generate_uuid():
return str(uuid.uuid4())
def is_uuid_like(val):
"""Returns validation of a value as a UUID.
For our purposes, a UUID is a canonical form string:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
return str(uuid.UUID(val)) == val
except (TypeError, ValueError, AttributeError):
return False
| apache-2.0 |
Urvik08/ns3-gpcr | src/bridge/bindings/modulegen__gcc_ILP32.py | 28 | 168219 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.bridge', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper [class]
module.add_class('BridgeHelper')
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel [class]
module.add_class('BridgeChannel', parent=root_module['ns3::Channel'])
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice [class]
module.add_class('BridgeNetDevice', parent=root_module['ns3::NetDevice'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3BridgeHelper_methods(root_module, root_module['ns3::BridgeHelper'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3BridgeChannel_methods(root_module, root_module['ns3::BridgeChannel'])
register_Ns3BridgeNetDevice_methods(root_module, root_module['ns3::BridgeNetDevice'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3BridgeHelper_methods(root_module, cls):
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper(ns3::BridgeHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BridgeHelper const &', 'arg0')])
## bridge-helper.h (module 'bridge'): ns3::BridgeHelper::BridgeHelper() [constructor]
cls.add_constructor([])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(ns3::Ptr<ns3::Node> node, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): ns3::NetDeviceContainer ns3::BridgeHelper::Install(std::string nodeName, ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('std::string', 'nodeName'), param('ns3::NetDeviceContainer', 'c')])
## bridge-helper.h (module 'bridge'): void ns3::BridgeHelper::SetDeviceAttribute(std::string n1, ns3::AttributeValue const & v1) [member function]
cls.add_method('SetDeviceAttribute',
'void',
[param('std::string', 'n1'), param('ns3::AttributeValue const &', 'v1')])
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3BridgeChannel_methods(root_module, cls):
## bridge-channel.h (module 'bridge'): static ns3::TypeId ns3::BridgeChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel::BridgeChannel() [constructor]
cls.add_constructor([])
## bridge-channel.h (module 'bridge'): void ns3::BridgeChannel::AddChannel(ns3::Ptr<ns3::Channel> bridgedChannel) [member function]
cls.add_method('AddChannel',
'void',
[param('ns3::Ptr< ns3::Channel >', 'bridgedChannel')])
## bridge-channel.h (module 'bridge'): uint32_t ns3::BridgeChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-channel.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3BridgeNetDevice_methods(root_module, cls):
## bridge-net-device.h (module 'bridge'): ns3::BridgeNetDevice::BridgeNetDevice() [constructor]
cls.add_constructor([])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddBridgePort(ns3::Ptr<ns3::NetDevice> bridgePort) [member function]
cls.add_method('AddBridgePort',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'bridgePort')])
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetBridgePort(uint32_t n) const [member function]
cls.add_method('GetBridgePort',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'n')],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Channel> ns3::BridgeNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint16_t ns3::BridgeNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): ns3::Address ns3::BridgeNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): uint32_t ns3::BridgeNetDevice::GetNBridgePorts() const [member function]
cls.add_method('GetNBridgePorts',
'uint32_t',
[],
is_const=True)
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::Node> ns3::BridgeNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): static ns3::TypeId ns3::BridgeNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## bridge-net-device.h (module 'bridge'): bool ns3::BridgeNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardBroadcast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardBroadcast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ForwardUnicast(ns3::Ptr<ns3::NetDevice> incomingPort, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Mac48Address src, ns3::Mac48Address dst) [member function]
cls.add_method('ForwardUnicast',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'incomingPort'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Mac48Address', 'src'), param('ns3::Mac48Address', 'dst')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeNetDevice::GetLearnedState(ns3::Mac48Address source) [member function]
cls.add_method('GetLearnedState',
'ns3::Ptr< ns3::NetDevice >',
[param('ns3::Mac48Address', 'source')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::Learn(ns3::Mac48Address source, ns3::Ptr<ns3::NetDevice> port) [member function]
cls.add_method('Learn',
'void',
[param('ns3::Mac48Address', 'source'), param('ns3::Ptr< ns3::NetDevice >', 'port')],
visibility='protected')
## bridge-net-device.h (module 'bridge'): void ns3::BridgeNetDevice::ReceiveFromDevice(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> packet, uint16_t protocol, ns3::Address const & source, ns3::Address const & destination, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('ReceiveFromDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'destination'), param('ns3::NetDevice::PacketType', 'packetType')],
visibility='protected')
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 |
SUSE/kiwi | test/unit/package_manager/init_test.py | 1 | 1854 | from mock import (
patch, Mock
)
from pytest import raises
from kiwi.package_manager import PackageManager
from kiwi.exceptions import KiwiPackageManagerSetupError
class TestPackageManager:
def test_package_manager_not_implemented(self):
with raises(KiwiPackageManagerSetupError):
PackageManager.new('repository', 'ms-manager')
@patch('kiwi.package_manager.zypper.PackageManagerZypper')
def test_manager_zypper(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'zypper')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.dnf.PackageManagerDnf')
def test_manager_dnf(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'dnf')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.dnf.PackageManagerDnf')
def test_manager_yum(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'yum')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.microdnf.PackageManagerMicroDnf')
def test_manager_microdnf(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'microdnf')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.apt.PackageManagerApt')
def test_manager_apt(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'apt-get')
mock_manager.assert_called_once_with(repository, None)
@patch('kiwi.package_manager.pacman.PackageManagerPacman')
def test_manager_pacman(self, mock_manager):
repository = Mock()
PackageManager.new(repository, 'pacman')
mock_manager.assert_called_once_with(repository, None)
| gpl-3.0 |
mxrrow/zaicoin | src/deps/boost/libs/python/pyste/dist/create_build.py | 54 | 1668 | # Copyright Bruno da Silva de Oliveira 2006. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import os
import sys
import shutil
import fnmatch
from zipfile import ZipFile, ZIP_DEFLATED
def findfiles(directory, mask):
def visit(files, dir, names):
for name in names:
if fnmatch.fnmatch(name, mask):
files.append(os.path.join(dir, name))
files = []
os.path.walk(directory, visit, files)
return files
def main():
# test if PyXML is installed
try:
import _xmlplus.parsers.expat
pyxml = '--includes _xmlplus.parsers.expat'
except ImportError:
pyxml = ''
# create exe
status = os.system('python setup.py py2exe %s >& build.log' % pyxml)
if status != 0:
raise RuntimeError, 'Error creating EXE'
# create distribution
import pyste
version = pyste.__VERSION__
zip = ZipFile('pyste-%s.zip' % version, 'w', ZIP_DEFLATED)
# include the base files
dist_dir = 'dist/pyste'
for basefile in os.listdir(dist_dir):
zip.write(os.path.join(dist_dir, basefile), os.path.join('pyste', basefile))
# include documentation
for doc_file in findfiles('../doc', '*.*'):
dest_name = os.path.join('pyste/doc', doc_file[3:])
zip.write(doc_file, dest_name)
zip.write('../index.html', 'pyste/doc/index.html')
zip.close()
# cleanup
os.remove('build.log')
shutil.rmtree('build')
shutil.rmtree('dist')
if __name__ == '__main__':
sys.path.append('../src')
main()
| mit |
alibbaba/plugin.video.live.streamspro | plugin.video.live.streamspro/resources/lib/resolvers/cloudtime.py | 2 | 1502 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from resources.lib.libraries import client
def resolve(url):
try:
id = re.compile('//.+?/.+?/([\w]+)').findall(url)
id += re.compile('//.+?/.+?v=([\w]+)').findall(url)
id = id[0]
url = 'http://embed.cloudtime.to/embed.php?v=%s' % id
result = client.request(url)
key = re.compile('flashvars.filekey=(.+?);').findall(result)[-1]
try: key = re.compile('\s+%s="(.+?)"' % key).findall(result)[-1]
except: pass
url = 'http://www.cloudtime.to/api/player.api.php?key=%s&file=%s' % (key, id)
result = client.request(url)
url = re.compile('url=(.+?)&').findall(result)[0]
return url
except:
return
| gpl-2.0 |
solintegra/addons | email_template/__init__.py | 381 | 1144 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2009 Sharoon Thomas
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import email_template
import wizard
import res_partner
import ir_actions
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
AIFDR/inasafe | safe/metadata35/property/boolean_property.py | 6 | 1694 | # -*- coding: utf-8 -*-
"""
InaSAFE Disaster risk assessment tool developed by AusAid -
**metadata module.**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '08/12/15'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
import json
NoneType = type(None)
from safe.common.exceptions import MetadataCastError
from safe.metadata35.property import BaseProperty
class BooleanProperty(BaseProperty):
"""A property that accepts boolean."""
# if you edit this you need to adapt accordingly xml_value and is_valid
_allowed_python_types = [bool, NoneType]
def __init__(self, name, value, xml_path):
super(BooleanProperty, self).__init__(
name, value, xml_path, self._allowed_python_types)
@classmethod
def is_valid(cls, value):
return True
def cast_from_str(self, value):
try:
return bool(int(value))
except ValueError as e:
raise MetadataCastError(e)
@property
def xml_value(self):
if self.python_type is bool:
return str(int(self.value))
elif self.python_type is NoneType:
return ''
else:
raise RuntimeError('self._allowed_python_types and self.xml_value'
'are out of sync. This should never happen')
| gpl-3.0 |
ales-erjavec/scipy | benchmarks/benchmarks/fftpack_basic.py | 46 | 2646 | """ Test functions for fftpack.basic module
"""
from __future__ import division, absolute_import, print_function
from numpy import arange, asarray, zeros, dot, exp, pi, double, cdouble
import numpy.fft
from numpy.random import rand
try:
from scipy.fftpack import ifft, fft, fftn, irfft, rfft
except ImportError:
pass
from .common import Benchmark
def random(size):
return rand(*size)
def direct_dft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = -arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)
return y
def direct_idft(x):
x = asarray(x)
n = len(x)
y = zeros(n,dtype=cdouble)
w = arange(n)*(2j*pi/n)
for i in range(n):
y[i] = dot(exp(i*w),x)/n
return y
class Fft(Benchmark):
params = [
[100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4],
['real', 'cmplx'],
['scipy', 'numpy']
]
param_names = ['size', 'type', 'module']
def setup(self, size, cmplx, module):
if cmplx == 'cmplx':
self.x = random([size]).astype(cdouble)+random([size]).astype(cdouble)*1j
else:
self.x = random([size]).astype(double)
def time_fft(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.fft(self.x)
else:
fft(self.x)
def time_ifft(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.ifft(self.x)
else:
ifft(self.x)
class RFft(Benchmark):
params = [
[100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4],
['scipy', 'numpy']
]
param_names = ['size', 'module']
def setup(self, size, module):
self.x = random([size]).astype(double)
def time_rfft(self, size, module):
if module == 'numpy':
numpy.fft.rfft(self.x)
else:
rfft(self.x)
def time_irfft(self, size, module):
if module == 'numpy':
numpy.fft.irfft(self.x)
else:
irfft(self.x)
class Fftn(Benchmark):
params = [
["100x100", "1000x100", "256x256", "512x512"],
['real', 'cmplx'],
['scipy', 'numpy']
]
param_names = ['size', 'type', 'module']
def setup(self, size, cmplx, module):
size = map(int, size.split("x"))
if cmplx != 'cmplx':
self.x = random(size).astype(double)
else:
self.x = random(size).astype(cdouble)+random(size).astype(cdouble)*1j
def time_fftn(self, size, cmplx, module):
if module == 'numpy':
numpy.fft.fftn(self.x)
else:
fftn(self.x)
| bsd-3-clause |
morenopc/edx-platform | cms/djangoapps/contentstore/views/assets.py | 3 | 11648 | import logging
from functools import partial
import math
import json
from django.http import HttpResponseBadRequest
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django_future.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_POST
from django.conf import settings
from edxmako.shortcuts import render_to_response
from cache_toolbox.core import del_cached_content
from contentstore.utils import reverse_course_url
from xmodule.contentstore.django import contentstore
from xmodule.modulestore.django import modulestore
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from django.core.exceptions import PermissionDenied
from xmodule.modulestore.keys import CourseKey, AssetKey
from util.date_utils import get_default_time_display
from util.json_request import JsonResponse
from django.http import HttpResponseNotFound
from django.utils.translation import ugettext as _
from pymongo import ASCENDING, DESCENDING
from .access import has_course_access
from xmodule.modulestore.exceptions import ItemNotFoundError
__all__ = ['assets_handler']
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
def assets_handler(request, course_key_string=None, asset_key_string=None):
"""
The restful handler for assets.
It allows retrieval of all the assets (as an HTML page), as well as uploading new assets,
deleting assets, and changing the "locked" state of an asset.
GET
html: return an html page which will show all course assets. Note that only the asset container
is returned and that the actual assets are filled in with a client-side request.
json: returns a page of assets. The following parameters are supported:
page: the desired page of results (defaults to 0)
page_size: the number of items per page (defaults to 50)
sort: the asset field to sort by (defaults to "date_added")
direction: the sort direction (defaults to "descending")
POST
json: create (or update?) an asset. The only updating that can be done is changing the lock state.
PUT
json: update the locked state of an asset
DELETE
json: delete an asset
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_access(request.user, course_key):
raise PermissionDenied()
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
return _assets_json(request, course_key)
else:
asset_key = AssetKey.from_string(asset_key_string) if asset_key_string else None
return _update_asset(request, course_key, asset_key)
elif request.method == 'GET': # assume html
return _asset_index(request, course_key)
else:
return HttpResponseNotFound()
def _asset_index(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
course_module = modulestore().get_course(course_key)
return render_to_response('asset_index.html', {
'context_course': course_module,
'asset_callback_url': reverse_course_url('assets_handler', course_key)
})
def _assets_json(request, course_key):
"""
Display an editable asset library.
Supports start (0-based index into the list of assets) and max query parameters.
"""
requested_page = int(request.REQUEST.get('page', 0))
requested_page_size = int(request.REQUEST.get('page_size', 50))
requested_sort = request.REQUEST.get('sort', 'date_added')
sort_direction = DESCENDING
if request.REQUEST.get('direction', '').lower() == 'asc':
sort_direction = ASCENDING
# Convert the field name to the Mongo name
if requested_sort == 'date_added':
requested_sort = 'uploadDate'
elif requested_sort == 'display_name':
requested_sort = 'displayname'
sort = [(requested_sort, sort_direction)]
current_page = max(requested_page, 0)
start = current_page * requested_page_size
assets, total_count = _get_assets_for_page(request, course_key, current_page, requested_page_size, sort)
end = start + len(assets)
# If the query is beyond the final page, then re-query the final page so that at least one asset is returned
if requested_page > 0 and start >= total_count:
current_page = int(math.floor((total_count - 1) / requested_page_size))
start = current_page * requested_page_size
assets, total_count = _get_assets_for_page(request, course_key, current_page, requested_page_size, sort)
end = start + len(assets)
asset_json = []
for asset in assets:
asset_id = asset['_id']
asset_location = StaticContent.compute_location(course_key, asset_id['name'])
# note, due to the schema change we may not have a 'thumbnail_location' in the result set
thumbnail_location = asset.get('thumbnail_location', None)
if thumbnail_location:
thumbnail_location = course_key.make_asset_key('thumbnail', thumbnail_location[4])
asset_locked = asset.get('locked', False)
asset_json.append(_get_asset_json(asset['displayname'], asset['uploadDate'], asset_location, thumbnail_location, asset_locked))
return JsonResponse({
'start': start,
'end': end,
'page': current_page,
'pageSize': requested_page_size,
'totalCount': total_count,
'assets': asset_json,
'sort': requested_sort,
})
def _get_assets_for_page(request, course_key, current_page, page_size, sort):
"""
Returns the list of assets for the specified page and page size.
"""
start = current_page * page_size
return contentstore().get_all_content_for_course(
course_key, start=start, maxresults=page_size, sort=sort
)
@require_POST
@ensure_csrf_cookie
@login_required
def _upload_asset(request, course_key):
'''
This method allows for POST uploading of files into the course asset
library, which will be supported by GridFS in MongoDB.
'''
# Does the course actually exist?!? Get anything from it to prove its
# existence
try:
modulestore().get_course(course_key)
except ItemNotFoundError:
# no return it as a Bad Request response
logging.error("Could not find course: %s", course_key)
return HttpResponseBadRequest()
# compute a 'filename' which is similar to the location formatting, we're
# using the 'filename' nomenclature since we're using a FileSystem paradigm
# here. We're just imposing the Location string formatting expectations to
# keep things a bit more consistent
upload_file = request.FILES['file']
filename = upload_file.name
mime_type = upload_file.content_type
content_loc = StaticContent.compute_location(course_key, filename)
chunked = upload_file.multiple_chunks()
sc_partial = partial(StaticContent, content_loc, filename, mime_type)
if chunked:
content = sc_partial(upload_file.chunks())
tempfile_path = upload_file.temporary_file_path()
else:
content = sc_partial(upload_file.read())
tempfile_path = None
# first let's see if a thumbnail can be created
(thumbnail_content, thumbnail_location) = contentstore().generate_thumbnail(
content,
tempfile_path=tempfile_path
)
# delete cached thumbnail even if one couldn't be created this time (else
# the old thumbnail will continue to show)
del_cached_content(thumbnail_location)
# now store thumbnail location only if we could create it
if thumbnail_content is not None:
content.thumbnail_location = thumbnail_location
# then commit the content
contentstore().save(content)
del_cached_content(content.location)
# readback the saved content - we need the database timestamp
readback = contentstore().find(content.location)
locked = getattr(content, 'locked', False)
response_payload = {
'asset': _get_asset_json(content.name, readback.last_modified_at, content.location, content.thumbnail_location, locked),
'msg': _('Upload completed')
}
return JsonResponse(response_payload)
@require_http_methods(("DELETE", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def _update_asset(request, course_key, asset_key):
"""
restful CRUD operations for a course asset.
Currently only DELETE, POST, and PUT methods are implemented.
asset_path_encoding: the odd /c4x/org/course/category/name repr of the asset (used by Backbone as the id)
"""
if request.method == 'DELETE':
# Make sure the item to delete actually exists.
try:
content = contentstore().find(asset_key)
except NotFoundError:
return JsonResponse(status=404)
# ok, save the content into the trashcan
contentstore('trashcan').save(content)
# see if there is a thumbnail as well, if so move that as well
if content.thumbnail_location is not None:
# We are ignoring the value of the thumbnail_location-- we only care whether
# or not a thumbnail has been stored, and we can now easily create the correct path.
thumbnail_location = course_key.make_asset_key('thumbnail', asset_key.name)
try:
thumbnail_content = contentstore().find(thumbnail_location)
contentstore('trashcan').save(thumbnail_content)
# hard delete thumbnail from origin
contentstore().delete(thumbnail_content.get_id())
# remove from any caching
del_cached_content(thumbnail_location)
except:
logging.warning('Could not delete thumbnail: %s', thumbnail_location)
# delete the original
contentstore().delete(content.get_id())
# remove from cache
del_cached_content(content.location)
return JsonResponse()
elif request.method in ('PUT', 'POST'):
if 'file' in request.FILES:
return _upload_asset(request, course_key)
else:
# Update existing asset
try:
modified_asset = json.loads(request.body)
except ValueError:
return HttpResponseBadRequest()
contentstore().set_attr(asset_key, 'locked', modified_asset['locked'])
# Delete the asset from the cache so we check the lock status the next time it is requested.
del_cached_content(asset_key)
return JsonResponse(modified_asset, status=201)
def _get_asset_json(display_name, date, location, thumbnail_location, locked):
"""
Helper method for formatting the asset information to send to client.
"""
asset_url = location.to_deprecated_string()
external_url = settings.LMS_BASE + asset_url
return {
'display_name': display_name,
'date_added': get_default_time_display(date),
'url': asset_url,
'external_url': external_url,
'portable_url': StaticContent.get_static_path_from_location(location),
'thumbnail': thumbnail_location.to_deprecated_string() if thumbnail_location is not None else None,
'locked': locked,
# Needed for Backbone delete/update.
'id': unicode(location)
}
| agpl-3.0 |
seanandrews/diskpop | phot/priors.py | 1 | 1143 | #
#
#
import numpy as np
import pandas as pd
from scipy.interpolate import interp1d
import matplotlib.pyplot as plt
import sys
# effective temperature prior
# inputs
Sbar = 60.
eSbar = 1.
Tinput = 8700.
# load spectral type |-> temperature conversion file
dt = {'ST': np.str, 'STix': np.float64, 'Teff':np.float64, 'eTeff':np.float64}
a = pd.read_csv('data/adopted_spt-teff.txt', dtype=dt,
names=['ST','STix','Teff','eTeff'])
# discretized relationship
S_g = np.array(a['STix'])
T_g = np.array(a['Teff'])
eT_g = np.array(a['eTeff'])
# need to interpolate for appropriate integration
tint = interp1d(S_g, T_g)
eint = interp1d(S_g, eT_g)
S = np.linspace(np.min(S_g), np.max(S_g), num=10.*len(S_g))
T = tint(S)
eT = eint(S)
# calculate p(S)
p_S = np.exp(-0.5*((S-Sbar)/eSbar )**2) / (np.sqrt(2.*np.pi)*eSbar)
# now calculate p(T)
p_T = np.zeros_like(T)
for i in np.arange(len(T)):
p_TS = np.exp(-0.5*((T[i]-tint(S))/eint(S))**2) / \
(np.sqrt(2.*np.pi)*eint(S))
p_T[i] = np.trapz(p_TS*p_S, S)
# create an interpolator for p_T
p_tint = interp1d(T, p_T)
prior_T = p_tint(Tinput)
print(prior_T)
| mit |
liama482/Picture | ggame/sysdeps.py | 227 | 1916 | def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('browser') and module_exists('javascript'):
from browser import window, document
from javascript import JSObject, JSConstructor
GFX = JSObject(window.PIXI)
GFX_Rectangle = JSConstructor(GFX.Rectangle)
GFX_Texture = JSConstructor(GFX.Texture)
GFX_Texture_fromImage = JSConstructor(GFX.Texture.fromImage)
GFX_Sprite = JSConstructor(GFX.Sprite)
GFX_Graphics = JSConstructor(GFX.Graphics)()
GFX_Text = JSConstructor(GFX.Text)
GFX_DetectRenderer = GFX.autoDetectRenderer
SND = JSObject(window.buzz)
SND_Sound = JSConstructor(SND.sound)
class GFX_Window(object):
def __init__(self, width, height, onclose):
self._w = window.open("", "")
self._stage = JSConstructor(GFX.Container)()
self.width = width if width != 0 else int(window.innerWidth * 0.9)
self.height = height if height != 0 else int(window.innerHeight * 0.9)
self._renderer = GFX.autoDetectRenderer(self.width, self.height, {'transparent':True})
self._w.document.body.appendChild(self._renderer.view)
self._w.onunload = onclose
def bind(self, evtspec, callback):
self._w.document.body.bind(evtspec, callback)
def add(self, obj):
self._stage.addChild(obj)
def remove(self, obj):
self._stage.removeChild(obj)
def animate(self, stepcallback):
self._renderer.render(self._stage)
self._w.requestAnimationFrame(stepcallback)
def destroy(self):
SND.all().stop()
self._stage.destroy()
elif module_exists('pygame'):
try:
from ggame.pygamedeps import *
except:
from pygamedeps import *
else:
try:
from ggame.headlessdeps import *
except:
from headlessdeps import *
| mit |
youprofit/NewsBlur | utils/munin/newsblur_feed_counts.py | 10 | 2628 | #!/usr/bin/env python
from utils.munin.base import MuninGraph
import redis
class NBMuninGraph(MuninGraph):
@property
def graph_config(self):
return {
'graph_category' : 'NewsBlur',
'graph_title' : 'NewsBlur Feed Counts',
'graph_vlabel' : 'Feeds Feed Counts',
'graph_args' : '-l 0',
'scheduled_feeds.label': 'scheduled_feeds',
'exception_feeds.label': 'exception_feeds',
'exception_pages.label': 'exception_pages',
'duplicate_feeds.label': 'duplicate_feeds',
'active_feeds.label': 'active_feeds',
'push_feeds.label': 'push_feeds',
}
def calculate_metrics(self):
from apps.rss_feeds.models import Feed, DuplicateFeed
from apps.push.models import PushSubscription
from django.conf import settings
from apps.statistics.models import MStatistics
exception_feeds = MStatistics.get('munin:exception_feeds')
if not exception_feeds:
exception_feeds = Feed.objects.filter(has_feed_exception=True).count()
MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12)
exception_pages = MStatistics.get('munin:exception_pages')
if not exception_pages:
exception_pages = Feed.objects.filter(has_page_exception=True).count()
MStatistics.set('munin:exception_pages', exception_pages, 60*60*12)
duplicate_feeds = MStatistics.get('munin:duplicate_feeds')
if not duplicate_feeds:
duplicate_feeds = DuplicateFeed.objects.count()
MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12)
active_feeds = MStatistics.get('munin:active_feeds')
if not active_feeds:
active_feeds = Feed.objects.filter(active_subscribers__gt=0).count()
MStatistics.set('munin:active_feeds', active_feeds, 60*60*12)
push_feeds = MStatistics.get('munin:push_feeds')
if not push_feeds:
push_feeds = PushSubscription.objects.filter(verified=True).count()
MStatistics.set('munin:push_feeds', push_feeds, 60*60*12)
r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL)
return {
'scheduled_feeds': r.zcard('scheduled_updates'),
'exception_feeds': exception_feeds,
'exception_pages': exception_pages,
'duplicate_feeds': duplicate_feeds,
'active_feeds': active_feeds,
'push_feeds': push_feeds,
}
if __name__ == '__main__':
NBMuninGraph().run()
| mit |
gabrielfalcao/lettuce | tests/integration/lib/Django-1.3/django/contrib/flatpages/admin.py | 250 | 1089 | from django import forms
from django.contrib import admin
from django.contrib.flatpages.models import FlatPage
from django.utils.translation import ugettext_lazy as _
class FlatpageForm(forms.ModelForm):
url = forms.RegexField(label=_("URL"), max_length=100, regex=r'^[-\w/\.~]+$',
help_text = _("Example: '/about/contact/'. Make sure to have leading"
" and trailing slashes."),
error_message = _("This value must contain only letters, numbers,"
" dots, underscores, dashes, slashes or tildes."))
class Meta:
model = FlatPage
class FlatPageAdmin(admin.ModelAdmin):
form = FlatpageForm
fieldsets = (
(None, {'fields': ('url', 'title', 'content', 'sites')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}),
)
list_display = ('url', 'title')
list_filter = ('sites', 'enable_comments', 'registration_required')
search_fields = ('url', 'title')
admin.site.register(FlatPage, FlatPageAdmin)
| gpl-3.0 |
lcgong/alchemy | redbean/test/security/serv/secure.py | 2 | 2092 | import logging
logger = logging.getLogger(__name__)
from redbean.secure.identity import SessionIdentity
from redbean.secure.keeper import UserIdentityKeeper
from redbean.asyncid import AsyncID64
from test.security.app import rest, etcd_endpoint
user_id_generator = AsyncID64('/asyncid/user_sn', etcd_endpoint)
keeper = UserIdentityKeeper(etcd_endpoint, user_id_generator=user_id_generator)
# rest.
rest.set_path('.')
@rest.post('login')
@rest.prepare_session
async def login(json_body: dict) -> SessionIdentity:
client_id = json_body.get('client_id')
identity = json_body.get('identity')
passwd = json_body.get('passwd')
identity = await keeper.check_passwd(identity, passwd)
identity.client_id = client_id
return identity
@rest.post('logout')
@rest.close_session
async def logout(identity: SessionIdentity) -> None:
logger.debug(f'signout {identity}')
@rest.post('identity/new')
@rest.prepare_session
async def create_identity(json_body: dict) -> SessionIdentity:
login_id = json_body.get('identity')
passwd = json_body.get('passwd')
identity = await keeper.create_identity(login_id, passwd)
return identity
@rest.permission_verifier
async def verify_permissions(identity: SessionIdentity, permissions):
return await keeper.verify_permissions(identity.user_id, *permissions)
@rest.on_cleanup
async def cleanup():
user_id_generator.stop()
await user_id_generator.stopped()
# @rest.get('verify_email/{token}')
# @rest.prepare_session
# async def verify_email(token: str) -> SessionIdentity:
# """ 使用邮件确认链接确认其使用本人邮件地址作为登录标识 """
# assert token
# identity = await keeper.verify_email(token)
# return identity
# @rest.post('signup')
# async def signup(json_arg: dict) -> SessionIdentity:
# client_id = json_arg.get('client_id')
# identity = json_arg.get('login_id')
# passwd = json_arg.get('login_id')
# assert client_id
# assert identity
# assert passwd
# await keeper.create_email_identity(client_id, identity, passwd)
| gpl-3.0 |
Panos512/invenio | modules/bibauthorid/lib/bibauthorid_regression_tests.py | 5 | 8601 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibAuthorId regressions tests."""
__revision__ = "$Id$"
from invenio.testutils import InvenioTestCase, \
run_test_suite, make_test_suite, test_web_page_content
from invenio.config import CFG_SITE_URL, \
CFG_INSPIRE_SITE, CFG_BIBAUTHORID_ENABLED
from invenio.dbquery import run_sql
import random
import string
class BibAuthorIdDisplayedPages(InvenioTestCase):
"""This regression test checks whether suitable pages are displayed
based on the variables CFG_INSPIRE_SITE and CFG_BIBAUTHORID_ENABLED"""
def setUp(self):
""" Initialization before tests"""
# This random, arbitrarily large string is obviously invalid.
self.any_name = ''.join(random.choice(string.lowercase) for x in range(26))
self.canonical_name = self._get_canonical_name()
def test_content_of_manage_profile(self):
"""This test checks whether the 'manage profile' page
is neutral of implementation (e.g. Inspire features) and
there are no authorisation issues."""
if CFG_INSPIRE_SITE or CFG_BIBAUTHORID_ENABLED:
# Ensures the authorization issue for manage_profile
# will not return.
url = '%s/author/manage_profile/%s' % (CFG_SITE_URL,
self.canonical_name)
text_not_there = 'This page is not accessible directly.'
response = test_web_page_content(url, 'guest',
unexpected_text=text_not_there)
self.assertEqual(list(), response)
# Ensures that the js (INSPIRE specific) login prompt box appears
# Only for Inspire
if self.canonical_name:
url = '%s/author/claim/%s' % (CFG_SITE_URL,
self.canonical_name)
guest_prompt_value = 'false'
if CFG_INSPIRE_SITE:
guest_prompt_value = 'true'
text_to_check = 'guestPrompt: %s' % guest_prompt_value
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
def test_content_of_profile_pages(self):
"""This test checks whether the profiles are displayed
containing appropriate error messages and content
and redirect to other appropriate."""
# If we're on Inspire, BibAuthorId is always enabled.
if CFG_INSPIRE_SITE or CFG_BIBAUTHORID_ENABLED:
# A valid canonical name should lead to the author's profile page.
if self.canonical_name:
url = '%s/author/profile/%s' % (CFG_SITE_URL,
self.canonical_name)
text_to_check = 'Personal Information'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# An invalid query for some profile, should lead to 'Person search'.
url = '%s/author/profile/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = ['Person search',
'We do not have a publication list for \'%s\'.'
% self.any_name]
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# author/%s searches are kept for backward compatibility.
# Should theses pages become obsolete,
# the regression test will not fail.
if self._test_web_page_existence_no_robots('%s/author/%s'
% (CFG_SITE_URL,
self.canonical_name)):
if self.canonical_name:
url = '%s/author/%s' % (CFG_SITE_URL,
self.canonical_name)
text_to_check = 'Personal Information'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
url = '%s/author/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = ['Person search',
'We do not have a publication list for \'%s\''
% self.any_name]
response = test_web_page_content(url, 'guest',
expected_text=text_to_check)
self.assertEqual(list(), response)
# Bibauthorid is disabled.
else:
# The navigation bar shouldn't be there.
text_not_there = ['View Profile', 'Manage Profile']
url = '%s/author/profile/Ellis,%%20J' % CFG_SITE_URL
text_to_check = ['Ellis, J', 'Personal Information']
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
# An invalid query for a profile, should lead to 'Person search'.
url = '%s/author/profile/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = 'This doesn\'t look like a person ID!'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
if self._test_web_page_existence_no_robots('%s/author/Ellis, J'
% CFG_SITE_URL):
url = '%s/author/Ellis,%%20J' % CFG_SITE_URL
text_to_check = ['Ellis, J', 'Personal Information']
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
url = '%s/author/%s' % (CFG_SITE_URL, self.any_name)
text_to_check = 'This doesn\'t look like a person ID!'
response = test_web_page_content(url, 'guest',
expected_text=text_to_check,
unexpected_text=text_not_there)
self.assertEqual(list(), response)
def _test_web_page_existence_no_robots(self, url):
"""Almost identical to testutils.test_web_page_existence(url) except
that we need to ignore robots.txt in some cases
(e.g. Invenio production) for this regression test."""
import mechanize
browser = mechanize.Browser()
try:
browser.set_handle_robots(False) # ignore robots.txt.
browser.open(url)
except:
raise
return True
def _get_canonical_name(self):
""" Fetches a valid canonical name from the database.
Returns None if it is empty."""
result = run_sql("select data from aidPERSONIDDATA where tag ="
+ "'canonical_name' LIMIT 1")
if result:
return result[0][0]
TEST_SUITE = make_test_suite(BibAuthorIdDisplayedPages)
if __name__ == "__main__":
run_test_suite(TEST_SUITE, warn_user=False)
| gpl-2.0 |
PyroShark/namebench | libnamebench/nameserver_test.py | 175 | 7015 | #!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mocks for tests."""
__author__ = '[email protected] (Thomas Stromberg)'
import mocks
import nameserver
import unittest
class TestNameserver(unittest.TestCase):
def testInit(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
self.assertEquals(ns.ip, mocks.GOOD_IP)
self.assertEquals(ns.name, None)
ns = mocks.MockNameServer(mocks.NO_RESPONSE_IP, name='Broked')
self.assertEquals(ns.ip, mocks.NO_RESPONSE_IP)
self.assertEquals(ns.name, 'Broked')
def testTimedRequest(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, duration, exception) = ns.TimedRequest('A', 'www.paypal.com')
self.assertEquals(response.id, 999)
expected = ('www.paypal.com. 159 IN A 66.211.169.65\n'
'www.paypal.com. 159 IN A 66.211.169.2')
self.assertEquals(str(response.answer[0]), expected)
self.assertTrue(duration > 0)
self.assertEquals(exception, None)
def testTestAnswers(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestAnswers('A', 'www.paypal.com',
'10.0.0.1')
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
self.assertTrue(duration > 0 and duration < 3600)
def testResponseToAscii(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, duration, exception) = ns.TimedRequest('A', 'www.paypal.com')
self.assertEquals(nameserver.ResponseToAscii(response),
'66.211.169.65 + 66.211.169.2')
response.answer = None
self.assertEquals(nameserver.ResponseToAscii(response), 'no answer')
def testGoogleComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestGoogleComResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning,
'google.com. is hijacked (66.211.169.65 + 66.211.169.2)')
self.assertTrue(duration > 0 and duration < 3600)
def testWwwGoogleComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestWwwGoogleComResponse()
self.assertEquals(is_broken, True)
self.assertEquals(warning, 'No answer')
self.assertTrue(duration > 0 and duration < 3600)
def testWwwPaypalComResponse(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestWwwPaypalComResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
def testNegativeResponse(self):
ns = mocks.MockNameServer(mocks.NO_RESPONSE_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning, None)
def testNegativeResponseHijacked(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, False)
self.assertEquals(warning,
'NXDOMAIN Hijacking (66.211.169.65 + 66.211.169.2)')
def testNegativeResponseBroken(self):
ns = mocks.MockNameServer(mocks.BROKEN_IP)
(is_broken, warning, duration) = ns.TestNegativeResponse()
self.assertEquals(is_broken, True)
self.assertEquals(warning, 'BadResponse')
def testWildcardCache(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
(response, is_broken, warning, duration) = ns.QueryWildcardCache()
self.assertEquals(is_broken, False)
question = str(response.question[0])
self.assertTrue(question.startswith('namebench'))
self.assertEquals(warning, None)
def testCheckHealthGood(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
ns.CheckHealth()
self.assertEquals(ns.CheckHealth(), False)
self.assertEquals(ns.warnings, ['No answer'])
self.assertEquals(len(ns.checks), 1)
self.assertEquals(ns.failure[0], 'TestWwwGoogleComResponse')
self.assertEquals(ns.checks[0][0:3],
('TestWwwGoogleComResponse', True, 'No answer'))
def testCheckHealthPerfect(self):
ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.CheckHealth()
self.assertEquals(ns.CheckHealth(), True)
expected = ['www.google.com. is hijacked (66.211.169.65 + 66.211.169.2)',
'google.com. is hijacked (66.211.169.65 + 66.211.169.2)',
'NXDOMAIN Hijacking (66.211.169.65 + 66.211.169.2)']
self.assertEquals(ns.warnings, expected)
self.assertEquals(len(ns.checks), 5)
self.assertEquals(ns.failure, None)
self.assertTrue(ns.check_duration > 10)
def testQUeryWildcardCacheSaving(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
# Test our cache-sharing mechanisms
(hostname, ttl) = ns.cache_check
self.assertTrue(hostname.startswith('namebench'))
self.assertEquals(ttl, 159)
(other_hostname, other_ttl) = other_ns.cache_check
self.assertTrue(other_hostname.startswith('namebench'))
self.assertNotEqual(hostname, other_hostname)
self.assertEquals(other_ttl, 159)
def testSharedCacheNoMatch(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, False)
self.assertEquals(slower, None)
self.assertEquals(faster, None)
def testSharedCacheMatch(self):
ns = mocks.MockNameServer(mocks.GOOD_IP)
other_ns = mocks.MockNameServer(mocks.PERFECT_IP)
ns.QueryWildcardCache(save=True)
other_ns.QueryWildcardCache(save=True)
# Increase the TTL of 'other'
other_ns.cache_check = (other_ns.cache_check[0], other_ns.cache_check[1] + 5)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, True)
self.assertEquals(slower.ip, mocks.GOOD_IP)
self.assertEquals(faster.ip, mocks.PERFECT_IP)
# Increase the TTL of 'other' by a whole lot
other_ns.cache_check = (other_ns.cache_check[0], other_ns.cache_check[1] + 3600)
(shared, slower, faster) = ns.TestSharedCache(other_ns)
self.assertEquals(shared, False)
self.assertEquals(slower, None)
self.assertEquals(faster, None)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
messi2050/android_kernel_huawei_msm8610 | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
soldag/home-assistant | tests/components/rfxtrx/test_light.py | 14 | 6854 | """The tests for the Rfxtrx light platform."""
from unittest.mock import call
import pytest
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.components.rfxtrx import DOMAIN
from homeassistant.core import State
from tests.common import MockConfigEntry, mock_restore_cache
from tests.components.rfxtrx.conftest import create_rfx_test_cfg
async def test_one_light(hass, rfxtrx):
"""Test with 1 light."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
state = hass.states.get("light.ac_213c7f2_16")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:16"
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 100},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 100
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 10},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 10
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.ac_213c7f2_16", "brightness": 255},
blocking=True,
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "on"
assert state.attributes.get("brightness") == 255
await hass.services.async_call(
"light", "turn_off", {"entity_id": "light.ac_213c7f2_16"}, blocking=True
)
state = hass.states.get("light.ac_213c7f2_16")
assert state.state == "off"
assert state.attributes.get("brightness") is None
assert rfxtrx.transport.send.mock_calls == [
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x01\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x06\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x00\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x02\x0f\x00")),
call(bytearray(b"\x0b\x11\x00\x00\x02\x13\xc7\xf2\x10\x00\x00\x00")),
]
@pytest.mark.parametrize("state,brightness", [["on", 100], ["on", 50], ["off", None]])
async def test_state_restore(hass, rfxtrx, state, brightness):
"""State restoration."""
entity_id = "light.ac_213c7f2_16"
mock_restore_cache(
hass, [State(entity_id, state, attributes={ATTR_BRIGHTNESS: brightness})]
)
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f210020f51": {"signal_repetitions": 1}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == state
assert hass.states.get(entity_id).attributes.get(ATTR_BRIGHTNESS) == brightness
async def test_several_lights(hass, rfxtrx):
"""Test with 3 lights."""
entry_data = create_rfx_test_cfg(
devices={
"0b1100cd0213c7f230020f71": {"signal_repetitions": 1},
"0b1100100118cdea02020f70": {"signal_repetitions": 1},
"0b1100101118cdea02050f70": {"signal_repetitions": 1},
}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.async_start()
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 213c7f2:48"
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
state = hass.states.get("light.ac_1118cdea_2")
assert state
assert state.state == "off"
assert state.attributes.get("friendly_name") == "AC 1118cdea:2"
await rfxtrx.signal("0b1100cd0213c7f230010f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
await rfxtrx.signal("0b1100cd0213c7f230000f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "off"
await rfxtrx.signal("0b1100cd0213c7f230020f71")
state = hass.states.get("light.ac_213c7f2_48")
assert state
assert state.state == "on"
assert state.attributes.get("brightness") == 255
@pytest.mark.parametrize("repetitions", [1, 3])
async def test_repetitions(hass, rfxtrx, repetitions):
"""Test signal repetitions."""
entry_data = create_rfx_test_cfg(
devices={"0b1100cd0213c7f230020f71": {"signal_repetitions": repetitions}}
)
mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data)
mock_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
"light", "turn_on", {"entity_id": "light.ac_213c7f2_48"}, blocking=True
)
await hass.async_block_till_done()
assert rfxtrx.transport.send.call_count == repetitions
async def test_discover_light(hass, rfxtrx_automatic):
"""Test with discovery of lights."""
rfxtrx = rfxtrx_automatic
await rfxtrx.signal("0b11009e00e6116202020070")
state = hass.states.get("light.ac_0e61162_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 0e61162:2"
await rfxtrx.signal("0b1100120118cdea02020070")
state = hass.states.get("light.ac_118cdea_2")
assert state
assert state.state == "on"
assert state.attributes.get("friendly_name") == "AC 118cdea:2"
| apache-2.0 |
KenKundert/quantiphy | tests/test_unit_conversion.py | 1 | 13490 | # encoding: utf8
from quantiphy import (
Quantity, UnitConversion,
QuantiPhyError, IncompatibleUnits, UnknownPreference, UnknownConversion,
UnknownUnitSystem, InvalidRecognizer, UnknownFormatKey, UnknownScaleFactor,
InvalidNumber, ExpectedQuantity, MissingName,
)
Quantity.reset_prefs()
import math
import pytest
def test_simple_scaling():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None
):
q=Quantity('1kg', scale=2)
qs=Quantity('2ms')
assert q.render() == '2 kg'
assert qs.render() == '2 ms'
assert q.render(scale=0.001) == '2 g'
assert str(q.scale(0.001)) == '2 g'
assert q.render(scale=qs) == '4 g'
assert str(q.scale(qs)) == '4 g'
with pytest.raises(KeyError) as exception:
q.render(scale='fuzz')
assert str(exception.value) == "unable to convert between 'fuzz' and 'g'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('fuzz', 'g')
with pytest.raises(KeyError) as exception:
q.scale('fuzz')
assert str(exception.value) == "unable to convert between 'fuzz' and 'g'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('fuzz', 'g')
q=Quantity('1', units='g', scale=1000)
assert q.render() == '1 kg'
assert q.render(scale=(0.0022046, 'lbs')) == '2.2046 lbs'
assert str(q.scale((0.0022046, 'lbs'))) == '2.2046 lbs'
q=Quantity('1', units='g', scale=qs)
assert q.render() == '2 mg'
q=Quantity('1', scale=(1000, 'g'))
assert q.render() == '1 kg'
assert q.render(scale=lambda v, u: (0.0022046*v, 'lbs')) == '2.2046 lbs'
def dB(v, u):
return 20*math.log(v, 10), 'dB'+u
def adB(v, u):
return pow(10, v/20), u[2:] if u.startswith('dB') else u
q=Quantity('-40 dBV', scale=adB)
assert q.render() == '10 mV'
assert q.render(scale=dB) == '-40 dBV'
assert str(q.scale(dB)) == '-40 dBV'
def test_temperature():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=True
):
q=Quantity('100 °C')
assert q.render() == '100 °C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
assert q.render(scale='°R') == '671.67 °R'
assert q.render(scale='R') == '671.67 R'
q=Quantity('100 C')
assert q.render() == '100 C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='F') == '212 F'
assert q.render(scale='R') == '671.67 R'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='°R') == '671.67 °R'
q=Quantity('373.15 K')
assert q.render() == '373.15 K'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='F') == '212 F'
assert q.render(scale='R') == '671.67 R'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='°R') == '671.67 °R'
q=Quantity('212 °F')
assert q.render() == '212 °F'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
#assert q.render(scale='°R') == '671.67 °R'
#assert q.render(scale='R') == '671.67 R'
q=Quantity('212 F')
assert q.render() == '212 F'
assert q.render(scale='C') == '100 C'
assert q.render(scale='K') == '373.15 K'
assert q.render(scale='°C') == '100 °C'
assert q.render(scale='°F') == '212 °F'
assert q.render(scale='F') == '212 F'
#assert q.render(scale='°R') == '671.67 °R'
#assert q.render(scale='R') == '671.67 R'
q=Quantity('100 °C', scale='K')
assert q.render() == '373.15 K'
q=Quantity('212 °F', scale='K')
assert q.render() == '373.15 K'
q=Quantity('212 °F', scale='C')
assert q.render() == '100 C'
q=Quantity('212 F', scale='°C')
assert q.render() == '100 °C'
q=Quantity('491.67 R', scale='°C')
assert q.is_close(Quantity('0 °C'))
q=Quantity('491.67 R', scale='K')
assert q.render() == '273.15 K'
def test_distance():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=False
):
q=Quantity('1_m')
assert q.render() == '1 m'
assert q.render(scale='cm', form='eng') == '100 cm'
assert q.render(scale='mm', form='eng') == '1e3 mm'
assert q.render(scale='um', form='eng') == '1e6 um'
assert q.render(scale='μm', form='eng') == '1e6 μm'
assert q.render(scale='nm', form='eng') == '1e9 nm'
assert q.render(scale='Å', form='eng') == '10e9 Å'
assert q.render(scale='angstrom', form='eng') == '10e9 angstrom'
assert q.render(scale='mi') == '621.37 umi'
assert q.render(scale='mile') == '621.37 umile'
assert q.render(scale='miles') == '621.37 umiles'
assert q.render(scale='in') == '39.37 in'
assert q.render(scale='inch') == '39.37 inch'
assert q.render(scale='inches') == '39.37 inches'
q=Quantity('1_m')
assert q.render() == '1 m'
q=Quantity('100cm', scale='m')
assert q.render() == '1 m'
q=Quantity('1cm', scale='m')
assert q.render() == '10 mm'
q=Quantity('1000mm', scale='m')
assert q.render() == '1 m'
q=Quantity('1mm', scale='m')
assert q.render() == '1 mm'
q=Quantity('1000000um', scale='m')
assert q.render() == '1 m'
q=Quantity('1um', scale='m')
assert q.render() == '1 um'
q=Quantity('1000000μm', scale='m')
assert q.render() == '1 m'
q=Quantity('1μm', scale='m')
assert q.render() == '1 um'
q=Quantity('1000000000nm', scale='m')
assert q.render() == '1 m'
q=Quantity('1nm', scale='m')
assert q.render() == '1 nm'
q=Quantity('10000000000Å', scale='m')
assert q.render() == '1 m'
q=Quantity('1Å', scale='m')
assert q.render() == '100 pm'
q=Quantity('1_mi', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('1_mile', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('1_miles', scale='m')
assert q.render() == '1.6093 km'
q=Quantity('d = 93 Mmiles -- average distance from Sun to Earth', scale='m')
assert q.render() == '149.67 Gm'
def test_mass():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=False
):
q=Quantity('1 g')
assert q.render() == '1 g'
assert q.render(scale='oz') == '35.274 moz'
assert q.render(scale='lb') == '2.2046 mlb'
assert q.render(scale='lbs') == '2.2046 mlbs'
q=Quantity('1 oz', scale='g')
assert q.render() == '28.35 g'
q=Quantity('1 lb', scale='g')
assert q.render() == '453.59 g'
q=Quantity('1 lbs', scale='g')
assert q.render() == '453.59 g'
def test_time():
Quantity.reset_prefs()
with Quantity.prefs(
spacer=None, show_label=None, label_fmt=None, label_fmt_full=None,
ignore_sf=True
):
q=Quantity('86400 s')
assert q.render() == '86.4 ks'
assert q.render(scale='sec') == '86.4 ksec'
assert q.render(scale='min') == '1.44 kmin'
assert q.render(scale='hr') == '24 hr'
assert q.render(scale='hour') == '24 hour'
assert q.render(scale='day') == '1 day'
q=Quantity('1 day', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('24 hour', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('24 hr', scale='s')
assert q.render() == '86.4 ks'
q=Quantity('60 min', scale='s')
assert q.render() == '3.6 ks'
q=Quantity('60 sec', scale='s')
assert q.render() == '60 s'
def test_scale():
Quantity.reset_prefs()
secs = Quantity('86400 s')
days = secs.scale('day')
assert secs.render() == '86.4 ks'
assert days.render() == '1 day'
def test_add():
Quantity.reset_prefs()
total = Quantity(0, '$')
for contribution in [1.23, 4.56, 7.89]:
total = total.add(contribution)
assert total.render() == '$13.68'
for contribution in [1.23, 4.56, 8.89]:
total = total.add(contribution, check_units=True)
assert total.render() == '$28.36'
for contribution in [1.23, 4.56, 9.89]:
total = total.add(Quantity(contribution, '$'), check_units=True)
assert total.render() == '$44.04'
try:
total = total.add(Quantity(contribution, 'lbs'), check_units=True)
assert False
except TypeError:
assert True
def test_coversion():
Quantity.reset_prefs()
conversion = UnitConversion('USD', 'BTC', 100000)
assert str(conversion) == 'USD = 100000*BTC'
result = conversion.convert(1, 'BTC', 'USD')
assert str(result) == '100 kUSD'
result = conversion.convert(1, 'USD', 'BTC')
assert str(result) == '10 uBTC'
result = conversion.convert(from_units='BTC', to_units='USD')
assert str(result) == '100 kUSD'
result = conversion.convert(from_units='USD', to_units='BTC')
assert str(result) == '10 uBTC'
result = conversion.convert('BTC')
assert str(result) == '100 kUSD'
result = conversion.convert('USD')
assert str(result) == '10 uBTC'
result = conversion.convert(10)
assert str(result) == '1 MUSD'
dollar = Quantity('200000 USD')
bitcoin = conversion.convert(dollar)
assert str(bitcoin) == '2 BTC'
dollar = conversion.convert(bitcoin)
assert str(dollar) == '200 kUSD'
conversion = UnitConversion('F', 'C', 1.8, 32)
assert str(conversion) == 'F = 1.8*C + 32'
result = conversion.convert(0, 'C', 'F')
assert str(result) == '32 F'
result = conversion.convert(32, to_units='C')
assert str(result) == '0 C'
result = conversion.convert(32, from_units='F')
assert str(result) == '0 C'
with pytest.raises(KeyError) as exception:
result = conversion.convert(0, from_units='X', to_units='X')
assert str(exception.value) == "unable to convert to 'X'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('X',)
result = conversion.convert(0, to_units='X')
assert str(result) == '32 F'
with pytest.raises(KeyError) as exception:
result = conversion.convert(0, from_units='X')
assert str(exception.value) == "unable to convert from 'X'."
assert isinstance(exception.value, UnknownConversion)
assert isinstance(exception.value, QuantiPhyError)
assert isinstance(exception.value, KeyError)
assert exception.value.args == ('X',)
def test_func():
Quantity.reset_prefs()
def from_dB(value):
return 10**(value/20)
def to_dB(value):
return 20*math.log10(value)
vconverter = UnitConversion('V', 'dBV', from_dB, to_dB)
assert str(vconverter) == 'V = from_dB(dBV), dBV = to_dB(V)'
assert str(vconverter.convert(Quantity('100mV'))) == '-20 dBV'
assert str(vconverter.convert(Quantity('-20dBV'))) == '100 mV'
aconverter = UnitConversion('A', 'dBA', from_dB, to_dB)
assert str(aconverter) == 'A = from_dB(dBA), dBA = to_dB(A)'
assert str(aconverter.convert(Quantity('100mA'))) == '-20 dBA'
assert str(aconverter.convert(Quantity('-20dBA'))) == '100 mA'
assert '{:pdBV}'.format(Quantity('100mV')) == '-20 dBV'
assert '{:pdBV}'.format(Quantity('10V')) == '20 dBV'
assert '{:pV}'.format(Quantity('-20 dBV')) == '0.1 V'
assert '{:pV}'.format(Quantity('20 dBV')) == '10 V'
assert '{:pdBA}'.format(Quantity('100mA')) == '-20 dBA'
assert '{:pdBA}'.format(Quantity('10A')) == '20 dBA'
assert '{:pA}'.format(Quantity('-20 dBA')) == '0.1 A'
assert '{:pA}'.format(Quantity('20 dBA')) == '10 A'
if __name__ == '__main__':
# As a debugging aid allow the tests to be run on their own, outside pytest.
# This makes it easier to see and interpret and textual output.
defined = dict(globals())
for k, v in defined.items():
if callable(v) and k.startswith('test_'):
print()
print('Calling:', k)
print((len(k)+9)*'=')
v()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.