gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
#!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Convert Android xml resources to API 14 compatible.
There are two reasons that we cannot just use API 17 attributes,
so we are generating another set of resources by this script.
1. paddingStart attribute can cause a crash on Galaxy Tab 2.
2. There is a bug that paddingStart does not override paddingLeft on
JB-MR1. This is fixed on JB-MR2. b/8654490
Therefore, this resource generation script can be removed when
we drop the support for JB-MR1.
Please refer to http://crbug.com/235118 for the details.
"""
import optparse
import os
import re
import shutil
import sys
import xml.dom.minidom as minidom
from util import build_utils
# Note that we are assuming 'android:' is an alias of
# the namespace 'http://schemas.android.com/apk/res/android'.
GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
# Almost all the attributes that has "Start" or "End" in
# its name should be mapped.
ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
'drawableStart' : 'drawableLeft',
'layout_alignStart' : 'layout_alignLeft',
'layout_marginStart' : 'layout_marginLeft',
'layout_alignParentStart' : 'layout_alignParentLeft',
'layout_toStartOf' : 'layout_toLeftOf',
'paddingEnd' : 'paddingRight',
'drawableEnd' : 'drawableRight',
'layout_alignEnd' : 'layout_alignRight',
'layout_marginEnd' : 'layout_marginRight',
'layout_alignParentEnd' : 'layout_alignParentRight',
'layout_toEndOf' : 'layout_toRightOf'}
ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
in ATTRIBUTES_TO_MAP.iteritems())
def IterateXmlElements(node):
"""minidom helper function that iterates all the element nodes.
Iteration order is pre-order depth-first."""
if node.nodeType == node.ELEMENT_NODE:
yield node
for child_node in node.childNodes:
for child_node_element in IterateXmlElements(child_node):
yield child_node_element
def ParseAndReportErrors(filename):
try:
return minidom.parse(filename)
except Exception:
import traceback
traceback.print_exc()
sys.stderr.write('Failed to parse XML file: %s\n' % filename)
sys.exit(1)
def AssertNotDeprecatedAttribute(name, value, filename):
"""Raises an exception if the given attribute is deprecated."""
msg = None
if name in ATTRIBUTES_TO_MAP_REVERSED:
msg = '{0} should use {1} instead of {2}'.format(filename,
ATTRIBUTES_TO_MAP_REVERSED[name], name)
elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
msg = '{0} should use start/end instead of left/right for {1}'.format(
filename, name)
if msg:
msg += ('\nFor background, see: http://android-developers.blogspot.com/'
'2013/03/native-rtl-support-in-android-42.html\n'
'If you have a legitimate need for this attribute, discuss with '
'[email protected] or [email protected]')
raise Exception(msg)
def WriteDomToFile(dom, filename):
"""Write the given dom to filename."""
build_utils.MakeDirectory(os.path.dirname(filename))
with open(filename, 'w') as f:
dom.writexml(f, '', ' ', '\n', encoding='utf-8')
def HasStyleResource(dom):
"""Return True if the dom is a style resource, False otherwise."""
root_node = IterateXmlElements(dom).next()
return bool(root_node.nodeName == 'resources' and
list(root_node.getElementsByTagName('style')))
def ErrorIfStyleResourceExistsInDir(input_dir):
"""If a style resource is in input_dir, raises an exception."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
dom = ParseAndReportErrors(input_filename)
if HasStyleResource(dom):
raise Exception('error: style file ' + input_filename +
' should be under ' + input_dir +
'-v17 directory. Please refer to '
'http://crbug.com/243952 for the details.')
def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert layout resource to API 14 compatible layout resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
# Iterate all the elements' attributes to find attributes to convert.
for element in IterateXmlElements(dom):
for name, value in list(element.attributes.items()):
# Convert any API 17 Start/End attributes to Left/Right attributes.
# For example, from paddingStart="10dp" to paddingLeft="10dp"
# Note: gravity attributes are not necessary to convert because
# start/end values are backward-compatible. Explained at
# https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
if name in ATTRIBUTES_TO_MAP:
element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
del element.attributes[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
"""Convert style resource to API 14 compatible style resource.
Args:
dom: Parsed minidom object to be modified.
filename: Filename that the DOM was parsed from.
assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
cause an exception to be thrown.
Returns:
True if dom is modified, False otherwise.
"""
is_modified = False
for style_element in dom.getElementsByTagName('style'):
for item_element in style_element.getElementsByTagName('item'):
name = item_element.attributes['name'].value
value = item_element.childNodes[0].nodeValue
if name in ATTRIBUTES_TO_MAP:
item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
is_modified = True
elif assert_not_deprecated:
AssertNotDeprecatedAttribute(name, value, filename)
return is_modified
def GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename):
"""Convert API 17 layout resource to API 14 compatible layout resource.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
If the generated resource is identical to the original resource,
don't do anything. If not, write the generated resource to
output_v14_filename, and copy the original resource to output_v17_filename.
"""
dom = ParseAndReportErrors(input_filename)
is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
if is_modified:
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
# Copy the original resource.
build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
shutil.copy2(input_filename, output_v17_filename)
def GenerateV14StyleResource(input_filename, output_v14_filename):
"""Convert API 17 style resources to API 14 compatible style resource.
Write the generated style resource to output_v14_filename.
It's mostly a simple replacement, s/Start/Left s/End/Right,
on the attribute names.
"""
dom = ParseAndReportErrors(input_filename)
GenerateV14StyleResourceDom(dom, input_filename)
# Write the generated resource.
WriteDomToFile(dom, output_v14_filename)
def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
"""Convert layout resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
output_v17_filename = os.path.join(output_v17_dir, rel_filename)
GenerateV14LayoutResource(input_filename, output_v14_filename,
output_v17_filename)
def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
"""Convert style resources to API 14 compatible resources in input_dir."""
for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
rel_filename = os.path.relpath(input_filename, input_dir)
output_v14_filename = os.path.join(output_v14_dir, rel_filename)
GenerateV14StyleResource(input_filename, output_v14_filename)
def ParseArgs():
"""Parses command line options.
Returns:
An options object as from optparse.OptionsParser.parse_args()
"""
parser = optparse.OptionParser()
parser.add_option('--res-dir',
help='directory containing resources '
'used to generate v14 compatible resources')
parser.add_option('--res-v14-compatibility-dir',
help='output directory into which '
'v14 compatible resources will be generated')
parser.add_option('--stamp', help='File to touch on success')
options, args = parser.parse_args()
if args:
parser.error('No positional arguments should be given.')
# Check that required options have been provided.
required_options = ('res_dir', 'res_v14_compatibility_dir')
build_utils.CheckOptions(options, parser, required=required_options)
return options
def GenerateV14Resources(res_dir, res_v14_dir):
for name in os.listdir(res_dir):
if not os.path.isdir(os.path.join(res_dir, name)):
continue
dir_pieces = name.split('-')
resource_type = dir_pieces[0]
qualifiers = dir_pieces[1:]
api_level_qualifier_index = -1
api_level_qualifier = ''
for index, qualifier in enumerate(qualifiers):
if re.match('v[0-9]+$', qualifier):
api_level_qualifier_index = index
api_level_qualifier = qualifier
break
# Android pre-v17 API doesn't support RTL. Skip.
if 'ldrtl' in qualifiers:
continue
input_dir = os.path.abspath(os.path.join(res_dir, name))
# We also need to copy the original v17 resource to *-v17 directory
# because the generated v14 resource will hide the original resource.
output_v14_dir = os.path.join(res_v14_dir, name)
output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
# We only convert layout resources under layout*/, xml*/,
# and style resources under values*/.
if resource_type in ('layout', 'xml'):
if not api_level_qualifier:
GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
output_v17_dir)
elif resource_type == 'values':
if api_level_qualifier == 'v17':
output_qualifiers = qualifiers[:]
del output_qualifiers[api_level_qualifier_index]
output_v14_dir = os.path.join(res_v14_dir,
'-'.join([resource_type] +
output_qualifiers))
GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
elif not api_level_qualifier:
ErrorIfStyleResourceExistsInDir(input_dir)
def main():
options = ParseArgs()
res_v14_dir = options.res_v14_compatibility_dir
build_utils.DeleteDirectory(res_v14_dir)
build_utils.MakeDirectory(res_v14_dir)
GenerateV14Resources(options.res_dir, res_v14_dir)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())
|
|
import pprint
import difflib
from django.contrib.auth.models import User
from django.test import TestCase, Client
from django.utils import simplejson
from ..models import Build, Project
class PonyTests(TestCase):
urls = 'devmason_server.urls'
fixtures = ['authtestdata', 'devmason_server_test_data']
def setUp(self):
# Make this class's test client have a default Accept: header to
# trigger the dispatching to API methods.
self.client = Client(HTTP_ACCEPT='application/json')
# Tag us a build (this is hard from fixtures)
b = Build.objects.get(pk=1)
b.tags = 'python, django'
b.save()
def assertJsonEqual(self, response, expected):
try:
json = simplejson.loads(response.content)
except ValueError:
self.fail('Response was invalid JSON:\n%s' % response)
# inspired by
# http://code.google.com/p/unittest-ext/source/browse/trunk/unittestnew/case.py#698
self.assert_(isinstance(json, dict), 'JSON response is not a dictionary: \n%r')
self.assert_(isinstance(expected, dict), 'Expected argument is not a dictionary.')
if json != expected:
diff = difflib.ndiff(pprint.pformat(expected).splitlines(),
pprint.pformat(json).splitlines())
msg = ('\n' + '\n'.join(diff))
self.fail(msg)
class PackageListTests(PonyTests):
def test_get_package_list(self):
r = self.client.get('/')
self.assertJsonEqual(r, {
"latest_builds": [
{
"success": True,
"links": [
{
"href": "/pony/builds/1",
"allowed_methods": [
"GET"
],
"rel": "self"
},
{
"href": "/pony",
"allowed_methods": [
"GET",
"PUT",
"DELETE"
],
"rel": "project"
},
{
"href": "/pony/tags/django",
"allowed_methods": [
"GET"
],
"rel": "tag"
},
{
"href": "/pony/tags/python",
"allowed_methods": [
"GET"
],
"rel": "tag"
}
],
"tags": [
"django",
"python"
],
"started": "Mon, 19 Oct 2009 16:22:00 -0500",
"results": [
{
"name": "test",
"success": True,
"started": "Mon, 19 Oct 2009 16:21:30 -0500",
"errout": "",
"finished": "Mon, 19 Oct 2009 16:25:00 -0500",
"output": "OK"
},
{
"name": "checkout",
"success": True,
"started": "Mon, 19 Oct 2009 16:22:00 -0500",
"errout": "",
"finished": "Mon, 19 Oct 2009 16:21:00 -0500",
"output": "OK"
}
],
"finished": "Mon, 19 Oct 2009 16:25:00 -0500",
"client": {
"host": "example.com",
"arch": "linux-i386",
"user": ""
}
}
],
"projects": [
{
"owner": "",
"name": "pony",
"links": [
{
"href": "/pony",
"allowed_methods": [
"GET",
"PUT",
"DELETE"
],
"rel": "self"
},
{
"href": "/pony/builds",
"allowed_methods": [
"GET",
"POST"
],
"rel": "build-list"
},
{
"href": "/pony/builds/latest",
"allowed_methods": [
"GET"
],
"rel": "latest-build"
},
{
"href": "/pony/tags",
"allowed_methods": [
"GET"
],
"rel": "tag-list"
}
]
}
],
"builds": {
"pony": {
"success": True,
"links": [
{
"href": "/pony/builds/1",
"allowed_methods": [
"GET"
],
"rel": "self"
},
{
"href": "/pony",
"allowed_methods": [
"GET",
"PUT",
"DELETE"
],
"rel": "project"
},
{
"href": "/pony/tags/django",
"allowed_methods": [
"GET"
],
"rel": "tag"
},
{
"href": "/pony/tags/python",
"allowed_methods": [
"GET"
],
"rel": "tag"
}
],
"tags": [
"django",
"python"
],
"started": "Mon, 19 Oct 2009 16:22:00 -0500",
"results": [
{
"name": "test",
"success": True,
"started": "Mon, 19 Oct 2009 16:21:30 -0500",
"errout": "",
"finished": "Mon, 19 Oct 2009 16:25:00 -0500",
"output": "OK"
},
{
"name": "checkout",
"success": True,
"started": "Mon, 19 Oct 2009 16:22:00 -0500",
"errout": "",
"finished": "Mon, 19 Oct 2009 16:21:00 -0500",
"output": "OK"
}
],
"finished": "Mon, 19 Oct 2009 16:25:00 -0500",
"client": {
"host": "example.com",
"arch": "linux-i386",
"user": ""
}
}
},
"links": [
{
"href": "/",
"allowed_methods": [
"GET"
],
"rel": "self"
}
]
}
)
class PackageDetailTests(PonyTests):
def test_get_package_detail(self):
r = self.client.get('/pony')
self.assertJsonEqual(r, {
u'name': u'pony',
u'owner': u'',
u'links': [
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'POST'],
u'href': u'/pony/builds',
u'rel': u'build-list'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/latest',
u'rel': u'latest-build'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags',
u'rel': u'tag-list'},
]
})
def test_create_new_package_fails_when_not_authenticated(self):
r = self.client.put('/proj', data='{"name": "myproject"}',
content_type="application/json")
self.assertEqual(r.status_code, 401) # 401 unauthorized
self.assertEqual(r['WWW-Authenticate'], 'Basic realm="pony"')
def test_create_new_package_works_with_existing_user(self):
auth = "Basic %s" % "testclient:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201, r.content) # 201 created
self.assertEqual(r['Location'], 'http://testserver/proj')
r = self.client.get('/proj')
self.assertJsonEqual(r, {
u'name': u'My Project',
u'owner': u'testclient',
u'links': [
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/proj',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'POST'],
u'href': u'/proj/builds',
u'rel': u'build-list'},
{u'allowed_methods': [u'GET'],
u'href': u'/proj/builds/latest',
u'rel': u'latest-build'},
{u'allowed_methods': [u'GET'],
u'href': u'/proj/tags',
u'rel': u'tag-list'},
]
})
def test_create_new_package_creates_users_automatically(self):
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201, r.content) # 201 created
self.assertEqual(r['Location'], 'http://testserver/proj')
# Check that the user got created
u = User.objects.get(username='testclient')
self.assertEqual(u.check_password('password'), True)
def test_update_package_succeeds_when_same_user(self):
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201) # 201 created
r = self.client.put('/proj', data='{"name": "Renamed"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertJsonEqual(r, {
u'name': u'Renamed',
u'owner': u'newuser',
u'links': [
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/proj',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'POST'],
u'href': u'/proj/builds',
u'rel': u'build-list'},
{u'allowed_methods': [u'GET'],
u'href': u'/proj/builds/latest',
u'rel': u'latest-build'},
{u'allowed_methods': [u'GET'],
u'href': u'/proj/tags',
u'rel': u'tag-list'},
]
})
def test_update_package_fails_when_different_user(self):
# the pony project was created with no auth, so this'll always fail
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/pony', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 403) # 403 forbidden
def test_delete_package_succeeds_when_user_matches(self):
# Create a package...
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201) # 201 created
# ... and delete it
r = self.client.delete('/proj', HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 204) # 204 no content
def test_delete_package_fails_when_different_user(self):
# Create a package...
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201) # 201 created
# ... and delete it with different auth
auth = "Basic %s" % "testclient:password".encode("base64").strip()
r = self.client.delete('/proj', HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 403) # 403 forbidden
def test_delete_package_should_not_create_users(self):
# Create a package...
auth = "Basic %s" % "newuser:password".encode("base64").strip()
r = self.client.put('/proj', data='{"name": "My Project"}',
content_type="application/json",
HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 201) # 201 created
# ... and delete it with a new user
auth = "Basic %s" % "newuwer2:password".encode("base64").strip()
r = self.client.delete('/proj', HTTP_AUTHORIZATION=auth)
self.assertEqual(r.status_code, 403) # 403 forbidden
# newuser2 shouldn't have been created
self.assertRaises(User.DoesNotExist, User.objects.get, username='newuser2')
class BuildListTests(PonyTests):
def test_get_package_build_list(self):
r = self.client.get('/pony/builds')
self.assertJsonEqual(r, {
u'count': 1,
u'num_pages': 1,
u'page': 1,
u'paginated': False,
u'per_page': 25,
u'project': {
u'name': u'pony',
u'owner': u'',
u'links': [
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'POST'],
u'href': u'/pony/builds',
u'rel': u'build-list'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/latest',
u'rel': u'latest-build'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags',
u'rel': u'tag-list'}
],
},
u'builds': [{
u'success': True,
u'started': u'Mon, 19 Oct 2009 16:22:00 -0500',
u'finished': u'Mon, 19 Oct 2009 16:25:00 -0500',
u'tags': [u'django', u'python'],
u'client': {u'host': u'example.com', u'user': u'', u'arch': u'linux-i386'},
u'results': [
{u'errout': u'',
u'finished': u'Mon, 19 Oct 2009 16:25:00 -0500',
u'name': u'test',
u'output': u'OK',
u'started': u'Mon, 19 Oct 2009 16:21:30 -0500',
u'success': True},
{u'errout': u'',
u'finished': u'Mon, 19 Oct 2009 16:21:00 -0500',
u'name': u'checkout',
u'output': u'OK',
u'started': u'Mon, 19 Oct 2009 16:22:00 -0500',
u'success': True}
],
u'links': [
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/1',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'project'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/django',
u'rel': u'tag'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/python',
u'rel': u'tag'}
]
}],
u'links': [
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'project'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/latest',
u'rel': u'latest-build'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds?per_page=25&page=1',
u'rel': u'self'},
]
})
def test_post_new_build(self):
build = {
u'success': False,
u'started': u'Mon, 26 Oct 2009 16:22:00 -0500',
u'finished': u'Mon, 26 Oct 2009 16:25:00 -0500',
u'tags': [u'pony', u'build', u'rocks'],
u'client': {u'host': u'example.com', u'user': u'', u'arch': u'linux-i386', u'extra': u'hi!'},
u'results': [
{u'errout': u'',
u'finished': u'Mon, 26 Oct 2009 16:25:00 -0500',
u'name': u'test',
u'output': u'Step 1: OK',
u'started': u'Mon, 26 Oct 2009 16:21:30 -0500',
u'success': True},
{u'errout': u'',
u'finished': u'Mon, 26 Oct 2009 16:21:00 -0500',
u'name': u'checkout',
u'output': u'Step 2: OK',
u'started': u'Mon, 26 Oct 2009 16:22:00 -0500',
u'success': False}
],
}
r = self.client.post('/pony/builds', data=simplejson.dumps(build),
content_type='application/json')
self.assertEqual(r.status_code, 201, r) # 201 created
self.assertEqual(r['Location'], 'http://testserver/pony/builds/2')
# make sure the build info came through okay
r = self.client.get('/pony/builds/2')
#Enable this again with a mock date time for finished.
"""
self.assertJsonEqual(r, {
u'success': False,
u'started': u'Mon, 26 Oct 2009 16:22:00 -0500',
u'finished': u'Mon, 26 Oct 2009 16:25:00 -0500',
u'tags': [u'build', u'pony', u'rocks'],
u'client': {u'host': u'example.com', u'user': u'', u'arch': u'linux-i386', u'extra': u'hi!'},
u'results': [
{u'errout': u'',
u'finished': u'Mon, 26 Oct 2009 16:25:00 -0500',
u'name': u'test',
u'output': u'Step 1: OK',
u'started': u'Mon, 26 Oct 2009 16:21:30 -0500',
u'success': True},
{u'errout': u'',
u'finished': u'Mon, 26 Oct 2009 16:21:00 -0500',
u'name': u'checkout',
u'output': u'Step 2: OK',
u'started': u'Mon, 26 Oct 2009 16:22:00 -0500',
u'success': False}
],
u'links': [
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/2',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'project'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/build',
u'rel': u'tag'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/pony',
u'rel': u'tag'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/rocks',
u'rel': u'tag'}
],
})
"""
class LatestBuildTests(PonyTests):
def test_get_latest_build(self):
r = self.client.get('/pony/builds/latest')
self.assertEqual(r.status_code, 302)
self.assertEqual(r['Location'], 'http://testserver/pony/builds/1')
class TagListTests(PonyTests):
def test_get_tag_list(self):
r = self.client.get('/pony/tags')
self.assertJsonEqual(r, {
u'tags': [u'django', u'python'],
u'links': [
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'project'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/django',
u'rel': u'tag'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/python',
u'rel': u'tag'}
]
})
class TagDetailTests(PonyTests):
def test_get_tag_detail(self):
r = self.client.get('/pony/tags/django')
self.assertJsonEqual(r, {
u'count': 1,
u'num_pages': 1,
u'page': 1,
u'paginated': False,
u'per_page': 25,
u'tags': [u'django'],
u'builds': [{
u'success': True,
u'started': u'Mon, 19 Oct 2009 16:22:00 -0500',
u'finished': u'Mon, 19 Oct 2009 16:25:00 -0500',
u'tags': [u'django', u'python'],
u'client': {u'host': u'example.com', u'user': u'', u'arch': u'linux-i386'},
u'results': [
{u'errout': u'',
u'finished': u'Mon, 19 Oct 2009 16:25:00 -0500',
u'name': u'test',
u'output': u'OK',
u'started': u'Mon, 19 Oct 2009 16:21:30 -0500',
u'success': True},
{u'errout': u'',
u'finished': u'Mon, 19 Oct 2009 16:21:00 -0500',
u'name': u'checkout',
u'output': u'OK',
u'started': u'Mon, 19 Oct 2009 16:22:00 -0500',
u'success': True}
],
u'links': [
{u'allowed_methods': [u'GET'],
u'href': u'/pony/builds/1',
u'rel': u'self'},
{u'allowed_methods': [u'GET', u'PUT', u'DELETE'],
u'href': u'/pony',
u'rel': u'project'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/django',
u'rel': u'tag'},
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/python',
u'rel': u'tag'}
]
}],
u'links': [
{u'allowed_methods': [u'GET'],
u'href': u'/pony/tags/django?per_page=25&page=1',
u'rel': u'self'},
]
})
class LatestTagsTests(PonyTests):
def test_get_latest_tagged_build(self):
r = self.client.get('/pony/tags/django/latest')
self.assertEqual(r.status_code, 302)
self.assertEqual(r['Location'], 'http://testserver/pony/builds/1')
def test_get_latest_tagged_build_404s_with_invalid_tags(self):
r = self.client.get('/pony/tags/nope/latest')
self.assertEqual(r.status_code, 404)
|
|
# region Description
"""
test_network.py: Unit tests for Raw-packet network classes
Author: Vladimir Ivanov
License: MIT
Copyright 2020, Raw-packet Project
"""
# endregion
# region Import
from sys import path
from os.path import dirname, abspath
import unittest
# endregion
# region Authorship information
__author__ = 'Vladimir Ivanov'
__copyright__ = 'Copyright 2020, Raw-packet Project'
__credits__ = ['']
__license__ = 'MIT'
__version__ = '0.2.1'
__maintainer__ = 'Vladimir Ivanov'
__email__ = '[email protected]'
__status__ = 'Development'
# endregion
# region Main class - NetworkTest
class NetworkTest(unittest.TestCase):
# region Properties
path.append(dirname(dirname(dirname(dirname(dirname(abspath(__file__)))))))
from raw_packet.Utils.base import Base
from raw_packet.Utils.network import RawEthernet, RawARP, RawIPv4, RawUDP, RawDNS, RawICMPv4, RawDHCPv4
from raw_packet.Utils.network import RawIPv6, RawICMPv6, RawDHCPv6
base: Base = Base()
ethernet: RawEthernet = RawEthernet()
arp: RawARP = RawARP()
ipv4: RawIPv4 = RawIPv4()
ipv6: RawIPv6 = RawIPv6()
udp: RawUDP = RawUDP()
dns: RawDNS = RawDNS()
icmpv4: RawICMPv4 = RawICMPv4()
dhcpv4: RawDHCPv4 = RawDHCPv4()
icmpv6: RawICMPv6 = RawICMPv6()
dhcpv6: RawDHCPv6 = RawDHCPv6()
# endregion
# region Test RawEthernet methods
def test_ethernet_init(self):
self.assertIn('00:18:de', self.ethernet.macs)
def test_ethernet_make_random_mac(self):
self.assertTrue(self.base.mac_address_validation(self.ethernet.make_random_mac()))
def test_ethernet_convert_mac(self):
# Convert string MAC address to bytes
self.assertEqual(self.ethernet.convert_mac('30:31:32:33:34:35', True, 41), b'012345')
# Convert bytes MAC address to string
self.assertEqual(self.ethernet.convert_mac(b'012345', True, 41), '30:31:32:33:34:35')
# Bad MAC address string
self.assertIsNone(self.ethernet.convert_mac('30:31:32:33:34:356', False, 41))
# Bad MAC address string
self.assertIsNone(self.ethernet.convert_mac('30:31:32:33:34567', False, 41))
# Bad MAC address bytes
self.assertIsNone(self.ethernet.convert_mac(b'01234', False, 41))
def test_ethernet_get_mac_prefix(self):
# Prefix from MAC address string
self.assertEqual(self.ethernet.get_mac_prefix('ab:cd:ef:01:23:45', 3, True, 42), 'ABCDEF')
# Prefix from MAC address bytes
self.assertEqual(self.ethernet.get_mac_prefix(b'012345', 3, True, 42), '303132')
# Bad MAC address string
self.assertIsNone(self.ethernet.get_mac_prefix('30:31:32:33:34:356', 3, False, 42))
# Bad MAC address string
self.assertIsNone(self.ethernet.get_mac_prefix('30:31:32:33:34567', 3, False, 42))
# Bad MAC address bytes
self.assertIsNone(self.ethernet.get_mac_prefix(b'01234', 3, False, 42))
def test_ethernet_parse_header(self):
# Normal packet
self.assertEqual(self.ethernet.parse_header(b'6789@A012345\x08\x00', True, 43),
{'destination': '36:37:38:39:40:41', 'source': '30:31:32:33:34:35', 'type': 2048})
# Bad packet
self.assertIsNone(self.ethernet.parse_header(b'6789@A012345\x08\x00\x01', False, 43))
def test_ethernet_make_header(self):
# MAC addresses string
self.assertEqual(self.ethernet.make_header('30:31:32:33:34:35', '36:37:38:39:40:41', 2048, True, 44),
b'6789@A012345\x08\x00')
# Bad first MAC address bytes
self.assertIsNone(self.ethernet.make_header('30:31:32:33:34567', '36:37:38:39:40:41', 2048, False, 44))
# Bad second MAC address bytes
self.assertIsNone(self.ethernet.make_header('30:31:32:33:34:56', '36:37:38:39:40123', 2048, False, 44))
# Bad network type
self.assertIsNone(self.ethernet.make_header('30:31:32:33:34:56', '36:37:38:39:40:41', 123123, False, 44))
# endregion
# region Test RawARP methods
def test_arp_parse_packet(self):
# Normal packet
self.assertEqual(self.arp.parse_packet(b'\x00\x01\x08\x00\x06\x04\x00\x01\x01#Eg\x89\n\xc0\xa8\x01\x01\x01' +
b'#Eg\x89\x0b\xc0\xa8\x01\x02', True, 45),
{'hardware-type': 1, 'protocol-type': 2048, 'hardware-size': 6, 'protocol-size': 4,
'opcode': 1, 'sender-mac': '01:23:45:67:89:0a', 'sender-ip': '192.168.1.1',
'target-mac': '01:23:45:67:89:0b', 'target-ip': '192.168.1.2'})
# Bad packet
self.assertIsNone(self.arp.parse_packet(b'\x00\x01\x08\x00\x06\x04\x00\x01\x01#Eg\x89\n\xc0\xa8\x01\x01\x01' +
b'#Eg\x89\x0b\xc0\xa8\x01\x02\x03', False, 45))
def test_arp_make_packet(self):
# Normal
self.assertEqual(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 4,
True, 46),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x06\x00\x01\x08\x00\x06\x04\x00\x01\x01#Eg\x89\n\xc0\xa8' +
b'\x01\x01\x01#Eg\x89\x0b\xc0\xa8\x01\x02')
# Bad ethernet src MAC address
self.assertIsNone(self.arp.make_packet('01:23:45:67:890ab', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 4,
False, 46))
# Bad ethernet dst MAC address
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:890ab', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 4,
False, 46))
# Bad sender MAC address
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0a', '01:23:45:67:890ab',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 4,
False, 46))
# Bad target MAC address
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0a', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:890ab', '192.168.1.2', 1, 1, 2048, 6, 4,
False, 46))
# Bad sender IP address
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.300', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 4,
False, 46))
# Bad target IP address
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.400', 1, 1, 2048, 6, 4,
False, 46))
# Bad ARP opcode
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 123123, 1, 2048, 6, 4,
False, 46))
# Bad hardware type
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 123123, 2048, 6, 4,
False, 46))
# Bad protocol type
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 123123, 6, 4,
False, 46))
# Bad hardware size
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 123123, 4,
False, 46))
# Bad protocol size
self.assertIsNone(self.arp.make_packet('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', 1, 1, 2048, 6, 123123,
False, 46))
def test_arp_make_request(self):
# Normal
self.assertEqual(self.arp.make_request('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', True, 47),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x06\x00\x01\x08\x00\x06\x04\x00\x01\x01#Eg\x89\n' +
b'\xc0\xa8\x01\x01\x01#Eg\x89\x0b\xc0\xa8\x01\x02')
def test_arp_make_response(self):
# Normal
self.assertEqual(self.arp.make_response('01:23:45:67:89:0a', '01:23:45:67:89:0b', '01:23:45:67:89:0a',
'192.168.1.1', '01:23:45:67:89:0b', '192.168.1.2', True, 48),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x06\x00\x01\x08\x00\x06\x04\x00\x02\x01#Eg\x89\n' +
b'\xc0\xa8\x01\x01\x01#Eg\x89\x0b\xc0\xa8\x01\x02')
# endregion
# region Test RawIPv4 methods
def test_ipv4_make_random_ip(self):
self.assertTrue(self.base.ip_address_validation(self.ipv4.make_random_ip()))
def test_ipv4_parse_header(self):
# Normal
self.assertEqual(self.ipv4.parse_header(b'E\x00\x00\x1c\x01\x00\x00\x00@\x11\xf6}' +
b'\xc0\xa8\x01\x01\xc0\xa8\x01\x02', True, 49),
{'version': 4, 'length': 5, 'dscp_ecn': 0, 'total-length': 28, 'identification': 256,
'flags': 0, 'fragment-offset': 0, 'time-to-live': 64, 'protocol': 17, 'checksum': 63101,
'source-ip': '192.168.1.1', 'destination-ip': '192.168.1.2'})
# Bad packet
self.assertIsNone(self.ipv4.parse_header(b'\x61\x00\x00\x1c\x8d/\x00\x00@\x11jN' +
b'\xc0\xa8\x01\x01\xc0\xa8\x01\x02', False, 49))
# Bad packet
self.assertIsNone(self.ipv4.parse_header(b'\x61\x00\x00\x1c\x8d/\x00\x00@\x11jN' +
b'\xc0\xa8\x01\x01\xc0\xa8\x01', False, 49))
def test_ipv4_make_header(self):
# Normal
self.assertEqual(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=8, transport_protocol_type=17, ttl=64,
identification=1, exit_on_failure=True, exit_code=50),
b'E\x00\x00\x1c\x01\x00\x00\x00@\x11\xf6}\xc0\xa8\x01\x01\xc0\xa8\x01\x02')
# Bad source IP
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.300', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=8, transport_protocol_type=17, ttl=64,
identification=1, exit_on_failure=False, exit_code=50))
# Bad destination IP
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.400', data_len=0,
transport_protocol_len=8, transport_protocol_type=17, ttl=64,
identification=1, exit_on_failure=False, exit_code=50))
# Bad identification
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=8, transport_protocol_type=17, ttl=64,
identification=123123, exit_on_failure=False, exit_code=50))
# Bad data length
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=123123,
transport_protocol_len=8, transport_protocol_type=17, ttl=64,
identification=1, exit_on_failure=False, exit_code=50))
# Bad transport protocol header length
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=123123, transport_protocol_type=17, ttl=64,
identification=1, exit_on_failure=False, exit_code=50))
# Bad transport protocol type
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=8, transport_protocol_type=123123, ttl=64,
identification=1, exit_on_failure=False, exit_code=50))
# Bad ttl
self.assertIsNone(self.ipv4.make_header(source_ip='192.168.1.1', destination_ip='192.168.1.2', data_len=0,
transport_protocol_len=8, transport_protocol_type=17, ttl=123123,
identification=1, exit_on_failure=False, exit_code=50))
# endregion
# region Test RawIPv6 methods
def test_ipv6_make_random_ip(self):
# Normal
self.assertTrue(self.base.ipv6_address_validation(self.ipv6.make_random_ip(octets=3,
prefix='fd00::',
exit_on_failure=True,
exit_code=51)))
# Bad prefix
self.assertIsNone(self.ipv6.make_random_ip(octets=1, prefix='fd00:::', exit_on_failure=False, exit_code=51))
# Bad octets count
self.assertIsNone(self.ipv6.make_random_ip(octets=123, prefix='fd00::', exit_on_failure=False, exit_code=51))
def test_ipv6_pack_addr(self):
# Normal
self.assertEqual(self.ipv6.pack_addr(ipv6_address='3132:3334::1', exit_on_failure=True, exit_code=52),
b'1234\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01')
# Bad IPv6 address
self.assertIsNone(self.ipv6.pack_addr(ipv6_address='fd00:::1', exit_on_failure=False, exit_code=52))
def test_ipv6_parse_header(self):
# Normal
self.assertEqual(self.ipv6.parse_header(b'`\x00\x00\x00\x00\x08\x11@\xfd\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x02', True, 53),
{'version': 6, 'traffic-class': 0, 'flow-label': 0, 'payload-length': 8, 'next-header': 17,
'hop-limit': 64, 'source-ip': 'fd00::1', 'destination-ip': 'fd00::2'})
# Bad packet
self.assertIsNone(self.ipv6.parse_header(b'`\x00\x00\x00\x00\x08\x11@\xfd\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00', False, 53))
# Bad packet
self.assertIsNone(self.ipv6.parse_header(b'E\x00\x00\x00\x00\x08\x11@\xfd\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x02', False, 53))
def test_ipv6_make_header(self):
# Normal
self.assertEqual(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=0,
flow_label=0, payload_len=8, next_header=17, hop_limit=64,
exit_on_failure=True, exit_code=54),
b'`\x00\x00\x00\x00\x08\x11@\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02')
# Bad source IP
self.assertIsNone(self.ipv6.make_header(source_ip='fd00:::1', destination_ip='fd00::2', traffic_class=0,
flow_label=0, payload_len=8, next_header=17, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad destination IP
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00:::2', traffic_class=0,
flow_label=0, payload_len=8, next_header=17, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad traffic class
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=123123123,
flow_label=0, payload_len=8, next_header=17, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad flow label
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=0,
flow_label=123123123123, payload_len=8, next_header=17, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad payload len
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=0,
flow_label=0, payload_len=123123123123, next_header=17, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad next header
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=0,
flow_label=0, payload_len=8, next_header=123123123123123, hop_limit=64,
exit_on_failure=False, exit_code=54))
# Bad hop limit
self.assertIsNone(self.ipv6.make_header(source_ip='fd00::1', destination_ip='fd00::2', traffic_class=0,
flow_label=0, payload_len=8, next_header=17, hop_limit=123123123123123,
exit_on_failure=False, exit_code=54))
# endregion
# region Test RawUDP methods
def test_udp_parse_header(self):
# Normal
self.assertEqual(self.udp.parse_header(packet=b'\x14\xe9\x14\xe9\x00\x08\xdc\x07',
exit_on_failure=True, exit_code=55),
{'source-port': 5353, 'destination-port': 5353, 'length': 8, 'checksum': 56327})
# Bad packet length
self.assertIsNone(self.udp.parse_header(packet=b'\x14\xe9\x14\xe9\x00\x08\xdc',
exit_on_failure=False, exit_code=55))
def test_udp_make_header(self):
# Normal
self.assertEqual(self.udp.make_header(source_port=5353, destination_port=5353, data_length=0,
exit_on_failure=True, exit_code=56), b'\x14\xe9\x14\xe9\x00\x08\x00\x00')
# Bad source port
self.assertIsNone(self.udp.make_header(source_port=123123, destination_port=5353, data_length=0,
exit_on_failure=False, exit_code=56))
# Bad destination port
self.assertIsNone(self.udp.make_header(source_port=5353, destination_port=123123, data_length=0,
exit_on_failure=False, exit_code=56))
# Bad data length
self.assertIsNone(self.udp.make_header(source_port=5353, destination_port=5353, data_length=123123,
exit_on_failure=False, exit_code=56))
def test_udp_make_header_with_ipv6_checksum(self):
# Normal
self.assertEqual(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00::1', ipv6_dst='fd00::2', port_src=5353,
port_dst=5353, payload_len=0, payload_data=b'',
exit_on_failure=True, exit_code=57),
b'\x14\xe9\x14\xe9\x00\x08\xdc\x07')
# Bad source IPv6 address
self.assertIsNone(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00:::1', ipv6_dst='fd00::2',
port_src=5353, port_dst=5353, payload_len=0,
payload_data=b'', exit_on_failure=False,
exit_code=57))
# Bad destination IPv6 address
self.assertIsNone(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00::1', ipv6_dst='fd00:::2',
port_src=5353, port_dst=5353, payload_len=0,
payload_data=b'', exit_on_failure=False,
exit_code=57))
# Bad source port
self.assertIsNone(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00::1', ipv6_dst='fd00::2',
port_src=123123, port_dst=5353, payload_len=0,
payload_data=b'', exit_on_failure=False,
exit_code=57))
# Bad destination port
self.assertIsNone(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00::1', ipv6_dst='fd00::2',
port_src=5353, port_dst=123123, payload_len=0,
payload_data=b'', exit_on_failure=False,
exit_code=57))
# Bad payload length
self.assertIsNone(self.udp.make_header_with_ipv6_checksum(ipv6_src='fd00::1', ipv6_dst='fd00::2',
port_src=5353, port_dst=5353, payload_len=123123,
payload_data=b'', exit_on_failure=False,
exit_code=57))
# endregion
# region Test RawDNS methods
def test_dns_get_top_level_domain(self):
# Normal
self.assertEqual(self.dns.get_top_level_domain(name='www.test.com'), 'test.com')
# Bad name
self.assertEqual(self.dns.get_top_level_domain(name='test'), 'test')
def test_dns_pack_dns_name(self):
# Normal
self.assertEqual(self.dns.pack_dns_name(name='test.com', exit_on_failure=True, exit_code=65),
b'\x04test\x03com\x00')
# Bad name
self.assertIsNone(self.dns.pack_dns_name(name='AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' +
'.com', exit_on_failure=False, exit_code=65))
def test_dns_parse_packet(self):
self.assertEqual(self.dns.parse_packet(packet=b'\x00\x01\x81\x80\x00\x01\x00\x01\x00\x00\x00\x00' +
b'\x04test\x03com\x00\x00\x01\x00\x01\x04test\x03com\x00' +
b'\x00\x01\x00\x01\x00\x00\xff\xff\x00\x04\xc0\xa8\x01\x01',
exit_on_failure=True, exit_code=67),
{'additional-rrs': 0, 'answer-rrs': 1, 'authority-rrs': 0, 'questions': 1, 'flags': 33152,
'transaction-id': 1,
'answers': [
{'address': '192.168.1.1',
'class': 1,
'name': 'test.com.',
'ttl': 65535,
'type': 1}],
'queries': [{'class': 1,
'name': 'test.com.',
'type': 1}]})
def test_dns_unpack_dns_name(self):
self.assertEqual(self.dns.unpack_dns_name(packed_name=b'\x03www\x04test\x03com\x00'), 'www.test.com.')
self.assertEqual(self.dns.unpack_dns_name(packed_name=b'\x04mail\xc0\x11', name='pop3.test.com'),
'mail.test.com')
self.assertEqual(self.dns.unpack_dns_name(packed_name=b'\xc0\x10', name='test.com'), 'test.com')
self.assertEqual(self.dns.unpack_dns_name(packed_name=b'\x03www\xc0\x0c', name='test.com'), 'www.test.com')
def test_dns_make_ipv4_request_packet(self):
# Normal
self.assertEqual(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x006\x01\x00\x00\x00@\x11\xf6c\xc0\xa8\x01\x01' +
b'\xc0\xa8\x01\x02\x14\xe9\x005\x00"\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00' +
b'\x04test\x03com\x00\x00\x01\x00\x01')
# Bad source MAC address
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:890ab',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad destination MAC address
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:890ab',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad source IPv4 address
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.300', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad destination IPv4 address
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.400',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad source UDP port
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=123123, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad destination UDP port
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=123123, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad transaction id
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=123123123,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad query type
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 123123, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad query class
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 123123, 'name': 'test.com'}],
flags=0))
# Bad flags
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=123123))
# Bad queries
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'name': 'test.com'}],
flags=0))
# Bad queries
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'class': 1, 'name': 'test.com'}],
flags=0))
# Bad queries
self.assertIsNone(self.dns.make_ipv4_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2',
ip_ttl=64, ip_ident=1,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'class': 1, 'type': 1}],
flags=0))
def test_dns_make_ipv6_request_packet(self):
# Normal
self.assertEqual(self.dns.make_ipv6_request_packet(ethernet_src_mac='01:23:45:67:89:0a', ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00"\x11@\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x14\xe9\x005\x00"B)\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00' +
b'\x04test\x03com\x00\x00\x01\x00\x01')
# Bad source IPv6 address
self.assertIsNone(self.dns.make_ipv6_request_packet(ethernet_src_mac='01:23:45:67:89:0a', ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00:::1', ip_dst='fd00::2', ip_ttl=64,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
# Bad destination IPv6 address
self.assertIsNone(self.dns.make_ipv6_request_packet(ethernet_src_mac='01:23:45:67:89:0a', ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00:::2', ip_ttl=64,
udp_src_port=5353, udp_dst_port=53, transaction_id=1,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
flags=0))
def test_dns_make_response_packet(self):
# Normal IPv4 response
self.assertEqual(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_dst='192.168.1.2', ip_ttl=64,
ip_ident=1, udp_src_port=53, udp_dst_port=5353, transaction_id=1,
flags=0x8180,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 1, 'class': 1,
'ttl': 65535, 'address': '192.168.1.1'}],
name_servers={}, exit_on_failure=True),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x00F\x01\x00\x00\x00@\x11\xf6S\xc0\xa8\x01\x01' +
b'\xc0\xa8\x01\x02\x005\x14\xe9\x002\xb5{\x00\x01\x81\x80\x00\x01\x00\x01\x00\x00\x00\x00' +
b'\x04test\x03com\x00\x00\x01\x00\x01\xc0\x0c\x00\x01\x00\x01\x00\x00\xff\xff\x00\x04\xc0' +
b'\xa8\x01\x01')
# Normal IPv6 response
self.assertEqual(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=53, udp_dst_port=5353, transaction_id=1,
flags=0x8180,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 28, 'class': 1,
'ttl': 65535, 'address': 'fd00::1'}],
name_servers={}, exit_on_failure=True),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00>\x11@\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x005\x14\xe9\x00>\x034\x00\x01\x81\x80\x00\x01\x00\x01\x00\x00\x00\x00\x04' +
b'test\x03com\x00\x00\x01\x00\x01\xc0\x0c\x00\x1c\x00\x01\x00\x00\xff\xff\x00\x10\xfd\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01')
# Bad MAC address
self.assertIsNone(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:890ab',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=53, udp_dst_port=5353, transaction_id=1, flags=0,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 28, 'class': 1,
'ttl': 65535, 'address': 'fd00::1'}],
name_servers={}, exit_on_failure=False))
# Bad IP address
self.assertIsNone(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00:::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=53, udp_dst_port=5353, transaction_id=1, flags=0,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 28, 'class': 1,
'ttl': 65535, 'address': 'fd00::1'}],
name_servers={}, exit_on_failure=False))
# Bad UDP port
self.assertIsNone(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=123123, udp_dst_port=5353, transaction_id=1, flags=0,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 28, 'class': 1,
'ttl': 65535, 'address': 'fd00::1'}],
name_servers={}, exit_on_failure=False))
# Bad IPv4 address in answer
self.assertIsNone(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=53, udp_dst_port=5353, transaction_id=1, flags=0,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 1, 'class': 1,
'ttl': 65535, 'address': '192.168.1.300'}],
name_servers={}, exit_on_failure=False))
# Bad IPv6 address in answer
self.assertIsNone(self.dns.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='fd00::1', ip_dst='fd00::2', ip_ttl=64, ip_ident=1,
udp_src_port=53, udp_dst_port=5353, transaction_id=1, flags=0,
queries=[{'type': 1, 'class': 1, 'name': 'test.com'}],
answers_address=[{'name': 'test.com', 'type': 28, 'class': 1,
'ttl': 65535, 'address': 'fd00:::1'}],
name_servers={}, exit_on_failure=False))
# endregion
# endregion
# region Test RawICMPv4 methods
def test_icmpv4_make_host_unreachable_packet(self):
# Normal
self.assertEqual(self.icmpv4.make_host_unreachable_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ident=1),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x000\x01\x00\x00\x00@\x01\xf8y\xc0\xa8\x00' +
b'\x01\xc0\xa8\x00\x02\x03\x01\xfc\xfe\x00\x00\x00\x00E\x00\x00\x1c\x01\x00\x00\x00@\x01' +
b'\xf8\x8d\xc0\xa8\x00\x02\xc0\xa8\x00\x01')
# Bad MAC address
self.assertIsNone(self.icmpv4.make_host_unreachable_packet(ethernet_src_mac='01:23:45:67:89:0ab',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ident=1))
# Bad IP address
self.assertIsNone(self.icmpv4.make_host_unreachable_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1111', ip_dst='192.168.0.2',
ip_ident=1))
def test_icmpv4_make_udp_port_unreachable_packet(self):
# Normal
self.assertEqual(self.icmpv4.make_udp_port_unreachable_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
udp_src_port=5353, udp_dst_port=5353,
ip_ident=1),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x008\x01\x00\x00\x00@\x01\xf8q\xc0\xa8\x00\x01' +
b'\xc0\xa8\x00\x02\x03\x03\xd3"\x00\x00\x00\x00E\x00\x00$\x01\x00\x00\x00@\x11\xf8u\xc0\xa8' +
b'\x00\x02\xc0\xa8\x00\x01\x14\xe9\x14\xe9\x00\x08\x00\x00')
def test_icmpv4_make_ping_request_packet(self):
# Normal
self.assertEqual(self.icmpv4.make_ping_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ident=1, data=b'0123456789'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x00&\x01\x00\x00\x00@\x01\xf8\x83\xc0\xa8\x00' +
b'\x01\xc0\xa8\x00\x02\x08\x00\xf2\xf5\x00\x00\x00\x000123456789')
def test_icmpv4_make_redirect_packet(self):
# Normal
self.assertEqual(self.icmpv4.make_redirect_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ttl=64, ip_ident=1,
gateway_address='192.168.0.1',
payload_ip_src='192.168.0.1',
payload_ip_dst='192.168.0.2'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x008\x01\x00\x00\x00@\x01\xf8q\xc0\xa8\x00\x01' +
b'\xc0\xa8\x00\x02\x05\x019\xe3\xc0\xa8\x00\x01E\x00\x00\x1c\x01\x00\x00\x00@\x11\xf8}\xc0' +
b'\xa8\x00\x01\xc0\xa8\x00\x02\x005\x005\x00\x08\x00\x00')
# Bad gateway address
self.assertIsNone(self.icmpv4.make_redirect_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ttl=64, ip_ident=1,
gateway_address='192.168.0.1111',
payload_ip_src='192.168.0.1',
payload_ip_dst='192.168.0.2'))
# Bad payload IP address
self.assertIsNone(self.icmpv4.make_redirect_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.0.1', ip_dst='192.168.0.2',
ip_ttl=64, ip_ident=1,
gateway_address='192.168.0.1',
payload_ip_src='192.168.0.1111',
payload_ip_dst='192.168.0.2'))
# endregion
# region Test RawDHCPv4 methods
def test_dhcpv4_discover_packet(self):
# Normal
self.assertEqual(self.dhcpv4.make_discover_packet(ethernet_src_mac='01:23:45:67:89:0a',
client_mac='01:23:45:67:89:0a',
ip_ident=1, transaction_id=1,
host_name='dhcp.discover.test',
exit_on_failure=True,
exit_code=76),
b'\xff\xff\xff\xff\xff\xff\x01#Eg\x89\n\x08\x00E\x00\x02<\x01\x00\x00\x00@\x11w\xb2\x00' +
b'\x00\x00\x00\xff\xff\xff\xff\x00D\x00C\x02(\x00\x00\x01\x01\x06\x00\x00\x00\x00\x01\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01#Eg\x89' +
b'\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00c\x82Sc5\x01\x01\x0c\x12dhcp.discover.test7\xfe\x01\x02\x03\x04\x05' +
b'\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c' +
b'\x1d\x1e\x1f !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefg' +
b'hijklmnopqrstuvwxyz{|}~\x7f\x80\x81\x82\x83\x84\x85\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e' +
b'\x8f\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e\x9f\xa0\xa1\xa2\xa3\xa4' +
b'\xa5\xa6\xa7\xa8\xa9\xaa\xab\xac\xad\xae\xaf\xb0\xb1\xb2\xb3\xb4\xb5\xb6\xb7\xb8\xb9\xba' +
b'\xbb\xbc\xbd\xbe\xbf\xc0\xc1\xc2\xc3\xc4\xc5\xc6\xc7\xc8\xc9\xca\xcb\xcc\xcd\xce\xcf\xd0' +
b'\xd1\xd2\xd3\xd4\xd5\xd6\xd7\xd8\xd9\xda\xdb\xdc\xdd\xde\xdf\xe0\xe1\xe2\xe3\xe4\xe5\xe6' +
b'\xe7\xe8\xe9\xea\xeb\xec\xed\xee\xef\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc' +
b'\xfd\xfe\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00')
def test_dhcpv4_make_request_packet(self):
# Normal
self.assertEqual(self.dhcpv4.make_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
client_mac='01:23:45:67:89:0a',
ip_ident=1, transaction_id=1,
requested_ip='192.168.1.1',
host_name='dhcp.request.test',
exit_on_failure=True,
exit_code=77),
b'\xff\xff\xff\xff\xff\xff\x01#Eg\x89\n\x08\x00E\x00\x01J\x01\x00\x00\x00@\x11x\xa4\x00' +
b'\x00\x00\x00\xff\xff\xff\xff\x00D\x00C\x016\x00\x00\x01\x01\x06\x00\x00\x00\x00\x01\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01#Eg\x89' +
b'\n\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00c\x82Sc5\x01\x032\x04\xc0\xa8\x01\x01\x0c\x11dhcp.request.test7\x07' +
b'\x01\x02\x03\x06\x1c\x0f\x1a\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
def test_dhcpv4_make_response_packet(self):
# DHCPv4 Offer
self.assertEqual(self.dhcpv4.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_ident=1, transaction_id=1,
dhcp_message_type=2, your_client_ip='192.168.1.2',
exit_on_failure=True,
exit_code=78),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x01F\x01\x00\x00\x00@\x11\xb6\xfe\xc0\xa8\x01' +
b'\x01\xff\xff\xff\xff\x00C\x00D\x012\x00\x00\x02\x01\x06\x00\x00\x00\x00\x01\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\xc0\xa8\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01#Eg\x89\x0b\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00c\x82Sc5\x01\x026\x04\xc0\xa8\x01\x013\x04\x00\x00\xff\xff\x01\x04\xff\xff\xff' +
b'\x00\x03\x04\xc0\xa8\x01\x01\x06\x04\xc0\xa8\x01\x01\xff\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
# DHCPv4 ACK
self.assertEqual(self.dhcpv4.make_response_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ip_src='192.168.1.1', ip_ident=1, transaction_id=1,
dhcp_message_type=5, your_client_ip='192.168.1.2',
exit_on_failure=True,
exit_code=78),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x08\x00E\x00\x01F\x01\x00\x00\x00@\x11\xb6\xfe\xc0\xa8\x01' +
b'\x01\xff\xff\xff\xff\x00C\x00D\x012\x00\x00\x02\x01\x06\x00\x00\x00\x00\x01\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\xc0\xa8\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01#Eg\x89\x0b\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00c\x82Sc5\x01\x056\x04\xc0\xa8\x01\x013\x04\x00\x00\xff\xff\x01\x04\xff\xff\xff' +
b'\x00\x03\x04\xc0\xa8\x01\x01\x06\x04\xc0\xa8\x01\x01\xff\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')
# endregion
# region Test RawICMPv6 methods
def test_icmpv6_make_option(self):
# Normal
self.assertEqual(self.icmpv6.make_option(option_type=1, option_value=b'test_option_value'),
b'\x01\x03\x00\x00\x00\x00\x00test_option_value')
def test_icmpv6_make_router_solicit_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_router_solicit_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='33:33:00:00:00:02',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
ipv6_flow=0x835d1,
need_source_link_layer_address=True,
source_link_layer_address=None),
b'33\x00\x00\x00\x02\x01#Eg\x89\n\x86\xdd`\x085\xd1\x00\x10:\xff\xfd\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x02\x85\x00\xb0\x1a\x00\x00\x00\x00\x01\x01\x01#Eg\x89\n')
def test_icmpv6_make_router_advertisement_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_router_advertisement_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
dns_address='fd00::1', domain_search='test.local',
prefix='fd00::/64'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x0bGU\x00\x80:\xff\xfd\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x02\x86\x00\xb3>@\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x04@\xc0\xff\xff' +
b'\xff\xff\xff\xff\xff\xff\x00\x00\x00\x00\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x01\x01\x01#Eg\x89\n\x05\x01\x00\x00\x00\x00\x05\xdc\x19\x03\x00\x00\x00' +
b'\x00\x17p\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x1f\x04\x00\x00' +
b'\x00\x00\x17p\x04test\x05local\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x01' +
b'\x00\x00\x00\x00\xea`')
def test_icmpv6_make_neighbor_solicitation_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_neighbor_solicitation_packet(ethernet_src_mac='01:23:45:67:89:0a',
ipv6_src='fd00::1'),
b'33\x00\x00\x00\x01\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00 :\xff\xfd\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x01\x87\x00\xac\x05\x00\x00\x00\x00\xff\x02\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x01\x02\x01\x01#Eg\x89\n')
def test_icmpv6_make_neighbor_advertisement_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_neighbor_advertisement_packet(ethernet_src_mac='01:23:45:67:89:0a',
ipv6_src='fd00::1',
target_ipv6_address='fd00::2'),
b'33\x00\x00\x00\x01\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00 :\xff\xfd\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x01\x88\x00\x8d\x06 \x00\x00\x00\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x02\x02\x01\x01#Eg\x89\n')
def test_icmpv6_make_echo_request_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_echo_request_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
id=1, sequence=1),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00@:\xff\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x80\x00\x8ek\x00\x01\x00\x01\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c' +
b'\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f ' +
b'!"#$%&\'()*+,-./01234567')
def test_icmpv6_make_echo_reply_packet(self):
# Normal
self.assertEqual(self.icmpv6.make_echo_reply_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
id=1, sequence=1),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00@:\xff\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x81\x00\x8dk\x00\x01\x00\x01\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c' +
b'\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f ' +
b'!"#$%&\'()*+,-./01234567')
# endregion
# region Test RawDHCPv6 methods
def test_dhcpv6_make_option(self):
# Normal
self.assertEqual(self.dhcpv6._make_duid(mac_address='01:23:45:67:89:0a'),
b'\x00\x03\x00\x01\x01#Eg\x89\n')
def test_dhcpv6_make_solicit_packet(self):
# Normal
self.assertEqual(self.dhcpv6.make_solicit_packet(ethernet_src_mac='01:23:45:67:89:0a',
ipv6_src='fd00::1',
transaction_id=1,
client_mac_address='01:23:45:67:89:0a',
option_request_list=[23, 24]),
b'33\x00\x01\x00\x02\x01#Eg\x89\n\x86\xdd`\x00\x00\x00\x00H\x11@\xfd\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\xff\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x01\x00\x02\x02"\x02#\x00H.\x01\x01\x00\x00\x01\x00\x03\x00\x18\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00' +
b'\x00\x00\x08\x00\x02\x00\x00\x00\x01\x00\n\x00\x03\x00\x01\x01#Eg\x89\n\x00\x06\x00\x04' +
b'\x00\x17\x00\x18')
def test_dhcpv6_make_relay_forw_packet(self):
# Normal
self.assertEqual(self.dhcpv6.make_relay_forw_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2', ipv6_flow=1,
hop_count=10, link_addr='fd00::2',
peer_addr='fd00::3'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\x00\x00\x01\x00*\x11@\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x02"\x02#\x00*\xfb?\x0c\n\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x02\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03')
def test_dhcpv6_make_advertise_packet(self):
# Normal
self.assertEqual(self.dhcpv6.make_advertise_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
transaction_id=1,
dns_address='fd00::1',
domain_search='test.local',
ipv6_address='fd00::2'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\n\x1b\x82\x00\x84\x11@\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x02#\x02"\x00\x84n\xf4\x02\x00\x00\x01\x00\x01\x00\n\x00\x03\x00\x01\x01#Eg' +
b'\x89\x0b\x00\x02\x00\n\x00\x03\x00\x01\x01#Eg\x89\n\x00\x14\x00\x00\x00\x17\x00\x10\xfd' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x18\x00\x0c\x04test\x05' +
b'local\x00\x00R\x00\x04\x00\x00\x00<\x00\x03\x00(\x00\x00\x00\x01\x00\x00T`\x00\x00\x87' +
b'\x00\x00\x05\x00\x18\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xff' +
b'\xff\xff\xff\xff\xff\xff\xff')
def test_dhcpv6_make_reply_packet(self):
# Normal
self.assertEqual(self.dhcpv6.make_reply_packet(ethernet_src_mac='01:23:45:67:89:0a',
ethernet_dst_mac='01:23:45:67:89:0b',
ipv6_src='fd00::1', ipv6_dst='fd00::2',
transaction_id=1,
dns_address='fd00::1',
domain_search='test.local',
ipv6_address='fd00::2'),
b'\x01#Eg\x89\x0b\x01#Eg\x89\n\x86\xdd`\n\x1b\x82\x00\x84\x11@\xfd\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x01\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' +
b'\x00\x00\x02\x02#\x02"\x00\x84i\xf4\x07\x00\x00\x01\x00\x01\x00\n\x00\x03\x00\x01\x01#Eg' +
b'\x89\x0b\x00\x02\x00\n\x00\x03\x00\x01\x01#Eg\x89\n\x00\x14\x00\x00\x00\x17\x00\x10\xfd' +
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x18\x00\x0c\x04test\x05' +
b'local\x00\x00R\x00\x04\x00\x00\x00<\x00\x03\x00(\x00\x00\x00\x01\x00\x00T`\x00\x00\x87' +
b'\x00\x00\x05\x00\x18\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\xff' +
b'\xff\xff\xff\xff\xff\xff\xff')
# endregion
# endregion
|
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers that are not directly related to course content."""
__author__ = 'Saifu Angto ([email protected])'
import base64
import hmac
import os
import time
import urlparse
import appengine_config
from models import transforms
from models.config import ConfigProperty
from models.config import ConfigPropertyEntity
from models.courses import Course
from models.models import Student
from models.roles import Roles
import webapp2
from google.appengine.api import namespace_manager
from google.appengine.api import users
import logging
# The name of the template dict key that stores a course's base location.
COURSE_BASE_KEY = 'gcb_course_base'
# The name of the template dict key that stores data from course.yaml.
COURSE_INFO_KEY = 'course_info'
XSRF_SECRET_LENGTH = 20
XSRF_SECRET = ConfigProperty(
'gcb_xsrf_secret', str, (
'Text used to encrypt tokens, which help prevent Cross-site request '
'forgery (CSRF, XSRF). You can set the value to any alphanumeric text, '
'preferably using 16-64 characters. Once you change this value, the '
'server rejects all subsequent requests issued using an old value for '
'this variable.'),
'course builder XSRF secret')
class ReflectiveRequestHandler(object):
"""Uses reflection to handle custom get() and post() requests.
Use this class as a mix-in with any webapp2.RequestHandler to allow request
dispatching to multiple get() and post() methods based on the 'action'
parameter.
Open your existing webapp2.RequestHandler, add this class as a mix-in.
Define the following class variables:
default_action = 'list'
get_actions = ['default_action', 'edit']
post_actions = ['save']
Add instance methods named get_list(self), get_edit(self), post_save(self).
These methods will now be called automatically based on the 'action'
GET/POST parameter.
"""
def create_xsrf_token(self, action):
return XsrfTokenManager.create_xsrf_token(action)
def get(self):
"""Handles GET."""
action = self.request.get('action')
if not action:
action = self.default_action
if not action in self.get_actions:
self.error(404)
return
handler = getattr(self, 'get_%s' % action)
if not handler:
self.error(404)
return
return handler()
def post(self):
"""Handles POST."""
action = self.request.get('action')
if not action or not action in self.post_actions:
self.error(404)
return
handler = getattr(self, 'post_%s' % action)
if not handler:
self.error(404)
return
# Each POST request must have valid XSRF token.
xsrf_token = self.request.get('xsrf_token')
if not XsrfTokenManager.is_xsrf_token_valid(xsrf_token, action):
self.error(403)
return
return handler()
class ApplicationHandler(webapp2.RequestHandler):
"""A handler that is aware of the application context."""
@classmethod
def is_absolute(cls, url):
return bool(urlparse.urlparse(url).scheme)
@classmethod
def get_base_href(cls, handler):
"""Computes current course <base> href."""
base = handler.app_context.get_slug()
if not base.endswith('/'):
base = '%s/' % base
# For IE to work with the <base> tag, its href must be an absolute URL.
if not cls.is_absolute(base):
parts = urlparse.urlparse(handler.request.url)
base = urlparse.urlunparse(
(parts.scheme, parts.netloc, base, None, None, None))
return base
def __init__(self, *args, **kwargs):
super(ApplicationHandler, self).__init__(*args, **kwargs)
self.template_value = {}
def get_template(self, template_file, additional_dirs=None):
"""Computes location of template files for the current namespace."""
self.template_value[COURSE_INFO_KEY] = self.app_context.get_environ()
self.template_value['is_course_admin'] = Roles.is_course_admin(
self.app_context)
self.template_value[
'is_read_write_course'] = self.app_context.fs.is_read_write()
self.template_value['is_super_admin'] = Roles.is_super_admin()
self.template_value[COURSE_BASE_KEY] = self.get_base_href(self)
return self.app_context.get_template_environ(
self.template_value[COURSE_INFO_KEY]['course']['locale'],
additional_dirs
).get_template(template_file)
def canonicalize_url(self, location):
"""Adds the current namespace URL prefix to the relative 'location'."""
is_relative = (
not self.is_absolute(location) and
not location.startswith(self.app_context.get_slug()))
has_slug = (
self.app_context.get_slug() and self.app_context.get_slug() != '/')
if is_relative and has_slug:
location = '%s%s' % (self.app_context.get_slug(), location)
return location
def redirect(self, location):
super(ApplicationHandler, self).redirect(
self.canonicalize_url(location))
class BaseHandler(ApplicationHandler):
"""Base handler."""
def __init__(self, *args, **kwargs):
super(BaseHandler, self).__init__(*args, **kwargs)
self.course = None
def get_course(self):
if not self.course:
self.course = Course(self)
return self.course
def find_unit_by_id(self, unit_id):
"""Gets a unit with a specific id or fails with an exception."""
return self.get_course().find_unit_by_id(unit_id)
def get_units(self):
"""Gets all units in the course."""
return self.get_course().get_units()
def get_lessons(self, unit_id):
"""Gets all lessons (in order) in the specific course unit."""
return self.get_course().get_lessons(unit_id)
def get_progress_tracker(self):
"""Gets the progress tracker for the course."""
return self.get_course().get_progress_tracker()
def get_user(self):
"""Validate user exists."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
else:
return user
def personalize_page_and_get_user(self):
"""If the user exists, add personalized fields to the navbar."""
user = self.get_user()
if user:
self.template_value['email'] = user.email()
self.template_value['logoutUrl'] = (
users.create_logout_url(self.request.uri))
return user
def personalize_page_and_get_enrolled(self):
"""If the user is enrolled, add personalized fields to the navbar."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return None
student = Student.get_enrolled_student_by_email(user.email())
if not student:
self.redirect('/preview')
return None
return student
def assert_xsrf_token_or_fail(self, request, action):
"""Asserts the current request has proper XSRF token or fails."""
token = request.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
self.error(403)
return False
return True
def render(self, template_file):
"""Renders a template."""
template = self.get_template(template_file)
self.response.out.write(template.render(self.template_value))
class BaseRESTHandler(BaseHandler):
"""Base REST handler."""
def assert_xsrf_token_or_fail(self, token_dict, action, args_dict):
"""Asserts that current request has proper XSRF token or fails."""
token = token_dict.get('xsrf_token')
if not token or not XsrfTokenManager.is_xsrf_token_valid(token, action):
transforms.send_json_response(
self, 403,
'Bad XSRF token. Please reload the page and try again',
args_dict)
return False
return True
class PreviewHandler(BaseHandler):
"""Handler for viewing course preview."""
def get(self):
"""Handles GET requests."""
user = users.get_current_user()
if not user:
self.template_value['loginUrl'] = (
users.create_login_url(self.request.uri))
else:
self.template_value['email'] = user.email()
self.template_value['logoutUrl'] = (
users.create_logout_url(self.request.uri))
self.template_value['navbar'] = {'course': True}
self.template_value['units'] = self.get_units()
if user and Student.get_enrolled_student_by_email(user.email()):
self.redirect('/course')
else:
self.render('preview.html')
class RegisterHandler(BaseHandler):
"""Handler for course registration."""
def get(self):
logging.error("cock has occured")
"""Handles GET request."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
student = Student.get_by_email(user.email())
if student and student.is_enrolled:
self.redirect('/course')
return
self.template_value['navbar'] = {'registration': True}
self.template_value['register_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('register-post'))
if student and not student.is_enrolled:
self.render('application_pending.html')
else:
self.render('register.html')
def post(self):
"""Handles POST requests."""
user = self.personalize_page_and_get_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
if not self.assert_xsrf_token_or_fail(self.request, 'register-post'):
return
can_register = self.app_context.get_environ(
)['reg_form']['can_register']
if not can_register:
self.template_value['course_status'] = 'full'
else:
name = self.request.get('form01')
surname = self.request.get('form02')
age = self.request.get('form03')
# create new or re-enroll old student
student = Student.get_by_email(user.email())
if not student:
student = Student(key_name=user.email())
student.user_id = user.user_id()
student.is_enrolled = False
student.name = name
student.surname = surname
student.age = age
student.put()
# Render registration confirmation page
self.template_value['navbar'] = {'registration': True}
self.render('confirmation.html')
class ForumHandler(BaseHandler):
"""Handler for forum page."""
def get(self):
"""Handles GET requests."""
if not self.personalize_page_and_get_enrolled():
return
self.template_value['navbar'] = {'forum': True}
self.render('forum.html')
class StudentProfileHandler(BaseHandler):
"""Handles the click to 'My Profile' link in the nav bar."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
course = self.get_course()
self.template_value['navbar'] = {}
self.template_value['student'] = student
self.template_value['score_list'] = course.get_all_scores(student)
self.template_value['overall_score'] = course.get_overall_score(student)
self.template_value['student_edit_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-edit'))
self.render('student_profile.html')
class StudentEditStudentHandler(BaseHandler):
"""Handles edits to student records by students."""
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-edit'):
return
Student.rename_current(self.request.get('name'))
self.redirect('/student/home')
class StudentUnenrollHandler(BaseHandler):
"""Handler for students to unenroll themselves."""
def get(self):
"""Handles GET requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
self.template_value['student'] = student
self.template_value['navbar'] = {'registration': True}
self.template_value['student_unenroll_xsrf_token'] = (
XsrfTokenManager.create_xsrf_token('student-unenroll'))
self.render('unenroll_confirmation_check.html')
def post(self):
"""Handles POST requests."""
student = self.personalize_page_and_get_enrolled()
if not student:
return
if not self.assert_xsrf_token_or_fail(self.request, 'student-unenroll'):
return
Student.set_enrollment_status_for_current(False)
self.template_value['navbar'] = {'registration': True}
self.render('unenroll_confirmation.html')
class XsrfTokenManager(object):
"""Provides XSRF protection by managing action/user tokens in memcache."""
# Max age of the token (4 hours).
XSRF_TOKEN_AGE_SECS = 60 * 60 * 4
# Token delimiters.
DELIMITER_PRIVATE = ':'
DELIMITER_PUBLIC = '/'
# Default nickname to use if a user does not have a nickname,
USER_ID_DEFAULT = 'default'
@classmethod
def init_xsrf_secret_if_none(cls):
"""Verifies that non-default XSRF secret exists; creates one if not."""
# Any non-default value is fine.
if XSRF_SECRET.value and XSRF_SECRET.value != XSRF_SECRET.default_value:
return
# All property manipulations must run in the default namespace.
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(
appengine_config.DEFAULT_NAMESPACE_NAME)
# Look in the datastore directly.
entity = ConfigPropertyEntity.get_by_key_name(XSRF_SECRET.name)
if not entity:
entity = ConfigPropertyEntity(key_name=XSRF_SECRET.name)
# Any non-default non-None value is fine.
if (entity.value and not entity.is_draft and
(str(entity.value) != str(XSRF_SECRET.default_value))):
return
# Initialize to random value.
entity.value = base64.urlsafe_b64encode(
os.urandom(XSRF_SECRET_LENGTH))
entity.is_draft = False
entity.put()
finally:
namespace_manager.set_namespace(old_namespace)
@classmethod
def _create_token(cls, action_id, issued_on):
"""Creates a string representation (digest) of a token."""
cls.init_xsrf_secret_if_none()
# We have decided to use transient tokens stored in memcache to reduce
# datastore costs. The token has 4 parts: hash of the actor user id,
# hash of the action, hash of the time issued and the plain text of time
# issued.
# Lookup user id.
user = users.get_current_user()
if user:
user_id = user.user_id()
else:
user_id = cls.USER_ID_DEFAULT
# Round time to seconds.
issued_on = long(issued_on)
digester = hmac.new(str(XSRF_SECRET.value))
digester.update(str(user_id))
digester.update(cls.DELIMITER_PRIVATE)
digester.update(str(action_id))
digester.update(cls.DELIMITER_PRIVATE)
digester.update(str(issued_on))
digest = digester.digest()
token = '%s%s%s' % (
issued_on, cls.DELIMITER_PUBLIC, base64.urlsafe_b64encode(digest))
return token
@classmethod
def create_xsrf_token(cls, action):
return cls._create_token(action, time.time())
@classmethod
def is_xsrf_token_valid(cls, token, action):
"""Validate a given XSRF token by retrieving it from memcache."""
try:
parts = token.split(cls.DELIMITER_PUBLIC)
if not len(parts) == 2:
return False
issued_on = long(parts[0])
age = time.time() - issued_on
if age > cls.XSRF_TOKEN_AGE_SECS:
return False
authentic_token = cls._create_token(action, issued_on)
if authentic_token == token:
return True
return False
except Exception: # pylint: disable-msg=broad-except
return False
|
|
"""Shelly entity helper."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
from dataclasses import dataclass
import logging
from typing import Any, Final, cast
from aioshelly.block_device import Block
import async_timeout
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import (
device_registry,
entity,
entity_registry,
update_coordinator,
)
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import StateType
from . import BlockDeviceWrapper, RpcDeviceWrapper, ShellyDeviceRestWrapper
from .const import (
AIOSHELLY_DEVICE_TIMEOUT_SEC,
BLOCK,
DATA_CONFIG_ENTRY,
DOMAIN,
REST,
RPC,
)
from .utils import (
async_remove_shelly_entity,
get_block_entity_name,
get_rpc_entity_name,
get_rpc_key_instances,
)
_LOGGER: Final = logging.getLogger(__name__)
async def async_setup_entry_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: dict[tuple[str, str], BlockAttributeDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for attributes."""
wrapper: BlockDeviceWrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][
config_entry.entry_id
][BLOCK]
if wrapper.device.initialized:
await async_setup_block_attribute_entities(
hass, async_add_entities, wrapper, sensors, sensor_class
)
else:
await async_restore_block_attribute_entities(
hass, config_entry, async_add_entities, wrapper, sensors, sensor_class
)
async def async_setup_block_attribute_entities(
hass: HomeAssistant,
async_add_entities: AddEntitiesCallback,
wrapper: BlockDeviceWrapper,
sensors: dict[tuple[str, str], BlockAttributeDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for block attributes."""
blocks = []
assert wrapper.device.blocks
for block in wrapper.device.blocks:
for sensor_id in block.sensor_ids:
description = sensors.get((block.type, sensor_id))
if description is None:
continue
# Filter out non-existing sensors and sensors without a value
if getattr(block, sensor_id, None) in (-1, None):
continue
# Filter and remove entities that according to settings should not create an entity
if description.removal_condition and description.removal_condition(
wrapper.device.settings, block
):
domain = sensor_class.__module__.split(".")[-1]
unique_id = f"{wrapper.mac}-{block.description}-{sensor_id}"
await async_remove_shelly_entity(hass, domain, unique_id)
else:
blocks.append((block, sensor_id, description))
if not blocks:
return
async_add_entities(
[
sensor_class(wrapper, block, sensor_id, description)
for block, sensor_id, description in blocks
]
)
async def async_restore_block_attribute_entities(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
wrapper: BlockDeviceWrapper,
sensors: dict[tuple[str, str], BlockAttributeDescription],
sensor_class: Callable,
) -> None:
"""Restore block attributes entities."""
entities = []
ent_reg = await entity_registry.async_get_registry(hass)
entries = entity_registry.async_entries_for_config_entry(
ent_reg, config_entry.entry_id
)
domain = sensor_class.__module__.split(".")[-1]
for entry in entries:
if entry.domain != domain:
continue
attribute = entry.unique_id.split("-")[-1]
description = BlockAttributeDescription(
name="",
icon=entry.original_icon,
unit=entry.unit_of_measurement,
device_class=entry.device_class,
)
entities.append(
sensor_class(wrapper, None, attribute, description, entry, sensors)
)
if not entities:
return
async_add_entities(entities)
async def async_setup_entry_rpc(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: dict[str, RpcAttributeDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for REST sensors."""
wrapper: RpcDeviceWrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][
config_entry.entry_id
][RPC]
entities = []
for sensor_id in sensors:
description = sensors[sensor_id]
key_instances = get_rpc_key_instances(wrapper.device.status, description.key)
for key in key_instances:
# Filter non-existing sensors
if description.sub_key not in wrapper.device.status[key]:
continue
# Filter and remove entities that according to settings should not create an entity
if description.removal_condition and description.removal_condition(
wrapper.device.config, key
):
domain = sensor_class.__module__.split(".")[-1]
unique_id = f"{wrapper.mac}-{key}-{sensor_id}"
await async_remove_shelly_entity(hass, domain, unique_id)
else:
entities.append((key, sensor_id, description))
if not entities:
return
async_add_entities(
[
sensor_class(wrapper, key, sensor_id, description)
for key, sensor_id, description in entities
]
)
async def async_setup_entry_rest(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
sensors: dict[str, RestAttributeDescription],
sensor_class: Callable,
) -> None:
"""Set up entities for REST sensors."""
wrapper: ShellyDeviceRestWrapper = hass.data[DOMAIN][DATA_CONFIG_ENTRY][
config_entry.entry_id
][REST]
entities = []
for sensor_id in sensors:
description = sensors.get(sensor_id)
if not wrapper.device.settings.get("sleep_mode"):
entities.append((sensor_id, description))
if not entities:
return
async_add_entities(
[
sensor_class(wrapper, sensor_id, description)
for sensor_id, description in entities
]
)
@dataclass
class BlockAttributeDescription:
"""Class to describe a sensor."""
name: str
# Callable = lambda attr_info: unit
icon: str | None = None
unit: None | str | Callable[[dict], str] = None
value: Callable[[Any], Any] = lambda val: val
device_class: str | None = None
state_class: str | None = None
default_enabled: bool = True
available: Callable[[Block], bool] | None = None
# Callable (settings, block), return true if entity should be removed
removal_condition: Callable[[dict, Block], bool] | None = None
extra_state_attributes: Callable[[Block], dict | None] | None = None
entity_category: str | None = None
@dataclass
class RpcAttributeDescription:
"""Class to describe a RPC sensor."""
key: str
sub_key: str
name: str
icon: str | None = None
unit: str | None = None
value: Callable[[Any, Any], Any] | None = None
device_class: str | None = None
state_class: str | None = None
default_enabled: bool = True
available: Callable[[dict], bool] | None = None
removal_condition: Callable[[dict, str], bool] | None = None
extra_state_attributes: Callable[[dict, dict], dict | None] | None = None
entity_category: str | None = None
@dataclass
class RestAttributeDescription:
"""Class to describe a REST sensor."""
name: str
icon: str | None = None
unit: str | None = None
value: Callable[[dict, Any], Any] | None = None
device_class: str | None = None
state_class: str | None = None
default_enabled: bool = True
extra_state_attributes: Callable[[dict], dict | None] | None = None
entity_category: str | None = None
class ShellyBlockEntity(entity.Entity):
"""Helper class to represent a block entity."""
def __init__(self, wrapper: BlockDeviceWrapper, block: Block) -> None:
"""Initialize Shelly entity."""
self.wrapper = wrapper
self.block = block
self._name = get_block_entity_name(wrapper.device, block)
@property
def name(self) -> str:
"""Name of entity."""
return self._name
@property
def should_poll(self) -> bool:
"""If device should be polled."""
return False
@property
def device_info(self) -> DeviceInfo:
"""Device info."""
return {
"connections": {(device_registry.CONNECTION_NETWORK_MAC, self.wrapper.mac)}
}
@property
def available(self) -> bool:
"""Available."""
return self.wrapper.last_update_success
@property
def unique_id(self) -> str:
"""Return unique ID of entity."""
return f"{self.wrapper.mac}-{self.block.description}"
async def async_added_to_hass(self) -> None:
"""When entity is added to HASS."""
self.async_on_remove(self.wrapper.async_add_listener(self._update_callback))
async def async_update(self) -> None:
"""Update entity with latest info."""
await self.wrapper.async_request_refresh()
@callback
def _update_callback(self) -> None:
"""Handle device update."""
self.async_write_ha_state()
async def set_state(self, **kwargs: Any) -> Any:
"""Set block state (HTTP request)."""
_LOGGER.debug("Setting state for entity %s, state: %s", self.name, kwargs)
try:
async with async_timeout.timeout(AIOSHELLY_DEVICE_TIMEOUT_SEC):
return await self.block.set_state(**kwargs)
except (asyncio.TimeoutError, OSError) as err:
_LOGGER.error(
"Setting state for entity %s failed, state: %s, error: %s",
self.name,
kwargs,
repr(err),
)
self.wrapper.last_update_success = False
return None
class ShellyRpcEntity(entity.Entity):
"""Helper class to represent a rpc entity."""
def __init__(self, wrapper: RpcDeviceWrapper, key: str) -> None:
"""Initialize Shelly entity."""
self.wrapper = wrapper
self.key = key
self._attr_should_poll = False
self._attr_device_info = {
"connections": {(device_registry.CONNECTION_NETWORK_MAC, wrapper.mac)}
}
self._attr_unique_id = f"{wrapper.mac}-{key}"
self._attr_name = get_rpc_entity_name(wrapper.device, key)
@property
def available(self) -> bool:
"""Available."""
return self.wrapper.device.connected
async def async_added_to_hass(self) -> None:
"""When entity is added to HASS."""
self.async_on_remove(self.wrapper.async_add_listener(self._update_callback))
async def async_update(self) -> None:
"""Update entity with latest info."""
await self.wrapper.async_request_refresh()
@callback
def _update_callback(self) -> None:
"""Handle device update."""
self.async_write_ha_state()
async def call_rpc(self, method: str, params: Any) -> Any:
"""Call RPC method."""
_LOGGER.debug(
"Call RPC for entity %s, method: %s, params: %s",
self.name,
method,
params,
)
try:
async with async_timeout.timeout(AIOSHELLY_DEVICE_TIMEOUT_SEC):
return await self.wrapper.device.call_rpc(method, params)
except asyncio.TimeoutError as err:
_LOGGER.error(
"Call RPC for entity %s failed, method: %s, params: %s, error: %s",
self.name,
method,
params,
repr(err),
)
self.wrapper.last_update_success = False
return None
class ShellyBlockAttributeEntity(ShellyBlockEntity, entity.Entity):
"""Helper class to represent a block attribute."""
def __init__(
self,
wrapper: BlockDeviceWrapper,
block: Block,
attribute: str,
description: BlockAttributeDescription,
) -> None:
"""Initialize sensor."""
super().__init__(wrapper, block)
self.attribute = attribute
self.description = description
unit = self.description.unit
if callable(unit):
unit = unit(block.info(attribute))
self._unit: None | str | Callable[[dict], str] = unit
self._unique_id: str = f"{super().unique_id}-{self.attribute}"
self._name = get_block_entity_name(wrapper.device, block, self.description.name)
@property
def unique_id(self) -> str:
"""Return unique ID of entity."""
return self._unique_id
@property
def name(self) -> str:
"""Name of sensor."""
return self._name
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if it should be enabled by default."""
return self.description.default_enabled
@property
def attribute_value(self) -> StateType:
"""Value of sensor."""
value = getattr(self.block, self.attribute)
if value is None:
return None
return cast(StateType, self.description.value(value))
@property
def device_class(self) -> str | None:
"""Device class of sensor."""
return self.description.device_class
@property
def icon(self) -> str | None:
"""Icon of sensor."""
return self.description.icon
@property
def available(self) -> bool:
"""Available."""
available = super().available
if not available or not self.description.available:
return available
return self.description.available(self.block)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
if self.description.extra_state_attributes is None:
return None
return self.description.extra_state_attributes(self.block)
@property
def entity_category(self) -> str | None:
"""Return category of entity."""
return self.description.entity_category
class ShellyRestAttributeEntity(update_coordinator.CoordinatorEntity):
"""Class to load info from REST."""
def __init__(
self,
wrapper: BlockDeviceWrapper,
attribute: str,
description: RestAttributeDescription,
) -> None:
"""Initialize sensor."""
super().__init__(wrapper)
self.wrapper = wrapper
self.attribute = attribute
self.description = description
self._name = get_block_entity_name(wrapper.device, None, self.description.name)
self._last_value = None
@property
def name(self) -> str:
"""Name of sensor."""
return self._name
@property
def device_info(self) -> DeviceInfo:
"""Device info."""
return {
"connections": {(device_registry.CONNECTION_NETWORK_MAC, self.wrapper.mac)}
}
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if it should be enabled by default."""
return self.description.default_enabled
@property
def available(self) -> bool:
"""Available."""
return self.wrapper.last_update_success
@property
def attribute_value(self) -> StateType:
"""Value of sensor."""
if callable(self.description.value):
self._last_value = self.description.value(
self.wrapper.device.status, self._last_value
)
return self._last_value
@property
def device_class(self) -> str | None:
"""Device class of sensor."""
return self.description.device_class
@property
def icon(self) -> str | None:
"""Icon of sensor."""
return self.description.icon
@property
def unique_id(self) -> str:
"""Return unique ID of entity."""
return f"{self.wrapper.mac}-{self.attribute}"
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
if self.description.extra_state_attributes is None:
return None
return self.description.extra_state_attributes(self.wrapper.device.status)
@property
def entity_category(self) -> str | None:
"""Return category of entity."""
return self.description.entity_category
class ShellyRpcAttributeEntity(ShellyRpcEntity, entity.Entity):
"""Helper class to represent a rpc attribute."""
def __init__(
self,
wrapper: RpcDeviceWrapper,
key: str,
attribute: str,
description: RpcAttributeDescription,
) -> None:
"""Initialize sensor."""
super().__init__(wrapper, key)
self.sub_key = description.sub_key
self.attribute = attribute
self.description = description
self._attr_unique_id = f"{super().unique_id}-{attribute}"
self._attr_name = get_rpc_entity_name(wrapper.device, key, description.name)
self._attr_entity_registry_enabled_default = description.default_enabled
self._attr_device_class = description.device_class
self._attr_icon = description.icon
self._last_value = None
@property
def attribute_value(self) -> StateType:
"""Value of sensor."""
if callable(self.description.value):
self._last_value = self.description.value(
self.wrapper.device.status[self.key][self.sub_key], self._last_value
)
else:
self._last_value = self.wrapper.device.status[self.key][self.sub_key]
return self._last_value
@property
def available(self) -> bool:
"""Available."""
available = super().available
if not available or not self.description.available:
return available
return self.description.available(
self.wrapper.device.status[self.key][self.sub_key]
)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the state attributes."""
if self.description.extra_state_attributes is None:
return None
assert self.wrapper.device.shelly
return self.description.extra_state_attributes(
self.wrapper.device.status[self.key][self.sub_key],
self.wrapper.device.shelly,
)
@property
def entity_category(self) -> str | None:
"""Return category of entity."""
return self.description.entity_category
class ShellySleepingBlockAttributeEntity(ShellyBlockAttributeEntity, RestoreEntity):
"""Represent a shelly sleeping block attribute entity."""
# pylint: disable=super-init-not-called
def __init__(
self,
wrapper: BlockDeviceWrapper,
block: Block | None,
attribute: str,
description: BlockAttributeDescription,
entry: entity_registry.RegistryEntry | None = None,
sensors: dict[tuple[str, str], BlockAttributeDescription] | None = None,
) -> None:
"""Initialize the sleeping sensor."""
self.sensors = sensors
self.last_state: StateType = None
self.wrapper = wrapper
self.attribute = attribute
self.block: Block | None = block # type: ignore[assignment]
self.description = description
self._unit = self.description.unit
if block is not None:
if callable(self._unit):
self._unit = self._unit(block.info(attribute))
self._unique_id = f"{self.wrapper.mac}-{block.description}-{attribute}"
self._name = get_block_entity_name(
self.wrapper.device, block, self.description.name
)
elif entry is not None:
self._unique_id = entry.unique_id
self._name = cast(str, entry.original_name)
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()
last_state = await self.async_get_last_state()
if last_state is not None:
self.last_state = last_state.state
self.description.state_class = last_state.attributes.get(ATTR_STATE_CLASS)
@callback
def _update_callback(self) -> None:
"""Handle device update."""
if (
self.block is not None
or not self.wrapper.device.initialized
or self.sensors is None
):
super()._update_callback()
return
_, entity_block, entity_sensor = self.unique_id.split("-")
assert self.wrapper.device.blocks
for block in self.wrapper.device.blocks:
if block.description != entity_block:
continue
for sensor_id in block.sensor_ids:
if sensor_id != entity_sensor:
continue
description = self.sensors.get((block.type, sensor_id))
if description is None:
continue
self.block = block
self.description = description
_LOGGER.debug("Entity %s attached to block", self.name)
super()._update_callback()
return
|
|
import numpy as np
import operator as op
from nose.tools import assert_equal, assert_true, assert_false, assert_raises
from numpy.testing import assert_equal, assert_array_equal, \
assert_array_less
import cyclopts.analysis as sis
def test_value_split():
a = np.array(zip(range(10), range(10, 20)), dtype=[('x', int), ('y', int)])
x, y = sis.value_split(a, 'x', 7)
yield assert_equal, x, a[:8]
yield assert_equal, y, a[8:]
x, y = sis.value_split(a, 'y', 11)
yield assert_equal, x, a[:2]
yield assert_equal, y, a[2:]
def test_cleanse():
a = np.array(zip(range(10), range(10, 20)), dtype=[('x', int), ('y', int)])
b = np.array(zip(range(10), range(10, 20)), dtype=[('x', int), ('y', int)])
x, y = sis.cleanse(a, b, 'x')
yield assert_equal, x, a
yield assert_equal, y, b
x, y = sis.cleanse(a, b, 'y')
yield assert_equal, x, a
yield assert_equal, y, b
c = np.array(zip(range(11), range(10, 21)), dtype=[('x', int), ('y', int)])
x, y = sis.cleanse(a, c, 'x')
yield assert_equal, x, a
yield assert_equal, y, c[:-1]
x, y = sis.cleanse(a, c, 'y')
yield assert_equal, x, a
yield assert_equal, y, c[:-1]
b[0]['y'] = -1
x, y = sis.cleanse(a, b, 'x')
yield assert_equal, x, a
yield assert_equal, y, b
x, y = sis.cleanse(a, b, 'y')
yield assert_equal, x, a[1:]
yield assert_equal, y, b[1:]
def test_split_group_by():
a = np.array(zip([0, 2, 1, 3], [1, 5, 3, 6]), dtype=[('x', int), ('y', int)])
b = np.array(zip([3, 1, 2, 0], [4, 2, 3, 1]), dtype=[('x', int), ('y', int)])
xs, ys = sis.split_group_by(a, b, 'y', 2, 'x')
yield assert_equal, xs[0], a[0:1]
yield assert_equal, ys[0], b[0:1]
yield assert_equal, xs[1], a[1:2]
yield assert_equal, ys[1], b[1:2]
yield assert_equal, xs[2], a[2:]
yield assert_equal, ys[2], b[2:]
def test_reduce():
a = np.array(zip(range(10), range(10, 20)), dtype=[('x', int), ('y', int)])
obs = sis.reduce(a, {'x': ('==', 1)})
yield assert_equal, obs, a[1:2]
obs = sis.reduce_eq(a, {'x': 1})
yield assert_equal, obs, a[1:2]
obs = sis.reduce_gt(a, {'x': 1})
yield assert_equal, obs, a[2:]
obs = sis.reduce_lt(a, {'x': 1})
yield assert_equal, obs, a[:1]
obs = sis.reduce_in(a, {'x': range(1, 9)})
yield assert_equal, obs, a[1:-1]
obs = sis.reduce_not_in(a, {'x': range(1, 9)})
yield assert_equal, obs, np.concatenate((a[:1], a[-1:]))
obs = sis.reduce(a, {'x': ('>', 1), 'y': ('<', 15)})
yield assert_equal, obs, a[2:-5]
def test_id_tree():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'c', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'y'}]
tree = sis.id_tree(data)
assert_equal(len(tree), 1)
for pid, pst in sis.subtrees(tree):
assert_equal(pid, 'a')
assert_equal(len(pst), 1)
for iid, ist in sis.subtrees(pst):
assert_equal(iid, 'b')
assert_equal(len(ist), 2)
sids = set([x for x, _ in sis.subtrees(ist)])
assert_equal(sids, set(['c', 'd']))
solvers = set([x for _, x in sis.subtrees(ist)])
assert_equal(solvers, set(['x', 'y']))
def test_id_tree_prune():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'c', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'y'},
{'paramid': 'a', 'instid': 'c', 'solnid': 'd', 'solver': 'y'},
{'paramid': 'b', 'instid': 'z', 'solnid': 'x', 'solver': 'y'},
{'paramid': 'b', 'instid': 'c', 'solnid': 'd', 'solver': 'y'},
{'paramid': 'b', 'instid': 'c', 'solnid': 'x', 'solver': 'y'},]
obs = sis.id_tree(data, nsoln_prune=0)
exp = sis.Tree()
assert_equal(obs, exp)
obs = sis.id_tree(data, nsoln_prune=1)
exp = sis.Tree()
exp['a']['c']['d'] = 'y'
exp['b']['z']['x'] = 'y'
assert_equal(obs, exp)
obs = sis.id_tree(data, nsoln_prune=2)
exp = sis.Tree()
exp['a']['b']['c'] = 'x'
exp['a']['b']['d'] = 'y'
exp['b']['c']['d'] = 'y'
exp['b']['c']['x'] = 'y'
assert_equal(obs, exp)
def test_id_tree_prune2():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'e', 'solver': 'y'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'f', 'solver': 'z'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'g', 'solver': 'x'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'h', 'solver': 'y'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'i', 'solver': 'z'},]
exp = sis.Tree()
assert_equal(exp, sis.id_tree(data, nsoln_prune=0))
assert_equal(exp, sis.id_tree(data, nsoln_prune=1))
assert_equal(exp, sis.id_tree(data, nsoln_prune=2))
assert_equal(exp, sis.id_tree(data, nsoln_prune=4))
exp['a']['b']['d'] = 'x'
exp['a']['b']['e'] = 'y'
exp['a']['b']['f'] = 'z'
exp['b']['a']['g'] = 'x'
exp['b']['a']['h'] = 'y'
exp['b']['a']['i'] = 'z'
assert_equal(exp, sis.id_tree(data, nsoln_prune=3))
def test_id_tree_from_file():
from cyclopts import exchange_family
from cyclopts.structured_species import request
fname = './files/test_comb_2_solvers.h5'
fam = exchange_family.ResourceExchange()
sp = request.StructuredRequest()
data = sis.cyclopts_data(fname, fam, sp)
tree = sis.id_tree(data, nsoln_prune=2)
assert_equal(len(tree), 4)
for pid, pst in sis.subtrees(tree):
assert_equal(len(pst), 1)
for iid, ist in sis.subtrees(pst):
assert_equal(len(ist), 2)
solvers = set([solver for _, solver in sis.subtrees(ist)])
assert_equal(solvers, set(['cbc', 'greedy']))
def test_nnodes():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'e', 'solver': 'y'},
{'paramid': 'a', 'instid': 'c', 'solnid': 'f', 'solver': 'z'},
{'paramid': 'a', 'instid': 'c', 'solnid': 'g', 'solver': 'a'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'h', 'solver': 'x'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'i', 'solver': 'y'},
{'paramid': 'b', 'instid': 'a', 'solnid': 'j', 'solver': 'z'},
{'paramid': 'b', 'instid': 'd', 'solnid': 'k', 'solver': 'a'},]
tree = sis.id_tree(data)
assert_equal(sis.nnodes(tree, lev=0), 2)
assert_equal(sis.nnodes(tree, lev=1), 4)
assert_equal(sis.nnodes(tree, lev=2), 8)
def test_ninsts():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'c', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'y'}]
tree = sis.id_tree(data)
assert_equal(sis.ninsts(tree), 1)
data.append({'paramid': 'a', 'instid': 'bx', 'solnid': 'c', 'solver': 'x'})
data.append({'paramid': 'a', 'instid': 'by', 'solnid': 'd', 'solver': 'y'})
tree = sis.id_tree(data)
assert_equal(sis.ninsts(tree), 3)
def test_leaf_vals():
data = [{'paramid': 'a', 'instid': 'b', 'solnid': 'c', 'solver': 'x'},
{'paramid': 'a', 'instid': 'b', 'solnid': 'd', 'solver': 'y'}]
tree = sis.id_tree(data)
assert_equal(sis.leaf_vals(tree), set(['x', 'y']))
def test_flow_rms():
from cyclopts import exchange_family
from cyclopts.structured_species import request
fname = './files/test_comb_2_solvers.h5'
fam = exchange_family.ResourceExchange()
sp = request.StructuredRequest()
data = sis.cyclopts_data(fname, fam, sp)
tree = sis.id_tree(data)
ret = sis.flow_rms(fname, tree, 'StructuredRequest')
assert_array_equal(ret['cflows']['cbc'], ret['cflows']['greedy'])
assert_array_equal(ret['flows']['cbc'], ret['flows']['greedy'])
def test_flow_rms_diff():
from cyclopts import exchange_family
from cyclopts.structured_species import request
from cyclopts.functionals import rms
fname = './files/test_comb_2_solvers.h5'
fam = exchange_family.ResourceExchange()
sp = request.StructuredRequest()
data = sis.cyclopts_data(fname, fam, sp)
tree = sis.id_tree(data)
ret = sis.flow_rms_diff(fname, tree, 'StructuredRequest', base_solver='cbc')
# This may fail if the db is regenerated with a new 'cyclopts exec', but I
# think everything is small enough that this will always be true. Ordering
# may also change, but I don't expect that to be the case.
exp = [
rms(np.array([17500, 17500, 0, 0])),
rms(np.array([17500, 17500, 0])),
0,
0,
]
obs = ret['flows']['greedy']
assert_array_equal(obs, exp)
exp = [
rms(0.5 * np.array([17500, 17500, 0, 0])),
rms(0.5 * np.array([17500, 17500, 0])),
0,
0,
]
obs = ret['cflows']['greedy']
assert_array_equal(obs, exp)
|
|
"""
Demo platform for the cover component.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
from homeassistant.components.cover import (
CoverDevice, SUPPORT_OPEN, SUPPORT_CLOSE)
from homeassistant.helpers.event import track_utc_time_change
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Demo covers."""
add_devices([
DemoCover(hass, 'Kitchen Window'),
DemoCover(hass, 'Hall Window', 10),
DemoCover(hass, 'Living Room Window', 70, 50),
DemoCover(hass, 'Garage Door', device_class='garage',
supported_features=(SUPPORT_OPEN | SUPPORT_CLOSE)),
])
class DemoCover(CoverDevice):
"""Representation of a demo cover."""
# pylint: disable=no-self-use
def __init__(self, hass, name, position=None, tilt_position=None,
device_class=None, supported_features=None):
"""Initialize the cover."""
self.hass = hass
self._name = name
self._position = position
self._device_class = device_class
self._supported_features = supported_features
self._set_position = None
self._set_tilt_position = None
self._tilt_position = tilt_position
self._requested_closing = True
self._requested_closing_tilt = True
self._unsub_listener_cover = None
self._unsub_listener_cover_tilt = None
self._is_opening = False
self._is_closing = False
if position is None:
self._closed = True
else:
self._closed = self.current_cover_position <= 0
@property
def name(self):
"""Return the name of the cover."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo cover."""
return False
@property
def current_cover_position(self):
"""Return the current position of the cover."""
return self._position
@property
def current_cover_tilt_position(self):
"""Return the current tilt position of the cover."""
return self._tilt_position
@property
def is_closed(self):
"""Return if the cover is closed."""
return self._closed
@property
def is_closing(self):
"""Return if the cover is closing."""
return self._is_closing
@property
def is_opening(self):
"""Return if the cover is opening."""
return self._is_opening
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return self._device_class
@property
def supported_features(self):
"""Flag supported features."""
if self._supported_features is not None:
return self._supported_features
return super().supported_features
def close_cover(self, **kwargs):
"""Close the cover."""
if self._position == 0:
return
elif self._position is None:
self._closed = True
self.schedule_update_ha_state()
return
self._is_closing = True
self._listen_cover()
self._requested_closing = True
self.schedule_update_ha_state()
def close_cover_tilt(self, **kwargs):
"""Close the cover tilt."""
if self._tilt_position in (0, None):
return
self._listen_cover_tilt()
self._requested_closing_tilt = True
def open_cover(self, **kwargs):
"""Open the cover."""
if self._position == 100:
return
elif self._position is None:
self._closed = False
self.schedule_update_ha_state()
return
self._is_opening = True
self._listen_cover()
self._requested_closing = False
self.schedule_update_ha_state()
def open_cover_tilt(self, **kwargs):
"""Open the cover tilt."""
if self._tilt_position in (100, None):
return
self._listen_cover_tilt()
self._requested_closing_tilt = False
def set_cover_position(self, position, **kwargs):
"""Move the cover to a specific position."""
self._set_position = round(position, -1)
if self._position == position:
return
self._listen_cover()
self._requested_closing = position < self._position
def set_cover_tilt_position(self, tilt_position, **kwargs):
"""Move the cover til to a specific position."""
self._set_tilt_position = round(tilt_position, -1)
if self._tilt_position == tilt_position:
return
self._listen_cover_tilt()
self._requested_closing_tilt = tilt_position < self._tilt_position
def stop_cover(self, **kwargs):
"""Stop the cover."""
self._is_closing = False
self._is_opening = False
if self._position is None:
return
if self._unsub_listener_cover is not None:
self._unsub_listener_cover()
self._unsub_listener_cover = None
self._set_position = None
def stop_cover_tilt(self, **kwargs):
"""Stop the cover tilt."""
if self._tilt_position is None:
return
if self._unsub_listener_cover_tilt is not None:
self._unsub_listener_cover_tilt()
self._unsub_listener_cover_tilt = None
self._set_tilt_position = None
def _listen_cover(self):
"""Listen for changes in cover."""
if self._unsub_listener_cover is None:
self._unsub_listener_cover = track_utc_time_change(
self.hass, self._time_changed_cover)
def _time_changed_cover(self, now):
"""Track time changes."""
if self._requested_closing:
self._position -= 10
else:
self._position += 10
if self._position in (100, 0, self._set_position):
self.stop_cover()
self._closed = self.current_cover_position <= 0
self.schedule_update_ha_state()
def _listen_cover_tilt(self):
"""Listen for changes in cover tilt."""
if self._unsub_listener_cover_tilt is None:
self._unsub_listener_cover_tilt = track_utc_time_change(
self.hass, self._time_changed_cover_tilt)
def _time_changed_cover_tilt(self, now):
"""Track time changes."""
if self._requested_closing_tilt:
self._tilt_position -= 10
else:
self._tilt_position += 10
if self._tilt_position in (100, 0, self._set_tilt_position):
self.stop_cover_tilt()
self.schedule_update_ha_state()
|
|
if __name__ == "__main__":
import sys
sys.path += [ "." ]
from base64 import b64encode, b64decode
from binascii import hexlify
from hashlib import sha256
import os.path
from os import urandom
from timeit import default_timer as timer
from collections import defaultdict
from twisted.test.proto_helpers import StringTransport
import rscoin
from rscoin.rscservice import RSCFactory, load_setup, get_authorities
from rscoin.rscservice import package_query, unpackage_query_response, \
package_commit, package_issue, unpackage_commit_response
import pytest
master_ip = "52.17.228.102"
@pytest.fixture
def sometx():
secret = "A" * 32
public = rscoin.Key(secret, public=False).id()
directory = [(public, "127.0.0.1", 8080)]
factory = RSCFactory(secret, directory, None)
# Build one transaction
k1 = rscoin.Key(urandom(32), public=False)
k2 = rscoin.Key(urandom(32), public=False)
tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)])
tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)])
tx3 = rscoin.Tx([rscoin.InputTx(tx1.id(), 0), rscoin.InputTx(tx2.id(), 0)], [rscoin.OutputTx(k1.id(), 250)])
for kv, vv in tx1.get_utxo_out_entries() + tx2.get_utxo_out_entries():
factory.db[kv] = vv
# Run the protocol
instance = factory.buildProtocol(None)
tr = StringTransport()
instance.makeConnection(tr)
return (factory, instance, tr), (k1, k2, tx1, tx2, tx3)
def test_factory():
secret = "A" * 32
public = rscoin.Key(secret, public=False).id()
directory = [(public, "127.0.0.1", 8080)]
factory = RSCFactory(secret, directory, None)
assert os.path.exists(factory.dbname + ".db") or os.path.exists(factory.dbname)
def test_authorities():
chars = ["A", "B", "C", "D", "E", "F"]
directory = [(c* 32, "127.0.0.1", 8080) for c in chars]
secret = "X"*32
factory = RSCFactory(secret, directory, None, N=3)
print
print factory.get_authorities("AXXX")[-1]
print factory.get_authorities("FXXX")[-1]
assert factory.get_authorities("AXXX")[-1][0] == "A"
assert factory.get_authorities("FXXX")[-1][0] == "F"
def test_TxQuery(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
tx4 = rscoin.Tx([rscoin.InputTx(tx1.id(), 0)], [rscoin.OutputTx(k1.id(), 100)])
for kv, vv in tx1.get_utxo_out_entries() + tx2.get_utxo_out_entries():
factory.db[kv] = vv
# Check the list is up to date
for ik in tx3.get_utxo_in_keys():
assert ik in factory.db
data = (tx3, [tx1.serialize(), tx2.serialize()],
[k1.export()[0], k2.export()[0]],
[k1.sign(tx3.id()), k2.sign(tx3.id())])
# Put the transaction through
print "Number of Authorities: %s" % len(factory.get_authorities(tx1.id()))
assert factory.key.id() in factory.get_authorities(tx1.id())
assert factory.process_TxQuery(data)
for ik in tx3.get_utxo_in_keys():
assert ik not in factory.db
## A transaction should be indepotent
assert factory.process_TxQuery(data)
data2 = (tx4, [tx1.serialize()],
[k1.export()[0]],
[k1.sign(tx3.id())])
assert not factory.process_TxQuery(data2)
def test_TxQuery_serialize(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
# Check the list is up to date
for ik in tx3.get_utxo_in_keys():
assert ik in factory.db
H, data, _ = package_query(tx3, [tx1, tx2], [k1, k2])
instance.lineReceived(data)
response = tr.value()
_, k, s = unpackage_query_response(response)
assert factory.key.verify(H, s)
def test_TxCommit(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
# Check the list is up to date
for ik in tx3.get_utxo_in_keys():
assert ik in factory.db
#data1 = map(b64encode, [tx3.serialize(), tx1.serialize(), tx2.serialize(),
# k1.export()[0], k2.export()[0], k1.sign(tx3.id()), k2.sign(tx3.id())])
#H = sha256(" ".join(data1)).digest()
#data = " ".join(["Query", str(len(data1))] + data1)
H, data, dataCore = package_query(tx3, [tx1, tx2], [k1, k2])
instance.lineReceived(data)
response = tr.value()
k, s = map(b64decode, response.split(" ")[1:])
k2 = rscoin.Key(k)
assert factory.key.verify(H, s)
assert k2.verify(H, s)
## Now we test the Commit
tr.clear()
# data = " ".join(["Commit", str(len(dataCore))] + dataCore + map(b64encode, [k, s]))
data = package_commit(dataCore, [(k, s)])
instance.lineReceived(data)
flag, pub, sig = tr.value().split(" ")
assert factory.key.verify(tx3.id(), b64decode(sig))
k3 = rscoin.Key(b64decode(pub))
assert k3.verify(tx3.id(), b64decode(sig))
def test_TxCommit_Issued(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
kIssue = rscoin.Key(urandom(32), public=False)
pubIssue = kIssue.export()[0]
factory.special_key = kIssue.id() # Asssign this as the special key
tx3 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 250)])
sig1 = kIssue.sign(tx3.id())
assert tx3.check_transaction_utxo([], [pubIssue], [sig1], kIssue.id())
assert tx3.check_transaction([], [pubIssue], [sig1], kIssue.id())
## Now we test the Commit
# Ensure the entries are not in before sending message
for k, v in tx3.get_utxo_out_entries():
assert k not in factory.db
# Send message
tr.clear()
data = package_issue(tx3, [kIssue, sig1])
instance.lineReceived(data)
# Ensure the returned signatures check
ret, pub, sig = tr.value().split(" ")
assert ret == "OK"
kx = rscoin.Key(b64decode(pub))
assert kx.verify(tx3.id(), b64decode(sig))
# Ensure the entries are now in
for k, v in tx3.get_utxo_out_entries():
assert factory.db[k] == v
def test_Ping(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
instance.lineReceived("Ping")
assert unpackage_query_response(tr.value()) == ["Pong", factory.key.id()]
assert tr.value().strip() == "Pong %s" % b64encode(factory.key.id())
def test_setup():
data = """{
"special": "AmodBjXyo2bVqyi1h0e5Kf8hSbGCmalnbF8YwJ0=",
"directory": [ ["A/Sw7CRkoXzB2O0A3WfPMSDIbv/pOxd5Co3u9kM=", "127.0.0.1", 8080] ]
}"""
stuff = load_setup(data)
secret = "hello1"
special = "special"
directory = stuff["directory"]
public = rscoin.Key(secret, public=False).pub.export()
print("\nPublic: %s" % b64encode(public)) # , b64decode
public_special = rscoin.Key(special, public=False).pub.export()
print("Public (special): %s" % b64encode(public_special)) # , b64decode
assert public == directory[0][0]
assert public_special == stuff["special"]
def test_load_balance():
try:
dir_data = load_setup(file("directory.conf").read())
directory = dir_data["directory"]
except:
chars = ["A", "B", "C", "D", "E", "F"]
directory = [(c* 32, "127.0.0.1", 8080) for c in chars]
hist = defaultdict(int)
for _ in range(10000):
x = get_authorities(directory, urandom(32), N = 3)
for xi in x:
hist[xi] += 1
for ki, ni in sorted(hist.iteritems()):
print hexlify(ki)[:8], ni
def test_multiple():
import os
try:
os.mkdir("testscratch")
except:
pass
# Make special keys for making coins
secret_special = "KEYSPECIAL"
public_special = rscoin.Key(secret_special, public=False).id()
# Define a number of keys
all_keys = []
for x in range(100):
secret = "KEY%s" % x
public = rscoin.Key(secret, public=False).id()
all_keys += [(public, secret)]
# Make up the directory
directory = []
for x, (pub, _) in enumerate(all_keys):
directory += [(pub, "127.0.0.1", 8080 + x)]
# Build the factories
factories = {}
for pub, sec in all_keys:
factory = RSCFactory(sec, directory, public_special, conf_dir="testscratch", N=5)
factories[pub] = factory
# Make a mass of transactions
k1 = rscoin.Key(urandom(32), public=False)
k2 = rscoin.Key(urandom(32), public=False)
all_tx_in = []
all_tx_out = []
for _ in range(10):
tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)])
tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)])
tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0),
rscoin.InputTx(tx2.id(), 0)],
[rscoin.OutputTx(k1.id(), 250)] )
all_tx_in += [ tx1, tx2 ]
all_tx_out += [ tx3 ]
print "Lens: all_tx_in: %s all_tx_out: %s" % (len(all_tx_in), len(all_tx_out))
for tx in all_tx_in:
for kv, vv in tx.get_utxo_out_entries():
for f in factories.values():
f.db[kv] = vv
data = (tx3, [tx1.serialize(), tx2.serialize()],
[k1.export()[0], k2.export()[0]],
[k1.sign(tx3.id()), k2.sign(tx3.id())])
# Put the transaction through
total = 0
[ kid1, kid2 ] = tx3.get_utxo_in_keys()
au1 = get_authorities(directory, kid1, N = 5)
au2 = get_authorities(directory, kid2, N = 5)
auxes = set(au1 + au2)
assert len(auxes) == 10
for aid in auxes:
assert isinstance(aid, str) and len(aid) == 32
assert aid in factories
H, msg, dataCore = package_query(tx3, [tx1, tx2], [k1, k2])
xset = []
rss = []
for kid, f in factories.iteritems():
# resp = f.process_TxQuery(data)
instance = f.buildProtocol(None)
tr = StringTransport()
instance.makeConnection(tr)
instance.lineReceived(msg)
resp_msg = unpackage_query_response(tr.value().strip())
assert kid == f.key.id()
if resp_msg[0] == "OK":
[r, s] = resp_msg[1:]
total += 1
xset += [ f.key.id() ]
rss += [(r,s)]
else:
pass
assert 5 <= total <= 10
assert set(auxes) == set(xset)
## Now test the commit phase
assert 5 <= len(rss) <= 10
msg_commit = package_commit(dataCore, rss)
#from twisted.python import log
#import sys
#log.startLogging(sys.stdout)
total = 0
for kid, f in factories.iteritems():
instance = f.buildProtocol(None)
tr = StringTransport()
instance.makeConnection(tr)
instance.lineReceived(msg_commit)
resp_commit = tr.value().strip()
resp_l = unpackage_commit_response(resp_commit)
if resp_l[0] == "OK":
total += 1
assert total == 5
@pytest.fixture
def msg_mass():
secret = "A" * 32
public = rscoin.Key(secret, public=False).id()
directory = [(public, "127.0.0.1", 8080)]
factory = RSCFactory(secret, directory, None)
# Run the protocol
instance = factory.buildProtocol(None)
tr = StringTransport()
instance.makeConnection(tr)
sometx = (factory, instance, tr)
# Make special keys for making coins
secret_special = "KEYSPECIAL"
public_special = rscoin.Key(secret_special, public=False).pub.export()
# Define a number of keys
all_keys = []
secret = "KEYX"
public = rscoin.Key(secret, public=False).id()
directory = [(public, "127.0.0.1", 8080 )]
# Make a mass of transactions
k1 = rscoin.Key(urandom(32), public=False)
k2 = rscoin.Key(urandom(32), public=False)
all_tx = []
for _ in range(1000):
tx1 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 100)])
tx2 = rscoin.Tx([], [rscoin.OutputTx(k2.id(), 150)])
tx3 = rscoin.Tx( [rscoin.InputTx(tx1.id(), 0),
rscoin.InputTx(tx2.id(), 0)],
[rscoin.OutputTx(k1.id(), 250)] )
all_tx += [ ([tx1, tx2], tx3) ]
for intx, _ in all_tx:
for tx in intx:
for kv, vv in tx.get_utxo_out_entries():
factory.db[kv] = vv
mesages_q = []
for ([tx1, tx2], tx3) in all_tx:
H, data, core = package_query(tx3, [tx1, tx2], [k1, k2])
mesages_q += [ (tx3, data, core) ]
return (sometx, mesages_q)
def test_full_client(msg_mass):
## Run a single client on single CPU and test-stress it.
(sometx, mesages_q) = msg_mass
(factory, instance, tr) = sometx
responses = []
t0 = timer()
for (tx, data, core) in mesages_q:
tr.clear()
instance.lineReceived(data)
response = tr.value()
responses += [(tx, data, core, response)]
t1 = timer()
print "\nQuery message rate: %2.2f / sec" % (1.0 / ((t1-t0)/(len(mesages_q))))
## Now we test the Commit
t0 = timer()
for (tx, data, core, response) in responses:
resp = response.split(" ")
k, s = map(b64decode, resp[1:])
assert resp[0] == "OK"
tr.clear()
data = package_commit(core, [(k, s)])
instance.lineReceived(data)
flag, pub, sig = tr.value().split(" ")
assert flag == "OK"
t1 = timer()
print "\nCommit message rate: %2.2f / sec" % (1.0 / ((t1-t0)/(len(responses))))
# @pytest.mark.online
def test_commit_error(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
instance.lineReceived("xCommit X Y Z")
assert tr.value().strip() == "Error ParsingError"
@pytest.mark.online
def test_online_ping(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
import socket
## Read live directory
HOST = master_ip # The remote host
PORT = 8080 # The same port as used by the server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
s.sendall('Ping\r\n')
data = s.recv(5000)
s.close()
assert unpackage_query_response(data)[0] == "Pong"
@pytest.mark.online
def test_online_issue(sometx):
(factory, instance, tr), (k1, k2, tx1, tx2, tx3) = sometx
kIssue = rscoin.Key(file("secret.key").read(), False) # rscoin.Key(urandom(32), public=False)
pubIssue = kIssue.pub.export()
factory.special_key = kIssue.id()
print "Public ID: %s" % b64encode(factory.special_key)
# Build a new coin
k1 = rscoin.Key(urandom(32), public=False)
k1pub = k1.pub.export()
tx3 = rscoin.Tx([], [rscoin.OutputTx(k1.id(), 250)])
sig1 = kIssue.sign(tx3.id())
## Now we test the Commit
#data1 = map(b64encode, [tx3.serialize(), pubIssue, sig1])
#data = " ".join(["Commit", str(len(data1))] + data1)
data = package_issue(tx3, [kIssue, sig1])
# test on fake
tr.clear()
instance.lineReceived(data)
# Ensure the returned signatures check
ret, pub, sig = tr.value().split(" ")
assert ret == "OK"
kx = rscoin.Key(b64decode(pub))
assert kx.verify(tx3.id(), b64decode(sig))
# Second time:
tr.clear()
instance.lineReceived(data)
ret2, pub2, sig2 = tr.value().split(" ")
assert ret2 == ret
assert pub2 == pub
assert sig != sig2
assert kx.verify(tx3.id(), b64decode(sig2))
import socket
HOST = master_ip # The remote host
PORT = 8080 # The same port as used by the server
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((HOST, PORT))
s.sendall(data + '\r\n')
data_rec = ''
data_rec += s.recv(4096)
print "Data: '%s'" % data_rec
s.close()
# Ensure the returned signatures check
ret, pub, sig = data_rec.split(" ")
assert ret == "OK"
kx = rscoin.Key(b64decode(pub))
assert kx.verify(tx3.id(), b64decode(sig))
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='Test and time the Tor median statistics.')
parser.add_argument('--time', action='store_true', help='Run timing tests')
parser.add_argument('--lprof', action='store_true', help='Run the line profiler')
parser.add_argument('--cprof', action='store_true', help='Run the c profiler')
parser.add_argument('--plot', action='store_true', help='Upload time plot to plotly')
args = parser.parse_args()
if args.time:
xxx = msg_mass()
test_full_client(xxx)
if args.cprof:
import cProfile
xxx = msg_mass()
cProfile.run("test_full_client(xxx)", sort="tottime")
if args.lprof:
from line_profiler import LineProfiler
#import rscoin.rscservice
profile = LineProfiler(rscoin.rscservice.RSCProtocol.handle_Query,
rscoin.rscservice.RSCFactory.process_TxQuery,
rscoin.Tx.check_transaction,
rscoin.Tx.check_transaction_utxo,
rscoin.Tx.parse)
xxx = msg_mass()
profile.run("test_full_client(xxx)")
profile.print_stats()
|
|
import datetime
import itertools
import json
import logging
import shlex
from json.decoder import JSONDecodeError
from typing import Dict, List, Optional, Tuple, Union
import requests
import requests.adapters
from requests.exceptions import HTTPError, RequestException
from datahub import __package_name__
from datahub.configuration.common import OperationalError
from datahub.emitter.mcp import MetadataChangeProposalWrapper
from datahub.emitter.serialization_helper import pre_json_transform
from datahub.metadata.com.linkedin.pegasus2avro.mxe import (
MetadataChangeEvent,
MetadataChangeProposal,
)
from datahub.metadata.com.linkedin.pegasus2avro.usage import UsageAggregation
logger = logging.getLogger(__name__)
def _make_curl_command(
session: requests.Session, method: str, url: str, payload: str
) -> str:
fragments: List[str] = [
"curl",
*itertools.chain(
*[
("-X", method),
*[("-H", f"{k}: {v}") for (k, v) in session.headers.items()],
("--data", payload),
]
),
url,
]
return " ".join(shlex.quote(fragment) for fragment in fragments)
class DatahubRestEmitter:
DEFAULT_CONNECT_TIMEOUT_SEC = 30 # 30 seconds should be plenty to connect
DEFAULT_READ_TIMEOUT_SEC = (
30 # Any ingest call taking longer than 30 seconds should be abandoned
)
_gms_server: str
_token: Optional[str]
_session: requests.Session
_connect_timeout_sec: float = DEFAULT_CONNECT_TIMEOUT_SEC
_read_timeout_sec: float = DEFAULT_READ_TIMEOUT_SEC
def __init__(
self,
gms_server: str,
token: Optional[str] = None,
connect_timeout_sec: Optional[float] = None,
read_timeout_sec: Optional[float] = None,
extra_headers: Optional[Dict[str, str]] = None,
ca_certificate_path: Optional[str] = None,
):
if ":9002" in gms_server:
logger.warning(
"the rest emitter should connect to GMS (usually port 8080) instead of frontend"
)
self._gms_server = gms_server
self._token = token
self._session = requests.Session()
adapter = requests.adapters.HTTPAdapter(pool_connections=100, pool_maxsize=100)
self._session.mount("http://", adapter)
self._session.mount("https://", adapter)
self._session.headers.update(
{
"X-RestLi-Protocol-Version": "2.0.0",
"Content-Type": "application/json",
}
)
if token:
self._session.headers.update({"Authorization": f"Bearer {token}"})
if extra_headers:
self._session.headers.update(extra_headers)
if ca_certificate_path:
self._session.verify = ca_certificate_path
if connect_timeout_sec:
self._connect_timeout_sec = connect_timeout_sec
if read_timeout_sec:
self._read_timeout_sec = read_timeout_sec
if self._connect_timeout_sec < 1 or self._read_timeout_sec < 1:
logger.warning(
f"Setting timeout values lower than 1 second is not recommended. Your configuration is connect_timeout:{self._connect_timeout_sec}s, read_timeout:{self._read_timeout_sec}s"
)
def test_connection(self) -> None:
response = self._session.get(f"{self._gms_server}/config")
response.raise_for_status()
config: dict = response.json()
if config.get("noCode") != "true":
raise ValueError(
f"This version of {__package_name__} requires GMS v0.8.0 or higher"
)
def emit(
self,
item: Union[
MetadataChangeEvent,
MetadataChangeProposal,
MetadataChangeProposalWrapper,
UsageAggregation,
],
) -> Tuple[datetime.datetime, datetime.datetime]:
start_time = datetime.datetime.now()
if isinstance(item, UsageAggregation):
self.emit_usage(item)
elif isinstance(item, (MetadataChangeProposal, MetadataChangeProposalWrapper)):
self.emit_mcp(item)
else:
self.emit_mce(item)
return start_time, datetime.datetime.now()
def emit_mce(self, mce: MetadataChangeEvent) -> None:
url = f"{self._gms_server}/entities?action=ingest"
raw_mce_obj = mce.proposedSnapshot.to_obj()
mce_obj = pre_json_transform(raw_mce_obj)
snapshot_fqn = (
f"com.linkedin.metadata.snapshot.{mce.proposedSnapshot.RECORD_SCHEMA.name}"
)
system_metadata_obj = {}
if mce.systemMetadata is not None:
system_metadata_obj = {
"lastObserved": mce.systemMetadata.lastObserved,
"runId": mce.systemMetadata.runId,
}
snapshot = {
"entity": {"value": {snapshot_fqn: mce_obj}},
"systemMetadata": system_metadata_obj,
}
payload = json.dumps(snapshot)
self._emit_generic(url, payload)
def emit_mcp(
self, mcp: Union[MetadataChangeProposal, MetadataChangeProposalWrapper]
) -> None:
url = f"{self._gms_server}/aspects?action=ingestProposal"
mcp_obj = pre_json_transform(mcp.to_obj())
payload = json.dumps({"proposal": mcp_obj})
self._emit_generic(url, payload)
def emit_usage(self, usageStats: UsageAggregation) -> None:
url = f"{self._gms_server}/usageStats?action=batchIngest"
raw_usage_obj = usageStats.to_obj()
usage_obj = pre_json_transform(raw_usage_obj)
snapshot = {
"buckets": [
usage_obj,
]
}
payload = json.dumps(snapshot)
self._emit_generic(url, payload)
def _emit_generic(self, url: str, payload: str) -> None:
curl_command = _make_curl_command(self._session, "POST", url, payload)
logger.debug(
"Attempting to emit to DataHub GMS; using curl equivalent to:\n%s",
curl_command,
)
try:
response = self._session.post(
url,
data=payload,
timeout=(self._connect_timeout_sec, self._read_timeout_sec),
)
response.raise_for_status()
except HTTPError as e:
try:
info = response.json()
raise OperationalError(
"Unable to emit metadata to DataHub GMS", info
) from e
except JSONDecodeError:
# If we can't parse the JSON, just raise the original error.
raise OperationalError(
"Unable to emit metadata to DataHub GMS", {"message": str(e)}
) from e
except RequestException as e:
raise OperationalError(
"Unable to emit metadata to DataHub GMS", {"message": str(e)}
) from e
|
|
import os
# Path helper
location = lambda x: os.path.join(
os.path.dirname(os.path.realpath(__file__)), x)
USE_TZ = True
DEBUG = True
TEMPLATE_DEBUG = True
SQL_DEBUG = True
EMAIL_SUBJECT_PREFIX = '[Oscar US sandbox] '
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Use a Sqlite database by default
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': location('db.sqlite'),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
# Includes all languages that have >50% coverage in Transifex
# Taken from Django's default setting for LANGUAGES
gettext_noop = lambda s: s
LANGUAGES = (
('en-us', gettext_noop('American English')),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = location("public/media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/admin/'
STATIC_URL = '/static/'
STATIC_ROOT = location('public/static')
STATICFILES_DIRS = (
location('static/'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '$)a6n&o80u!6y5t-+jrd3)3!%vh&shg$wqpjpxc!ar&p#!)n1a'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# needed by django-treebeard for admin (and potentially other libs)
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
# Oscar specific
'oscar.apps.search.context_processors.search_form',
'oscar.apps.promotions.context_processors.promotions',
'oscar.apps.checkout.context_processors.checkout',
'oscar.core.context_processors.metadata',
'oscar.apps.customer.notifications.context_processors.notifications',
)
MIDDLEWARE_CLASSES = (
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
# Allow languages to be selected
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
# Ensure a valid basket is added to the request instance for every request
'oscar.apps.basket.middleware.BasketMiddleware',
# Enable the ProfileMiddleware, then add ?cprofile to any
# URL path to print out profile details
# 'oscar.profiling.middleware.ProfileMiddleware',
)
ROOT_URLCONF = 'urls'
# Add another path to Oscar's templates. This allows templates to be
# customised easily.
from oscar import OSCAR_MAIN_TEMPLATE_DIR
TEMPLATE_DIRS = (
location('templates'),
OSCAR_MAIN_TEMPLATE_DIR,
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s',
},
'simple': {
'format': '[%(asctime)s] %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'checkout_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'checkout.log',
'formatter': 'verbose'
},
'gateway_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'gateway.log',
'formatter': 'simple'
},
'error_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'errors.log',
'formatter': 'verbose'
},
'sorl_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'sorl.log',
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'filters': ['require_debug_false'],
},
},
'loggers': {
# Django loggers
'django': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins', 'error_file'],
'level': 'ERROR',
'propagate': False,
},
'django.db.backends': {
'handlers': ['null'],
'propagate': False,
'level': 'DEBUG',
},
# Oscar core loggers
'oscar.checkout': {
'handlers': ['console', 'checkout_file'],
'propagate': False,
'level': 'INFO',
},
'oscar.catalogue.import': {
'handlers': ['console'],
'propagate': False,
'level': 'INFO',
},
'oscar.alerts': {
'handlers': ['null'],
'propagate': False,
'level': 'INFO',
},
# Sandbox logging
'gateway': {
'handlers': ['gateway_file'],
'propagate': True,
'level': 'INFO',
},
# Third party
'south': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'sorl.thumbnail': {
'handlers': ['sorl_file'],
'propagate': True,
'level': 'INFO',
},
# Suppress output of this debug toolbar panel
'template_timings_panel': {
'handlers': ['null'],
'level': 'DEBUG',
'propagate': False,
}
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.flatpages',
'django.contrib.staticfiles',
'django.contrib.sitemaps',
'django_extensions',
# Debug toolbar + extensions
'debug_toolbar',
'template_timings_panel',
'compressor', # Oscar's templates use compressor
]
from oscar import get_core_apps
INSTALLED_APPS = INSTALLED_APPS + get_core_apps(
['apps.partner', 'apps.checkout', 'apps.shipping'])
import django
if django.VERSION < (1, 7):
INSTALLED_APPS.append('south')
# Add Oscar's custom auth backend so users can sign in using their email
# address.
AUTHENTICATION_BACKENDS = (
'oscar.apps.customer.auth_backends.EmailBackend',
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_REDIRECT_URL = '/'
APPEND_SLASH = True
# Haystack settings
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': location('whoosh_index'),
},
}
# =============
# Debug Toolbar
# =============
# Implicit setup can often lead to problems with circular imports, so we
# explicitly wire up the toolbar
DEBUG_TOOLBAR_PATCH_SETTINGS = False
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'template_timings_panel.panels.TemplateTimings.TemplateTimings',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
INTERNAL_IPS = ['127.0.0.1', '::1']
# ==============
# Oscar settings
# ==============
from oscar.defaults import * # noqa
# Meta
# ====
OSCAR_SHOP_TAGLINE = 'US Sandbox'
OSCAR_DEFAULT_CURRENCY = 'USD'
OSCAR_ALLOW_ANON_CHECKOUT = True
# LESS/CSS/statics
# ================
# We default to using CSS files, rather than the LESS files that generate them.
# If you want to develop Oscar's CSS, then set USE_LESS=True and
# COMPRESS_ENABLED=False in your settings_local module and ensure you have
# 'lessc' installed.
USE_LESS = False
COMPRESS_ENABLED = True
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
)
COMPRESS_OFFLINE_CONTEXT = {
'STATIC_URL': 'STATIC_URL',
'use_less': USE_LESS,
}
# We do this to work around an issue in compressor where the LESS files are
# compiled but compression isn't enabled. When this happens, the relative URL
# is wrong between the generated CSS file and other assets:
# https://github.com/jezdez/django_compressor/issues/226
COMPRESS_OUTPUT_DIR = 'oscar'
# Logging
# =======
LOG_ROOT = location('logs')
# Ensure log root exists
if not os.path.exists(LOG_ROOT):
os.mkdir(LOG_ROOT)
# Sorl
# ====
THUMBNAIL_DEBUG = True
THUMBNAIL_KEY_PREFIX = 'oscar-us-sandbox'
# Django 1.6 has switched to JSON serializing for security reasons, but it does
# not serialize Models. We should resolve this by extending the
# django/core/serializers/json.Serializer to have the `dumps` function. Also in
# tests/config.py
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Try and import local settings which can be used to override any of the above.
try:
from settings_local import * # noqa
except ImportError:
pass
|
|
# coding=utf-8
"""Module that provides a cron-like task scheduler.
This task scheduler is designed to be used from inside your own program.
You can schedule Python functions to be called at specific intervals or
days. It uses the standard 'sched' module for the actual task scheduling,
but provides much more:
* repeated tasks (at intervals, or on specific days)
* error handling (exceptions in tasks don't kill the scheduler)
* optional to run scheduler in its own thread or separate process
* optional to run a task in its own thread or separate process
If the threading module is available, you can use the various Threaded
variants of the scheduler and associated tasks. If threading is not
available, you could still use the forked variants. If fork is also
not available, all processing is done in a single process, sequentially.
There are three Scheduler classes:
Scheduler ThreadedScheduler ForkedScheduler
You usually add new tasks to a scheduler using the add_interval_task or
add_daytime_task methods, with the appropriate processmethod argument
to select sequential, threaded or forked processing. NOTE: it is impossible
to add new tasks to a ForkedScheduler, after the scheduler has been started!
For more control you can use one of the following Task classes
and use schedule_task or schedule_task_abs:
IntervalTask ThreadedIntervalTask ForkedIntervalTask
SingleTask ThreadedSingleTask ForkedSingleTask
WeekdayTask ThreadedWeekdayTask ForkedWeekdayTask
MonthdayTask ThreadedMonthdayTask ForkedMonthdayTask
Kronos is the Greek God of Time.
Kronos scheduler (c) Irmen de Jong.
This version has been extracted from the Turbogears source repository
and slightly changed to be completely stand-alone again. Also some fixes
have been made to make it work on Python 2.6 (sched module changes).
The version in Turbogears is based on the original stand-alone Kronos.
This is open-source software, released under the MIT Software License:
http://www.opensource.org/licenses/mit-license.php
"""
__version__ = "2.0"
__all__ = [
"DayTaskRescheduler",
"ForkedIntervalTask",
"ForkedMonthdayTask",
"ForkedScheduler",
"ForkedSingleTask",
"ForkedTaskMixin",
"ForkedWeekdayTask",
"IntervalTask",
"MonthdayTask",
"Scheduler",
"SingleTask",
"Task",
"ThreadedIntervalTask",
"ThreadedMonthdayTask",
"ThreadedScheduler",
"ThreadedSingleTask",
"ThreadedTaskMixin",
"ThreadedWeekdayTask",
"WeekdayTask",
]
import os
import sys
import sched
import time
import logging
import traceback
import weakref
class method:
sequential = "sequential"
forked = "forked"
threaded = "threaded"
class SchedulerNotRunning(Exception):
"""Interrupt a running scheduler.
This exception is used to break out of sched.sched.run when we
are not running.
"""
pass
class Scheduler:
"""The Scheduler itself."""
def __init__(self):
self.running = True
self.log = logging.getLogger('diamond')
self.sched = sched.scheduler(time.time, self.__delayfunc)
def __delayfunc(self, delay):
# This delay function is basically a time.sleep() that is
# divided up, so that we can check the self.running flag while
# delaying. There is an additional check in here to ensure that the
# top item of the queue hasn't changed
if delay < 10:
time.sleep(delay)
else:
toptime = self._getqueuetoptime()
endtime = time.time() + delay
period = 5
stoptime = endtime - period
while (self.running
and stoptime > time.time()
and self._getqueuetoptime() == toptime):
time.sleep(period)
if self.running and self._getqueuetoptime() == toptime:
now = time.time()
if endtime > now:
time.sleep(endtime - now)
if not self.running:
raise SchedulerNotRunning()
def _acquire_lock(self):
pass
def _release_lock(self):
pass
def add_interval_task(self, action, taskname, initialdelay, interval,
processmethod, args, kw, abs=False):
"""Add a new Interval Task to the schedule.
A very short initialdelay or one of zero cannot be honored, you will
see a slight delay before the task is first executed. This is because
the scheduler needs to pick it up in its loop.
"""
if initialdelay < 0 or interval < 1:
raise ValueError("Delay or interval must be >0")
# Select the correct IntervalTask class.
# Not all types may be available!
if processmethod == method.sequential:
TaskClass = IntervalTask
elif processmethod == method.threaded:
TaskClass = ThreadedIntervalTask
elif processmethod == method.forked:
TaskClass = ForkedIntervalTask
else:
raise ValueError("Invalid processmethod")
if not args:
args = []
if not kw:
kw = {}
task = TaskClass(taskname, interval, action, args, kw, abs)
self.schedule_task(task, initialdelay)
return task
def add_single_task(self, action, taskname, initialdelay, processmethod,
args, kw):
"""Add a new task to the scheduler that will only be executed once."""
if initialdelay < 0:
raise ValueError("Delay must be >0")
# Select the correct SingleTask class. Not all types may be available!
if processmethod == method.sequential:
TaskClass = SingleTask
elif processmethod == method.threaded:
TaskClass = ThreadedSingleTask
elif processmethod == method.forked:
TaskClass = ForkedSingleTask
else:
raise ValueError("Invalid processmethod")
if not args:
args = []
if not kw:
kw = {}
task = TaskClass(taskname, action, args, kw)
self.schedule_task(task, initialdelay)
return task
def add_daytime_task(self, action, taskname, weekdays, monthdays,
timeonday, processmethod, args, kw):
"""Add a new Day Task (Weekday or Monthday) to the schedule."""
if weekdays and monthdays:
raise ValueError("You can only specify weekdays or monthdays, "
"not both")
if not args:
args = []
if not kw:
kw = {}
if weekdays:
# Select the correct WeekdayTask class.
# Not all types may be available!
if processmethod == method.sequential:
TaskClass = WeekdayTask
elif processmethod == method.threaded:
TaskClass = ThreadedWeekdayTask
elif processmethod == method.forked:
TaskClass = ForkedWeekdayTask
else:
raise ValueError("Invalid processmethod")
task = TaskClass(taskname, weekdays, timeonday, action, args, kw)
if monthdays:
# Select the correct MonthdayTask class.
# Not all types may be available!
if processmethod == method.sequential:
TaskClass = MonthdayTask
elif processmethod == method.threaded:
TaskClass = ThreadedMonthdayTask
elif processmethod == method.forked:
TaskClass = ForkedMonthdayTask
else:
raise ValueError("Invalid processmethod")
task = TaskClass(taskname, monthdays, timeonday, action, args, kw)
firsttime = task.get_schedule_time(True)
self.schedule_task_abs(task, firsttime)
return task
def schedule_task(self, task, delay):
"""Add a new task to the scheduler with the given delay (seconds).
Low-level method for internal use.
"""
if self.running:
# lock the sched queue, if needed
self._acquire_lock()
try:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),))
finally:
self._release_lock()
else:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),))
def schedule_task_abs(self, task, abstime):
"""Add a new task to the scheduler for the given absolute time value.
Low-level method for internal use.
"""
if self.running:
# lock the sched queue, if needed
self._acquire_lock()
try:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),))
finally:
self._release_lock()
else:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),))
def start(self):
"""Start the scheduler."""
self._run()
def stop(self):
"""Remove all pending tasks and stop the Scheduler."""
self.running = False
# Pending tasks are removed in _run.
def cancel(self, task):
"""Cancel given scheduled task."""
self.sched.cancel(task.event)
if sys.version_info >= (2, 6):
# code for sched module of python 2.6+
def _getqueuetoptime(self):
return self.sched._queue[0].time
def _clearschedqueue(self):
self.sched._queue[:] = []
else:
# code for sched module of python 2.5 and older
def _getqueuetoptime(self):
return self.sched.queue[0][0]
def _clearschedqueue(self):
self.sched.queue[:] = []
def _run(self):
# Low-level run method to do the actual scheduling loop.
while self.running:
try:
self.sched.run()
except SchedulerNotRunning:
self._clearschedqueue()
except Exception, x:
self.log.error("ERROR DURING SCHEDULER EXECUTION %s \n %s", x,
"".join(traceback.format_exception(*sys.exc_info())))
# queue is empty; sleep a short while before checking again
if self.running:
time.sleep(5)
class Task(object):
"""Abstract base class of all scheduler tasks"""
def __init__(self, name, action, args, kw):
"""This is an abstract class!"""
self.name = name
self.action = action
self.args = args
self.kw = kw
self.log = logging.getLogger('diamond')
def __call__(self, schedulerref):
"""Execute the task action in the scheduler's thread."""
try:
self.execute()
except Exception, x:
self.handle_exception(x)
self.reschedule(schedulerref())
def reschedule(self, scheduler):
"""This method should be defined in one of the sub classes!"""
raise NotImplementedError("You're using the abstract class 'Task',"
" use a concrete class instead")
def execute(self):
"""Execute the actual task."""
self.action(*self.args, **self.kw)
def handle_exception(self, exc):
"""Handle any exception that occured during task execution."""
self.log.error("ERROR DURING TASK EXECUTION %s \n %s", exc,
"".join(traceback.format_exception(*sys.exc_info())))
class SingleTask(Task):
"""A task that only runs once."""
def reschedule(self, scheduler):
pass
class IntervalTask(Task):
"""A repeated task that occurs at certain intervals (in seconds)."""
def __init__(self, name, interval, action, args=None, kw=None, abs=False):
Task.__init__(self, name, action, args, kw)
self.absolute = abs
self.interval = interval
self.duration = 0
def execute(self):
""" Execute the actual task."""
start_time = time.time()
self.action(*self.args, **self.kw)
end_time = time.time()
self.duration = int(end_time - start_time)
def reschedule(self, scheduler):
"""Reschedule this task according to its interval (in seconds)."""
if self.absolute and self.duration:
if self.duration < self.interval:
scheduler.schedule_task(self, self.interval - self.duration)
else:
scheduler.schedule_task(self, 0)
else:
scheduler.schedule_task(self, self.interval)
class DayTaskRescheduler:
"""A mixin class that contains the reschedule logic for the DayTasks."""
def __init__(self, timeonday):
self.timeonday = timeonday
def get_schedule_time(self, today):
"""Calculate the time value at which this task is to be scheduled."""
now = list(time.localtime())
if today:
# schedule for today. let's see if that is still possible
if (now[3], now[4]) >= self.timeonday:
# too bad, it will be tomorrow
now[2] += 1
else:
# tomorrow
now[2] += 1
# set new time on day (hour,minute)
now[3], now[4] = self.timeonday
# seconds
now[5] = 0
return time.mktime(now)
def reschedule(self, scheduler):
"""Reschedule this task according to the daytime for the task.
The task is scheduled for tomorrow, for the given daytime.
"""
# (The execute method in the concrete Task classes will check
# if the current day is a day on which the task must run).
abstime = self.get_schedule_time(False)
scheduler.schedule_task_abs(self, abstime)
class WeekdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a week (1-7), at a fixed time
on the day.
"""
def __init__(self, name, weekdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list, tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(weekdays) not in (list, tuple):
raise TypeError("weekdays must be a sequence of weekday numbers "
"1-7 (1 is Monday)")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days = weekdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the week).
weekday = time.localtime().tm_wday + 1
if weekday in self.days:
self.action(*self.args, **self.kw)
class MonthdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a month (1-31), at a fixed
time on the day.
"""
def __init__(self, name, monthdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list, tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(monthdays) not in (list, tuple):
raise TypeError("monthdays must be a sequence of numbers 1-31")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days = monthdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the month).
if time.localtime().tm_mday in self.days:
self.action(*self.args, **self.kw)
try:
import threading
class ThreadedScheduler(Scheduler):
"""A Scheduler that runs in its own thread."""
def __init__(self):
Scheduler.__init__(self)
# we require a lock around the task queue
self._lock = threading.Lock()
def start(self):
"""Splice off a thread in which the scheduler will run."""
self.thread = threading.Thread(target=self._run)
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
"""Stop the scheduler and wait for the thread to finish."""
Scheduler.stop(self)
try:
self.thread.join()
except AttributeError:
pass
def _acquire_lock(self):
"""Lock the thread's task queue."""
self._lock.acquire()
def _release_lock(self):
"""Release the lock on the thread's task queue."""
self._lock.release()
class ThreadedTaskMixin:
"""A mixin class to make a Task execute in a separate thread."""
def __call__(self, schedulerref):
"""Execute the task action in its own thread."""
threading.Thread(target=self.threadedcall).start()
self.reschedule(schedulerref())
def threadedcall(self):
# This method is run within its own thread, so we have to
# do the execute() call and exception handling here.
try:
self.execute()
except Exception, x:
self.handle_exception(x)
class ThreadedIntervalTask(ThreadedTaskMixin, IntervalTask):
"""Interval Task that executes in its own thread."""
def __init__(self, name, interval, action, args=None, kw=None,
abs=False):
# Force abs to be False, as in threaded mode we reschedule
# immediately.
super(ThreadedIntervalTask, self).__init__(name, interval, action,
args=args, kw=kw,
abs=False)
class ThreadedSingleTask(ThreadedTaskMixin, SingleTask):
"""Single Task that executes in its own thread."""
pass
class ThreadedWeekdayTask(ThreadedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own thread."""
pass
class ThreadedMonthdayTask(ThreadedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own thread."""
pass
except ImportError:
# threading is not available
pass
if hasattr(os, "fork"):
import signal
class ForkedScheduler(Scheduler):
"""A Scheduler that runs in its own forked process."""
def __del__(self):
if hasattr(self, "childpid"):
os.kill(self.childpid, signal.SIGKILL)
def start(self):
"""Fork off a new process in which the scheduler will run."""
pid = os.fork()
if pid == 0:
# we are the child
signal.signal(signal.SIGUSR1, self.signalhandler)
self._run()
os._exit(0)
else:
# we are the parent
self.childpid = pid
# can no longer insert in the scheduler queue
del self.sched
def stop(self):
"""Stop the scheduler and wait for the process to finish."""
os.kill(self.childpid, signal.SIGUSR1)
os.waitpid(self.childpid, 0)
def signalhandler(self, sig, stack):
Scheduler.stop(self)
class ForkedTaskMixin:
"""A mixin class to make a Task execute in a separate process."""
def __call__(self, schedulerref):
"""Execute the task action in its own process."""
pid = os.fork()
if pid == 0:
# we are the child
try:
self.execute()
except Exception, x:
self.handle_exception(x)
os._exit(0)
else:
# we are the parent
self.reschedule(schedulerref())
class ForkedIntervalTask(ForkedTaskMixin, IntervalTask):
"""Interval Task that executes in its own process."""
def __init__(self, name, interval, action, args=None, kw=None,
abs=False):
# Force abs to be False, as in forked mode we reschedule
# immediately.
super(ForkedIntervalTask, self).__init__(name, interval, action,
args=args, kw=kw,
abs=False)
class ForkedSingleTask(ForkedTaskMixin, SingleTask):
"""Single Task that executes in its own process."""
pass
class ForkedWeekdayTask(ForkedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own process."""
pass
class ForkedMonthdayTask(ForkedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own process."""
pass
if __name__ == "__main__":
def testaction(arg):
print ">>>TASK", arg, "sleeping 3 seconds"
time.sleep(3)
print "<<<END_TASK", arg
s = ThreadedScheduler()
s.add_interval_task(testaction,
"test action 1",
0,
4,
method.threaded,
["task 1"],
None)
s.start()
print "Scheduler started, waiting 15 sec...."
time.sleep(15)
print "STOP SCHEDULER"
s.stop()
print "EXITING"
|
|
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/mt/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of MT IPv6 Reachability
"""
__slots__ = ("_path_helper", "_extmethods", "__mt_id")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mt_id = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv6-reachability",
"prefixes",
"prefix",
"mt",
"state",
]
def _get_mt_id(self):
"""
Getter method for mt_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/mt/state/mt_id (uint16)
YANG Description: Multi-topology ID.
"""
return self.__mt_id
def _set_mt_id(self, v, load=False):
"""
Setter method for mt_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/mt/state/mt_id (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_mt_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mt_id() directly.
YANG Description: Multi-topology ID.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..65535"]},
int_size=16,
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mt_id must be of a type compatible with uint16""",
"defined-type": "uint16",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['0 .. 4095']}), is_leaf=True, yang_name="mt-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
}
)
self.__mt_id = t
if hasattr(self, "_set"):
self._set()
def _unset_mt_id(self):
self.__mt_id = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
mt_id = __builtin__.property(_get_mt_id)
_pyangbind_elements = OrderedDict([("mt_id", mt_id)])
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-ipv6-reachability/prefixes/prefix/mt/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: State parameters of MT IPv6 Reachability
"""
__slots__ = ("_path_helper", "_extmethods", "__mt_id")
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__mt_id = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-ipv6-reachability",
"prefixes",
"prefix",
"mt",
"state",
]
def _get_mt_id(self):
"""
Getter method for mt_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/mt/state/mt_id (uint16)
YANG Description: Multi-topology ID.
"""
return self.__mt_id
def _set_mt_id(self, v, load=False):
"""
Setter method for mt_id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_ipv6_reachability/prefixes/prefix/mt/state/mt_id (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_mt_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mt_id() directly.
YANG Description: Multi-topology ID.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int,
restriction_dict={"range": ["0..65535"]},
int_size=16,
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mt_id must be of a type compatible with uint16""",
"defined-type": "uint16",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': ['0 .. 4095']}), is_leaf=True, yang_name="mt-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint16', is_config=False)""",
}
)
self.__mt_id = t
if hasattr(self, "_set"):
self._set()
def _unset_mt_id(self):
self.__mt_id = YANGDynClass(
base=RestrictedClassType(
base_type=RestrictedClassType(
base_type=int, restriction_dict={"range": ["0..65535"]}, int_size=16
),
restriction_dict={"range": ["0 .. 4095"]},
),
is_leaf=True,
yang_name="mt-id",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint16",
is_config=False,
)
mt_id = __builtin__.property(_get_mt_id)
_pyangbind_elements = OrderedDict([("mt_id", mt_id)])
|
|
#!/usr/bin/python
# Copyright (c) 2009, Purdue University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# Neither the name of the Purdue University nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Regression test for dnszoneimporter
Make sure you are running this against a database that can be destroyed.
DO NOT EVER RUN THIS TEST AGAINST A PRODUCTION DATABASE.
"""
__copyright__ = 'Copyright (C) 2009, Purdue University'
__license__ = 'BSD'
__version__ = '#TRUNK#'
import os
import sys
import socket
import threading
import time
import getpass
import unittest
import roster_core
import roster_server
from roster_user_tools import roster_client_lib
USER_CONFIG = 'test_data/roster.conf'
CONFIG_FILE = 'test_data/roster.conf' # Example in test_data
SCHEMA_FILE = '../roster-core/data/database_schema.sql'
DATA_FILE = 'test_data/test_data.sql'
HOST = u'localhost'
USERNAME = u'sharrell'
PASSWORD = u'test'
KEYFILE=('test_data/dnsmgmt.key.pem')
CERTFILE=('test_data/dnsmgmt.cert.pem')
CREDFILE='%s/.dnscred' % os.getcwd()
EXEC='../roster-config-manager/scripts/dnszoneimporter'
class DaemonThread(threading.Thread):
def __init__(self, config_instance, port):
threading.Thread.__init__(self)
self.config_instance = config_instance
self.port = port
self.daemon_instance = None
def run(self):
self.daemon_instance = roster_server.Server(self.config_instance, KEYFILE,
CERTFILE)
self.daemon_instance.Serve(port=self.port)
class TestDnsZoneImport(unittest.TestCase):
def setUp(self):
def PickUnusedPort():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((HOST, 0))
addr, port = s.getsockname()
s.close()
return port
self.config_instance = roster_core.Config(file_name=CONFIG_FILE)
db_instance = self.config_instance.GetDb()
db_instance.CreateRosterDatabase()
data = open(DATA_FILE, 'r').read()
db_instance.StartTransaction()
db_instance.cursor.execute(data)
db_instance.EndTransaction()
db_instance.close()
self.port = PickUnusedPort()
self.server_name = 'https://%s:%s' % (HOST, self.port)
self.daemon_thread = DaemonThread(self.config_instance, self.port)
self.daemon_thread.daemon = True
self.daemon_thread.start()
self.core_instance = roster_core.Core(USERNAME, self.config_instance)
self.password = 'test'
time.sleep(1)
roster_client_lib.GetCredentials(USERNAME, u'test', credfile=CREDFILE,
server_name=self.server_name)
self.core_instance.MakeView(u'test_view1')
self.core_instance.MakeView(u'test_view2')
self.core_instance.MakeZone(u'sub.university.lcl', u'master',
u'sub.university.lcl.', view_name=u'test_view1')
self.core_instance.MakeZone(u'0.168.192.in-addr.arpa', u'master',
u'0.168.192.in-addr.arpa.',
view_name=u'test_view1')
self.core_instance.MakeZone(u'8.0.e.f.f.3.ip6.arpa', u'master',
u'8.0.e.f.f.3.ip6.arpa.',
view_name=u'test_view1')
self.core_instance.MakeReverseRangeZoneAssignment(
u'0.168.192.in-addr.arpa', u'192.168.0/24')
self.core_instance.MakeReverseRangeZoneAssignment(
u'8.0.e.f.f.3.ip6.arpa', u'3ffe:0800:0000:0000:0000:0000:0000:0000/24')
def tearDown(self):
if( os.path.exists(CREDFILE) ):
os.remove(CREDFILE)
def testErrors(self):
output = os.popen('python %s -f test_data/test_zone.db -v no_view '
'-u %s --config-file %s -z sub.university.lcl' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(output.read(),
'Loading in test_data/test_zone.db\n'
'View no_view does not exist.\n')
output.close()
output = os.popen('python %s -f test_data/test_zone.db -v any '
'-u %s --config-file %s -z no_zone' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(output.read(),
'Loading in test_data/test_zone.db\n'
'Zone no_zone does not exist.\n')
output.close()
def testImportForwardZoneToAny(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_zone.db -v any '
'-u %s --config-file %s -z sub.university.lcl' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(output.read(),
'Loading in test_data/test_zone.db\n'
'17 records loaded from zone test_data/test_zone.db\n'
'17 total records added\n')
output.close()
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 796, u'refresh_seconds': 10800, 'target': u'@',
u'name_server': u'ns.university.lcl.', u'retry_seconds': 3600,
'ttl': 3600, u'minimum_seconds': 86400, 'record_type': u'soa',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'admin_email': u'hostmaster.ns.university.lcl.',
u'expiry_seconds': 3600000}, {'target': u'@',
u'name_server': u'ns.sub.university.lcl.', 'ttl': 3600,
'record_type': u'ns', 'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl'}, {'target': u'@',
u'name_server': u'ns2.sub.university.lcl.', 'ttl': 3600,
'record_type': u'ns', 'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl'}, {'target': u'@', 'ttl': 3600,
u'priority': 10, 'record_type': u'mx', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'mail_server': u'mail1.sub.university.lcl.'}, {'target': u'@',
'ttl': 3600, u'priority': 20, 'record_type': u'mx',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'mail_server': u'mail2.sub.university.lcl.'},
{'target': u'@', 'ttl': 3600, 'record_type': u'txt',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'quoted_text': u'"Contact 1: Stephen Harrell ([email protected])"'},
{'target': u'@', 'ttl': 3600, 'record_type': u'a',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'assignment_ip': u'192.168.0.1'},
{'target': u'ns', 'ttl': 3600, 'record_type': u'a',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.103'},
{'target': u'desktop-1', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'assignment_ip': u'3ffe:0800:0000:0000:02a8:79ff:fe32:1982'},
{'target': u'desktop-1', 'ttl': 3600, 'record_type': u'a',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.100'},
{'target': u'ns2', 'ttl': 3600, 'record_type': u'a',
'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.104'},
{'target': u'ns2', 'ttl': 3600, u'hardware': u'PC',
'record_type': u'hinfo', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'os': u'NT'}, {'target': u'www', 'ttl': 3600,
'record_type': u'cname', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_host': u'sub.university.lcl.'}, {'target': u'localhost',
'ttl': 3600, 'record_type': u'a', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_ip': u'127.0.0.1'}, {'target': u'www.data',
'ttl': 3600, 'record_type': u'cname', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_host': u'ns.university.lcl.'}, {'target': u'mail1',
'ttl': 3600, 'record_type': u'a', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_ip': u'192.168.1.101'}, {'target': u'mail2',
'ttl': 3600, 'record_type': u'a', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_ip': u'192.168.1.102'}])
def testImportForwardZoneToView(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_zone.db -v test_view1 '
'-u %s --config-file %s -z sub.university.lcl' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(output.read(),
'Loading in test_data/test_zone.db\n'
'17 records loaded from zone test_data/test_zone.db\n'
'17 total records added\n')
output.close()
#fix indenting
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 795, u'refresh_seconds': 10800, 'target': u'@',
u'name_server': u'ns.university.lcl.', u'retry_seconds': 3600,
'ttl': 3600, u'minimum_seconds': 86400, 'record_type': u'soa',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'admin_email': u'hostmaster.ns.university.lcl.',
u'expiry_seconds': 3600000}, {'target': u'@',
u'name_server': u'ns.sub.university.lcl.', 'ttl': 3600,
'record_type': u'ns', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl'},
{'target': u'@', u'name_server': u'ns2.sub.university.lcl.',
'ttl': 3600, 'record_type': u'ns', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl'},
{'target': u'@', 'ttl': 3600, u'priority': 10, 'record_type': u'mx',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'mail_server': u'mail1.sub.university.lcl.'},
{'target': u'@', 'ttl': 3600, u'priority': 20,
'record_type': u'mx', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'mail_server': u'mail2.sub.university.lcl.'}, {'target': u'@',
'ttl': 3600, 'record_type': u'txt', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'quoted_text': u'"Contact 1: Stephen Harrell ([email protected])"'},
{'target': u'@', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.0.1'},
{'target': u'ns', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.103'},
{'target': u'desktop-1', 'ttl': 3600, 'record_type': u'aaaa',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl',
u'assignment_ip': u'3ffe:0800:0000:0000:02a8:79ff:fe32:1982'},
{'target': u'desktop-1', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.100'},
{'target': u'ns2', 'ttl': 3600, 'record_type': u'a',
'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.104'},
{'target': u'ns2', 'ttl': 3600, u'hardware': u'PC',
'record_type': u'hinfo', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'os': u'NT'}, {'target': u'www', 'ttl': 3600,
'record_type': u'cname', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_host': u'sub.university.lcl.'}, {'target': u'localhost',
'ttl': 3600, 'record_type': u'a', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_ip': u'127.0.0.1'}, {'target': u'www.data',
'ttl': 3600, 'record_type': u'cname', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_host': u'ns.university.lcl.'}, {'target': u'mail1',
'ttl': 3600, 'record_type': u'a', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'sub.university.lcl',
u'assignment_ip': u'192.168.1.101'}, {'target': u'mail2', 'ttl': 3600,
'record_type': u'a', 'view_name': u'test_view1', 'last_user': u'sharrell',
'zone_name': u'sub.university.lcl', u'assignment_ip': u'192.168.1.102'}])
def testImportReverseIPV6ZoneToAny(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_reverse_ipv6_zone.db '
'-v any -u %s --config-file %s '
'-z 8.0.e.f.f.3.ip6.arpa' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(
output.read(),
'Loading in test_data/test_reverse_ipv6_zone.db\n'
'5 records loaded from zone test_data/test_reverse_ipv6_zone.db\n'
'5 total records added\n')
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 6, u'refresh_seconds': 10800, 'target': u'@',
u'name_server': u'ns.university.lcl.', u'retry_seconds': 3600, 'ttl': 86400,
u'minimum_seconds': 86400, 'record_type': u'soa', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'admin_email':
u'hostmaster.university.lcl.', u'expiry_seconds': 3600000}, {'target':
u'@', u'name_server': u'ns.university.lcl.', 'ttl': 86400, 'record_type':
u'ns', 'view_name': u'any', 'last_user': u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa'},
{'target': u'@', u'name_server': u'ns2.university.lcl.', 'ttl': 86400,
'record_type': u'ns', 'view_name': u'any', 'last_user': u'sharrell', 'zone_name':
u'8.0.e.f.f.3.ip6.arpa'}, {'target': u'2.8.9.1.2.3.e.f.f.f.9.7.8.a.2.0.0.0.0.0.0.0.0.0.0.0',
'ttl': 86400, 'record_type': u'ptr', 'view_name': u'any', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'assignment_host':
u'router.university.lcl.'}, {'target': u'0.8.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0',
'ttl': 86400, 'record_type': u'ptr', 'view_name': u'any', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'assignment_host':
u'desktop-1.university.lcl.'}])
def testImportReverseIPV6ZoneToView(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_reverse_ipv6_zone.db '
'-v test_view1 -u %s --config-file %s '
'-z 8.0.e.f.f.3.ip6.arpa' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(
output.read(),
'Loading in test_data/test_reverse_ipv6_zone.db\n'
'5 records loaded from zone test_data/test_reverse_ipv6_zone.db\n'
'5 total records added\n')
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 5, u'refresh_seconds': 10800, 'target': u'@', u'name_server':
u'ns.university.lcl.', u'retry_seconds': 3600, 'ttl': 86400, u'minimum_seconds':
86400, 'record_type': u'soa', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'admin_email': u'hostmaster.university.lcl.',
u'expiry_seconds': 3600000}, {'target': u'@', u'name_server': u'ns.university.lcl.',
'ttl': 86400, 'record_type': u'ns', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa'}, {'target': u'@', u'name_server':
u'ns2.university.lcl.', 'ttl': 86400, 'record_type': u'ns', 'view_name':
u'test_view1', 'last_user': u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa'},
{'target': u'2.8.9.1.2.3.e.f.f.f.9.7.8.a.2.0.0.0.0.0.0.0.0.0.0.0', 'ttl':
86400, 'record_type': u'ptr', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'assignment_host':
u'router.university.lcl.'}, {'target': u'0.8.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0',
'ttl': 86400, 'record_type': u'ptr', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'8.0.e.f.f.3.ip6.arpa', u'assignment_host':
u'desktop-1.university.lcl.'}])
def testImportReverseZoneToAny(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_reverse_zone.db '
'-v any -u %s --config-file %s '
'-z 0.168.192.in-addr.arpa' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(
output.read(),
'Loading in test_data/test_reverse_zone.db\n'
'6 records loaded from zone test_data/test_reverse_zone.db\n'
'6 total records added\n')
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 6, u'refresh_seconds': 10800, 'target': u'@', u'name_server':
u'ns.university.lcl.', u'retry_seconds': 3600, 'ttl': 86400, u'minimum_seconds':
86400, 'record_type': u'soa', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa', u'admin_email': u'hostmaster.university.lcl.',
u'expiry_seconds': 3600000}, {'target': u'@', u'name_server': u'ns.university.lcl.',
'ttl': 86400, 'record_type': u'ns', 'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'0.168.192.in-addr.arpa'}, {'target': u'@', u'name_server':
u'ns2.university.lcl.', 'ttl': 86400, 'record_type': u'ns', 'view_name':
u'any', 'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa'},
{'target': u'1', 'ttl': 86400, 'record_type': u'ptr', 'view_name': u'any',
'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa', u'assignment_host':
u'router.university.lcl.'}, {'target': u'11', 'ttl': 86400, 'record_type':
u'ptr', 'view_name': u'any', 'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa',
u'assignment_host': u'desktop-1.university.lcl.'}, {'target': u'12', 'ttl':
86400, 'record_type': u'ptr', 'view_name': u'any', 'last_user': u'sharrell',
'zone_name': u'0.168.192.in-addr.arpa', u'assignment_host': u'desktop-2.university.lcl.'}])
def testImportReverseZoneToView(self):
self.assertEqual(self.core_instance.ListRecords(), [])
output = os.popen('python %s -f test_data/test_reverse_zone.db '
'-v test_view1 -u %s --config-file %s '
'-z 0.168.192.in-addr.arpa' % (
EXEC, USERNAME, USER_CONFIG))
self.assertEqual(
output.read(),
'Loading in test_data/test_reverse_zone.db\n'
'6 records loaded from zone test_data/test_reverse_zone.db\n'
'6 total records added\n')
self.assertEqual(self.core_instance.ListRecords(),
[{u'serial_number': 5, u'refresh_seconds': 10800, 'target': u'@', u'name_server':
u'ns.university.lcl.', u'retry_seconds': 3600, 'ttl': 86400, u'minimum_seconds':
86400, 'record_type': u'soa', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa', u'admin_email': u'hostmaster.university.lcl.',
u'expiry_seconds': 3600000}, {'target': u'@', u'name_server': u'ns.university.lcl.',
'ttl': 86400, 'record_type': u'ns', 'view_name': u'test_view1', 'last_user':
u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa'}, {'target': u'@',
u'name_server': u'ns2.university.lcl.', 'ttl': 86400, 'record_type': u'ns',
'view_name': u'test_view1', 'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa'},
{'target': u'1', 'ttl': 86400, 'record_type': u'ptr', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa', u'assignment_host':
u'router.university.lcl.'}, {'target': u'11', 'ttl': 86400, 'record_type':
u'ptr', 'view_name': u'test_view1', 'last_user': u'sharrell', 'zone_name':
u'0.168.192.in-addr.arpa', u'assignment_host': u'desktop-1.university.lcl.'},
{'target': u'12', 'ttl': 86400, 'record_type': u'ptr', 'view_name': u'test_view1',
'last_user': u'sharrell', 'zone_name': u'0.168.192.in-addr.arpa', u'assignment_host':
u'desktop-2.university.lcl.'}])
if( __name__ == '__main__' ):
unittest.main()
|
|
#!/usr/bin/env python
import feedparser # pip install feedparser
from pymongo import MongoClient
import json
import re
import datetime
import logging
import RAKE
from calendar import timegm
from datetime import datetime
import hashlib
from fuzzywuzzy import fuzz
from bs4 import BeautifulSoup
import time
start = time.time()
logging.basicConfig(filename='logger.log', level=logging.DEBUG)
logger = logging.getLogger(__name__)
connection = MongoClient('mongodb://localhost:27017/Culminate')
db = connection.Culminate
# Main class
class insertingClass:
"""
insertingClass does various sorting and inserting data to appropriate collections
@params data : contains the data
@params category : the category of the data
@params source :the source of the data
"""
def __init__(self, data, category, source):
self.data = data
self.category = category
self.source = source
def individualInsertObj(self):
"""
Classifying according to the category
"""
if self.data['category'] == "General":
collectionInsert(db.General, "General", self.data, self.source)
if self.data['category'] == "Technology":
collectionInsert(db.Technology, "Technology",
self.data, self.source)
if self.data['category'] == "Science":
collectionInsert(db.Science, "Science", self.data, self.source)
if self.data['category'] == "Entertainment":
collectionInsert(db.Entertainment, "Entertainment",
self.data, self.source)
if self.data['category'] == "World":
collectionInsert(db.World, "World", self.data, self.source)
if self.data['category'] == "Politics":
collectionInsert(db.Politics, "Politics", self.data, self.source)
if self.data['category'] == "Business":
collectionInsert(db.Business, "Business", self.data, self.source)
if self.data['category'] == "Health":
collectionInsert(db.Health, "Health", self.data, self.source)
if self.data['category'] == "Education":
collectionInsert(db.Education, "Education", self.data, self.source)
if self.data['category'] == "Sports":
collectionInsert(db.Sports, "Sports", self.data, self.source)
# Individual insertion fucntion
def collectionInsert(collectionName, tag, data, source):
"""
Inserting function with respect to the collection name parsed
"""
if collectionName.count() == 0:
collectionName.insert_one(data)
else:
for document in collectionName.find():
collision = fuzz.token_sort_ratio(
str(data['title']), document['title'])
tags = str(data['uTag'])
if collectionName.find_one(
{'uTag': tags}, {'_id': 1}):
pass
else:
insertDoc = collectionName.insert_one(data)
if db.Main.find_one(
{'uTag': tags}, {'_id': 1}):
pass
else:
insertDoc = db.Main.insert_one(data)
if insertDoc:
logging.info('Inserted new for ' + tag + " for " + source
)
logging.info('\n')
else:
logging.info('Error in insertion for ' +
tag + " for " + source)
logging.info('\n')
print("Done for " + tag + " for " + source)
# Parsing function
def Type1parser(url, source, category, tag):
"""
This class handles all the feed parsing jobs initialted by the main function
"""
etags = db.eTags.find({'source': source + category})
if etags.count() > 0:
etag = etags[0]['etag']
feed = feedparser.parse(url, etag=etag)
print(feed.status)
db.eTags.update_one({'source': source + category}, {
'$set': {'etag': feed.etag}}, upsert=True)
else:
feed = feedparser.parse(url)
print(feed.etag)
db.eTags.update_one({'source': source + category}, {
'$set': {'etag': feed.etag}}, upsert=True)
return
# main function
def parsing(feed):
array = []
for item in feed['entries']:
summarys = ""
if 'summary' in item:
cleantext = BeautifulSoup(item.summary).text
summarys = cleantext
publishedTag = ""
if 'published' in item:
publishedTag = item.published
# if 'media_content' in item:
# takes stopwords as list of strings
Rake = RAKE.Rake('stopwords_en.txt')
words = Rake.run(item.title)
tagWordArray = []
for word in words:
tagWordArray.append(word[0].title())
itemArray = dict()
itemArray['title'] = item.title
itemArray['link'] = item.link
if 'media_content' in item:
itemArray['image'] = item.media_content[0]['url']
if 'media_thumbnail' in item:
itemArray['image'] = item.media_thumbnail[0]['url']
itemArray['published'] = publishedTag
itemArray['source'] = source
itemArray['type'] = tag
itemArray['category'] = category
itemArray['summary'] = summarys
itemArray['tags'] = tagWordArray
itemArray['created_at'] = str(datetime.now())
itemArray['uTag'] = hashlib.sha256(
str(item.title).encode('utf-8')).hexdigest()[:16]
print("Inside iterating loop")
individualInsert = insertingClass(itemArray, category, source)
individualInsert.individualInsertObj()
def main():
start = time.time()
""" Calling the main class parser for appropriate rss feeds """
def main():
""" Calling the main class parser for appropriate rss feeds """
Type1parser("http://rss.cnn.com/rss/edition.rss", "CNN", "General", "Top")
# Type1parser("http://rss.cnn.com/rss/edition_world.rss",
# "CNN", "World", "Top")
# Type1parser("http://rss.cnn.com/rss/edition_technology.rss",
# "CNN", "Technology", "Top")
# Type1parser("http://rss.cnn.com/rss/edition_space.rss",
# "CNN", "Science", "Top")
# Type1parser("http://rss.cnn.com/rss/edition_entertainment.rss",
# "CNN", "Entertainment", "Top")
# Type1parser("http://rss.cnn.com/rss/cnn_latest.rss",
# "CNN", "General", "Latest")
# Type1parser("http://feeds.bbci.co.uk/news/rss.xml",
# "BBC", "General", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/world/rss.xml",
# "BBC", "World", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/politics/rss.xml",
# "BBC", "Politics", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/business/rss.xml",
# "BBC", "Business", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/health/rss.xml",
# "BBC", "Health", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/education/rss.xml",
# "BBC", "Education", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/science_and_environment/rss.xml",
# "BBC", "Science", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/technology/rss.xml",
# "BBC", "Technology", "Top")
# Type1parser("http://feeds.bbci.co.uk/news/entertainment_and_arts/rss.xml",
# "BBC", "Entertainment", "Top")
# # Times of India
# Type1parser("http://timesofindia.indiatimes.com/rssfeedstopstories.cms",
# "Times of India", "General", "Top")
# # New York Times
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/HomePage.xml",
# "New York Times", "General", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/World.xml",
# "New York Times", "World", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/Business.xml",
# "New York Times", "Business", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/Technology.xml",
# "New York Times", "Technology", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/Sports.xml",
# "New York Times", "Sports", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/Science.xml",
# "New York Times", "Science", "Top")
# Type1parser("http://rss.nytimes.com/services/xml/rss/nyt/Health.xml",
# "New York Times", "Health", "Top")
# # Reuters
# Type1parser("http://feeds.reuters.com/reuters/INtopNews",
# "Reuters India", "General", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INbusinessNews",
# "Reuters India", "Business", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INsouthAsiaNews",
# "Reuters India", "World", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INworldNews",
# "Reuters India", "World", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INentertainmentNews",
# "Reuters India", "Entertainment", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INsportsNews",
# "Reuters India", "Sports", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INcricketNews",
# "Reuters India", "Sports", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INtechnologyNews",
# "Reuters India", "Technology", "Top")
# Type1parser("http://feeds.reuters.com/reuters/INhealth",
# "Reuters India", "Health", "Top")
# # The Guardian
# Type1parser("https://www.theguardian.com/world/rss",
# "The Guardian", "World", "Top")
# Type1parser("https://www.theguardian.com/international/rss",
# "The Guardian", "General", "Top")
# Type1parser("https://www.theguardian.com/uk/environment/rss",
# "The Guardian", "World", "Top")
# Type1parser("https://www.theguardian.com/uk/money/rss",
# "The Guardian", "Business", "Top")
# Type1parser("https://www.theguardian.com/uk/business/rss",
# "The Guardian", "Business", "Top")
# Type1parser("https://www.theguardian.com/uk/technology/rss",
# "The Guardian", "Technology", "Top")
# Type1parser("https://www.theguardian.com/uk/sport/rss",
# "The Guardian", "Sports", "Top")
# Type1parser("https://www.theguardian.com/world/rss",
# "The Guardian", "General", "Top")
print ('It took', time.time() - start, 'seconds.')
if __name__ == '__main__':
main()
|
|
# Copyright 2013 Google Inc. All Rights Reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
from itertools import izip
import logging
import random
import re
from net import bsonrpc
from net import gorpc
from vtdb import dbapi
from vtdb import dbexceptions
from vtdb import field_types
from vtdb import keyrange
from vtdb import vtdb_logger
from vtdb import vtgate_cursor
_errno_pattern = re.compile('\(errno (\d+)\)')
def log_exception(method):
"""Decorator for logging the exception from vtgatev2.
The convert_exception method interprets and recasts the exceptions
raised by lower-layer. The inner function calls the appropriate vtdb_logger
method based on the exception raised.
Args:
exc: exception raised by calling code
args: additional args for the exception.
Returns:
Decorated method.
"""
def _log_exception(exc, *args):
logger_object = vtdb_logger.get_logger()
new_exception = method(exc, *args)
if isinstance(new_exception, dbexceptions.IntegrityError):
logger_object.integrity_error(new_exception)
else:
logger_object.vtgatev2_exception(new_exception)
return new_exception
return _log_exception
def handle_app_error(exc_args):
msg = str(exc_args[0]).lower()
if msg.startswith('request_backlog'):
return dbexceptions.RequestBacklog(exc_args)
match = _errno_pattern.search(msg)
if match:
mysql_errno = int(match.group(1))
# Prune the error message to truncate the query string
# returned by mysql as it contains bind variables.
if mysql_errno == 1062:
parts = _errno_pattern.split(msg)
pruned_msg = msg[:msg.find(parts[2])]
new_args = (pruned_msg,) + tuple(exc_args[1:])
return dbexceptions.IntegrityError(new_args)
return dbexceptions.DatabaseError(exc_args)
@log_exception
def convert_exception(exc, *args):
new_args = exc.args + args
if isinstance(exc, gorpc.TimeoutError):
return dbexceptions.TimeoutError(new_args)
elif isinstance(exc, gorpc.AppError):
return handle_app_error(new_args)
elif isinstance(exc, gorpc.ProgrammingError):
return dbexceptions.ProgrammingError(new_args)
elif isinstance(exc, gorpc.GoRpcError):
return dbexceptions.FatalError(new_args)
return exc
def _create_req_with_keyspace_ids(sql, new_binds, keyspace, tablet_type, keyspace_ids):
# keyspace_ids are Keyspace Ids packed to byte[]
sql, new_binds = dbapi.prepare_query_bind_vars(sql, new_binds)
new_binds = field_types.convert_bind_vars(new_binds)
req = {
'Sql': sql,
'BindVariables': new_binds,
'Keyspace': keyspace,
'TabletType': tablet_type,
'KeyspaceIds': keyspace_ids,
}
return req
def _create_req_with_keyranges(sql, new_binds, keyspace, tablet_type, keyranges):
# keyranges are keyspace.KeyRange objects with start/end packed to byte[]
sql, new_binds = dbapi.prepare_query_bind_vars(sql, new_binds)
new_binds = field_types.convert_bind_vars(new_binds)
req = {
'Sql': sql,
'BindVariables': new_binds,
'Keyspace': keyspace,
'TabletType': tablet_type,
'KeyRanges': keyranges,
}
return req
# A simple, direct connection to the vttablet query server.
# This is shard-unaware and only handles the most basic communication.
# If something goes wrong, this object should be thrown away and a new one instantiated.
class VTGateConnection(object):
session = None
_stream_fields = None
_stream_conversions = None
_stream_result = None
_stream_result_index = None
def __init__(self, addr, timeout, user=None, password=None, encrypted=False, keyfile=None, certfile=None):
self.addr = addr
self.timeout = timeout
self.client = bsonrpc.BsonRpcClient(addr, timeout, user, password, encrypted=encrypted, keyfile=keyfile, certfile=certfile)
self.logger_object = vtdb_logger.get_logger()
def __str__(self):
return '<VTGateConnection %s >' % self.addr
def dial(self):
try:
if not self.is_closed():
self.close()
self.client.dial()
except gorpc.GoRpcError as e:
raise convert_exception(e, str(self))
def close(self):
if self.session:
self.rollback()
self.client.close()
def is_closed(self):
return self.client.is_closed()
def cursor(self, cursorclass=None, *pargs, **kwargs):
if cursorclass is None:
cursorclass = vtgate_cursor.VTGateCursor
return cursorclass(*pargs, **kwargs)
def begin(self):
try:
response = self.client.call('VTGate.Begin', None)
self.session = response.reply
except gorpc.GoRpcError as e:
raise convert_exception(e, str(self))
def commit(self):
try:
session = self.session
self.session = None
self.client.call('VTGate.Commit', session)
except gorpc.GoRpcError as e:
raise convert_exception(e, str(self))
def rollback(self):
try:
session = self.session
self.session = None
self.client.call('VTGate.Rollback', session)
except gorpc.GoRpcError as e:
raise convert_exception(e, str(self))
def _add_session(self, req):
if self.session:
req['Session'] = self.session
def _update_session(self, response):
if 'Session' in response.reply and response.reply['Session']:
self.session = response.reply['Session']
def _execute(self, sql, bind_variables, keyspace, tablet_type, keyspace_ids=None, keyranges=None):
exec_method = None
req = None
if keyspace_ids is not None:
req = _create_req_with_keyspace_ids(sql, bind_variables, keyspace, tablet_type, keyspace_ids)
exec_method = 'VTGate.ExecuteKeyspaceIds'
elif keyranges is not None:
req = _create_req_with_keyranges(sql, bind_variables, keyspace, tablet_type, keyranges)
exec_method = 'VTGate.ExecuteKeyRanges'
else:
raise dbexceptions.ProgrammingError('_execute called without specifying keyspace_ids or keyranges')
self._add_session(req)
fields = []
conversions = []
results = []
rowcount = 0
lastrowid = 0
try:
response = self.client.call(exec_method, req)
self._update_session(response)
reply = response.reply
if 'Error' in response.reply and response.reply['Error']:
raise gorpc.AppError(response.reply['Error'], exec_method)
if 'Result' in reply:
res = reply['Result']
for field in res['Fields']:
fields.append((field['Name'], field['Type']))
conversions.append(field_types.conversions.get(field['Type']))
for row in res['Rows']:
results.append(tuple(_make_row(row, conversions)))
rowcount = res['RowsAffected']
lastrowid = res['InsertId']
except gorpc.GoRpcError as e:
self.logger_object.log_private_data(bind_variables)
raise convert_exception(e, str(self), sql)
except:
logging.exception('gorpc low-level error')
raise
return results, rowcount, lastrowid, fields
def _execute_entity_ids(self, sql, bind_variables, keyspace, tablet_type, entity_keyspace_id_map, entity_column_name):
sql, new_binds = dbapi.prepare_query_bind_vars(sql, bind_variables)
new_binds = field_types.convert_bind_vars(new_binds)
req = {
'Sql': sql,
'BindVariables': new_binds,
'Keyspace': keyspace,
'TabletType': tablet_type,
'EntityKeyspaceIDs': [
{'ExternalID': xid, 'KeyspaceID': kid}
for xid, kid in entity_keyspace_id_map.iteritems()],
'EntityColumnName': entity_column_name,
}
self._add_session(req)
fields = []
conversions = []
results = []
try:
response = self.client.call('VTGate.ExecuteEntityIds', req)
self._update_session(response)
reply = response.reply
if 'Error' in response.reply and response.reply['Error']:
raise gorpc.AppError(response.reply['Error'], 'VTGate.ExecuteEntityIds')
if 'Result' in reply:
res = reply['Result']
for field in res['Fields']:
fields.append((field['Name'], field['Type']))
conversions.append(field_types.conversions.get(field['Type']))
for row in res['Rows']:
results.append(tuple(_make_row(row, conversions)))
rowcount = res['RowsAffected']
lastrowid = res['InsertId']
except gorpc.GoRpcError as e:
self.logger_object.log_private_data(bind_variables)
raise convert_exception(e, str(self), sql)
except:
logging.exception('gorpc low-level error')
raise
return results, rowcount, lastrowid, fields
def _execute_batch(self, sql_list, bind_variables_list, keyspace, tablet_type, keyspace_ids):
query_list = []
for sql, bind_vars in zip(sql_list, bind_variables_list):
sql, bind_vars = dbapi.prepare_query_bind_vars(sql, bind_vars)
query = {}
query['Sql'] = sql
query['BindVariables'] = field_types.convert_bind_vars(bind_vars)
query_list.append(query)
rowsets = []
try:
req = {
'Queries': query_list,
'Keyspace': keyspace,
'TabletType': tablet_type,
'KeyspaceIds': keyspace_ids,
}
self._add_session(req)
response = self.client.call('VTGate.ExecuteBatchKeyspaceIds', req)
self._update_session(response)
if 'Error' in response.reply and response.reply['Error']:
raise gorpc.AppError(response.reply['Error'], 'VTGate.ExecuteBatchKeyspaceIds')
for reply in response.reply['List']:
fields = []
conversions = []
results = []
rowcount = 0
for field in reply['Fields']:
fields.append((field['Name'], field['Type']))
conversions.append(field_types.conversions.get(field['Type']))
for row in reply['Rows']:
results.append(tuple(_make_row(row, conversions)))
rowcount = reply['RowsAffected']
lastrowid = reply['InsertId']
rowsets.append((results, rowcount, lastrowid, fields))
except gorpc.GoRpcError as e:
self.logger_object.log_private_data(bind_variables_list)
raise convert_exception(e, str(self), sql_list)
except:
logging.exception('gorpc low-level error')
raise
return rowsets
# we return the fields for the response, and the column conversions
# the conversions will need to be passed back to _stream_next
# (that way we avoid using a member variable here for such a corner case)
def _stream_execute(self, sql, bind_variables, keyspace, tablet_type, keyspace_ids=None, keyranges=None):
exec_method = None
req = None
if keyspace_ids is not None:
req = _create_req_with_keyspace_ids(sql, bind_variables, keyspace, tablet_type, keyspace_ids)
exec_method = 'VTGate.StreamExecuteKeyspaceIds'
elif keyranges is not None:
req = _create_req_with_keyranges(sql, bind_variables, keyspace, tablet_type, keyranges)
exec_method = 'VTGate.StreamExecuteKeyRanges'
else:
raise dbexceptions.ProgrammingError('_stream_execute called without specifying keyspace_ids or keyranges')
self._add_session(req)
self._stream_fields = []
self._stream_conversions = []
self._stream_result = None
self._stream_result_index = 0
try:
self.client.stream_call(exec_method, req)
first_response = self.client.stream_next()
reply = first_response.reply['Result']
for field in reply['Fields']:
self._stream_fields.append((field['Name'], field['Type']))
self._stream_conversions.append(field_types.conversions.get(field['Type']))
except gorpc.GoRpcError as e:
self.logger_object.log_private_data(bind_variables)
raise convert_exception(e, str(self), sql)
except:
logging.exception('gorpc low-level error')
raise
return None, 0, 0, self._stream_fields
def _stream_next(self):
# Terminating condition
if self._stream_result_index is None:
return None
# See if we need to read more or whether we just pop the next row.
while self._stream_result is None:
try:
self._stream_result = self.client.stream_next()
if self._stream_result is None:
self._stream_result_index = None
return None
# A session message, if any comes separately with no rows
if 'Session' in self._stream_result.reply and self._stream_result.reply['Session']:
self.session = self._stream_result.reply['Session']
self._stream_result = None
continue
# An extra fields message if it is scatter over streaming, ignore it
if not self._stream_result.reply['Result']['Rows']:
self._stream_result = None
continue
except gorpc.GoRpcError as e:
raise convert_exception(e, str(self))
except:
logging.exception('gorpc low-level error')
raise
row = tuple(_make_row(self._stream_result.reply['Result']['Rows'][self._stream_result_index], self._stream_conversions))
# If we are reading the last row, set us up to read more data.
self._stream_result_index += 1
if self._stream_result_index == len(self._stream_result.reply['Result']['Rows']):
self._stream_result = None
self._stream_result_index = 0
return row
def _make_row(row, conversions):
converted_row = []
for conversion_func, field_data in izip(conversions, row):
if field_data is None:
v = None
elif conversion_func:
v = conversion_func(field_data)
else:
v = field_data
converted_row.append(v)
return converted_row
def get_params_for_vtgate_conn(vtgate_addrs, timeout, encrypted=False, user=None, password=None):
db_params_list = []
addrs = []
if isinstance(vtgate_addrs, dict):
service = '_vt'
if encrypted:
service = '_vts'
if service not in vtgate_addrs:
raise Exception("required vtgate service addrs %s not exist" % service)
addrs = vtgate_addrs[service]
random.shuffle(addrs)
elif isinstance(vtgate_addrs, list):
random.shuffle(vtgate_addrs)
addrs = vtgate_addrs
else:
raise dbexceptions.Error("Wrong type for vtgate addrs %s" % vtgate_addrs)
for addr in addrs:
vt_params = dict()
vt_params['addr'] = addr
vt_params['timeout'] = timeout
vt_params['encrypted'] = encrypted
vt_params['user'] = user
vt_params['password'] = password
db_params_list.append(vt_params)
return db_params_list
def connect(vtgate_addrs, timeout, encrypted=False, user=None, password=None):
db_params_list = get_params_for_vtgate_conn(vtgate_addrs, timeout,
encrypted=encrypted, user=user,
password=password)
if not db_params_list:
raise dbexceptions.OperationalError("empty db params list - no db instance available for vtgate_addrs %s" % vtgate_addrs)
db_exception = None
host_addr = None
for params in db_params_list:
try:
db_params = params.copy()
host_addr = db_params['addr']
conn = VTGateConnection(**db_params)
conn.dial()
return conn
except Exception as e:
db_exception = e
logging.warning('db connection failed: %s, %s', host_addr, e)
raise dbexceptions.OperationalError(
'unable to create vt connection', host_addr, db_exception)
|
|
#
# Copyright (c) 2013-2018 Quarkslab.
# This file is part of IRMA project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License in the top-level directory
# of this distribution and at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# No part of the project, including this file, may be copied,
# modified, propagated, or distributed except according to the
# terms contained in the LICENSE file.
import os
import logging
import ssl
from kombu import Queue
from logging import BASIC_FORMAT, Formatter
from logging.handlers import SysLogHandler
from celery.log import redirect_stdouts_to_logger
from celery.signals import after_setup_task_logger, after_setup_logger
from irma.common.base.exceptions import IrmaConfigurationError
from irma.common.configuration.ini import TemplatedConfiguration
from irma.common.configuration.sql import SQLConf
from irma.common.ftp.sftp import IrmaSFTP
from irma.common.ftp.sftpv2 import IrmaSFTPv2
from irma.common.ftp.ftps import IrmaFTPS
from irma.common.base.utils import common_celery_options
from datetime import timedelta
# ==========
# TEMPLATE
# ==========
template_brain_config = {
'log': [
('syslog', TemplatedConfiguration.integer, 0),
('prefix', TemplatedConfiguration.string, "irma-brain :"),
('debug', TemplatedConfiguration.boolean, False),
('sql_debug', TemplatedConfiguration.boolean, False),
],
'celery_options': [
('concurrency', TemplatedConfiguration.integer, 0),
('soft_time_limit', TemplatedConfiguration.integer, 300),
('time_limit', TemplatedConfiguration.integer, 1500),
('beat_schedule', TemplatedConfiguration.string,
"/var/irma/brain_beat_schedule"),
('probelist_cache_lifetime', TemplatedConfiguration.integer, 30),
],
'broker_brain': [
('host', TemplatedConfiguration.string, None),
('port', TemplatedConfiguration.integer, 5672),
('vhost', TemplatedConfiguration.string, None),
('username', TemplatedConfiguration.string, None),
('password', TemplatedConfiguration.string, None),
('queue', TemplatedConfiguration.string, None)
],
'broker_probe': [
('host', TemplatedConfiguration.string, None),
('port', TemplatedConfiguration.integer, 5672),
('vhost', TemplatedConfiguration.string, None),
('username', TemplatedConfiguration.string, None),
('password', TemplatedConfiguration.string, None),
('queue', TemplatedConfiguration.string, None)
],
'broker_frontend': [
('host', TemplatedConfiguration.string, None),
('port', TemplatedConfiguration.integer, 5672),
('vhost', TemplatedConfiguration.string, None),
('username', TemplatedConfiguration.string, None),
('password', TemplatedConfiguration.string, None),
('queue', TemplatedConfiguration.string, None)
],
'sqldb': [
('dbms', TemplatedConfiguration.string, None),
('dialect', TemplatedConfiguration.string, None),
('username', TemplatedConfiguration.string, None),
('password', TemplatedConfiguration.string, None),
('host', TemplatedConfiguration.string, None),
('port', TemplatedConfiguration.string, None),
('dbname', TemplatedConfiguration.string, None),
('tables_prefix', TemplatedConfiguration.string, None),
],
'ftp': [
('protocol', TemplatedConfiguration.string, "sftpv2"),
],
'ftp_brain': [
('host', TemplatedConfiguration.string, None),
('port', TemplatedConfiguration.integer, 22),
('auth', TemplatedConfiguration.string, "password"),
('key_path', TemplatedConfiguration.string, ""),
('username', TemplatedConfiguration.string, None),
('password', TemplatedConfiguration.string, None),
],
'interprocess_lock': [
('path', TemplatedConfiguration.string,
"/var/run/lock/irma-brain.lock"),
],
'ssl_config': [
('activate_ssl', TemplatedConfiguration.boolean, False),
('ca_certs', TemplatedConfiguration.string, None),
('keyfile', TemplatedConfiguration.string, None),
('certfile', TemplatedConfiguration.string, None),
],
}
config_path = os.environ.get('IRMA_BRAIN_CFG_PATH')
if config_path is None:
# Fallback to default path that is
# current working directory
config_path = os.path.abspath(os.path.dirname(__file__))
cfg_file = os.path.join(config_path, "brain.ini")
brain_config = TemplatedConfiguration(cfg_file, template_brain_config)
# ================
# Celery helpers
# ================
def get_celery_options(app, app_name):
concurrency = brain_config.celery_options.concurrency
soft_time_limit = brain_config.celery_options.soft_time_limit
time_limit = brain_config.celery_options.time_limit
options = common_celery_options(app,
app_name,
concurrency,
soft_time_limit,
time_limit)
# disable prefetching behavior
options.append("-Ofair")
options.append("--beat")
beat_schedule = brain_config.celery_options.beat_schedule
options.append("--schedule={}".format(beat_schedule))
return options
def _conf_celery(app, broker, backend=None, queue=None):
app.conf.update(BROKER_URL=broker,
CELERY_ACCEPT_CONTENT=['json'],
CELERY_TASK_SERIALIZER='json',
CELERY_RESULT_SERIALIZER='json'
)
if backend is not None:
app.conf.update(CELERY_RESULT_BACKEND=backend)
app.conf.update(CELERY_TASK_RESULT_EXPIRES=300) # 5 minutes
if queue is not None:
app.conf.update(CELERY_DEFAULT_QUEUE=queue,
# delivery_mode=1 enable transient mode
# (don't survive to a server restart)
CELERY_QUEUES=(Queue(queue, routing_key=queue),)
)
if brain_config.ssl_config.activate_ssl:
ca_certs_path = brain_config.ssl_config.ca_certs
keyfile_path = brain_config.ssl_config.keyfile
certfile_path = brain_config.ssl_config.certfile
app.conf.update(
BROKER_USE_SSL={
'ca_certs': ca_certs_path,
'keyfile': keyfile_path,
'certfile': certfile_path,
'cert_reqs': ssl.CERT_REQUIRED
}
)
return
def conf_brain_celery(app):
broker = get_brain_broker_uri()
# default backend is amqp
# same as broker
backend = get_brain_backend_uri()
queue = brain_config.broker_brain.queue
_conf_celery(app, broker, backend=backend, queue=queue)
cache_lifetime = brain_config.celery_options.probelist_cache_lifetime
app.conf.update(
CELERYBEAT_SCHEDULE={
'probe_list_refresh': {
'task': 'scan_tasks.probe_list_refresh',
'schedule': timedelta(seconds=cache_lifetime),
'args': (),
},
},
CELERY_TIMEZONE='UTC'
)
def conf_probe_celery(app):
broker = get_probe_broker_uri()
backend = get_probe_backend_uri()
_conf_celery(app, broker, backend=backend)
def conf_frontend_celery(app):
broker = get_frontend_broker_uri()
queue = brain_config.broker_frontend.queue
_conf_celery(app, broker, backend=False, queue=queue)
def conf_results_celery(app):
broker = get_probe_broker_uri()
queue = brain_config.broker_probe.queue
_conf_celery(app, broker, backend=False, queue=queue)
# =================
# Brocker helpers
# =================
def _get_amqp_uri(broker_config):
user = broker_config.username
pwd = broker_config.password
host = broker_config.host
port = broker_config.port
vhost = broker_config.vhost
return "amqp://{user}:{pwd}@{host}:{port}/{vhost}" \
"".format(user=user, pwd=pwd, host=host, port=port, vhost=vhost)
def get_brain_broker_uri():
return _get_amqp_uri(brain_config.broker_brain)
def get_brain_backend_uri():
return _get_amqp_uri(brain_config.broker_brain)
def get_probe_broker_uri():
return _get_amqp_uri(brain_config.broker_probe)
def get_probe_backend_uri():
return _get_amqp_uri(brain_config.broker_probe)
def get_frontend_broker_uri():
return _get_amqp_uri(brain_config.broker_frontend)
def get_frontend_rmqvhost():
return brain_config.broker_frontend.vhost
# ================
# Syslog helpers
# ================
def configure_syslog(app):
if brain_config.log.syslog:
app.conf.update(CELERYD_LOG_COLOR=False)
after_setup_logger.connect(setup_log)
after_setup_task_logger.connect(setup_log)
def setup_log(**args):
# redirect stdout and stderr to logger
redirect_stdouts_to_logger(args['logger'])
# logs to local syslog
hl = SysLogHandler('/dev/log',
facility=SysLogHandler.facility_names['syslog'])
# setting log level
hl.setLevel(args['loglevel'])
# setting log format
formatter = Formatter(brain_config.log.prefix + BASIC_FORMAT)
hl.setFormatter(formatter)
# add new handler to logger
args['logger'].addHandler(hl)
def debug_enabled():
return brain_config.log.debug
def sql_debug_enabled():
return brain_config.log.sql_debug
def setup_debug_logger(logger):
log = logging.getLogger()
log.setLevel(logging.DEBUG)
FORMAT = "%(asctime)-15s %(name)s %(process)d %(filename)s:"
FORMAT += "%(lineno)d (%(funcName)s) %(message)s"
logging.basicConfig(format=FORMAT)
logger.setLevel(logging.DEBUG)
return
# ==================
# Database helpers
# ==================
sqldb = SQLConf(**brain_config.sqldb)
# ==================
# Database helpers
# ==================
def get_lock_path():
return brain_config.interprocess_lock.path
# =============
# FTP helpers
# =============
def get_ftp_class():
protocol = brain_config.ftp.protocol
if protocol == "sftp":
key_path = brain_config.ftp_brain.key_path
auth = brain_config.ftp_brain.auth
if auth == "key" and not os.path.isfile(key_path):
msg = "You are using SFTP authentication by key but the path of " \
"the private key does not exist:['" + key_path + "']"
raise IrmaConfigurationError(msg)
return IrmaSFTP
elif protocol == "sftpv2":
auth = brain_config.ftp_brain.auth
if auth == "key":
raise IrmaConfigurationError("SFTPv2 pubkey auth not implemented")
return IrmaSFTPv2
elif protocol == "ftps":
return IrmaFTPS
|
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry import page as page_module
from telemetry.page import page_set
from telemetry.results import base_test_results_unittest
from telemetry.results import page_test_results
from telemetry.timeline import trace_data
from telemetry.value import failure
from telemetry.value import histogram
from telemetry.value import scalar
from telemetry.value import skip
from telemetry.value import trace
class PageTestResultsTest(base_test_results_unittest.BaseTestResultsUnittest):
def setUp(self):
ps = page_set.PageSet(file_path=os.path.dirname(__file__))
ps.AddUserStory(page_module.Page("http://www.bar.com/", ps, ps.base_dir))
ps.AddUserStory(page_module.Page("http://www.baz.com/", ps, ps.base_dir))
ps.AddUserStory(page_module.Page("http://www.foo.com/", ps, ps.base_dir))
self.page_set = ps
@property
def pages(self):
return self.page_set.pages
def testFailures(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(
failure.FailureValue(self.pages[0], self.CreateException()))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.DidRunPage(self.pages[1])
self.assertEqual(set([self.pages[0]]), results.pages_that_failed)
self.assertEqual(set([self.pages[1]]), results.pages_that_succeeded)
self.assertEqual(2, len(results.all_page_runs))
self.assertTrue(results.all_page_runs[0].failed)
self.assertTrue(results.all_page_runs[1].ok)
def testSkips(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(skip.SkipValue(self.pages[0], 'testing reason'))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.DidRunPage(self.pages[1])
self.assertTrue(results.all_page_runs[0].skipped)
self.assertEqual(self.pages[0], results.all_page_runs[0].user_story)
self.assertEqual(set([self.pages[0], self.pages[1]]),
results.pages_that_succeeded)
self.assertEqual(2, len(results.all_page_runs))
self.assertTrue(results.all_page_runs[0].skipped)
self.assertTrue(results.all_page_runs[1].ok)
def testBasic(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(scalar.ScalarValue(self.pages[1], 'a', 'seconds', 3))
results.DidRunPage(self.pages[1])
results.PrintSummary()
values = results.FindPageSpecificValuesForPage(self.pages[0], 'a')
self.assertEquals(1, len(values))
v = values[0]
self.assertEquals(v.name, 'a')
self.assertEquals(v.page, self.pages[0])
values = results.FindAllPageSpecificValuesNamed('a')
assert len(values) == 2
def testUrlIsInvalidValue(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
self.assertRaises(
AssertionError,
lambda: results.AddValue(scalar.ScalarValue(
self.pages[0], 'url', 'string', 'foo')))
def testAddSummaryValueWithPageSpecified(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
self.assertRaises(
AssertionError,
lambda: results.AddSummaryValue(scalar.ScalarValue(self.pages[0],
'a', 'units', 3)))
def testUnitChange(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
self.assertRaises(
AssertionError,
lambda: results.AddValue(scalar.ScalarValue(
self.pages[1], 'a', 'foobgrobbers', 3)))
def testTypeChange(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
self.assertRaises(
AssertionError,
lambda: results.AddValue(histogram.HistogramValue(
self.pages[1], 'a', 'seconds',
raw_value_json='{"buckets": [{"low": 1, "high": 2, "count": 1}]}')))
def testGetPagesThatSucceededAllPagesFail(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.AddValue(failure.FailureValue.FromMessage(self.pages[0], 'message'))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(scalar.ScalarValue(self.pages[1], 'a', 'seconds', 7))
results.AddValue(failure.FailureValue.FromMessage(self.pages[1], 'message'))
results.DidRunPage(self.pages[1])
results.PrintSummary()
self.assertEquals(0, len(results.pages_that_succeeded))
def testGetSuccessfulPageValuesMergedNoFailures(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
self.assertEquals(1, len(results.all_page_specific_values))
results.DidRunPage(self.pages[0])
def testGetAllValuesForSuccessfulPages(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
value1 = scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3)
results.AddValue(value1)
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
value2 = scalar.ScalarValue(self.pages[1], 'a', 'seconds', 3)
results.AddValue(value2)
results.DidRunPage(self.pages[1])
results.WillRunPage(self.pages[2])
value3 = scalar.ScalarValue(self.pages[2], 'a', 'seconds', 3)
results.AddValue(value3)
results.DidRunPage(self.pages[2])
self.assertEquals(
[value1, value2, value3], results.all_page_specific_values)
def testGetAllValuesForSuccessfulPagesOnePageFails(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
value1 = scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3)
results.AddValue(value1)
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
value2 = failure.FailureValue.FromMessage(self.pages[1], 'Failure')
results.AddValue(value2)
results.DidRunPage(self.pages[1])
results.WillRunPage(self.pages[2])
value3 = scalar.ScalarValue(self.pages[2], 'a', 'seconds', 3)
results.AddValue(value3)
results.DidRunPage(self.pages[2])
self.assertEquals(
[value1, value2, value3], results.all_page_specific_values)
def testTraceValue(self):
results = page_test_results.PageTestResults()
results.WillRunPage(self.pages[0])
results.AddValue(trace.TraceValue(None, trace_data.TraceData({'test' : 1})))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(trace.TraceValue(None, trace_data.TraceData({'test' : 2})))
results.DidRunPage(self.pages[1])
results.PrintSummary()
values = results.FindAllTraceValues()
self.assertEquals(2, len(values))
def testCleanUpCleansUpTraceValues(self):
results = page_test_results.PageTestResults()
v0 = trace.TraceValue(None, trace_data.TraceData({'test': 1}))
v1 = trace.TraceValue(None, trace_data.TraceData({'test': 2}))
results.WillRunPage(self.pages[0])
results.AddValue(v0)
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(v1)
results.DidRunPage(self.pages[1])
results.CleanUp()
self.assertTrue(v0.cleaned_up)
self.assertTrue(v1.cleaned_up)
def testNoTracesLeftAfterCleanUp(self):
results = page_test_results.PageTestResults()
v0 = trace.TraceValue(None, trace_data.TraceData({'test': 1}))
v1 = trace.TraceValue(None, trace_data.TraceData({'test': 2}))
results.WillRunPage(self.pages[0])
results.AddValue(v0)
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(v1)
results.DidRunPage(self.pages[1])
results.CleanUp()
self.assertFalse(results.FindAllTraceValues())
class PageTestResultsFilterTest(unittest.TestCase):
def setUp(self):
ps = page_set.PageSet(file_path=os.path.dirname(__file__))
ps.AddUserStory(page_module.Page('http://www.foo.com/', ps, ps.base_dir))
ps.AddUserStory(page_module.Page('http://www.bar.com/', ps, ps.base_dir))
self.page_set = ps
@property
def pages(self):
return self.page_set.pages
def testFilterValue(self):
def AcceptValueNamed_a(value, _):
return value.name == 'a'
results = page_test_results.PageTestResults(
value_can_be_added_predicate=AcceptValueNamed_a)
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.AddValue(scalar.ScalarValue(self.pages[0], 'b', 'seconds', 3))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(scalar.ScalarValue(self.pages[1], 'a', 'seconds', 3))
results.AddValue(scalar.ScalarValue(self.pages[1], 'd', 'seconds', 3))
results.DidRunPage(self.pages[1])
results.PrintSummary()
self.assertEquals(
[('a', 'http://www.foo.com/'), ('a', 'http://www.bar.com/')],
[(v.name, v.page.url) for v in results.all_page_specific_values])
def testFilterIsFirstResult(self):
def AcceptSecondValues(_, is_first_result):
return not is_first_result
results = page_test_results.PageTestResults(
value_can_be_added_predicate=AcceptSecondValues)
# First results (filtered out)
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 7))
results.AddValue(scalar.ScalarValue(self.pages[0], 'b', 'seconds', 8))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(scalar.ScalarValue(self.pages[1], 'a', 'seconds', 5))
results.AddValue(scalar.ScalarValue(self.pages[1], 'd', 'seconds', 6))
results.DidRunPage(self.pages[1])
# Second results
results.WillRunPage(self.pages[0])
results.AddValue(scalar.ScalarValue(self.pages[0], 'a', 'seconds', 3))
results.AddValue(scalar.ScalarValue(self.pages[0], 'b', 'seconds', 4))
results.DidRunPage(self.pages[0])
results.WillRunPage(self.pages[1])
results.AddValue(scalar.ScalarValue(self.pages[1], 'a', 'seconds', 1))
results.AddValue(scalar.ScalarValue(self.pages[1], 'd', 'seconds', 2))
results.DidRunPage(self.pages[1])
results.PrintSummary()
expected_values = [
('a', 'http://www.foo.com/', 3),
('b', 'http://www.foo.com/', 4),
('a', 'http://www.bar.com/', 1),
('d', 'http://www.bar.com/', 2)]
actual_values = [(v.name, v.page.url, v.value)
for v in results.all_page_specific_values]
self.assertEquals(expected_values, actual_values)
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Arithmetic Operations that don't fit into math_ops due to dependencies.
To avoid circular dependencies, some math_ops should go here.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import deprecation
from tensorflow.python.util.tf_export import tf_export
# TODO(b/27419586) Change docstring for required dtype of x once int allowed
@tf_export('math.lbeta', 'lbeta')
@deprecation.deprecated_endpoints('lbeta')
def lbeta(x, name=None):
r"""Computes \\(ln(|Beta(x)|)\\), reducing along the last dimension.
Given one-dimensional `z = [z_0,...,z_{K-1}]`, we define
$$Beta(z) = \prod_j Gamma(z_j) / Gamma(\sum_j z_j)$$
And for `n + 1` dimensional `x` with shape `[N1, ..., Nn, K]`, we define
$$lbeta(x)[i1, ..., in] = Log(|Beta(x[i1, ..., in, :])|)$$.
In other words, the last dimension is treated as the `z` vector.
Note that if `z = [u, v]`, then
\\(Beta(z) = int_0^1 t^{u-1} (1 - t)^{v-1} dt\\), which defines the
traditional bivariate beta function.
If the last dimension is empty, we follow the convention that the sum over
the empty set is zero, and the product is one.
Args:
x: A rank `n + 1` `Tensor`, `n >= 0` with type `float`, or `double`.
name: A name for the operation (optional).
Returns:
The logarithm of \\(|Beta(x)|\\) reducing along the last dimension.
"""
# In the event that the last dimension has zero entries, we return -inf.
# This is consistent with a convention that the sum over the empty set 0, and
# the product is 1.
# This is standard. See https://en.wikipedia.org/wiki/Empty_set.
with ops.name_scope(name, 'lbeta', [x]):
x = ops.convert_to_tensor(x, name='x')
# Note reduce_sum([]) = 0.
log_prod_gamma_x = math_ops.reduce_sum(
math_ops.lgamma(x), reduction_indices=[-1])
# Note lgamma(0) = infinity, so if x = []
# log_gamma_sum_x = lgamma(0) = infinity, and
# log_prod_gamma_x = lgamma(1) = 0,
# so result = -infinity
sum_x = math_ops.reduce_sum(x, axis=[-1])
log_gamma_sum_x = math_ops.lgamma(sum_x)
result = log_prod_gamma_x - log_gamma_sum_x
return result
@tf_export('math.bessel_i0')
def bessel_i0(x, name=None):
"""Computes the Bessel i0 function of `x` element-wise.
Modified Bessel function of order 0.
It is preferable to use the numerically stabler function `i0e(x)` instead.
Args:
x: A `Tensor` or `SparseTensor`. Must be one of the following types: `half`,
`float32`, `float64`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor`, respectively. Has the same type as `x`.
@compatibility(scipy)
Equivalent to scipy.special.i0
@end_compatibility
"""
with ops.name_scope(name, 'bessel_i0', [x]):
return math_ops.exp(math_ops.abs(x)) * math_ops.bessel_i0e(x)
@tf_export('math.bessel_i1')
def bessel_i1(x, name=None):
"""Computes the Bessel i1 function of `x` element-wise.
Modified Bessel function of order 1.
It is preferable to use the numerically stabler function `i1e(x)` instead.
Args:
x: A `Tensor` or `SparseTensor`. Must be one of the following types: `half`,
`float32`, `float64`.
name: A name for the operation (optional).
Returns:
A `Tensor` or `SparseTensor`, respectively. Has the same type as `x`.
@compatibility(scipy)
Equivalent to scipy.special.i1
@end_compatibility
"""
with ops.name_scope(name, 'bessel_i1', [x]):
return math_ops.exp(math_ops.abs(x)) * math_ops.bessel_i1e(x)
@tf_export('einsum', 'linalg.einsum')
def einsum(equation, *inputs, **kwargs):
"""A generalized contraction between tensors of arbitrary dimension.
This function returns a tensor whose elements are defined by `equation`,
which is written in a shorthand form inspired by the Einstein summation
convention. As an example, consider multiplying two matrices
A and B to form a matrix C. The elements of C are given by:
```
C[i,k] = sum_j A[i,j] * B[j,k]
```
The corresponding `equation` is:
```
ij,jk->ik
```
In general, the `equation` is obtained from the more familiar element-wise
equation by
1. removing variable names, brackets, and commas,
2. replacing "*" with ",",
3. dropping summation signs, and
4. moving the output to the right, and replacing "=" with "->".
Many common operations can be expressed in this way. For example:
```python
# Matrix multiplication
>>> einsum('ij,jk->ik', m0, m1) # output[i,k] = sum_j m0[i,j] * m1[j, k]
# Dot product
>>> einsum('i,i->', u, v) # output = sum_i u[i]*v[i]
# Outer product
>>> einsum('i,j->ij', u, v) # output[i,j] = u[i]*v[j]
# Transpose
>>> einsum('ij->ji', m) # output[j,i] = m[i,j]
# Batch matrix multiplication
>>> einsum('aij,ajk->aik', s, t) # out[a,i,k] = sum_j s[a,i,j] * t[a, j, k]
```
This function behaves like `numpy.einsum`, but does not support:
* Ellipses (subscripts like `ij...,jk...->ik...`)
* Subscripts where an axis appears more than once for a single input
(e.g. `ijj,k->ik`).
* Subscripts that are summed across multiple inputs (e.g., `ij,ij,jk->ik`).
Args:
equation: a `str` describing the contraction, in the same format as
`numpy.einsum`.
*inputs: the inputs to contract (each one a `Tensor`), whose shapes should
be consistent with `equation`.
name: A name for the operation (optional).
Returns:
The contracted `Tensor`, with shape determined by `equation`.
Raises:
ValueError: If
- the format of `equation` is incorrect,
- the number of inputs implied by `equation` does not match `len(inputs)`,
- an axis appears in the output subscripts but not in any of the inputs,
- the number of dimensions of an input differs from the number of
indices in its subscript, or
- the input shapes are inconsistent along a particular axis.
"""
equation = equation.replace(' ', '')
name = kwargs.pop('name', None)
if kwargs:
raise TypeError('invalid keyword arguments for this function: ' + ', '.join(
[format(key) for key in sorted(list(kwargs.keys()))]))
with ops.name_scope(name, 'einsum', [equation, inputs]) as name:
if '...' in equation:
raise ValueError('Subscripts with ellipses are not yet supported.')
match = re.match('^([a-zA-Z,]+)(->[a-zA-Z]*)?$', equation)
if not match:
raise ValueError('Indices have incorrect format: %s' % equation)
inputs = list(inputs)
input_axis_labels = match.group(1).split(',')
if len(inputs) != len(input_axis_labels):
raise ValueError('Got %d arguments for equation "%s", expecting %d' %
(len(inputs), equation, len(input_axis_labels)))
axis_labels = set(''.join(input_axis_labels))
if match.group(2):
output_axis_labels = match.group(2)[2:]
else:
# infer the output subscripts if not given, assume alphabetical order
indices = ''.join(sorted(axis_labels))
counts = {ax: 0 for ax in indices}
for axes_ in input_axis_labels:
for ax in axes_:
counts[ax] += 1
output_axis_labels = ''.join(
sorted(ax for ax in indices if counts[ax] == 1))
for a in axis_labels:
input_count = sum(1 for s in input_axis_labels if a in s)
if input_count > 2 and a not in output_axis_labels:
logging.warn(
'Falling back to exponential-space implementation of einsum()'
' because index "%s" is summed over more than two inputs.', a)
return _exponential_space_einsum(equation, *inputs)
temp = inputs[0]
temp_axis_labels = input_axis_labels[0]
for i in xrange(len(inputs) - 1):
axes_to_sum = (
set(temp_axis_labels) &
set(input_axis_labels[i + 1]) - set(output_axis_labels))
temp, temp_axis_labels = _einsum_reduction(
temp, temp_axis_labels, inputs[i + 1], input_axis_labels[i + 1],
axes_to_sum)
missing_indices = set(temp_axis_labels) - set(output_axis_labels)
if missing_indices:
reduction_indices = [
i for i, a in enumerate(temp_axis_labels)
if a not in output_axis_labels
]
temp = math_ops.reduce_sum(temp, reduction_indices=reduction_indices)
temp_axis_labels = ''.join(
a for a in temp_axis_labels if a in output_axis_labels)
if sorted(temp_axis_labels) != sorted(output_axis_labels):
raise ValueError('Invalid equation: %s' % equation)
perm = [temp_axis_labels.index(a) for a in output_axis_labels]
return _transpose_if_necessary(temp, perm)
def _einsum_reduction(t0, t0_axis_labels, t1, t1_axis_labels, axes_to_sum):
"""Helper for einsum() that computes the result of a two-argument einsum().
Args:
t0: a `Tensor`
t0_axis_labels: a string of axis labels. This string's length must equal
the rank of t0.
t1: a `Tensor`
t1_axis_labels: a string to axis labels. This string's length must equal
the rank of t1.
axes_to_sum: set of labels of axes to be summed over
Returns:
A `Tensor` whose elements are obtained by summing, over all axes in
`axes_to_sum`, the corresponding elements of `t0` and `t1`.
For example, if t0_axis_labels == 'abijk', t1_axis_labels == 'acjkl', and
axes_to_sum == {j,k}, this will return a tensor x where
out[a,b,c,i,l] = sum_j sum_k t0[a,b,i,j,k] * t1[a,c,j,k,l]
Raises:
ValueError: if the rank of `t0` does not match the length of
`t0_axis_labels`, or that of `t1` does not match the length of
`t1_axis_labels`.
"""
if len(t0_axis_labels) != len(t0.get_shape()):
raise ValueError(
'Tensor t0 of rank %d does not match einsum reduction of length %d' %
(len(t0.get_shape()), len(t0_axis_labels)))
if len(t1_axis_labels) != len(t1.get_shape()):
raise ValueError(
'Tensor t1 of rank %d does not match einsum reduction of length %d' %
(len(t1.get_shape()), len(t1_axis_labels)))
# This function computes the result of a two-argument einsum() using batch
# matrix multiplication. This involves
# 1. transposing t0 and t1 so that axes are in the correct order for
# batch matrix multiplication, and
# 2. reshaping t0 and t1 so that they are both of rank 3.
# First, we divide axes into three groups:
# * "preserved" axes are present in both inputs and the output
# * "summed" axes are present in both inputs but not the output
# * "broadcast" axes are present in exactly one input and the output
#
# As an example, if the einsum is abijk,acjkl->abcil, then "a" is a
# preserved axis, "b" and "c" are broadcast axes, and "j" and "k" are
# summed axes.
assert all(a in t0_axis_labels and a in t1_axis_labels for a in axes_to_sum)
preserved_axes = (set(t0_axis_labels) & set(t1_axis_labels)) - axes_to_sum
broadcast_axes = {}
for i, sym_list in enumerate([t0_axis_labels, t1_axis_labels]):
broadcast_axes[i] = set(sym_list) - preserved_axes - axes_to_sum
# Reorder the axes so that:
# 1. preserved axes come first in both inputs
# 2. in input 0, broadcast axes come next, followed by summed axes
# 3. in input 1, summed axes come next, followed by broadcast axes
def sort_key(input_index, a):
if a in preserved_axes:
return (-1, a)
elif ((input_index == 0 and a in broadcast_axes[0]) or
(input_index == 1 and a in axes_to_sum)):
return (0, a)
else:
return (1, a)
axis_labels = [t0_axis_labels, t1_axis_labels]
sorted_axes = [
sorted(sym_list, key=lambda a: sort_key(i, a))
for i, sym_list in enumerate(axis_labels)
]
inputs = [t0, t1]
for i, axes_str in enumerate(axis_labels):
perm = [axes_str.find(a) for a in sorted_axes[i]]
inputs[i] = _transpose_if_necessary(inputs[i], perm)
t0, t1 = inputs
if not axes_to_sum:
# In the special case where there are no axes to sum over, reduce to mul()
# rather than to batch matrix multiplication.
for _ in broadcast_axes[1]:
t0 = array_ops.expand_dims(t0, -1)
for _ in broadcast_axes[0]:
t1 = array_ops.expand_dims(t1, len(preserved_axes))
product = math_ops.multiply(t0, t1)
product_axes = sorted_axes[0] + sorted_axes[1][len(preserved_axes):]
return product, ''.join(product_axes)
else:
# Reduce to matmul().
# Reshape both inputs so as to combine multiple broadcast axes
# into a single axis, and combine multiple summed axes into a
# single axis.
t0_shape = _get_shape(t0)
num_broadcast_elements_t0 = _total_size(
t0_shape[len(preserved_axes):-len(axes_to_sum)])
num_summed_elements = _total_size(t0_shape[-len(axes_to_sum):])
new_shape = (
t0_shape[:len(preserved_axes)] +
[num_broadcast_elements_t0, num_summed_elements])
t0 = _reshape_if_necessary(t0, new_shape)
t1_shape = _get_shape(t1)
num_broadcast_elements_t1 = _total_size(
t1_shape[len(preserved_axes) + len(axes_to_sum):])
new_shape = (
t1_shape[:len(preserved_axes)] +
[num_summed_elements, num_broadcast_elements_t1])
t1 = _reshape_if_necessary(t1, new_shape)
product = math_ops.matmul(t0, t1)
# Undo compaction of broadcast axes
uncompacted_shape = (
t0_shape[:len(preserved_axes) + len(broadcast_axes[0])] +
t1_shape[len(t1_shape) - len(broadcast_axes[1]):])
product = _reshape_if_necessary(product, uncompacted_shape)
product_axes = (
sorted_axes[0][:len(preserved_axes) + len(broadcast_axes[0])] +
sorted_axes[1][len(sorted_axes[1]) - len(broadcast_axes[1]):])
return product, ''.join(product_axes)
def _transpose_if_necessary(tensor, perm):
"""Like transpose(), but avoids creating a new tensor if possible."""
if perm != range(len(perm)):
return array_ops.transpose(tensor, perm=perm)
else:
return tensor
def _reshape_if_necessary(tensor, new_shape):
"""Like reshape(), but avoids creating a new tensor if possible."""
# Accept None as an alias for -1 in new_shape.
new_shape = tuple(-1 if x is None else x for x in new_shape)
cur_shape = tuple(x.value for x in tensor.get_shape())
if (len(new_shape) == len(cur_shape) and
all(d0 == d1 or d1 == -1 for d0, d1 in zip(cur_shape, new_shape))):
return tensor
else:
return array_ops.reshape(tensor, new_shape)
def _get_shape(tensor):
"""Like get_shape().as_list(), but explicitly queries the shape of a tensor
if necessary to ensure that the returned value contains no unknown value."""
shape = tensor.get_shape().as_list()
none_indices = [i for i, d in enumerate(shape) if d is None]
if none_indices:
# Query the shape if shape contains None values
shape_tensor = array_ops.shape(tensor)
for i in none_indices:
shape[i] = shape_tensor[i]
return shape
def _total_size(shape_values):
"""Given list of tensor shape values, returns total size.
If shape_values contains tensor values (which are results of
array_ops.shape), then it returns a scalar tensor.
If not, it returns an integer."""
result = 1
for val in shape_values:
result *= val
return result
def _exponential_space_einsum(equation, *inputs):
"""Fallback implementation that supports summing an index over > 2 inputs."""
if '...' in equation:
raise ValueError('Subscripts with ellipses are not yet supported.')
match = re.match('^([a-zA-Z,]+)(->[a-zA-Z]*)?$', equation)
if not match:
raise ValueError('Indices have incorrect format: %s' % equation)
inputs = list(inputs)
idx_in = match.group(1).split(',')
idx_all = set(''.join(idx_in))
indices = ''.join(sorted(idx_all))
if match.group(2):
idx_out = match.group(2)[2:]
else:
# infer the output subscripts if not given, assume alphabetical order
counts = {ax: 0 for ax in indices}
for axes_ in idx_in:
for ax in axes_:
counts[ax] += 1
idx_out = ''.join(sorted(ax for ax in indices if counts[ax] == 1))
if len(idx_in) != len(inputs):
raise ValueError('Expected %d inputs but got %d' % (len(idx_in),
len(inputs)))
missing_idx = set(idx_out).difference(idx_all)
if missing_idx:
raise ValueError('Unknown output axes: %s' % missing_idx)
axis_order = {}
for ax in indices:
if ax not in idx_out:
axis_order[ax] = len(axis_order)
for ax in idx_out:
axis_order[ax] = len(axis_order)
# transpose inputs so axes are in order
for i, (input_, axes_) in enumerate(zip(inputs, idx_in)):
if input_.get_shape().ndims != len(axes_):
raise ValueError(
'Input %d with axes %s has incorrect' \
' number of dimensions (expected %d, got %d)' % (
i, axes_, len(axes_), input_.get_shape().ndims
)
)
sorted_idx = sorted(axes_, key=axis_order.get)
if len(set(axes_)) != len(axes_):
raise ValueError(
'Subscript not supported: an axis appears more than once: %s' % axes_)
if list(axes_) != sorted_idx:
permuted = [axes_.find(ax) for ax in sorted_idx]
inputs[i] = array_ops.transpose(input_, permuted)
idx_in[i] = sorted_idx
reduction_idx = []
shapes = [[dim if dim else -1
for dim in tensor.get_shape().as_list()]
for tensor in inputs]
# validate shapes for broadcasting
for j, ax in enumerate(sorted(idx_all, key=axis_order.get)):
dims = []
for i, idx in enumerate(idx_in):
if ax not in idx:
shapes[i].insert(j, 1)
else:
dim = shapes[i][j]
if isinstance(dim, int) and dim > 1:
dims.append(dim)
if len(set(dims)) > 1:
raise ValueError('Dimension mismatch on axis: %s' % ax)
if ax not in idx_out:
reduction_idx.append(j)
# reshape, multiply
expanded_inputs = [
array_ops.reshape(input_, shape) for input_, shape in zip(inputs, shapes)
]
expanded_output = 1
for input_ in expanded_inputs:
expanded_output *= input_
# contract
return math_ops.reduce_sum(expanded_output, reduction_idx)
|
|
import queue
import concurrent.futures
import time
import hashlib
import sys
import re
import string
import random
import urllib.request
import urllib.parse
from html.parser import HTMLParser
from collections import namedtuple
from classes.cache import Cache
from classes.results import Results
class HTMLStripper(HTMLParser):
def __init__(self):
self.reset()
self.strict = False
self.convert_charrefs = True
self.tagtext = []
def handle_data(self, d):
self.tagtext.append(d)
def get_tagtext(self):
return ''.join(self.tagtext)
def _clean_page(page):
# this the same method nmap's http.lua uses for error page detection
# nselib/http.lua: clean_404
# remove information from the page that might not be static
# time
page = re.sub(b'(\d?\d:?){2,3}', b'',page)
page = re.sub(b'AM', b'',page, flags=re.IGNORECASE)
page = re.sub(b'PM', b'',page, flags=re.IGNORECASE)
# date with 4 digit year
page = re.sub(b'(\d){8}', '',page)
page = re.sub(b'\d{4}-\d{2}-\d{2}', b'',page)
page = re.sub(b'\d{4}/\d{2}/\d{2}', b'',page)
page = re.sub(b'\d{2}-\d{2}-\d{4}', b'',page)
page = re.sub(b'\d{2}/\d{2}/\d{4}', b'',page)
# date with 2 digit year
page = re.sub( b'(\d){6}', '',page)
page = re.sub( b'\d{2}-\d{2}-\d{2}', b'',page)
page = re.sub( b'\d{2}/\d{2}/\d{2}', b'',page)
# links and paths
page = re.sub( b'/[^ ]+', b'', page)
page = re.sub( b'[a-zA-Z]:\\[^ ]+', b'', page)
# return the fingerprint of the stripped page
return hashlib.md5(page).hexdigest().lower()
def _create_response(response):
R = Response()
url = response.geturl()
response_info = urllib.request.urlparse(url)
body = response.read()
# get the page text only
parser = HTMLStripper()
parser.feed(body.decode('utf-8', 'ignore'))
page_text = parser.get_tagtext()
R.set_body(body)
R.protocol = response_info.scheme
R.host = response_info.netloc
R.url = url
R.status = {'code': response.code, 'text': response.reason}
R.headers = {pair[0].lower():pair[1] for pair in response.getheaders()}
R.md5 = hashlib.md5(body).hexdigest().lower()
R.md5_404 = _clean_page(body)
R.md5_404_text = _clean_page(page_text.encode('utf-8', 'ignore'))
return(R)
#######################################################################
#
# Override urllib.request classes
#
#######################################################################
class OutOfScopeException(Exception):
def __init__(self, org_url, new_url):
self.original_netloc = org_url.netloc
self.new_netloc = new_url.netloc
def __str__(self):
return repr( "%s is not in scope %s" % (self.new_netloc, self.original_netloc) )
class UnknownHostName(Exception):
def __init__(self, url):
self.url = url
def __str__(self):
return "Unknown host: %s" % (self.url,)
class ErrorHandler(urllib.request.HTTPDefaultErrorHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
return(fp)
class RedirectHandler(urllib.request.HTTPRedirectHandler):
"""
This currently only checks if the redirection netloc is
the same as the the netloc for the request.
NOTE: this is very strict, as it will not allow redirections
from 'example.com' to 'www.example.com'
"""
def http_error_302(self, req, fp, code, msg, headers):
if 'location' in headers:
org_url = urllib.request.urlparse(req.get_full_url())
new_url = urllib.request.urlparse(headers['location'])
# if the location starts with '/' the path is relative
if headers['location'].startswith('/'):
new_url = new_url._replace(scheme=org_url.scheme, netloc=org_url.netloc)
if not new_url.netloc == org_url.netloc:
raise OutOfScopeException(org_url, new_url)
# call python's built-in redirection handler
return urllib.request.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
http_error_301 = http_error_303 = http_error_307 = http_error_302
#######################################################################
#
# Custom request and response classes
#
#######################################################################
class Response:
"""
This is object is used to store response information
The normal http.client.HTTPResponse cannot be pickled
which is used in the caching process
"""
def __init__(self):
self.url = ''
self.protocol = ''
self.host = ''
self.status = {}
self.headers = {}
self.body = ''
self.md5 = ''
self.md5_404 = ''
self.should_be_error_page = False
chars = string.ascii_uppercase + string.digits
self.id = ''.join(random.choice(chars) for _ in range(16))
def get_url(self):
url_data = urllib.request.urlparse(self.url)
if url_data.scheme == '': url_data._replace(scheme=self.protocol)
if url_data.netloc == '': url_data._replace(netloc=self.host)
return url_data.geturl()
def set_body(self, body):
# check if the encoding is specified in the http header
content_type = 'Content-Type'.lower()
if content_type not in self.headers:
self.body = str(body, errors='replace')
else:
# find content-type definitions
content_types = {'text': False, 'charset': None}
for item in self.headers[content_type].split(';'):
if 'text' in item:
content_types['text'] = True
if 'charset' in item:
content_types['charset'] = item.split('=')[1]
# set the encoding to use
if content_types['charset'] is not None:
self.body = str(body, content_types['charset'], errors='replace')
elif content_types['text']:
self.body = str(body, 'ISO-8859-1', errors='replace')
else:
self.body = str(body, errors='replace')
def __repr__(self):
def get_string(r):
string = r.url + '\n'
string += '%s %s\n' %(r.status['code'], r.status['text'])
string += '\n'.join([header +': '+ r.headers[header] for header in r.headers])
string += '\n\n'
string += 'MD5: ' + self.md5 + '\n'
string += 'MD5 Error page: ' + self.md5_404 + '\n'
return string
return get_string(self)
class Requester:
def __init__(self, options, data):
self.threads = options['threads']
self.proxy = options['proxy']
self.user_agent = options['user_agent']
self.data = data
self.cache = data['cache']
self.requested = data['requested']
self.printer = data['printer']
self.is_redirected = False
self.find_404s = False
self.fingerprintQueue = None
self.url_data = urllib.request.urlparse(options['url'])
if options['prefix']:
self.url_data.path = options['prefix'] + self.url_data.path
self.url = urllib.request.urlunparse(self.url_data)
def _create_fetcher(self, redirect_handler=True):
args = [ErrorHandler]
if self.proxy == None:
args.append(urllib.request.ProxyHandler({}))
elif not self.proxy == False:
protocol = self.url_data.scheme
args.append(urllib.request.ProxyHandler({protocol: self.proxy}))
if redirect_handler:
args.append(RedirectHandler)
opener = urllib.request.build_opener(*args)
opener.addheaders = [('User-agent', self.user_agent)]
return opener
def detect_redirect(self):
parse = urllib.request.urlparse
# the original url
org_url = self.url_data
# get an opener doing redirections
try:
opener = self._create_fetcher(redirect_handler=False)
response = opener.open(self.url)
except:
raise UnknownHostName(self.url)
# the new url
new_url = parse(response.geturl())
# detect a redirection
new_loc = new_url.scheme + '://' + new_url.netloc
org_loc = org_url.scheme + '://' + org_url.netloc
self.is_redirected = not(new_loc == org_loc)
if self.is_redirected:
self.printer.print('%s redirects to %s\n' % (org_loc, new_loc), 2, '')
else:
self.printer.print('%s does not redirect\n' % (org_loc, ), 2, '')
# create an response object and add it to the cache
R = _create_response(response)
self.cache[new_loc] = R
self.cache[self.url] = R
return (self.is_redirected, new_loc)
def request(self, fp_list):
url = fp_list[0]['url']
complete_url = urllib.parse.urljoin(self.url, url)
R = None
# check if the url is out of scope
url_data = urllib.parse.urlparse(complete_url)
host_data = urllib.parse.urlparse(self.url)
if not url_data.netloc == host_data.netloc:
pass
elif not complete_url in self.cache:
try:
opener = self._create_fetcher()
request = urllib.request.Request(complete_url)
response = opener.open(request)
R = _create_response(response)
self.cache[complete_url] = R
self.cache[response.geturl()] = R
except Exception as e:
pass
else:
R = self.cache[complete_url]
return (fp_list, R)
def run(self, run_type, fp_lists):
with concurrent.futures.ThreadPoolExecutor(max_workers=self.threads) as executor:
future_list = []
for fp_list in fp_lists:
future_list.append(executor.submit(self.request, fp_list))
for future in concurrent.futures.as_completed(future_list):
self.requested.put(future.result())
return self.requested
|
|
# -*- coding: utf-8 -*-
# Copyright 2012 Leonidas Poulopoulos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.shortcuts import render_to_response
from django.core.context_processors import request
from django.template.context import RequestContext
from django.http import HttpResponse
from mupy.muparse.models import *
from mupy.muparse.forms import *
from django.views.decorators.cache import cache_page
from django.views.decorators.cache import never_cache
from django.contrib.auth.decorators import login_required
from django.http import Http404
from django.core.cache import cache
import json, bz2
@login_required
def home(request, search_id = None):
result = None
if search_id:
savedsearches = SavedSearch.objects.get(pk=search_id)
graphs = []
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
graphs.extend(["%s"%(i.pk) for i in savedsearches.graphs.filter(node__group__in=nodegroups)])
else:
graphs.extend(["%s"%(i.pk) for i in savedsearches.graphs.all()])
graphs = ','.join(graphs)
result = {'result':graphs, 'display_type': savedsearches.display_type, 'description': savedsearches.description}
saved_searches = SavedSearch.objects.all().order_by('description')
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
saved_searches = saved_searches.filter(graphs__in=NodeGraphs.objects.filter(node__group__in=nodegroups)).distinct()
searches = []
if saved_searches:
searches.extend([s.description for s in saved_searches])
return render_to_response('main.html', {'saved':searches, "new_window": result}, context_instance =RequestContext(request))
@login_required
def get_node_tree(request, user_id):
if int(request.user.pk) != int(user_id):
raise Http404
glist = cache.get('user_%s_tree'%(request.user.pk))
if glist:
glist = bz2.decompress(glist)
return HttpResponse(glist, mimetype="application/json")
grlist = []
nlist = []
parsed_node = []
parsed_group = []
graphs = NodeGraphs.objects.all().select_related('node', 'graph', 'node__group', 'graph__category').order_by('node__group', 'node', 'graph__category__name')
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
graphs = graphs.filter(node__group__in=nodegroups)
len_graphs = len(graphs)
for index, graph in enumerate(graphs):
if graph.node not in parsed_node:
parsed_graph_category = []
if parsed_node:
ndict['children'].append(gcdict)
nlist.append(ndict)
ndict = {}
ndict['title'] = graph.node.name
ndict['key'] = "node_%s" %(graph.node.pk)
ndict['href'] = graph.node.url
ndict['children'] = []
ndict['type'] = "node"
parsed_node.append(graph.node)
if graph.node.group not in parsed_group:
if parsed_group:
nlist.sort(key=lambda item:item['title'], reverse=False)
grdict['children']= nlist
grlist.append(grdict)
nlist = []
grdict={}
grdict['title'] = graph.node.group.name
grdict['key'] = "group_%s" %(graph.node.group.pk)
grdict['href'] = graph.node.group.url
grdict['children'] = []
grdict['type'] = "group"
parsed_group.append(graph.node.group)
if graph.graph.category.name not in parsed_graph_category:
if parsed_graph_category:
ndict['children'].append(gcdict)
gcdict = {}
gcdict['title'] = graph.graph.category.name
gcdict['key'] = "graphCategory_%s_%s" %(graph.node.pk, graph.graph.category.pk)
gcdict['children'] = []
gcdict['type'] = "graph_category"
parsed_graph_category.append(graph.graph.category.name)
gdict = {}
gdict['type'] = "graph"
gdict['title'] = graph.graph.name
gdict['nodename'] = graph.node.name
gdict['graphname'] = graph.graph.name
gdict['slug'] = graph.graph.slug
gdict['url'] = graph.baseurl
gdict['key'] = "graph_%s" %(graph.pk)
gdict['pageurl'] = graph.pageurl
gdict['nodeurl'] = graph.node.url
gcdict['children'].append(gdict)
if (index == (len_graphs-1)):
nlist.append(ndict)
nlist.sort(key=lambda item:item['title'], reverse=False)
grdict['children']= nlist
grlist.append(grdict)
ndict['children'].append(gcdict)
glist = json.dumps(grlist)
cache.set('user_%s_tree'%(request.user.pk), bz2.compress(glist), 60 * 60 * 24 *5)
return HttpResponse(glist, mimetype="application/json")
@login_required
def get_node_tree_category(request, user_id):
if int(request.user.pk) != int(user_id):
raise Http404
glist = cache.get('user_%s_tree_cat'%(request.user.pk))
if glist:
glist = bz2.decompress(glist)
return HttpResponse(glist, mimetype="application/json")
grlist = []
nlist = []
graphs_list = []
parsed_node = []
parsed_group = []
parsed_graph_category = []
parsed_graph_name =[]
graphs = NodeGraphs.objects.all().select_related('node', 'graph', 'node__group', 'graph__category').order_by('graph__category__name', 'graph__name', 'node__group', 'node__name')
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
graphs = graphs.filter(node__group__in=nodegroups)
len_graphs = len(graphs)
for index, graph in enumerate(graphs):
current_category = graph.graph.category.name
if graph.graph.category.name not in parsed_graph_category:
if parsed_graph_category:
grlist.append(gcdict)
gcdict = {}
gcdict['title'] = graph.graph.category.name
gcdict['key'] = "graphCategory_%s_%s" %(graph.node.pk, graph.graph.category.pk)
gcdict['children'] = []
gcdict['type'] = "graph_category"
parsed_graph_category.append(graph.graph.category.name)
if graph.graph.name not in parsed_graph_name:
parsed_group = []
gdict = {}
gdict['type'] = "graph"
gdict['title'] = graph.graph.name
gdict['slug'] = graph.graph.slug
gdict['children'] = []
gcdict['children'].append(gdict)
parsed_graph_name.append(graph.graph.name)
if graph.node.group.name not in parsed_group:
grdict = {}
grdict['title'] = graph.node.group.name
grdict['key'] = "group_%s" %(graph.node.group.pk)
grdict['href'] = graph.node.group.url
grdict['children'] = []
grdict['type'] = "group"
gdict['children'].append(grdict)
parsed_group.append(graph.node.group.name)
ndict = {}
ndict['nodename'] = graph.node.name
ndict['title'] = graph.node.name
ndict['graphname'] = graph.graph.name
ndict['key'] = "graph_%s" %(graph.pk)
ndict['url'] = graph.baseurl
ndict['pageurl'] = graph.pageurl
ndict['nodeurl'] = graph.node.url
ndict['type'] = "graph"
grdict['children'].append(ndict)
if (index == (len_graphs-1)):
grlist.append(gcdict)
glist = json.dumps(grlist)
cache.set('user_%s_tree_cat'%(request.user.pk), bz2.compress(glist), 60 * 60 * 24 *5)
return HttpResponse(glist, mimetype="application/json")
@login_required
@never_cache
def save_search(request):
request_data = request.POST.copy()
graph_pks = request_data.get('graphs').split(',')
is_edit = request_data.get('is_edit')
request_data.setlist('graphs', graph_pks)
form = SavedSearchForm(request_data)
if is_edit == 'edit':
description = request_data.get('description')
try:
existinggraphsearch = SavedSearch.objects.get(description=description)
form = SavedSearchForm(request_data, instance=existinggraphsearch)
except SavedSearch.DoesNotExist:
pass
if form.is_valid():
search = form.save(commit=False)
search.save()
form.save_m2m()
response = json.dumps({"result": "Successfully saved %s graphs as %s"%(len(graph_pks), search.description), 'errors': 'None'})
return HttpResponse(response, mimetype="application/json")
else:
response = json.dumps({"result": "Errors: %s" %(form.errors), 'errors': "True"})
return HttpResponse(response, mimetype="application/json")
@login_required
@never_cache
def load_search(request, search_id=None):
savedsearches = SavedSearch.objects.get(pk=search_id)
graphs = []
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
graphs.extend(["%s"%(i.pk) for i in savedsearches.graphs.filter(node__group__in=nodegroups)])
else:
graphs.extend(["%s"%(i.pk) for i in savedsearches.graphs.all()])
graphs = ','.join(graphs)
result = json.dumps({'result':graphs, 'display_type': savedsearches.display_type, 'description': savedsearches.description})
return HttpResponse(result, mimetype="application/json")
@login_required
@never_cache
def load_search_blank(request, search_id=None):
savedsearches = SavedSearch.objects.get(pk=search_id)
graphs = []
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
graphs.extend([int("%s"%(i.pk)) for i in savedsearches.graphs.filter(node__group__in=nodegroups)])
else:
graphs.extend([int("%s"%(i.pk)) for i in savedsearches.graphs.all()])
nodegraphs = NodeGraphs.objects.filter(pk__in=graphs)
return render_to_response('searches_window.html', {'graphs':nodegraphs}, context_instance =RequestContext(request))
@login_required
@never_cache
def delete_search(request, search_id=None):
try:
savedsearch = SavedSearch.objects.get(pk=search_id)
savedsearch.delete()
response = json.dumps({"result": "Successfully deleted %s"%(savedsearch.description), 'errors': 'None'})
except Exception as e:
response = json.dumps({"result": "Errors: %s" %(e), 'errors': "True"})
return HttpResponse(response, mimetype="application/json")
@login_required
@never_cache
def saved_searches(request):
searches = []
saved_searches = SavedSearch.objects.all().order_by('description')
searches = saved_searches
if not request.user.is_superuser:
try:
nodegroups = request.user.get_profile().nodegroups.all()
except UserProfile.DoesNotExist:
raise Http404
searches = []
saved_searches = saved_searches.filter(graphs__in=NodeGraphs.objects.filter(node__group__in=nodegroups)).distinct()
for s in saved_searches:
s_graphs = s.graphs.filter(node__group__in=nodegroups)
if s_graphs:
search_dict = {
'pk': s.pk,
'description': s.description,
'graphs': s_graphs
}
searches.append(search_dict)
return render_to_response('saved_searches.html', {"saved":searches}, context_instance =RequestContext(request))
|
|
RiskNotify = {
"secuid": "char",
"alarm_score": "char",
"alarm_status": "char",
"alarm_rule": "char",
"alarm_time": "char",
"alarm_msg": "char",
}
OrderReq = {
"cl_order_id": "char",
"symbol": "char",
"order_type": "int16_t",
"side": "int16_t",
"volume": "int64_t",
"price": "int64_t",
"order_flag": "int16_t",
}
OrderRsp = {
"account_id": "char",
"account_type": "int16_t",
"order_date": "int32_t",
"order_id": "char",
"cl_order_id": "char",
}
CancelReq = {
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
CancelRsp = {
"account_id": "char",
"account_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
OrderDetail = {
"account_id": "char",
"account_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
"orig_order_id": "char",
"symbol": "char",
"order_status": "int16_t",
"order_type": "int16_t",
"side": "int16_t",
"volume": "int64_t",
"price": "int64_t",
"filled_volume": "int64_t",
"filled_turnover": "int64_t",
"filled_price": "int64_t",
"cancel_volume": "int64_t",
"cancel_flag": "int16_t",
"order_date": "int32_t",
"order_time": "int32_t",
"err_code": "int32_t",
"err_msg": "char",
"secuid": "char",
"name": "char",
"freeze_amount": "int64_t",
}
TradeDetail = {
"account_id": "char",
"account_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
"symbol": "char",
"order_type": "int16_t",
"side": "int16_t",
"report_type": "int16_t",
"report_no": "char",
"volume": "int64_t",
"price": "int64_t",
"turnover": "int64_t",
"trade_date": "int32_t",
"trade_time": "int32_t",
"err_code": "int32_t",
"err_msg": "char",
"secuid": "char",
"name": "char",
}
PositionDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"side": "int16_t",
"volume": "int64_t",
"avail_volume": "int64_t",
"init_volume": "int64_t",
"market_value": "int64_t",
"today_buy_volume": "int64_t",
"today_sell_volume": "int64_t",
"secuid": "char",
"cost_price": "int32_t",
"buy_cost": "int64_t",
"last_price": "int32_t",
"income": "int64_t",
"total_income": "int64_t",
"profit_cost": "int64_t",
"today_avail_pr_volume": "int64_t",
"market": "int32_t",
"security_type": "int32_t",
"freeze_volume": "int64_t",
"name": "char",
}
CashDetail = {
"account_id": "char",
"account_type": "int16_t",
"currency_type": "int16_t",
"total_amount": "int64_t",
"avail_amount": "int64_t",
"total_asset": "int64_t",
"market_value": "int64_t",
}
AccountInfo = {
"account_id": "char",
"account_type": "int16_t",
"account_pwd": "char",
"cust_orgid": "char",
"cust_branchid": "char",
"cl_system_id": "char",
}
SecuidInfo = {
"market": "char",
"secuid": "char",
}
LoginRsp = {
"account_id": "char",
"account_type": "int16_t",
"cust_orgid": "char",
"cust_branchid": "char",
"cust_id": "char",
"cust_name": "char",
"secuid_array": "SecuidInfo",
"cif_account": "char",
}
ETFDetail = {
"name": "char",
"first_symbol": "char",
"second_symbol": "char",
"trade_unit": "int32_t",
"cash_component": "int64_t",
"max_cash_ratio": "int64_t",
"trade_state": "int32_t",
"record_num": "int32_t",
"trade_day": "int32_t",
"pre_trade_day": "int32_t",
"nav_percu": "int64_t",
"nav": "int64_t",
"creation_limituser": "int64_t",
"redemption_limituser": "int64_t",
"net_creation_limituser": "int64_t",
"net_redemption_limituser": "int64_t",
"creation_limit": "int64_t",
"redemption_limit": "int64_t",
"net_creation_limit": "int64_t",
"net_redemption_limit": "int64_t",
}
ETFStockDetail = {
"first_symbol": "char",
"name": "char",
"symbol": "char",
"volume": "int32_t",
"replace_flag": "int16_t",
"over_price_rate": "int64_t",
"replace_amt": "int64_t",
}
MaxOrderQtyDetail = {
"account_id": "char",
"account_type": "int16_t",
"max_qty": "int64_t",
"actual_max_qty": "int64_t",
}
IPOMaxPurchaseDetail = {
"account_id": "char",
"account_type": "int16_t",
"market": "char",
"max_qty": "int64_t",
"stib_max_qty": "int64_t",
}
IPOStockDetail = {
"symbol": "char",
"name": "char",
"currency_type": "int16_t",
"ipo_price": "int64_t",
"max_num": "int64_t",
"min_num": "int64_t",
}
IPODistributionDetail = {
"account_id": "char",
"account_type": "int16_t",
"biz_date": "int32_t",
"symbol": "char",
"name": "char",
"mate_no": "char",
"match_qty": "int64_t",
}
IPOLotteryDetail = {
"account_id": "char",
"account_type": "int16_t",
"biz_date": "int32_t",
"symbol": "char",
"name": "char",
"match_price": "int64_t",
"match_qty": "int64_t",
}
SecurityBaseInfo = {
"symbol": "char",
"name": "char",
"security_type": "int32_t",
"security_status": "int32_t",
"price_tick": "int64_t",
"limit_up": "int64_t",
"limit_down": "int64_t",
"max_qty": "int64_t",
"min_qty": "int64_t",
"buy_tick": "int64_t",
"sale_tick": "int64_t",
"has_price_limit": "int32_t",
"max_rise_rate": "int32_t",
"is_registration": "int32_t",
}
HKRateDetail = {
"market": "char",
"buy_rate": "int64_t",
"sale_rate": "int64_t",
"mid_rate": "int64_t",
"set_rate": "int64_t",
"sys_date": "int32_t",
}
HKStockDetail = {
"symbol": "char",
"name": "char",
"stktype": "int16_t",
"currency_type": "int16_t",
}
HKFundDetail = {
"account_id": "char",
"account_type": "int16_t",
"currency_type": "int16_t",
"fund_uncomeavl": "int64_t",
"fund_buy": "int64_t",
"fund_sale": "int64_t",
"fund_uncomebuy": "int64_t",
"fund_uncomesale": "int64_t",
}
HKMinPriceUnitDetail = {
"stktype": "int16_t",
"begin_price": "int64_t",
"end_price": "int64_t",
"price_unit": "int32_t",
}
HKTradeCalendarDetail = {
"date": "int32_t",
"business_flag": "int16_t",
"commit_flag": "int16_t",
}
CreditMortgageInOutReq = {
"cl_order_id": "char",
"symbol": "char",
"side": "int16_t",
"volume": "int32_t",
}
CreditMortgageInOutRsp = {
"account_id": "char",
"account_type": "int16_t",
"cl_order_id": "char",
"order_id": "char",
}
CreditStockBackReq = {
"cl_order_id": "char",
"symbol": "char",
"volume": "int32_t",
}
CreditStockBackRsp = {
"account_id": "char",
"account_type": "int16_t",
"cl_order_id": "char",
"order_id": "char",
}
CreditPayBackRsp = {
"account_id": "char",
"account_type": "int16_t",
"real_back_amt": "int64_t",
}
CreditStockDetail = {
"symbol": "char",
"name": "char",
"currency_type": "int16_t",
"credit_fund_ctrl": "char",
"credit_stk_ctrl": "char",
"margin_rate_fund": "int64_t",
"margin_rate_stk": "int64_t",
"sep_rem_qty": "int32_t",
}
CreditAssetsDetail = {
"account_id": "char",
"account_type": "int16_t",
"currency_type": "int16_t",
"avail_balance": "int64_t",
"fetch_balance": "int64_t",
"frozen_balance": "int64_t",
"stock_balance": "int64_t",
"fund_balance": "int64_t",
"asset_balance": "int64_t",
"avail_margin": "int64_t",
"credit_quota": "int64_t",
"finance_quota": "int64_t",
"shortsell_quota": "int64_t",
"assure_ratio": "int64_t",
"total_debt": "int64_t",
"fund_debt": "int64_t",
"stock_debt": "int64_t",
"fund_interest_fee": "int64_t",
"stock_interest_fee": "int64_t",
"shortsell_total_balance": "int64_t",
"shortsell_avail_balance": "int64_t",
"shortsell_frozen_balance": "int64_t",
"enbuyback_avail_balance": "int64_t",
"fund_margin_profit": "int64_t",
"stock_margin_profit": "int64_t",
"fund_interest": "int64_t",
"stock_interest": "int64_t",
"fund_margin_balance": "int64_t",
"stock_margin_balance": "int64_t",
"fund_floating_deficit": "int64_t",
"stock_floating_deficit": "int64_t",
"fund_margin_profit_conversion": "int64_t",
"stock_margin_profit_conversion": "int64_t",
}
CreditFinanceDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"name": "char",
"order_id": "char",
"currency_type": "int16_t",
"debt_status": "int16_t",
"occur_date": "int32_t",
"total_balance": "int64_t",
"cur_balance": "int64_t",
"total_interest_fee": "int64_t",
"cur_interest_fee": "int64_t",
"interest_rate": "int64_t",
"repayable_balance": "int64_t",
"f_deal_bal": "int64_t",
"f_exp_cet_intr": "int64_t",
"credit_repay_unfrz": "int64_t",
"all_fee_unfrz": "int64_t",
"market": "int32_t",
}
CreditShortsellDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"name": "char",
"order_id": "char",
"currency_type": "int16_t",
"debt_status": "int16_t",
"occur_date": "int32_t",
"total_qty": "int64_t",
"cur_qty": "int64_t",
"total_interest_fee": "int64_t",
"cur_interest_fee": "int64_t",
"interest_rate": "int64_t",
"d_stk_bal": "int64_t",
"market": "int32_t",
"all_fee_unfrz": "int64_t",
"stk_repay_unfrz": "int64_t",
"end_date": "int32_t",
}
CreditRepayAmountDetail = {
"account_id": "char",
"account_type": "int16_t",
"max_back_amt": "int64_t",
}
CreditRepayStockDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"name": "char",
"total_qty": "int64_t",
"max_back_qty": "int64_t",
"returned_qty": "int64_t",
}
CreditMortgageHoldDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"name": "char",
"currency_type": "int16_t",
"pledge_rate": "int64_t",
}
CreditSecuritySellQtyRsp = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"total_qty": "int64_t",
"avail_qty": "int64_t",
}
OptionCashDetail = {
"account_id": "char",
"account_type": "int16_t",
"currency_type": "int16_t",
"init_amount": "int64_t",
"total_amount": "int64_t",
"avail_amount": "int64_t",
"fetch_amount": "int64_t",
"frozen_amount": "int64_t",
"used_margin": "int64_t",
"exercise_frozen": "int64_t",
"deposit_amount": "int64_t",
"withdraw_amount": "int64_t",
"royalty_in": "int64_t",
"royalty_out": "int64_t",
"commission": "int64_t",
"frozen_commission": "int64_t",
}
OptionPositionDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"side": "int16_t",
"init_volume": "int64_t",
"init_avail_volume": "int64_t",
"init_comb_volume": "int64_t",
"volume": "int64_t",
"avail_volume": "int64_t",
"comb_volume": "int64_t",
"open_volume": "int64_t",
"close_volume": "int64_t",
"exercise_frozen": "int64_t",
"buy_cost": "int64_t",
"secuid": "char",
}
ExerciseReq = {
"cl_order_id": "char",
"symbol": "char",
"volume": "int64_t",
"exercise_type": "int16_t",
}
ExerciseRsp = {
"account_id": "char",
"account_type": "int16_t",
"cl_order_id": "char",
"order_id": "char",
"exercise_type": "int16_t",
}
CancelExerciseReq = {
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
CancelExerciseRsp = {
"account_id": "char",
"account_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
ExerciseDetail = {
"account_id": "char",
"account_type": "int16_t",
"exercise_type": "int16_t",
"cl_order_id": "char",
"order_id": "char",
"counter_order_date": "int32_t",
"counter_order_time": "int32_t",
"counter_cancel_time": "int32_t",
"exercise_status": "int32_t",
"oper_status": "int32_t",
"symbol": "char",
"volume": "int32_t",
"cancel_volume": "int32_t",
"err_code": "int32_t",
"err_msg": "char",
}
ContractInfo = {
"contract_id": "char",
"exch_contract_id": "char",
"contract_name": "char",
"underlying_contract_id": "char",
"underlying_contract_name": "char",
"underlying_multiplier": "int32_t",
"strike_mode": "int32_t",
"options_type": "int32_t",
"order_unit": "int32_t",
"buy_trading_unit": "int32_t",
"sell_trading_unit": "int32_t",
"max_market_order_buy_volume": "int32_t",
"min_market_order_buy_volume": "int32_t",
"max_limit_order_buy_volume": "int32_t",
"min_limit_order_buy_volume": "int32_t",
"max_market_order_sell_volume": "int32_t",
"min_market_order_sell_volume": "int32_t",
"max_limit_order_sell_volume": "int32_t",
"min_limit_order_sell_volume": "int32_t",
"volume_multiplier": "int32_t",
"price_tick": "int64_t",
"contract_status": "int32_t",
"strike_price": "int64_t",
"first_date": "int32_t",
"last_date": "int32_t",
"strike_date": "int32_t",
"expire_date": "int32_t",
"deliv_date": "int32_t",
"is_up_down_limit": "int32_t",
"margin_unit": "int64_t",
"pre_settlement_price": "int64_t",
"pre_close_price": "int64_t",
"underlying_pre_close_price": "int64_t",
}
TransferStockReq = {
"symbol": "char",
"transfer_type": "int16_t",
"transfer_side": "int16_t",
"transfer_qty": "int64_t",
}
TransferStockRsp = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"transfer_type": "int16_t",
"transfer_side": "int16_t",
"transfer_qty": "int64_t",
}
TransferStockDetail = {
"account_id": "char",
"account_type": "int16_t",
"serial_number": "char",
"symbol": "char",
"transact_date": "int32_t",
"transact_time": "int32_t",
"transfer_type": "int16_t",
"transfer_side": "int16_t",
"transfer_status": "int16_t",
"yesterday_volume": "int64_t",
"today_bs_volume": "int64_t",
"today_pr_volume": "int64_t",
"stock_cust_id": "char",
"stock_secuid": "char",
"status_msg": "char",
}
LockOrderReq = {
"cl_order_id": "char",
"symbol": "char",
"lock_type": "int16_t",
"volume": "int64_t",
}
LockOrderRsp = {
"account_id": "char",
"account_type": "int16_t",
"lock_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
}
LockOrderDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"cl_order_id": "char",
"order_id": "char",
"order_date": "int32_t",
"order_time": "int32_t",
"cancel_time": "int32_t",
"lock_type": "int16_t",
"lock_status": "int16_t",
"volume": "int64_t",
"cancel_volume": "int64_t",
"err_code": "int32_t",
"err_msg": "char",
}
CombOrderReq = {
"cl_order_id": "char",
"symbol": "char",
"comb_type": "int16_t",
"comb_strategy": "int16_t",
"volume": "int64_t",
"exch_comb_id": "char",
}
CombOrderRsp = {
"account_id": "char",
"account_type": "int16_t",
"comb_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
}
CombExerciseReq = {
"cl_order_id": "char",
"call_symbol": "char",
"put_symbol": "char",
"volume": "int64_t",
}
CombExerciseRsp = {
"account_id": "char",
"account_type": "int16_t",
"cl_order_id": "char",
"order_id": "char",
}
CancelCombExerciseReq = {
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
CancelCombExerciseRsp = {
"account_id": "char",
"account_type": "int16_t",
"order_id": "char",
"cl_order_id": "char",
"cl_cancel_id": "char",
}
CombOrderDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"exch_comb_id": "char",
"comb_type": "int16_t",
"comb_strategy": "int16_t",
"cl_order_id": "char",
"order_id": "char",
"order_date": "int32_t",
"order_time": "int32_t",
"cancel_time": "int32_t",
"order_status": "int16_t",
"oper_status": "int16_t",
"volume": "int64_t",
"cancel_volume": "int64_t",
"err_code": "int32_t",
"err_msg": "char",
}
CombExerciseDetail = {
"account_id": "char",
"account_type": "int16_t",
"call_symbol": "char",
"put_symbol": "char",
"cl_order_id": "char",
"order_id": "char",
"order_date": "int32_t",
"order_time": "int32_t",
"cancel_time": "int32_t",
"exercise_status": "int16_t",
"oper_status": "int16_t",
"volume": "int64_t",
"cancel_volume": "int64_t",
"err_code": "int32_t",
"err_msg": "char",
}
CombPositionDetail = {
"account_id": "char",
"account_type": "int16_t",
"symbol": "char",
"comb_strategy": "int16_t",
"volume": "int64_t",
"avail_volume": "int64_t",
"yesterday_volume": "int64_t",
"yesterday_avail_volume": "int64_t",
"today_volume": "int64_t",
"today_avail_volume": "int64_t",
"margin": "int64_t",
"frozen_margin": "int64_t",
"commission": "int64_t",
"frozen_commission": "int64_t",
}
CombContractInfo = {
"contract_id": "char",
"comb_strategy": "int16_t",
"leg1_contract_id": "char",
"leg1_name": "char",
"leg1_side": "int16_t",
"leg2_contract_id": "char",
"leg2_name": "char",
"leg2_side": "int16_t",
}
LockSecurityDetail = {
"account_id": "char",
"account_type": "int16_t",
"sys_date": "int32_t",
"sno": "char",
"symbol": "char",
"name": "char",
"lock_type": "int16_t",
"used_fee_rate": "int64_t",
"unused_fee_rate": "int64_t",
"lock_qty": "int32_t",
"used_qty": "int32_t",
"back_qty": "int32_t",
"begin_date": "int32_t",
"back_date": "int32_t",
"end_date": "int32_t",
}
ExtendLockSecurityReq = {
"sys_date": "int32_t",
"sno": "char",
"apply_delay_days": "int32_t",
"apply_used_fee_rate": "int64_t",
"apply_unused_fee_rate": "int64_t",
"apply_delay_qty": "int32_t",
"symbol": "char",
}
LockSecurityExtensionDetail = {
"account_id": "char",
"account_type": "int16_t",
"apply_date": "int32_t",
"apply_sno": "int32_t",
"sys_date": "int32_t",
"sno": "char",
"symbol": "char",
"name": "char",
"apply_end_date": "int32_t",
"apply_delay_qty": "int32_t",
"approval_used_fee_rate": "int64_t",
"approval_unused_fee_rate": "int64_t",
"approval_status": "int16_t",
"approval_remark": "char",
}
TransferFundDetail = {
"account_id": "char",
"account_type": "int16_t",
"serial_number": "char",
"transact_date": "int32_t",
"transact_time": "int32_t",
"transfer_value": "int64_t",
"transfer_side": "int16_t",
"transfer_status": "int16_t",
"remark": "char",
}
|
|
import BTrees
import UserDict
import ZODB.POSException
import ZODB.blob
import cStringIO
import logging
import persistent
import persistent.mapping
import tempfile
import time
import transaction
import zc.set
import zeit.connector.interfaces
import zope.interface
import zope.security.proxy
import zope.testing.cleanup
log = logging.getLogger(__name__)
def get_storage_key(key):
if isinstance(key, unicode):
key = key.encode('utf8')
assert isinstance(key, str)
return key
class StringRef(persistent.Persistent):
# Legacy
def __init__(self, s):
self._str = s
def open(self, mode):
return cStringIO.StringIO(self._str)
def update(self, new_data):
self._str = new_data
class SlottedStringRef(StringRef):
"""A variant of StringRef using slots for less memory consumption."""
# Legacy
__slots__ = ('_str',)
INVALID_ETAG = object()
class Body(persistent.Persistent):
BUFFER_SIZE = 10 * 1024
__slots__ = ('data', 'etag')
def __init__(self):
self.data = self.etag = None
def open(self, mode='r'):
assert mode == 'r'
if isinstance(self.data, ZODB.blob.Blob):
try:
commited_name = self.data.committed()
except ZODB.blob.BlobError:
# In the rather rare case that the blob was created in this
# transaction, we have to copy the data to a temporary file.
data = self.data.open('r')
tmp = tempfile.NamedTemporaryFile(
prefix='zeit.connector.cache.')
s = data.read(self.BUFFER_SIZE)
while s:
tmp.write(s)
s = data.read(self.BUFFER_SIZE)
tmp.seek(0, 0)
data_file = open(tmp.name, 'rb')
else:
data_file = open(commited_name, 'rb')
elif isinstance(self.data, str):
data_file = cStringIO.StringIO(self.data)
else:
raise RuntimeError('self.data is of unsupported type %s' %
type(self.data))
return data_file
def update(self, data, etag):
if etag == self.etag:
return
self.etag = etag
if hasattr(data, 'seek'):
data.seek(0)
s = data.read(self.BUFFER_SIZE)
if len(s) < self.BUFFER_SIZE:
# Small object
small = True
target = cStringIO.StringIO()
else:
small = False
self.data = ZODB.blob.Blob()
target = self.data.open('w')
while s:
target.write(s)
s = data.read(self.BUFFER_SIZE)
if small:
self.data = target.getvalue()
else:
target.close()
def _p_resolveConflict(self, old, commited, newstate):
if commited[1]['etag'] == newstate[1]['etag']:
return commited
# Different ETags. Invalidate the cache.
commited[1]['etag'] = INVALID_ETAG
return commited
class AccessTimes(object):
UPDATE_INTERVAL = NotImplemented
def __init__(self):
self._last_access_time = BTrees.family64.OI.BTree()
self._access_time_to_ids = BTrees.family32.IO.BTree()
def _update_cache_access(self, key):
last_access_time = self._last_access_time.get(key, 0)
new_access_time = self._get_time_key(time.time())
old_set = None
if last_access_time / 10e6 < 10e6:
# Ignore old access times. This is to allow an update w/o downtime.
old_set = self._access_time_to_ids.get(last_access_time)
try:
new_set = self._access_time_to_ids[new_access_time]
except KeyError:
new_set = self._access_time_to_ids[new_access_time] = (
BTrees.family32.OI.TreeSet())
if old_set != new_set:
if old_set is not None:
try:
old_set.remove(key)
except KeyError:
pass
new_set.insert(key)
self._last_access_time[key] = new_access_time
def sweep(self, cache_timeout=(7 * 24 * 3600)):
start = 0
timeout = self._get_time_key(time.time() - cache_timeout)
while True:
try:
access_time = iter(self._access_time_to_ids.keys(
min=start, max=timeout)).next()
id_set = []
# For reasons unknown we sometimes get "the bucket being
# iterated changed size" here, which according to
# http://mail.zope.org/pipermail/zodb-dev/2004-June/007452.html
# "can never happen". So we try to sweep all the IDs we can and
# leave the rest to rot, rather than get stuck on one broken
# access_time bucket and not be able to sweep anything after it
try:
for id in self._access_time_to_ids[access_time]:
id_set.append(id)
except RuntimeError:
pass
try:
i = 0
retries = 0
while i < len(id_set):
id = id_set[i]
i += 1
log.info('Evicting %s', id)
self._last_access_time.pop(id, None)
try:
self.remove(id)
except KeyError:
pass # already gone
if i % 100 == 0:
try:
log.info('Sub-commit')
transaction.commit()
except ZODB.POSException.ConflictError:
if retries == 3:
raise RuntimeError('Too many retries')
log.info('ConflictError, retrying')
transaction.abort()
retries += 1
i -= 100
self._access_time_to_ids.pop(access_time, None)
start = access_time
except:
start = access_time + 1
log.info('Abort %s', access_time, exc_info=True)
transaction.abort()
else:
log.info('Commit %s', access_time)
transaction.commit()
except StopIteration:
break
def _get_time_key(self, time):
return int(time / self.UPDATE_INTERVAL)
class ResourceCache(AccessTimes, persistent.Persistent):
"""Cache for ressource data."""
zope.interface.implements(zeit.connector.interfaces.IResourceCache)
UPDATE_INTERVAL = 24 * 3600
def __init__(self):
super(ResourceCache, self).__init__()
self._data = BTrees.family64.OO.BTree()
def getData(self, unique_id, properties):
key = get_storage_key(unique_id)
current_etag = properties[('getetag', 'DAV:')]
value = self._data.get(key)
if value is not None and not isinstance(value, Body):
if isinstance(value, str):
log.warning("Loaded str for %s" % unique_id)
raise KeyError(unique_id)
# Legacy, meke a temporary body
old_etags = getattr(self, '_etags', None)
etag = None
if old_etags is not None:
etag = old_etags.get(key)
if etag is None:
raise KeyError('No ETAG for legacy value.')
body = Body()
body.update(value.open('r'), etag)
value = body
if value is None or value.etag != current_etag:
raise KeyError(u"Object %r is not cached." % unique_id)
self._update_cache_access(key)
return value.open()
def setData(self, unique_id, properties, data):
key = get_storage_key(unique_id)
current_etag = properties.get(('getetag', 'DAV:'))
if current_etag is None:
# When we have no etag, we must not store the data as we have no
# means of invalidation then.
f = cStringIO.StringIO(data.read())
return f
log.debug('Storing body of %s with etag %s' % (
unique_id, current_etag))
# Reuse previously stored container
store = self._data.get(key)
if store is None or not isinstance(store, Body):
self._data[key] = store = Body()
store.update(data, current_etag)
self._update_cache_access(key)
return store.open()
def remove(self, unique_id):
self._data.pop(unique_id, None)
class PersistentCache(AccessTimes, persistent.Persistent):
zope.interface.implements(zeit.connector.interfaces.IPersistentCache)
CACHE_VALUE_CLASS = None # Set in subclass
def __init__(self):
super(PersistentCache, self).__init__()
self._storage = BTrees.family32.OO.BTree()
def __getitem__(self, key):
skey = get_storage_key(key)
try:
value = self._storage[skey]
except KeyError:
raise KeyError(key)
if self._is_deleted(value):
raise KeyError(key)
self._update_cache_access(skey)
return value
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
key = get_storage_key(key)
value = self._storage.get(key, self)
self._update_cache_access(key)
return value is not self and not self._is_deleted(value)
def keys(self, include_deleted=False):
keys = self._storage.keys()
if include_deleted:
return keys
return (key for key in keys if key in self)
def __delitem__(self, key):
value = self._storage[get_storage_key(key)]
if isinstance(value, self.CACHE_VALUE_CLASS):
self._mark_deleted(value)
else:
self.remove(key)
def remove(self, key):
del self._storage[get_storage_key(key)]
def __setitem__(self, key, value):
skey = get_storage_key(key)
old_value = self._storage.get(skey)
if isinstance(old_value, self.CACHE_VALUE_CLASS):
self._set_value(old_value, value)
else:
value = self.CACHE_VALUE_CLASS(value)
self._storage[skey] = value
self._update_cache_access(skey)
def _is_deleted(self, value):
return zeit.connector.interfaces.DeleteProperty in value
def _set_value(self, old_value, new_value):
if self._cache_values_equal(old_value, new_value):
return
old_value.clear()
old_value.update(new_value)
class WebDAVPropertyKey(object):
__slots__ = ('name',)
_instances = {} # class variable
def __new__(cls, name):
instance = cls._instances.get(name)
if instance is None:
instance = cls._instances[name] = object.__new__(cls)
return instance
def __init__(self, name):
assert not zope.security.proxy.isinstance(name, WebDAVPropertyKey)
self.name = name
def __getitem__(self, idx):
return self.name.__getitem__(idx)
def __cmp__(self, other):
if zope.security.proxy.isinstance(other, WebDAVPropertyKey):
return cmp(self.name, other.name)
return cmp(self.name, other)
def __hash__(self):
assert not zope.security.proxy.isinstance(self.name, WebDAVPropertyKey)
return hash(self.name)
def __repr__(self):
return '<WebDAVPropertyKey %s>' % (self.name,)
def __reduce__(self):
return (WebDAVPropertyKey, (self.name,))
zope.testing.cleanup.addCleanUp(WebDAVPropertyKey._instances.clear)
class Properties(persistent.mapping.PersistentMapping):
cached_time = None
def _p_resolveConflict(self, old, commited, newstate):
if not (old.keys() ==
commited.keys() ==
newstate.keys() ==
['data']):
# We can only resolve data.
raise ZODB.POSException.ConflictError
commited_data = commited['data']
newstate_data = newstate['data'].copy()
commited_data.pop(('cached-time', 'INTERNAL'), None)
newstate_data.pop(('cached-time', 'INTERNAL'), None)
if newstate_data == commited_data:
return newstate
# Completely invalidate cache entry when we cannot resolve.
old['data'] = {zeit.connector.interfaces.DeleteProperty: None}
return old
def __setitem__(self, key, value):
key = zope.security.proxy.removeSecurityProxy(key)
if (key is not zeit.connector.interfaces.DeleteProperty and
not isinstance(key, WebDAVPropertyKey)):
key = WebDAVPropertyKey(key)
super(Properties, self).__setitem__(key, value)
def update(self, dict=None, **kwargs):
if dict is None:
dict = {}
for key, value in dict.items() + kwargs.items():
self[key] = value
self._p_changed = True
def __repr__(self):
return object.__repr__(self)
class PropertyCache(PersistentCache):
"""Property cache."""
zope.interface.implements(zeit.connector.interfaces.IPropertyCache)
CACHE_VALUE_CLASS = Properties
UPDATE_INTERVAL = 24 * 3600
def _mark_deleted(self, value):
value.clear()
value[zeit.connector.interfaces.DeleteProperty] = None
@staticmethod
def _cache_values_equal(a, b):
return dict(a.items()) == dict(b.items())
class ChildNames(zc.set.Set):
def _p_resolveConflict(self, old, commited, newstate):
if commited == newstate:
return commited
old['_data'] = set([zeit.connector.interfaces.DeleteProperty])
return old
def __iter__(self):
return iter(sorted(super(ChildNames, self).__iter__()))
def __repr__(self):
return object.__repr__(self)
def insert(self, key):
# BTree sets have insert instead of add. Let's be greedy.
self.add(key)
class ChildNameCache(PersistentCache):
"""Cache for child names."""
zope.interface.implements(zeit.connector.interfaces.IChildNameCache)
CACHE_VALUE_CLASS = ChildNames
UPDATE_INTERVAL = 24 * 3600
def _mark_deleted(self, value):
value.clear()
value.add(zeit.connector.interfaces.DeleteProperty)
@staticmethod
def _cache_values_equal(a, b):
return set(a) == set(b)
class AlwaysEmptyDict(UserDict.DictMixin):
"""Used by mock connector to disable filesystem transaction bound cache."""
def __getitem__(self, key):
raise KeyError(key)
def __setitem__(self, key, value):
pass
def __delitem__(self, key):
pass
def keys(self):
return ()
|
|
# Author: Lars Buitinck <[email protected]>
# License: BSD 3 clause
from array import array
from collections import Mapping
from operator import itemgetter
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..externals import six
from ..externals.six.moves import xrange
from ..utils import atleast2d_or_csr, tosequence
def _tosequence(X):
"""Turn X into a sequence or ndarray, avoiding a copy if possible."""
if isinstance(X, Mapping): # single sample
return [X]
else:
return tosequence(X)
class DictVectorizer(BaseEstimator, TransformerMixin):
"""Transforms lists of feature-value mappings to vectors.
This transformer turns lists of mappings (dict-like objects) of feature
names to feature values into Numpy arrays or scipy.sparse matrices for use
with scikit-learn estimators.
When feature values are strings, this transformer will do a binary one-hot
(aka one-of-K) coding: one boolean-valued feature is constructed for each
of the possible string values that the feature can take on. For instance,
a feature "f" that can take on the values "ham" and "spam" will become two
features in the output, one signifying "f=ham", the other "f=spam".
Features that do not occur in a sample (mapping) will have a zero value
in the resulting array/matrix.
Parameters
----------
dtype : callable, optional
The type of feature values. Passed to Numpy array/scipy.sparse matrix
constructors as the dtype argument.
separator: string, optional
Separator string used when constructing new features for one-hot
coding.
sparse: boolean, optional.
Whether transform should produce scipy.sparse matrices.
True by default.
Attributes
----------
`vocabulary_` : dict
A dictionary mapping feature names to feature indices.
`feature_names_` : list
A list of length n_features containing the feature names (e.g., "f=ham"
and "f=spam").
Examples
--------
>>> from sklearn.feature_extraction import DictVectorizer
>>> v = DictVectorizer(sparse=False)
>>> D = [{'foo': 1, 'bar': 2}, {'foo': 3, 'baz': 1}]
>>> X = v.fit_transform(D)
>>> X
array([[ 2., 0., 1.],
[ 0., 1., 3.]])
>>> v.inverse_transform(X) == \
[{'bar': 2.0, 'foo': 1.0}, {'baz': 1.0, 'foo': 3.0}]
True
>>> v.transform({'foo': 4, 'unseen_feature': 3})
array([[ 0., 0., 4.]])
See also
--------
FeatureHasher : performs vectorization using only a hash function.
sklearn.preprocessing.OneHotEncoder : handles nominal/categorical features
encoded as columns of integers.
"""
def __init__(self, dtype=np.float64, separator="=", sparse=True):
self.dtype = dtype
self.separator = separator
self.sparse = sparse
def fit(self, X, y=None):
"""Learn a list of feature name -> indices mappings.
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
self
"""
# collect all the possible feature names
feature_names = set()
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
feature_names.add(f)
# sort the feature names to define the mapping
feature_names = sorted(feature_names)
self.vocabulary_ = dict((f, i) for i, f in enumerate(feature_names))
self.feature_names_ = feature_names
return self
def fit_transform(self, X, y=None):
"""Learn a list of feature name -> indices mappings and transform X.
Like fit(X) followed by transform(X).
Parameters
----------
X : Mapping or iterable over Mappings
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
Notes
-----
Because this method requires two passes over X, it materializes X in
memory.
"""
X = _tosequence(X)
self.fit(X)
return self.transform(X)
def inverse_transform(self, X, dict_type=dict):
"""Transform array or sparse matrix X back to feature mappings.
X must have been produced by this DictVectorizer's transform or
fit_transform method; it may only have passed through transformers
that preserve the number of features and their order.
In the case of one-hot/one-of-K coding, the constructed feature
names and values are returned rather than the original ones.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Sample matrix.
dict_type : callable, optional
Constructor for feature mappings. Must conform to the
collections.Mapping API.
Returns
-------
D : list of dict_type objects, length = n_samples
Feature mappings for the samples in X.
"""
X = atleast2d_or_csr(X) # COO matrix is not subscriptable
n_samples = X.shape[0]
names = self.feature_names_
dicts = [dict_type() for _ in xrange(n_samples)]
if sp.issparse(X):
for i, j in zip(*X.nonzero()):
dicts[i][names[j]] = X[i, j]
else:
for i, d in enumerate(dicts):
for j, v in enumerate(X[i, :]):
if v != 0:
d[names[j]] = X[i, j]
return dicts
def transform(self, X, y=None):
"""Transform feature->value dicts to array or sparse matrix.
Named features not encountered during fit or fit_transform will be
silently ignored.
Parameters
----------
X : Mapping or iterable over Mappings, length = n_samples
Dict(s) or Mapping(s) from feature names (arbitrary Python
objects) to feature values (strings or convertible to dtype).
y : (ignored)
Returns
-------
Xa : {array, sparse matrix}
Feature vectors; always 2-d.
"""
# Sanity check: Python's array has no way of explicitly requesting the
# signed 32-bit integers that scipy.sparse needs, so we use the next
# best thing: typecode "i" (int). However, if that gives larger or
# smaller integers than 32-bit ones, np.frombuffer screws up.
assert array("i").itemsize == 4, (
"sizeof(int) != 4 on your platform; please report this at"
" https://github.com/scikit-learn/scikit-learn/issues and"
" include the output from platform.platform() in your bug report")
dtype = self.dtype
vocab = self.vocabulary_
if self.sparse:
X = [X] if isinstance(X, Mapping) else X
indices = array("i")
indptr = array("i", [0])
# XXX we could change values to an array.array as well, but it
# would require (heuristic) conversion of dtype to typecode...
values = []
for x in X:
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
try:
indices.append(vocab[f])
values.append(dtype(v))
except KeyError:
pass
indptr.append(len(indices))
if len(indptr) == 1:
raise ValueError("Sample sequence X is empty.")
if len(indices) > 0:
# workaround for bug in older NumPy:
# http://projects.scipy.org/numpy/ticket/1943
indices = np.frombuffer(indices, dtype=np.intc)
indptr = np.frombuffer(indptr, dtype=np.intc)
shape = (len(indptr) - 1, len(vocab))
return sp.csr_matrix((values, indices, indptr),
shape=shape, dtype=dtype)
else:
X = _tosequence(X)
Xa = np.zeros((len(X), len(vocab)), dtype=dtype)
for i, x in enumerate(X):
for f, v in six.iteritems(x):
if isinstance(v, six.string_types):
f = "%s%s%s" % (f, self.separator, v)
v = 1
try:
Xa[i, vocab[f]] = dtype(v)
except KeyError:
pass
return Xa
def get_feature_names(self):
"""Returns a list of feature names, ordered by their indices.
If one-of-K coding is applied to categorical features, this will
include the constructed feature names but not the original ones.
"""
return self.feature_names_
def restrict(self, support, indices=False):
"""Restrict the features to those in support.
Parameters
----------
support : array-like
Boolean mask or list of indices (as returned by the get_support
member of feature selectors).
indices : boolean, optional
Whether support is a list of indices.
"""
if not indices:
support = np.where(support)[0]
names = self.feature_names_
new_vocab = {}
for i in support:
new_vocab[names[i]] = len(new_vocab)
self.vocabulary_ = new_vocab
self.feature_names_ = [f for f, i in sorted(six.iteritems(new_vocab),
key=itemgetter(1))]
return self
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import base64
from datetime import date, datetime, timedelta
import gzip
import hashlib
import hmac
import logging
import StringIO
from urllib import urlencode
from urlparse import urlparse, parse_qs
import pytz
log = logging.getLogger(__name__)
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ"
class UnknownSkinCode(StandardError):
pass
class Backend(object):
merchant_account = None
skin_code = None
skin_secret = None
is_live = False
payment_flow = 'onepage'
def __init__(self, merchant_account, skin_code, skin_secret):
self.merchant_account = merchant_account
self.skin_code = skin_code
self.skin_secret = skin_secret
def get_skin_secret(self, skin_code):
if skin_code != self.skin_code:
raise UnknownSkinCode
return self.skin_secret
class BadSignatureError(Exception):
pass
class HostedPayment(object):
"""
A payment session that is to be submitted to Adyen.
All payment session fields mentioned in section 2.2 of the Integration
Manual are accessible in underscore syntax (skinCode -> skin_code).
Additionally the resURL field mentioned in section 2.4 of the Integration
Manual is accessible as res_url.
If a field has no value, it is None. This is possible only for optional
fields. If a field is optional, has no value but is included in the
signature calculation, it is never None. Instead it is the empty string.
"""
def __init__(self, backend, merchant_reference, payment_amount,
currency_code, is_live=False, **kwargs):
self.backend = backend
self.merchant_reference = merchant_reference
self.payment_amount = payment_amount
self.currency_code = currency_code
self.ship_before_date = timedelta(days=3)
self.shopper_locale = None
self.order_data = None
self.session_validity = timedelta(days=1)
self.merchant_return_data = ''
self.country_code = None
self.shopper_email = None
self.shopper_reference = None
self.recurring_contract = None
self.allowed_methods = ''
self.blocked_methods = ''
self.offset = None
self.brand_code = None
self.issuer_id = None
self.shopper_statement = ''
self.offer_email = None
self.res_url = None
for name, value in kwargs.items():
setattr(self, name, value)
@property
def skin_code(self):
return self.backend.skin_code
@property
def is_live(self):
return self.backend.is_live
@property
def merchant_account(self):
return self.backend.merchant_account
def get_redirect_url(self, force_multi=False):
params = {
'merchantReference': self.merchant_reference,
'paymentAmount': self.payment_amount,
'currencyCode': self.currency_code,
'shipBeforeDate': self.ship_before_date,
'skinCode': self.skin_code,
'merchantAccount': self.merchant_account,
'sessionValidity': self.session_validity,
'resURL': self.res_url
}
optional_params = {
'shopperLocale': self.shopper_locale,
'orderData': self._encode_order_data(self.order_data),
'merchantReturnData': self.merchant_return_data,
'countryCode': self.country_code,
'shopperEmail': self.shopper_email,
'shopperReference': self.shopper_reference,
'recurringContract': self.recurring_contract,
'allowedMethods': self.allowed_methods,
'blockedMethods': self.blocked_methods,
'offset': self.offset,
'brandCode': self.brand_code,
'issuerId': self.issuer_id,
'shopperStatement': self.shopper_statement,
'offerEmail': self.offer_email
}
for key, value in optional_params.items():
if value:
params[key] = value
if any(map(lambda x: x is None, params.values())):
raise ValueError("The parameter(s) {} may not be None."
.format(", ".join([k for k, v in params.items()
if v is None])))
params = {key: str(value) for (key, value) in params.items()}
params['merchantSig'] = self.get_setup_signature(
params, self.backend.get_skin_secret(self.backend.skin_code))
live_or_test = 'test'
if self.is_live:
live_or_test = 'live'
single_or_multi = 'select'
if self.backend.payment_flow == 'onepage' and not force_multi:
single_or_multi = 'pay'
return ('https://{live_or_test}.adyen.com/'
'hpp/{single_or_multi}.shtml?{params}'
.format(live_or_test=live_or_test,
single_or_multi=single_or_multi,
params=urlencode(params)))
@property
def ship_before_date(self):
return self._format_date(self._ship_before_date)
@ship_before_date.setter
def ship_before_date(self, value):
self._ship_before_date = value
@property
def session_validity(self):
return self._format_datetime(self._session_validity)
@session_validity.setter
def session_validity(self, value):
self._session_validity = value
@staticmethod
def _format_date(value):
if isinstance(value, timedelta):
value = date.today() + value
if not isinstance(value, date):
raise ValueError("value must be timedelta or date, got {}"
.format(value))
return value.isoformat()
@staticmethod
def _format_datetime(value):
if isinstance(value, timedelta):
value = datetime.utcnow() + value
if not isinstance(value, datetime):
raise ValueError("value must be timedelta or datetime, got {}"
.format(value))
if is_naive(value):
value = value.replace(tzinfo=pytz.utc)
value = value.replace(microsecond=0)
return value.isoformat()
@staticmethod
def _encode_order_data(value):
if not value:
return None
out = StringIO.StringIO()
with gzip.GzipFile(fileobj=out, mode="w") as f:
f.write(value)
return out.getvalue().encode('base64')
@staticmethod
def get_setup_signature(params, secret):
"""
Calculate the payment setup signature in base64
"""
keys = ['paymentAmount', 'currencyCode', 'shipBeforeDate',
'merchantReference', 'skinCode', 'merchantAccount',
'sessionValidity', 'shopperEmail', 'shopperReference',
'recurringContract', 'allowedMethods', 'blockedMethods',
'shopperStatement', 'merchantReturnData', 'billingAddressType',
'deliveryAddressType', 'shopperType', 'offset']
return _get_signature(keys, params, secret)
class HostedPaymentResult(object):
def __init__(self, params, backend):
log.debug('Received result:')
log.debug(params)
skin_secret = backend.get_skin_secret(params['skinCode'])
if (_get_result_signature(params, skin_secret)
!= params['merchantSig']):
raise BadSignatureError
self.auth_result = params['authResult']
self.psp_reference = params.get('pspReference')
self.merchant_reference = params['merchantReference']
self.skin_code = params['skinCode']
self.payment_method = params.get('paymentMethod')
self.shopper_locale = params['shopperLocale']
self.merchant_return_data = params.get('merchantReturnData')
@classmethod
def mock(cls, backend, url):
"""
Return the payment result url for a successful payment. Use this for
unit tests.
"""
url = urlparse(url)
query = parse_qs(url.query, keep_blank_values=True)
params = {
'authResult': 'AUTHORISED',
'pspReference': 'mockreference',
'merchantReference': query['merchantReference'][0],
'skinCode': query['skinCode'][0],
'paymentMethod': 'visa',
'shopperLocale': query.get('shopperLocale', '')[0],
'merchantReturnData': query.get('merchantReturnData', '')[0]
}
skin_secret = backend.get_skin_secret(query['skinCode'][0])
params['merchantSig'] = _get_result_signature(params, skin_secret)
return "{}?{}".format(query['resURL'][0], urlencode(params))
class HostedPaymentNotification(object):
def __init__(self, params):
log.debug('Received notification:')
log.debug(params)
# get a mutable copy
params = params.dict()
try:
self.live = self._parse_boolean(params.pop('live'))
except ValueError:
raise ValueError("Invalid value for 'live': {}"
.format(params.pop('live')))
self.event_code = params.pop('eventCode')
self.psp_reference = params.pop('pspReference')
self.original_reference = params.pop('originalReference')
self.merchant_reference = params.pop('merchantReference')
self.merchant_account_code = params.pop('merchantAccountCode')
self.event_date = (datetime.strptime(params.pop('eventDate'),
DATETIME_FORMAT)
.replace(tzinfo=pytz.utc))
success = params.pop('success')
try:
self.success = self._parse_boolean(success)
except ValueError:
raise ValueError("Invalid value for 'success': {}".format(success))
self.payment_method = params.pop('paymentMethod')
self.operations = params.pop('operations')
self.reason = params.pop('reason')
self.value = int(params.pop('value'))
self.currency = params.pop('currency')
self.additional_params = None
if params:
self.additional_params = params
@staticmethod
def _parse_boolean(value):
"""
Turn a string value of "true" or "false" into the corresponding boolean
value. Raise ValueError if the string is something else.
"""
if value == 'true':
return True
elif value == 'false':
return False
else:
raise ValueError
def _get_result_signature(params, secret):
"""
Calculate the payment result signature in base64
"""
keys = ['authResult', 'pspReference', 'merchantReference', 'skinCode',
'merchantReturnData']
return _get_signature(keys, params, secret)
def _get_signature(keys, params, secret):
if not secret:
# the implementation needs a not None key, but for mock testing an
# empty key is sufficient, so treat None as ''.
secret = ''.encode('utf-8')
plaintext = "".join(map(lambda v: '' if v is None else v,
(params.get(key, '') for key in keys)))
hm = hmac.new(secret, plaintext, hashlib.sha1)
return base64.encodestring(hm.digest()).strip()
def is_naive(value):
"""
Determines if a given datetime.datetime is naive.
The logic is described in Python's docs:
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
# copied from Django
return value.tzinfo is None or value.tzinfo.utcoffset(value) is None
def is_old_browser(user_agent):
if not user_agent:
return False
return "MSIE 8" in user_agent or "MSIE 9" in user_agent
|
|
"""
Automated Theorem Prover within Forseti
"""
# pylint: disable=fixme
from copy import deepcopy
import time
from forseti.formula import Formula, Not, And, Or, Skolem, Herbrand, Predicate
from forseti import converter, parser
import forseti.util as util
from forseti import exceptions
class Prover(object):
"""
Prover class
"""
def __init__(self, timeout=30):
self._cnf_list = []
self.parents = []
self.formulas = []
self.goals = []
self._goals = []
self.proof_found = False
self.proof_time = 0
self.proof_timeout = timeout
def add_formula(self, statement):
"""
:param statement:
:return:
"""
if isinstance(statement, str):
statement = parser.parse(statement)
self._add_statement(deepcopy(statement), self.formulas)
def add_goal(self, statement):
"""
:param statement:
:return:
"""
statement = parser.parse(statement)
self._add_statement(deepcopy(statement), self.goals)
self._add_statement(Not(deepcopy(statement)), self._goals)
@staticmethod
def _add_statement(statement, add_to_list):
"""
:param statement:
:param add_to_list:
:param additional: additional operator to apply onto statement
:return:
"""
statement = parser.parse(statement)
statement = converter.convert_formula(statement)
add_to_list.append(statement)
def run_prover(self):
"""
:return:
"""
self.parents = []
self.proof_found = False
if len(self.goals) == 0:
# TODO: give this a better exception class
raise Exception("You need at least one goal!")
for formula in self.formulas:
converts = self._convert_formula(formula)
for convert in converts:
self._cnf_list += [convert]
for goal in self._goals:
converts = self._convert_formula(goal)
for convert in converts:
self._cnf_list += [convert]
self._tautology_elimination()
for i in range(len(self._cnf_list)):
self._cnf_list[i] = sorted(self._cnf_list[i])
for i in range(len(self._cnf_list)):
self.parents.append([])
self.proof_found, self.proof_time = self._resolve()
return self.proof_found
@staticmethod
def _convert_formula(formula):
"""
Converts a CNF formula into lists for resolution.
Ex:
and(a,b) -> [[a], [b]]
or(a,b) -> [[a,b]]
and(or(a,b),c) -> [[a,b],[c]]
:type formula: LogicalOperator or Symbol
"""
cnf_list = [[formula]]
all_checked = False
break_from = False
while not all_checked:
all_checked = True
for i in range(len(cnf_list)):
for j in range(len(cnf_list[i])):
statement = cnf_list[i][j]
if isinstance(statement, And) or isinstance(statement, Or):
if isinstance(statement, And):
cnf_list.insert(i + 1, [statement.args[0]])
cnf_list.insert(i + 2, [statement.args[1]])
elif isinstance(statement, Or):
cnf_list[i].insert(j + 1, statement.args[0])
cnf_list[i].insert(j + 2, statement.args[1])
cnf_list[i].pop(j)
if len(cnf_list[i]) == 0:
cnf_list.pop(i)
break_from = True
all_checked = False
break
if break_from is True:
break_from = False
break
for i in range(len(cnf_list)):
j = 0
while j < len(cnf_list[i]):
if cnf_list[i][j] in cnf_list[i][j + 1:]:
cnf_list[i].pop(j)
j -= 1
j += 1
return cnf_list
def _resolve(self):
"""
:return:
"""
start_time = time.time()
i = 0
checked = list()
while i < len(self._cnf_list):
if (time.time() - start_time) > self.proof_timeout:
raise exceptions.TimeoutException('Proof timeout reached')
j = i + 1
while j < len(self._cnf_list):
if [i, j] in checked:
j += 1
continue
checked.append([i, j])
have_resolve = False
for k in range(len(self._cnf_list[i])):
atomic = self._cnf_list[i][k]
negation = util.negate_formula(atomic)
try:
resolve = Prover._get_index(self._cnf_list[j], negation)
cnf_list1 = deepcopy(self._cnf_list[i])
cnf_list2 = deepcopy(self._cnf_list[j])
if resolve[1] >= 1:
Prover._remove_herbrand(resolve, cnf_list1, cnf_list2, k)
ind = resolve[0]
new_cnf = cnf_list1[:k]
new_cnf += cnf_list1[(k + 1):]
for cnf in cnf_list2[:ind]:
if cnf not in new_cnf:
new_cnf.append(cnf)
for cnf in cnf_list2[(ind + 1):]:
if cnf not in new_cnf:
new_cnf.append(cnf)
new_cnf.sort()
if len(new_cnf) == 0:
self._cnf_list.append([])
self.parents.append([i, j])
return True, time.time() - start_time
if not util.is_tautology(new_cnf) and \
new_cnf not in self._cnf_list:
have_resolve = True
self._cnf_list.append(new_cnf)
self.parents.append([i, j])
checked.append([i, len(self._cnf_list) - 1])
checked.append([j, len(self._cnf_list) - 1])
except ValueError:
pass
if have_resolve is True:
i = -1
break
j += 1
i += 1
return False, time.time() - start_time
def _tautology_elimination(self):
"""
Remove any CNF tautologies
:return:
"""
i = 0
while i < len(self._cnf_list):
cnf = self._cnf_list[i]
if util.is_tautology(cnf):
self._cnf_list.pop(i)
i -= 1
i += 1
@staticmethod
def _get_index(cnf_list, negation):
for i in range(len(cnf_list)):
element = cnf_list[i]
run = Prover._check_element(element, negation, [False, 0, None, None])
if run[0] is True:
return [i, run[1], run[2], run[3]]
raise ValueError
@staticmethod
def _check_element(element, negation, unify):
if isinstance(element, type(negation)) and not isinstance(element, Herbrand):
if isinstance(element, str) and isinstance(negation, str):
return [element == negation, 0, None]
elif isinstance(element, Formula) and isinstance(negation, Formula):
if isinstance(element, Predicate) and isinstance(negation, Predicate):
if len(element.args) != len(negation.args) or element.name != negation.name:
return [False, 0, None, None]
elif isinstance(element, Skolem) and isinstance(negation, Skolem):
if element.skole_count != negation.skole_count:
return [False, 0, None, None]
ret = [True, 0, None, None]
for i in range(len(element.args)):
r = Prover._check_element(element.args[i], negation.args[i], ret)
if r[0] is True and r[1] > 0:
ret = [True, r[1], r[2], r[3]]
elif not r[0]:
return [False, 0, None, None]
return ret
# elif isinstance(element, Skolem) or isinstance(negation, Skolem):
# return True
elif isinstance(element, Herbrand):
if unify[1] == 2:
return [False, 0, None, None]
elif unify[1] == 1 and (unify[2] != element or unify[3] != negation):
return [False, 0, None, None]
return [True, 1, deepcopy(element), deepcopy(negation)]
elif isinstance(negation, Herbrand):
if unify[1] == 1:
return [False, 0, None, None]
elif unify[1] == 2 and (unify[2] != element or unify[3] != negation):
return [False, 0, None, None]
return [True, 2, deepcopy(element), deepcopy(negation)]
else:
return [False, 0, None, None]
@staticmethod
def _remove_herbrand(resolve, cnf_list1, cnf_list2, k):
if resolve[1] == 2:
assert isinstance(resolve[3], Herbrand)
for elem in range(len(cnf_list1)):
if elem == k:
continue
cnf_list1[elem] = Prover._replace_herbrand(
cnf_list1[elem], resolve[3], resolve[2])
elif resolve[1] == 1:
assert isinstance(resolve[2], Herbrand)
for elem in range(len(cnf_list2)):
cnf_list2[elem] = Prover._replace_herbrand(
cnf_list2[elem], resolve[2], resolve[3])
@staticmethod
def _replace_herbrand(element, replace, replacement):
if isinstance(element, str):
return element
assert isinstance(element, Formula)
assert isinstance(replace, Herbrand)
assert isinstance(replacement, Formula)
if isinstance(element, Herbrand):
if replace.herbrand_count == element.herbrand_count:
element = deepcopy(replacement)
for i in range(len(element.args)):
if isinstance(element.args[i], Herbrand):
if replace.herbrand_count == element.args[i].herbrand_count:
element.args[i] = deepcopy(replacement)
else:
element.args[i] = Prover._replace_herbrand(element.args[i],
replace,
replacement)
return element
def get_proof(self):
if self.proof_found is False:
return ["No Proof"]
proof = []
proof_parents = [self.parents[-1]]
proof_formulas = {len(self.parents) - 1}
while True:
if len(proof_parents) > 0:
x = proof_parents.pop(0)
for i in x:
proof_parents.append(self.parents[i])
proof_formulas.add(i)
else:
break
proof_formulas = sorted(proof_formulas)
for i in proof_formulas:
extra = "Assumption"
if len(self.parents[i]) > 0:
extra = "resolve(" + ",".join([str(y+1) for y in self.parents[i]]) + ")"
cnf = str(util.cnf_list_as_disjunction(self._cnf_list[i]))
proof.append(((str(i+1) + ")").rjust(4) + " " + cnf.ljust(70) + " " + extra))
return proof
if __name__ == "__main__":
prover = Prover()
prover.add_formula("exists(x,forall(y,A(x,y)))")
prover.add_goal("exists(x,A(x,a))")
print(prover.run_prover())
print("\n".join(prover.get_proof()))
|
|
#!/usr/bin/env python
# Copyright 2012 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This is a list of all the functions that are not auto-generated.
# It contains all the meta data that describes the function.
templated_type_symbol_map = {
'bool' : 'b',
'int8_t' : 'a',
'int16_t' : 's',
'int32_t' : 'i',
'int64_t' : 'l',
'float' : 'f',
'double' : 'd',
'string' : 'NS_11StringValueE',
'timestamp' : 'NS_14TimestampValueE'
}
# Generates the BE symbol for the Compute Function class_name::fn_name<templated_type>.
# Does not handle varargs.
# TODO: this is a stopgap. ComputeFunctions are being removed and we can use the
# symbol lookup code in the BE.
def symbol(class_name, fn_name, templated_type = None):
sym = '_ZN6impala'
sym += str(len(class_name)) + class_name
sym += str(len(fn_name)) + fn_name
if templated_type == None:
sym += 'EPNS_4ExprEPNS_8TupleRowE'
else:
sym += 'I'
sym += templated_type_symbol_map[templated_type]
sym += 'EEPvPNS_4ExprEPNS_8TupleRowE'
return sym
# The format is:
# [sql aliases], <return_type>, [<args>], <backend symbol>,
# With an optional
# <prepare symbol>, <close symbol>
#
# 'sql aliases' are the function names that can be used from sql. There must be at least
# one per function.
#
# The symbol can be empty for functions that are not yet implemented or are special-cased
# in Expr::CreateExpr() (i.e., functions that are implemented via a custom Expr class
# rather than a single function).
functions = [
[['udf_pi'], 'DOUBLE', [], 'impala::UdfBuiltins::Pi'],
[['udf_abs'], 'DOUBLE', ['DOUBLE'], 'impala::UdfBuiltins::Abs'],
[['udf_lower'], 'STRING', ['STRING'], 'impala::UdfBuiltins::Lower'],
[['max_int'], 'INT', [],
'_ZN6impala11UdfBuiltins6MaxIntEPN10impala_udf15FunctionContextE'],
[['max_tinyint'], 'TINYINT', [],
'_ZN6impala11UdfBuiltins10MaxTinyIntEPN10impala_udf15FunctionContextE'],
[['max_smallint'], 'SMALLINT', [],
'_ZN6impala11UdfBuiltins11MaxSmallIntEPN10impala_udf15FunctionContextE'],
[['max_bigint'], 'BIGINT', [],
'_ZN6impala11UdfBuiltins9MaxBigIntEPN10impala_udf15FunctionContextE'],
[['min_int'], 'INT', [],
'_ZN6impala11UdfBuiltins6MinIntEPN10impala_udf15FunctionContextE'],
[['min_tinyint'], 'TINYINT', [],
'_ZN6impala11UdfBuiltins10MinTinyIntEPN10impala_udf15FunctionContextE'],
[['min_smallint'], 'SMALLINT', [],
'_ZN6impala11UdfBuiltins11MinSmallIntEPN10impala_udf15FunctionContextE'],
[['min_bigint'], 'BIGINT', [],
'_ZN6impala11UdfBuiltins9MinBigIntEPN10impala_udf15FunctionContextE'],
[['is_nan'], 'BOOLEAN', ['DOUBLE'],
'_ZN6impala11UdfBuiltins5IsNanEPN10impala_udf15FunctionContextERKNS1_9DoubleValE'],
[['is_inf'], 'BOOLEAN', ['DOUBLE'],
'_ZN6impala11UdfBuiltins5IsInfEPN10impala_udf15FunctionContextERKNS1_9DoubleValE'],
[['trunc'], 'TIMESTAMP', ['TIMESTAMP', 'STRING'],
'_ZN6impala11UdfBuiltins5TruncEPN10impala_udf15FunctionContextERKNS1_12TimestampValERKNS1_9StringValE',
'_ZN6impala11UdfBuiltins12TruncPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala11UdfBuiltins10TruncCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
# Don't add an entry for EXTRACT(STRING, TIMESTAMP). STRINGs may be used to represent
# TIMESTAMPs meaning EXTRACT(STRING, STRING) is valid. If EXTRACT(STRING, TIMESTAMP)
# is added, it takes precedence over the existing EXTRACT(TIMESTAMP, STRING)
# which could break users.
[['extract'], 'INT', ['TIMESTAMP', 'STRING'],
'_ZN6impala11UdfBuiltins7ExtractEPN10impala_udf15FunctionContextERKNS1_12TimestampValERKNS1_9StringValE',
'_ZN6impala11UdfBuiltins21SwappedExtractPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala11UdfBuiltins12ExtractCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['date_part'], 'INT', ['STRING', 'TIMESTAMP'],
'_ZN6impala11UdfBuiltins7ExtractEPN10impala_udf15FunctionContextERKNS1_9StringValERKNS1_12TimestampValE',
'_ZN6impala11UdfBuiltins14ExtractPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala11UdfBuiltins12ExtractCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['madlib_encode_vector'], 'STRING', ['STRING'],
'_ZN6impala11UdfBuiltins12EncodeVectorEPN10impala_udf15FunctionContextERKNS1_9StringValE'],
[['madlib_decode_vector'], 'STRING', ['STRING'],
'_ZN6impala11UdfBuiltins12DecodeVectorEPN10impala_udf15FunctionContextERKNS1_9StringValE'],
[['madlib_print_vector'], 'STRING', ['STRING'],
'_ZN6impala11UdfBuiltins11PrintVectorEPN10impala_udf15FunctionContextERKNS1_9StringValE'],
[['madlib_vector'], 'STRING', ['DOUBLE', '...'],
'_ZN6impala11UdfBuiltins8ToVectorEPN10impala_udf15FunctionContextEiPKNS1_9DoubleValE'],
[['madlib_vector_get'], 'DOUBLE', ['BIGINT', 'STRING'],
'_ZN6impala11UdfBuiltins9VectorGetEPN10impala_udf15FunctionContextERKNS1_9BigIntValERKNS1_9StringValE'],
# Timestamp functions
[['unix_timestamp'], 'INT', ['STRING'], '_ZN6impala18TimestampFunctions14UnixFromStringEPN10impala_udf15FunctionContextERKNS1_9StringValE'],
[['year'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions4YearEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['month'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions5MonthEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['dayofweek'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions9DayOfWeekEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['day', 'dayofmonth'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions10DayOfMonthEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['dayofyear'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions9DayOfYearEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['weekofyear'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions10WeekOfYearEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['hour'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions4HourEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['minute'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions6MinuteEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['second'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions6SecondEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['to_date'], 'STRING', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions6ToDateEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['dayname'], 'STRING', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions7DayNameEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['years_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf6IntValEN5boost9date_time14years_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['years_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf9BigIntValEN5boost9date_time14years_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['years_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf6IntValEN5boost9date_time14years_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['years_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf9BigIntValEN5boost9date_time14years_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['months_add', 'add_months'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf6IntValEN5boost9date_time15months_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['months_add', 'add_months'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf9BigIntValEN5boost9date_time15months_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['months_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf6IntValEN5boost9date_time15months_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['months_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf9BigIntValEN5boost9date_time15months_durationINS4_9gregorian21greg_durations_configEEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['weeks_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf6IntValEN5boost9gregorian14weeks_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['weeks_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf9BigIntValEN5boost9gregorian14weeks_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['weeks_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf6IntValEN5boost9gregorian14weeks_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['weeks_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf9BigIntValEN5boost9gregorian14weeks_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['days_add', 'date_add', 'adddate'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf6IntValEN5boost9gregorian13date_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['days_add', 'date_add', 'adddate'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb1EN10impala_udf9BigIntValEN5boost9gregorian13date_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['days_sub', 'date_sub', 'subdate'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf6IntValEN5boost9gregorian13date_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['days_sub', 'date_sub', 'subdate'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10DateAddSubILb0EN10impala_udf9BigIntValEN5boost9gregorian13date_durationEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['hours_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost10posix_time5hoursEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['hours_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost10posix_time5hoursEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['hours_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost10posix_time5hoursEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['hours_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost10posix_time5hoursEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['minutes_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost10posix_time7minutesEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['minutes_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost10posix_time7minutesEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['minutes_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost10posix_time7minutesEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['minutes_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost10posix_time7minutesEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['seconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost10posix_time7secondsEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['seconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost10posix_time7secondsEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['seconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost10posix_time7secondsEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['seconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost10posix_time7secondsEEENS2_12TimestampValEPNS2_15FunctionContextERKS7_RKT0_'],
[['milliseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['milliseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['milliseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['milliseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['microseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['microseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['microseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['microseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['nanoseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['nanoseconds_add'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb1EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['nanoseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'INT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf6IntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['nanoseconds_sub'], 'TIMESTAMP', ['TIMESTAMP', 'BIGINT'],
'_ZN6impala18TimestampFunctions10TimeAddSubILb0EN10impala_udf9BigIntValEN5boost9date_time18subsecond_durationINS4_10posix_time13time_durationELl1000000000EEEEENS2_12TimestampValEPNS2_15FunctionContextERKSA_RKT0_'],
[['datediff'], 'INT', ['TIMESTAMP', 'TIMESTAMP'], '_ZN6impala18TimestampFunctions8DateDiffEPN10impala_udf15FunctionContextERKNS1_12TimestampValES6_'],
[['unix_timestamp'], 'INT', [], '_ZN6impala18TimestampFunctions4UnixEPN10impala_udf15FunctionContextE'],
[['unix_timestamp'], 'INT', ['TIMESTAMP'], '_ZN6impala18TimestampFunctions4UnixEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['unix_timestamp'], 'INT', ['STRING', 'STRING'], '_ZN6impala18TimestampFunctions4UnixEPN10impala_udf15FunctionContextERKNS1_9StringValES6_',
'_ZN6impala18TimestampFunctions22UnixAndFromUnixPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala18TimestampFunctions20UnixAndFromUnixCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['from_unixtime'], 'STRING', ['INT'],
'_ZN6impala18TimestampFunctions8FromUnixIN10impala_udf6IntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['from_unixtime'], 'STRING', ['INT', 'STRING'],
'_ZN6impala18TimestampFunctions8FromUnixIN10impala_udf6IntValEEENS2_9StringValEPNS2_15FunctionContextERKT_RKS4_',
'_ZN6impala18TimestampFunctions22UnixAndFromUnixPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala18TimestampFunctions20UnixAndFromUnixCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['from_unixtime'], 'STRING', ['BIGINT'],
'_ZN6impala18TimestampFunctions8FromUnixIN10impala_udf9BigIntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['from_unixtime'], 'STRING', ['BIGINT', 'STRING'],
'_ZN6impala18TimestampFunctions8FromUnixIN10impala_udf9BigIntValEEENS2_9StringValEPNS2_15FunctionContextERKT_RKS4_',
'_ZN6impala18TimestampFunctions22UnixAndFromUnixPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala18TimestampFunctions20UnixAndFromUnixCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['now', 'current_timestamp'], 'TIMESTAMP', [], '_ZN6impala18TimestampFunctions3NowEPN10impala_udf15FunctionContextE'],
[['from_utc_timestamp'], 'TIMESTAMP', ['TIMESTAMP', 'STRING'],
"impala::TimestampFunctions::FromUtc"],
[['to_utc_timestamp'], 'TIMESTAMP', ['TIMESTAMP', 'STRING'],
"impala::TimestampFunctions::ToUtc"],
# Math builtin functions
[['pi'], 'DOUBLE', [], 'impala::MathFunctions::Pi'],
[['e'], 'DOUBLE', [], 'impala::MathFunctions::E'],
[['abs'], 'BIGINT', ['BIGINT'], 'impala::MathFunctions::Abs'],
[['abs'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Abs'],
[['abs'], 'FLOAT', ['FLOAT'], 'impala::MathFunctions::Abs'],
[['abs'], 'INT', ['INT'], 'impala::MathFunctions::Abs'],
[['abs'], 'SMALLINT', ['SMALLINT'], 'impala::MathFunctions::Abs'],
[['abs'], 'TINYINT', ['TINYINT'], 'impala::MathFunctions::Abs'],
[['sign'], 'FLOAT', ['DOUBLE'], 'impala::MathFunctions::Sign'],
[['sin'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Sin'],
[['asin'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Asin'],
[['cos'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Cos'],
[['acos'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Acos'],
[['tan'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Tan'],
[['atan'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Atan'],
[['radians'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Radians'],
[['degrees'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Degrees'],
[['ceil', 'ceiling'], 'BIGINT', ['DOUBLE'], 'impala::MathFunctions::Ceil'],
[['floor'], 'BIGINT', ['DOUBLE'], 'impala::MathFunctions::Floor'],
[['round'], 'BIGINT', ['DOUBLE'], 'impala::MathFunctions::Round'],
[['round'], 'DOUBLE', ['DOUBLE', 'INT'], 'impala::MathFunctions::RoundUpTo'],
[['exp'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Exp'],
[['ln'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Ln'],
[['log10'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Log10'],
[['log2'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Log2'],
[['log'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], 'impala::MathFunctions::Log'],
[['pow', 'power'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], 'impala::MathFunctions::Pow'],
[['sqrt'], 'DOUBLE', ['DOUBLE'], 'impala::MathFunctions::Sqrt'],
[['rand'], 'DOUBLE', [], 'impala::MathFunctions::Rand',
'_ZN6impala13MathFunctions11RandPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['rand'], 'DOUBLE', ['BIGINT'], 'impala::MathFunctions::RandSeed',
'_ZN6impala13MathFunctions11RandPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['bin'], 'STRING', ['BIGINT'], 'impala::MathFunctions::Bin'],
[['hex'], 'STRING', ['BIGINT'], 'impala::MathFunctions::HexInt'],
[['hex'], 'STRING', ['STRING'], 'impala::MathFunctions::HexString'],
[['unhex'], 'STRING', ['STRING'], 'impala::MathFunctions::Unhex'],
[['conv'], 'STRING', ['BIGINT', 'TINYINT', 'TINYINT'],
'impala::MathFunctions::ConvInt'],
[['conv'], 'STRING', ['STRING', 'TINYINT', 'TINYINT'],
'impala::MathFunctions::ConvString'],
[['pmod'], 'BIGINT', ['BIGINT', 'BIGINT'], 'impala::MathFunctions::PmodBigInt'],
[['pmod'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], 'impala::MathFunctions::PmodDouble'],
[['fmod'], 'FLOAT', ['FLOAT', 'FLOAT'], 'impala::MathFunctions::FmodFloat'],
[['fmod'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], 'impala::MathFunctions::FmodDouble'],
[['positive'], 'TINYINT', ['TINYINT'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf10TinyIntValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'SMALLINT', ['SMALLINT'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf11SmallIntValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'INT', ['INT'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf6IntValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'BIGINT', ['BIGINT'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf9BigIntValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'FLOAT', ['FLOAT'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf8FloatValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'DOUBLE', ['DOUBLE'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf9DoubleValEEET_PNS2_15FunctionContextERKS4_'],
[['positive'], 'DECIMAL', ['DECIMAL'],
'_ZN6impala13MathFunctions8PositiveIN10impala_udf10DecimalValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'TINYINT', ['TINYINT'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf10TinyIntValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'SMALLINT', ['SMALLINT'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf11SmallIntValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'INT', ['INT'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf6IntValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'BIGINT', ['BIGINT'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf9BigIntValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'FLOAT', ['FLOAT'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf8FloatValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'DOUBLE', ['DOUBLE'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf9DoubleValEEET_PNS2_15FunctionContextERKS4_'],
[['negative'], 'DECIMAL', ['DECIMAL'],
'_ZN6impala13MathFunctions8NegativeIN10impala_udf10DecimalValEEET_PNS2_15FunctionContextERKS4_'],
[['quotient'], 'BIGINT', ['BIGINT', 'BIGINT'],
'impala::MathFunctions::QuotientBigInt'],
[['quotient'], 'BIGINT', ['DOUBLE', 'DOUBLE'],
'impala::MathFunctions::QuotientDouble'],
[['least'], 'TINYINT', ['TINYINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf10TinyIntValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'SMALLINT', ['SMALLINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf11SmallIntValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'INT', ['INT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf6IntValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'BIGINT', ['BIGINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf9BigIntValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'FLOAT', ['FLOAT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf8FloatValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'DOUBLE', ['DOUBLE', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf9DoubleValELb1EEET_PNS2_15FunctionContextEiPKS4_'],
[['least'], 'TIMESTAMP', ['TIMESTAMP', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb1EEEN10impala_udf12TimestampValEPNS2_15FunctionContextEiPKS3_'],
[['least'], 'STRING', ['STRING', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb1EEEN10impala_udf9StringValEPNS2_15FunctionContextEiPKS3_'],
[['least'], 'DECIMAL', ['DECIMAL', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb1EEEN10impala_udf10DecimalValEPNS2_15FunctionContextEiPKS3_'],
[['greatest'], 'TINYINT', ['TINYINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf10TinyIntValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'SMALLINT', ['SMALLINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf11SmallIntValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'INT', ['INT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf6IntValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'BIGINT', ['BIGINT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf9BigIntValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'FLOAT', ['FLOAT', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf8FloatValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'DOUBLE', ['DOUBLE', '...'],
'_ZN6impala13MathFunctions13LeastGreatestIN10impala_udf9DoubleValELb0EEET_PNS2_15FunctionContextEiPKS4_'],
[['greatest'], 'TIMESTAMP', ['TIMESTAMP', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb0EEEN10impala_udf12TimestampValEPNS2_15FunctionContextEiPKS3_'],
[['greatest'], 'STRING', ['STRING', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb0EEEN10impala_udf9StringValEPNS2_15FunctionContextEiPKS3_'],
[['greatest'], 'DECIMAL', ['DECIMAL', '...'],
'_ZN6impala13MathFunctions13LeastGreatestILb0EEEN10impala_udf10DecimalValEPNS2_15FunctionContextEiPKS3_'],
# Decimal Functions
# TODO: oracle has decimal support for transcendental functions (e.g. sin()) to very
# high precisions. Do we need them? It's unclear if other databases do the same.
[['precision'], 'INT', ['DECIMAL'], 'impala::DecimalFunctions::Precision'],
[['scale'], 'INT', ['DECIMAL'], 'impala::DecimalFunctions::Scale'],
[['abs'], 'DECIMAL', ['DECIMAL'], 'impala::DecimalFunctions::Abs'],
[['ceil', 'ceiling'], 'DECIMAL', ['DECIMAL'], 'impala::DecimalFunctions::Ceil'],
[['floor'], 'DECIMAL', ['DECIMAL'], 'impala::DecimalFunctions::Floor'],
[['round'], 'DECIMAL', ['DECIMAL'], 'impala::DecimalFunctions::Round'],
[['round'], 'DECIMAL', ['DECIMAL', 'TINYINT'], 'impala::DecimalFunctions::RoundTo'],
[['round'], 'DECIMAL', ['DECIMAL', 'SMALLINT'], 'impala::DecimalFunctions::RoundTo'],
[['round'], 'DECIMAL', ['DECIMAL', 'INT'], 'impala::DecimalFunctions::RoundTo'],
[['round'], 'DECIMAL', ['DECIMAL', 'BIGINT'], 'impala::DecimalFunctions::RoundTo'],
[['truncate'], 'DECIMAL', ['DECIMAL'], 'impala::DecimalFunctions::Truncate'],
[['truncate'], 'DECIMAL', ['DECIMAL', 'TINYINT'],
'impala::DecimalFunctions::TruncateTo'],
[['truncate'], 'DECIMAL', ['DECIMAL', 'SMALLINT'],
'impala::DecimalFunctions::TruncateTo'],
[['truncate'], 'DECIMAL', ['DECIMAL', 'INT'],
'impala::DecimalFunctions::TruncateTo'],
[['truncate'], 'DECIMAL', ['DECIMAL', 'BIGINT'],
'impala::DecimalFunctions::TruncateTo'],
# String builtin functions
[['substr', 'substring'], 'STRING', ['STRING', 'BIGINT'],
'impala::StringFunctions::Substring'],
[['substr', 'substring'], 'STRING', ['STRING', 'BIGINT', 'BIGINT'],
'impala::StringFunctions::Substring'],
# left and right are key words, leave them out for now.
[['strleft'], 'STRING', ['STRING', 'BIGINT'], 'impala::StringFunctions::Left'],
[['strright'], 'STRING', ['STRING', 'BIGINT'], 'impala::StringFunctions::Right'],
[['space'], 'STRING', ['BIGINT'], 'impala::StringFunctions::Space'],
[['repeat'], 'STRING', ['STRING', 'BIGINT'], 'impala::StringFunctions::Repeat'],
[['lpad'], 'STRING', ['STRING', 'BIGINT', 'STRING'], 'impala::StringFunctions::Lpad'],
[['rpad'], 'STRING', ['STRING', 'BIGINT', 'STRING'], 'impala::StringFunctions::Rpad'],
[['length'], 'INT', ['STRING'], 'impala::StringFunctions::Length'],
[['length'], 'INT', ['CHAR'], 'impala::StringFunctions::CharLength'],
[['char_length'], 'INT', ['STRING'], 'impala::StringFunctions::Length'],
[['character_length'], 'INT', ['STRING'], 'impala::StringFunctions::Length'],
[['lower', 'lcase'], 'STRING', ['STRING'], 'impala::StringFunctions::Lower'],
[['upper', 'ucase'], 'STRING', ['STRING'], 'impala::StringFunctions::Upper'],
[['initcap'], 'STRING', ['STRING'], 'impala::StringFunctions::InitCap'],
[['reverse'], 'STRING', ['STRING'], 'impala::StringFunctions::Reverse'],
[['translate'], 'STRING', ['STRING', 'STRING', 'STRING'],
'impala::StringFunctions::Translate'],
[['trim'], 'STRING', ['STRING'], 'impala::StringFunctions::Trim'],
[['ltrim'], 'STRING', ['STRING'], 'impala::StringFunctions::Ltrim'],
[['rtrim'], 'STRING', ['STRING'], 'impala::StringFunctions::Rtrim'],
[['ascii'], 'INT', ['STRING'], 'impala::StringFunctions::Ascii'],
[['instr'], 'INT', ['STRING', 'STRING'], 'impala::StringFunctions::Instr'],
[['locate'], 'INT', ['STRING', 'STRING'], 'impala::StringFunctions::Locate'],
[['locate'], 'INT', ['STRING', 'STRING', 'BIGINT'],
'impala::StringFunctions::LocatePos'],
[['regexp_extract'], 'STRING', ['STRING', 'STRING', 'BIGINT'],
'impala::StringFunctions::RegexpExtract',
'_ZN6impala15StringFunctions13RegexpPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala15StringFunctions11RegexpCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['regexp_replace'], 'STRING', ['STRING', 'STRING', 'STRING'],
'impala::StringFunctions::RegexpReplace',
'_ZN6impala15StringFunctions13RegexpPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala15StringFunctions11RegexpCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['concat'], 'STRING', ['STRING', '...'], 'impala::StringFunctions::Concat'],
[['concat_ws'], 'STRING', ['STRING', 'STRING', '...'],
'impala::StringFunctions::ConcatWs'],
[['find_in_set'], 'INT', ['STRING', 'STRING'], 'impala::StringFunctions::FindInSet'],
[['parse_url'], 'STRING', ['STRING', 'STRING'], 'impala::StringFunctions::ParseUrl',
'_ZN6impala15StringFunctions15ParseUrlPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala15StringFunctions13ParseUrlCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
[['parse_url'], 'STRING', ['STRING', 'STRING', 'STRING'], 'impala::StringFunctions::ParseUrlKey',
'_ZN6impala15StringFunctions15ParseUrlPrepareEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE',
'_ZN6impala15StringFunctions13ParseUrlCloseEPN10impala_udf15FunctionContextENS2_18FunctionStateScopeE'],
# Conditional Functions
# Some of these have empty symbols because the BE special-cases them based on the
# function name
[['if'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN', 'BOOLEAN'], ''],
[['if'], 'TINYINT', ['BOOLEAN', 'TINYINT', 'TINYINT'], ''],
[['if'], 'SMALLINT', ['BOOLEAN', 'SMALLINT', 'SMALLINT'], ''],
[['if'], 'INT', ['BOOLEAN', 'INT', 'INT'], ''],
[['if'], 'BIGINT', ['BOOLEAN', 'BIGINT', 'BIGINT'], ''],
[['if'], 'FLOAT', ['BOOLEAN', 'FLOAT', 'FLOAT'], ''],
[['if'], 'DOUBLE', ['BOOLEAN', 'DOUBLE', 'DOUBLE'], ''],
[['if'], 'STRING', ['BOOLEAN', 'STRING', 'STRING'], ''],
[['if'], 'TIMESTAMP', ['BOOLEAN', 'TIMESTAMP', 'TIMESTAMP'], ''],
[['if'], 'DECIMAL', ['BOOLEAN', 'DECIMAL', 'DECIMAL'], ''],
[['nullif'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN'], ''],
[['nullif'], 'TINYINT', ['TINYINT', 'TINYINT'], ''],
[['nullif'], 'SMALLINT', ['SMALLINT', 'SMALLINT'], ''],
[['nullif'], 'INT', ['INT', 'INT'], ''],
[['nullif'], 'BIGINT', ['BIGINT', 'BIGINT'], ''],
[['nullif'], 'FLOAT', ['FLOAT', 'FLOAT'], ''],
[['nullif'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], ''],
[['nullif'], 'STRING', ['STRING', 'STRING'], ''],
[['nullif'], 'TIMESTAMP', ['TIMESTAMP', 'TIMESTAMP'], ''],
[['nullif'], 'DECIMAL', ['DECIMAL', 'DECIMAL'], ''],
[['zeroifnull'], 'TINYINT', ['TINYINT'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'SMALLINT', ['SMALLINT'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'INT', ['INT'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'BIGINT', ['BIGINT'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'FLOAT', ['FLOAT'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'DOUBLE', ['DOUBLE'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['zeroifnull'], 'DECIMAL', ['DECIMAL'], 'impala::ConditionalFunctions::ZeroIfNull'],
[['nullifzero'], 'TINYINT', ['TINYINT'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'SMALLINT', ['SMALLINT'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'INT', ['INT'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'BIGINT', ['BIGINT'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'FLOAT', ['FLOAT'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'DOUBLE', ['DOUBLE'], 'impala::ConditionalFunctions::NullIfZero'],
[['nullifzero'], 'DECIMAL', ['DECIMAL'], 'impala::ConditionalFunctions::NullIfZero'],
[['isnull', 'ifnull', 'nvl'], 'BOOLEAN', ['BOOLEAN', 'BOOLEAN'], ''],
[['isnull', 'ifnull', 'nvl'], 'TINYINT', ['TINYINT', 'TINYINT'], ''],
[['isnull', 'ifnull', 'nvl'], 'SMALLINT', ['SMALLINT', 'SMALLINT'], ''],
[['isnull', 'ifnull', 'nvl'], 'INT', ['INT', 'INT'], ''],
[['isnull', 'ifnull', 'nvl'], 'BIGINT', ['BIGINT', 'BIGINT'], ''],
[['isnull', 'ifnull', 'nvl'], 'FLOAT', ['FLOAT', 'FLOAT'], ''],
[['isnull', 'ifnull', 'nvl'], 'DOUBLE', ['DOUBLE', 'DOUBLE'], ''],
[['isnull', 'ifnull', 'nvl'], 'STRING', ['STRING', 'STRING'], ''],
[['isnull', 'ifnull', 'nvl'], 'TIMESTAMP', ['TIMESTAMP', 'TIMESTAMP'], ''],
[['isnull', 'ifnull', 'nvl'], 'DECIMAL', ['DECIMAL', 'DECIMAL'], ''],
[['coalesce'], 'BOOLEAN', ['BOOLEAN', '...'], ''],
[['coalesce'], 'TINYINT', ['TINYINT', '...'], ''],
[['coalesce'], 'SMALLINT', ['SMALLINT', '...'], ''],
[['coalesce'], 'INT', ['INT', '...'], ''],
[['coalesce'], 'BIGINT', ['BIGINT', '...'], ''],
[['coalesce'], 'FLOAT', ['FLOAT', '...'], ''],
[['coalesce'], 'DOUBLE', ['DOUBLE', '...'], ''],
[['coalesce'], 'STRING', ['STRING', '...'], ''],
[['coalesce'], 'TIMESTAMP', ['TIMESTAMP', '...'], ''],
[['coalesce'], 'DECIMAL', ['DECIMAL', '...'], ''],
# Utility functions
[['current_database'], 'STRING', [], 'impala::UtilityFunctions::CurrentDatabase'],
[['user'], 'STRING', [], 'impala::UtilityFunctions::User'],
[['sleep'], 'BOOLEAN', ['INT'], 'impala::UtilityFunctions::Sleep'],
[['pid'], 'INT', [], 'impala::UtilityFunctions::Pid'],
[['version'], 'STRING', [], 'impala::UtilityFunctions::Version'],
[['typeOf'], 'STRING', ['BOOLEAN'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf10BooleanValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['TINYINT'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf10TinyIntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['SMALLINT'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf11SmallIntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['INT'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf6IntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['BIGINT'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf9BigIntValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['FLOAT'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf8FloatValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['DOUBLE'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf9DoubleValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['CHAR'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf9StringValEEES3_PNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['VARCHAR'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf9StringValEEES3_PNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['STRING'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf9StringValEEES3_PNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['TIMESTAMP'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf12TimestampValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['typeOf'], 'STRING', ['DECIMAL'], '_ZN6impala16UtilityFunctions6TypeOfIN10impala_udf10DecimalValEEENS2_9StringValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['TINYINT'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf10TinyIntValEEENS2_9BigIntValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['SMALLINT'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf11SmallIntValEEENS2_9BigIntValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['INT'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf6IntValEEENS2_9BigIntValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['BIGINT'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf9BigIntValEEES3_PNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['FLOAT'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf8FloatValEEENS2_9BigIntValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['DOUBLE'],
'_ZN6impala16UtilityFunctions7FnvHashIN10impala_udf9DoubleValEEENS2_9BigIntValEPNS2_15FunctionContextERKT_'],
[['fnv_hash'], 'BIGINT', ['STRING'],
'_ZN6impala16UtilityFunctions13FnvHashStringEPN10impala_udf15FunctionContextERKNS1_9StringValE'],
[['fnv_hash'], 'BIGINT', ['TIMESTAMP'],
'_ZN6impala16UtilityFunctions16FnvHashTimestampEPN10impala_udf15FunctionContextERKNS1_12TimestampValE'],
[['fnv_hash'], 'BIGINT', ['DECIMAL'],
'_ZN6impala16UtilityFunctions14FnvHashDecimalEPN10impala_udf15FunctionContextERKNS1_10DecimalValE'],
]
|
|
from decimal import Decimal
from django.utils.translation import gettext_lazy as _
from django.utils import timezone
from django.conf import settings as django_settings
from oscar.core.loading import get_class, get_model
from oscarapicheckout.methods import PaymentMethod, PaymentMethodSerializer
from oscarapicheckout.states import FormPostRequired, Complete, Declined
from oscarapicheckout import utils
from rest_framework import serializers
from suds import sudsobject
from cybersource.cybersoap import CyberSourceSoap
from .constants import CHECKOUT_FINGERPRINT_SESSION_ID, DECISION_ACCEPT, DECISION_REVIEW
from .models import PaymentToken, CyberSourceReply
from . import actions, signals, settings
import dateutil.parser
import logging
logger = logging.getLogger(__name__)
Transaction = get_model("payment", "Transaction")
InvalidOrderStatus = get_class("order.exceptions", "InvalidOrderStatus")
OrderNote = get_model("order", "OrderNote")
def create_order_note(order, msg):
return OrderNote.objects.create(
note_type=OrderNote.SYSTEM, order=order, message=msg
)
def create_review_order_note(order, transaction_id):
"""If an order is under review, add a note explaining why"""
msg = _(
"Transaction %(transaction_id)s is currently under review. Use Decision Manager to either accept or reject the transaction."
) % dict(transaction_id=transaction_id)
create_order_note(order, msg)
def log_order_exception(order_number, order_status, reply_log_entry):
logger.exception(
"Failed to set Order %s to payment declined. Order is current in status %s. Examine CyberSourceReply[%s]",
order_number,
order_status,
reply_log_entry.pk,
)
def mark_declined(order, request, method_key, reply_log_entry):
amount = Decimal(request.data.get("req_amount", "0.00"))
try:
utils.mark_payment_method_declined(order, request, method_key, amount)
except InvalidOrderStatus:
log_order_exception(order.number, order.status, reply_log_entry)
def sudsobj_to_dict(sudsobj, key_prefix=""):
"""Convert Suds object into a flattened dictionary"""
out = {}
# Handle lists
if isinstance(sudsobj, list):
for i, child in enumerate(sudsobj):
child_key = "{}[{}]".format(key_prefix, i)
out.update(sudsobj_to_dict(child, key_prefix=child_key))
return out
# Handle Primitives
if not hasattr(sudsobj, "__keylist__"):
out[key_prefix] = sudsobj
return out
# Handle Suds Objects
for parent_key, parent_val in sudsobject.asdict(sudsobj).items():
full_parent_key = (
"{}.{}".format(key_prefix, parent_key) if key_prefix else parent_key
)
out.update(sudsobj_to_dict(parent_val, key_prefix=full_parent_key))
return out
class Cybersource(PaymentMethod):
"""
This is an example of how to implement a payment method that required some off-site
interaction, like Cybersource Secure Acceptance, for example. It returns a pending
status initially that requires the client app to make a form post, which in-turn
redirects back to us. This is a common pattern in PCI SAQ A-EP ecommerce sites.
"""
name = settings.SOURCE_TYPE
code = "cybersource"
serializer_class = PaymentMethodSerializer
def _record_payment(self, request, order, method_key, amount, reference, **kwargs):
"""Payment Step 1: Require form POST to Cybersource"""
# Allow application to include extra, arbitrary fields in the request to CS
extra_fields = {}
signals.pre_build_get_token_request.send(
sender=self.__class__,
extra_fields=extra_fields,
request=request,
order=order,
method_key=method_key,
)
# Build the data for CyberSource transaction
session_id = request.COOKIES.get(django_settings.SESSION_COOKIE_NAME)
operation = actions.CreatePaymentToken(
session_id=session_id,
order=order,
method_key=method_key,
amount=amount,
server_hostname=request.META.get("HTTP_HOST", ""),
customer_ip_address=request.META["REMOTE_ADDR"],
fingerprint_session_id=request.session.get(CHECKOUT_FINGERPRINT_SESSION_ID),
extra_fields=extra_fields,
)
# Return form fields to the browser. The browser then needs to fill in the blank
# fields (like billing data) and submit them as a POST to CyberSource.
url, fields = self._fields(operation)
# Return a response showing we need to post some fields to the given
# URL to finishing processing this payment method
return FormPostRequired(amount=amount, name="get-token", url=url, fields=fields)
def record_created_payment_token(self, reply_log_entry, data):
"""Payment Step 2: Record the generated payment token and require authorization using the token."""
token_string = data.get("payment_token")
card_num = data.get("req_card_number")
card_type = data.get("req_card_type")
try:
token = PaymentToken.objects.filter(token=token_string).get()
except PaymentToken.DoesNotExist:
token = PaymentToken(
log=reply_log_entry,
token=token_string,
masked_card_number=card_num,
card_type=card_type,
)
token.save()
return token, None
def _fields(self, operation):
"""Helper to convert an operation object into a list of fields and a URL"""
fields = []
cs_fields = operation.fields()
editable_fields = cs_fields["unsigned_field_names"].split(",")
for key, value in cs_fields.items():
fields.append(
{
"key": key,
"value": value if isinstance(value, str) else value.decode(),
"editable": (key in editable_fields),
}
)
return operation.url, fields
class BluefinPaymentMethodSerializer(PaymentMethodSerializer):
payment_data = serializers.CharField(max_length=256)
def validate(self, data):
if "payment_data" not in data:
raise serializers.ValidationError(_("Missing encrypted payment data."))
return super().validate(data)
class Bluefin(PaymentMethod):
name = settings.SOURCE_TYPE
code = "bluefin"
serializer_class = BluefinPaymentMethodSerializer
@staticmethod
def log_soap_response(request, order, response, card_expiry_date=None):
reply_data = sudsobj_to_dict(response)
log = CyberSourceReply(
user=request.user if request.user.is_authenticated else None,
order=order,
reply_type=CyberSourceReply.REPLY_TYPE_SOAP,
data=reply_data,
decision=response.decision,
message=None,
reason_code=response.reasonCode,
req_bill_to_address_postal_code=order.billing_address.postcode,
req_bill_to_forename=order.billing_address.first_name,
req_bill_to_surname=order.billing_address.last_name,
req_card_expiry_date=card_expiry_date,
req_reference_number=response.merchantReferenceCode
if "merchantReferenceCode" in response
else None,
req_transaction_type=(
"create_payment_token"
if "paySubscriptionCreateReply" in response
else "authorization"
),
req_transaction_uuid=None,
request_token=response.requestToken,
transaction_id=response.requestID,
)
# These fields may or may not be present
cc_auth_reply = getattr(response, "ccAuthReply", None)
if cc_auth_reply:
log.auth_code = getattr(cc_auth_reply, "authorizationCode", None)
log.auth_response = getattr(cc_auth_reply, "processorResponse", None)
log.auth_trans_ref_no = getattr(cc_auth_reply, "reconciliationID", None)
log.auth_avs_code = getattr(cc_auth_reply, "avsCode", None)
# Save and return log object
log.save()
return log
@staticmethod
def lookup_token_details(request, order, payment_token):
cs = CyberSourceSoap(
settings.CYBERSOURCE_WSDL,
settings.MERCHANT_ID,
settings.CYBERSOURCE_SOAP_KEY,
request,
order,
"",
)
return cs.lookup_payment_token(payment_token)
def record_created_payment_token(self, request, order, reply_log_entry, response):
token_string = response.paySubscriptionCreateReply.subscriptionID
# Lookup more details about the token
token_details = self.__class__.lookup_token_details(
request, order, token_string
)
if token_details is None:
return None, None
# Create the payment token
if not PaymentToken.objects.filter(token=token_string).exists():
token = PaymentToken(
log=reply_log_entry,
token=token_string,
masked_card_number=token_details.paySubscriptionRetrieveReply.cardAccountNumber,
card_type=token_details.paySubscriptionRetrieveReply.cardType,
)
token.save()
return token, token_details
def record_successful_authorization(
self, reply_log_entry, order, token_string, response
):
decision = reply_log_entry.get_decision()
transaction_id = response.requestID
request_token = response.requestToken
signed_date_time = response.ccAuthReply.authorizedDateTime
req_amount = Decimal(response.ccAuthReply.amount)
# assuming these are equal since authorization succeeded
auth_amount = req_amount
source = self.get_source(order, transaction_id)
try:
token = PaymentToken.objects.get(token=token_string)
except PaymentToken.DoesNotExist:
return Declined(req_amount, source_id=source.pk)
source.amount_allocated += auth_amount
source.save()
transaction = Transaction()
transaction.log = reply_log_entry
transaction.source = source
transaction.token = token
transaction.txn_type = Transaction.AUTHORISE
transaction.amount = req_amount
transaction.reference = transaction_id
transaction.status = decision
transaction.request_token = request_token
transaction.processed_datetime = dateutil.parser.parse(signed_date_time)
transaction.save()
event = self.make_authorize_event(order, auth_amount)
for line in order.lines.all():
self.make_event_quantity(event, line, line.quantity)
return Complete(source.amount_allocated, source_id=source.pk)
def record_declined_authorization(
self, reply_log_entry, order, token_string, response, amount
):
decision = reply_log_entry.get_decision()
transaction_id = response.requestID
request_token = response.requestToken
signed_date_time = str(timezone.now()) # not available in response.ccAuthReply
req_amount = amount # not available in response.ccAuthReply
source = self.get_source(order, transaction_id)
transaction = Transaction()
transaction.log = reply_log_entry
transaction.source = source
transaction.token = PaymentToken.objects.filter(token=token_string).first()
transaction.txn_type = Transaction.AUTHORISE
transaction.amount = req_amount
transaction.reference = transaction_id
transaction.status = decision
transaction.request_token = request_token
transaction.processed_datetime = dateutil.parser.parse(signed_date_time)
transaction.save()
return Declined(req_amount, source_id=source.pk)
def _record_payment(self, request, order, method_key, amount, reference, **kwargs):
"""This is the entry point from django-oscar-api-checkout"""
payment_data = kwargs.get("payment_data")
if payment_data is None:
return Declined(amount)
return self.record_bluefin_payment(
request, order, method_key, amount, payment_data
)
def record_bluefin_payment(self, request, order, method_key, amount, payment_data):
# Allow application to include extra, arbitrary fields in the request to CS
extra_fields = {}
signals.pre_build_get_token_request.send(
sender=self.__class__,
extra_fields=extra_fields,
request=request,
order=order,
method_key=method_key,
)
cs = CyberSourceSoap(
settings.CYBERSOURCE_WSDL,
settings.MERCHANT_ID,
settings.CYBERSOURCE_SOAP_KEY,
request,
order,
method_key,
)
# Get token via SOAP
response = cs.get_token_encrypted(payment_data)
reply_log_entry = self.__class__.log_soap_response(request, order, response)
# If token creation was not successful, return declined.
if response.decision != DECISION_ACCEPT:
return Declined(amount)
# Record the new payment token
token, token_details = self.record_created_payment_token(
request, order, reply_log_entry, response
)
if token is None or token_details is None:
return Declined(amount)
expiry_date = "{month}-{year}".format(
month=token_details.paySubscriptionRetrieveReply.cardExpirationMonth,
year=token_details.paySubscriptionRetrieveReply.cardExpirationYear,
)
reply_log_entry.req_card_expiry_date = expiry_date
reply_log_entry.save()
# Attempt to authorize payment
response = cs.authorize_encrypted(payment_data, amount)
reply_log_entry = self.__class__.log_soap_response(
request, order, response, card_expiry_date=expiry_date
)
# If authorization was not successful, log it and redirect to the failed page.
if response.decision not in (DECISION_ACCEPT, DECISION_REVIEW):
new_state = self.record_declined_authorization(
reply_log_entry, order, token.token, response, amount
)
return new_state
# Authorization was successful! Log it and update he order state
new_state = self.record_successful_authorization(
reply_log_entry, order, token.token, response
)
if response.decision == DECISION_REVIEW:
create_review_order_note(order, response.requestID)
return new_state
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Bernoulli distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import numpy as np
from tensorflow.python.eager import backprop
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops.distributions import bernoulli
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
def try_import(name): # pylint: disable=invalid-name
module = None
try:
module = importlib.import_module(name)
except ImportError as e:
tf_logging.warning("Could not import %s: %s" % (name, str(e)))
return module
special = try_import("scipy.special")
def make_bernoulli(batch_shape, dtype=dtypes.int32):
p = np.random.uniform(size=list(batch_shape))
p = constant_op.constant(p, dtype=dtypes.float32)
return bernoulli.Bernoulli(probs=p, dtype=dtype)
def entropy(p):
q = 1. - p
return -q * np.log(q) - p * np.log(p)
class BernoulliTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def testP(self):
p = [0.2, 0.4]
dist = bernoulli.Bernoulli(probs=p)
self.assertAllClose(p, self.evaluate(dist.probs))
@test_util.run_in_graph_and_eager_modes
def testLogits(self):
logits = [-42., 42.]
dist = bernoulli.Bernoulli(logits=logits)
self.assertAllClose(logits, self.evaluate(dist.logits))
if not special:
return
self.assertAllClose(special.expit(logits), self.evaluate(dist.probs))
p = [0.01, 0.99, 0.42]
dist = bernoulli.Bernoulli(probs=p)
self.assertAllClose(special.logit(p), self.evaluate(dist.logits))
@test_util.run_in_graph_and_eager_modes
def testInvalidP(self):
invalid_ps = [1.01, 2.]
for p in invalid_ps:
with self.assertRaisesOpError("probs has components greater than 1"):
dist = bernoulli.Bernoulli(probs=p, validate_args=True)
self.evaluate(dist.probs)
invalid_ps = [-0.01, -3.]
for p in invalid_ps:
with self.assertRaisesOpError("Condition x >= 0"):
dist = bernoulli.Bernoulli(probs=p, validate_args=True)
self.evaluate(dist.probs)
valid_ps = [0.0, 0.5, 1.0]
for p in valid_ps:
dist = bernoulli.Bernoulli(probs=p)
self.assertEqual(p, self.evaluate(dist.probs)) # Should not fail
@test_util.run_in_graph_and_eager_modes
def testShapes(self):
for batch_shape in ([], [1], [2, 3, 4]):
dist = make_bernoulli(batch_shape)
self.assertAllEqual(batch_shape, dist.batch_shape.as_list())
self.assertAllEqual(batch_shape, self.evaluate(dist.batch_shape_tensor()))
self.assertAllEqual([], dist.event_shape.as_list())
self.assertAllEqual([], self.evaluate(dist.event_shape_tensor()))
@test_util.run_in_graph_and_eager_modes
def testDtype(self):
dist = make_bernoulli([])
self.assertEqual(dist.dtype, dtypes.int32)
self.assertEqual(dist.dtype, dist.sample(5).dtype)
self.assertEqual(dist.dtype, dist.mode().dtype)
self.assertEqual(dist.probs.dtype, dist.mean().dtype)
self.assertEqual(dist.probs.dtype, dist.variance().dtype)
self.assertEqual(dist.probs.dtype, dist.stddev().dtype)
self.assertEqual(dist.probs.dtype, dist.entropy().dtype)
self.assertEqual(dist.probs.dtype, dist.prob(0).dtype)
self.assertEqual(dist.probs.dtype, dist.prob(0.5).dtype)
self.assertEqual(dist.probs.dtype, dist.log_prob(0).dtype)
self.assertEqual(dist.probs.dtype, dist.log_prob(0.5).dtype)
dist64 = make_bernoulli([], dtypes.int64)
self.assertEqual(dist64.dtype, dtypes.int64)
self.assertEqual(dist64.dtype, dist64.sample(5).dtype)
self.assertEqual(dist64.dtype, dist64.mode().dtype)
@test_util.run_in_graph_and_eager_modes
def _testPmf(self, **kwargs):
dist = bernoulli.Bernoulli(**kwargs)
# pylint: disable=bad-continuation
xs = [
0,
[1],
[1, 0],
[[1, 0]],
[[1, 0], [1, 1]],
]
expected_pmfs = [
[[0.8, 0.6], [0.7, 0.4]],
[[0.2, 0.4], [0.3, 0.6]],
[[0.2, 0.6], [0.3, 0.4]],
[[0.2, 0.6], [0.3, 0.4]],
[[0.2, 0.6], [0.3, 0.6]],
]
# pylint: enable=bad-continuation
for x, expected_pmf in zip(xs, expected_pmfs):
self.assertAllClose(self.evaluate(dist.prob(x)), expected_pmf)
self.assertAllClose(self.evaluate(dist.log_prob(x)), np.log(expected_pmf))
@test_util.run_deprecated_v1
def testPmfCorrectBroadcastDynamicShape(self):
with self.cached_session():
p = array_ops.placeholder(dtype=dtypes.float32)
dist = bernoulli.Bernoulli(probs=p)
event1 = [1, 0, 1]
event2 = [[1, 0, 1]]
self.assertAllClose(
dist.prob(event1).eval({
p: [0.2, 0.3, 0.4]
}), [0.2, 0.7, 0.4])
self.assertAllClose(
dist.prob(event2).eval({
p: [0.2, 0.3, 0.4]
}), [[0.2, 0.7, 0.4]])
@test_util.run_in_graph_and_eager_modes
@test_util.run_deprecated_v1
def testPmfInvalid(self):
p = [0.1, 0.2, 0.7]
dist = bernoulli.Bernoulli(probs=p, validate_args=True)
with self.assertRaisesOpError("must be non-negative."):
self.evaluate(dist.prob([1, 1, -1]))
with self.assertRaisesOpError("Elements cannot exceed 1."):
self.evaluate(dist.prob([2, 0, 1]))
@test_util.run_in_graph_and_eager_modes
def testPmfWithP(self):
p = [[0.2, 0.4], [0.3, 0.6]]
self._testPmf(probs=p)
if not special:
return
self._testPmf(logits=special.logit(p))
@test_util.run_in_graph_and_eager_modes
def testPmfWithFloatArgReturnsXEntropy(self):
p = [[0.2], [0.4], [0.3], [0.6]]
samps = [0, 0.1, 0.8]
self.assertAllClose(
np.float32(samps) * np.log(np.float32(p)) +
(1 - np.float32(samps)) * np.log(1 - np.float32(p)),
self.evaluate(
bernoulli.Bernoulli(probs=p, validate_args=False).log_prob(samps)))
@test_util.run_deprecated_v1
def testBroadcasting(self):
with self.cached_session():
p = array_ops.placeholder(dtypes.float32)
dist = bernoulli.Bernoulli(probs=p)
self.assertAllClose(np.log(0.5), dist.log_prob(1).eval({p: 0.5}))
self.assertAllClose(
np.log([0.5, 0.5, 0.5]), dist.log_prob([1, 1, 1]).eval({
p: 0.5
}))
self.assertAllClose(
np.log([0.5, 0.5, 0.5]), dist.log_prob(1).eval({
p: [0.5, 0.5, 0.5]
}))
@test_util.run_deprecated_v1
def testPmfShapes(self):
with self.cached_session():
p = array_ops.placeholder(dtypes.float32, shape=[None, 1])
dist = bernoulli.Bernoulli(probs=p)
self.assertEqual(2, len(dist.log_prob(1).eval({p: [[0.5], [0.5]]}).shape))
dist = bernoulli.Bernoulli(probs=0.5)
self.assertEqual(2, len(self.evaluate(dist.log_prob([[1], [1]])).shape))
dist = bernoulli.Bernoulli(probs=0.5)
self.assertEqual((), dist.log_prob(1).get_shape())
self.assertEqual((1), dist.log_prob([1]).get_shape())
self.assertEqual((2, 1), dist.log_prob([[1], [1]]).get_shape())
dist = bernoulli.Bernoulli(probs=[[0.5], [0.5]])
self.assertEqual((2, 1), dist.log_prob(1).get_shape())
@test_util.run_in_graph_and_eager_modes
def testBoundaryConditions(self):
dist = bernoulli.Bernoulli(probs=1.0)
self.assertAllClose(np.nan, self.evaluate(dist.log_prob(0)))
self.assertAllClose([np.nan], [self.evaluate(dist.log_prob(1))])
@test_util.run_in_graph_and_eager_modes
def testEntropyNoBatch(self):
p = 0.2
dist = bernoulli.Bernoulli(probs=p)
self.assertAllClose(self.evaluate(dist.entropy()), entropy(p))
@test_util.run_in_graph_and_eager_modes
def testEntropyWithBatch(self):
p = [[0.1, 0.7], [0.2, 0.6]]
dist = bernoulli.Bernoulli(probs=p, validate_args=False)
self.assertAllClose(
self.evaluate(dist.entropy()),
[[entropy(0.1), entropy(0.7)], [entropy(0.2),
entropy(0.6)]])
@test_util.run_in_graph_and_eager_modes
def testSampleN(self):
p = [0.2, 0.6]
dist = bernoulli.Bernoulli(probs=p)
n = 100000
samples = dist.sample(n)
samples.set_shape([n, 2])
self.assertEqual(samples.dtype, dtypes.int32)
sample_values = self.evaluate(samples)
self.assertTrue(np.all(sample_values >= 0))
self.assertTrue(np.all(sample_values <= 1))
# Note that the standard error for the sample mean is ~ sqrt(p * (1 - p) /
# n). This means that the tolerance is very sensitive to the value of p
# as well as n.
self.assertAllClose(p, np.mean(sample_values, axis=0), atol=1e-2)
self.assertEqual(set([0, 1]), set(sample_values.flatten()))
# In this test we're just interested in verifying there isn't a crash
# owing to mismatched types. b/30940152
dist = bernoulli.Bernoulli(np.log([.2, .4]))
self.assertAllEqual((1, 2), dist.sample(1, seed=42).get_shape().as_list())
@test_util.run_in_graph_and_eager_modes
def testNotReparameterized(self):
p = constant_op.constant([0.2, 0.6])
with backprop.GradientTape() as tape:
tape.watch(p)
dist = bernoulli.Bernoulli(probs=p)
samples = dist.sample(100)
grad_p = tape.gradient(samples, p)
self.assertIsNone(grad_p)
@test_util.run_deprecated_v1
def testSampleActsLikeSampleN(self):
with self.cached_session() as sess:
p = [0.2, 0.6]
dist = bernoulli.Bernoulli(probs=p)
n = 1000
seed = 42
self.assertAllEqual(
self.evaluate(dist.sample(n, seed)),
self.evaluate(dist.sample(n, seed)))
n = array_ops.placeholder(dtypes.int32)
sample1, sample2 = sess.run([dist.sample(n, seed), dist.sample(n, seed)],
feed_dict={n: 1000})
self.assertAllEqual(sample1, sample2)
@test_util.run_in_graph_and_eager_modes
def testMean(self):
p = np.array([[0.2, 0.7], [0.5, 0.4]], dtype=np.float32)
dist = bernoulli.Bernoulli(probs=p)
self.assertAllEqual(self.evaluate(dist.mean()), p)
@test_util.run_in_graph_and_eager_modes
def testVarianceAndStd(self):
var = lambda p: p * (1. - p)
p = [[0.2, 0.7], [0.5, 0.4]]
dist = bernoulli.Bernoulli(probs=p)
self.assertAllClose(
self.evaluate(dist.variance()),
np.array([[var(0.2), var(0.7)], [var(0.5), var(0.4)]],
dtype=np.float32))
self.assertAllClose(
self.evaluate(dist.stddev()),
np.array([[np.sqrt(var(0.2)), np.sqrt(var(0.7))],
[np.sqrt(var(0.5)), np.sqrt(var(0.4))]],
dtype=np.float32))
@test_util.run_in_graph_and_eager_modes
def testBernoulliBernoulliKL(self):
batch_size = 6
a_p = np.array([0.5] * batch_size, dtype=np.float32)
b_p = np.array([0.4] * batch_size, dtype=np.float32)
a = bernoulli.Bernoulli(probs=a_p)
b = bernoulli.Bernoulli(probs=b_p)
kl = kullback_leibler.kl_divergence(a, b)
kl_val = self.evaluate(kl)
kl_expected = (a_p * np.log(a_p / b_p) + (1. - a_p) * np.log(
(1. - a_p) / (1. - b_p)))
self.assertEqual(kl.get_shape(), (batch_size,))
self.assertAllClose(kl_val, kl_expected)
if __name__ == "__main__":
test.main()
|
|
"""
Test Suites
-----------
Provides a LazySuite, which is a suite whose test list is a generator
function, and ContextSuite,which can run fixtures (setup/teardown
functions or methods) for the context that contains its tests.
"""
from __future__ import generators
import logging
import sys
import unittest
from nose.case import Test
from nose.config import Config
from nose.proxy import ResultProxyFactory
from nose.util import isclass, resolve_name, try_run
if sys.platform == 'cli':
if sys.version_info[:2] < (2, 6):
import clr
clr.AddReference("IronPython")
from IronPython.Runtime.Exceptions import StringException
else:
class StringException(Exception):
pass
log = logging.getLogger(__name__)
#log.setLevel(logging.DEBUG)
# Singleton for default value -- see ContextSuite.__init__ below
_def = object()
def _strclass(cls):
return "%s.%s" % (cls.__module__, cls.__name__)
class MixedContextError(Exception):
"""Error raised when a context suite sees tests from more than
one context.
"""
pass
class LazySuite(unittest.TestSuite):
"""A suite that may use a generator as its list of tests
"""
def __init__(self, tests=()):
"""Initialize the suite. tests may be an iterable or a generator
"""
self._set_tests(tests)
def __iter__(self):
return iter(self._tests)
def __repr__(self):
return "<%s tests=generator (%s)>" % (
_strclass(self.__class__), id(self))
def __hash__(self):
return object.__hash__(self)
__str__ = __repr__
def addTest(self, test):
self._precache.append(test)
def __nonzero__(self):
log.debug("tests in %s?", id(self))
if self._precache:
return True
if self.test_generator is None:
return False
try:
test = self.test_generator.next()
if test is not None:
self._precache.append(test)
return True
except StopIteration:
pass
return False
def _get_tests(self):
log.debug("precache is %s", self._precache)
for test in self._precache:
yield test
if self.test_generator is None:
return
for test in self.test_generator:
yield test
def _set_tests(self, tests):
self._precache = []
is_suite = isinstance(tests, unittest.TestSuite)
if callable(tests) and not is_suite:
self.test_generator = tests()
elif is_suite:
# Suites need special treatment: they must be called like
# tests for their setup/teardown to run (if any)
self.addTests([tests])
self.test_generator = None
else:
self.addTests(tests)
self.test_generator = None
_tests = property(_get_tests, _set_tests, None,
"Access the tests in this suite. Access is through a "
"generator, so iteration may not be repeatable.")
class ContextSuite(LazySuite):
"""A suite with context.
A ContextSuite executes fixtures (setup and teardown functions or
methods) for the context containing its tests.
The context may be explicitly passed. If it is not, a context (or
nested set of contexts) will be constructed by examining the tests
in the suite.
"""
failureException = unittest.TestCase.failureException
was_setup = False
was_torndown = False
classSetup = ('setup_class', 'setup_all', 'setupClass', 'setupAll',
'setUpClass', 'setUpAll')
classTeardown = ('teardown_class', 'teardown_all', 'teardownClass',
'teardownAll', 'tearDownClass', 'tearDownAll')
moduleSetup = ('setup_module', 'setupModule', 'setUpModule', 'setup',
'setUp')
moduleTeardown = ('teardown_module', 'teardownModule', 'tearDownModule',
'teardown', 'tearDown')
packageSetup = ('setup_package', 'setupPackage', 'setUpPackage')
packageTeardown = ('teardown_package', 'teardownPackage',
'tearDownPackage')
def __init__(self, tests=(), context=None, factory=None,
config=None, resultProxy=None, can_split=True):
log.debug("Context suite for %s (%s) (%s)", tests, context, id(self))
self.context = context
self.factory = factory
if config is None:
config = Config()
self.config = config
self.resultProxy = resultProxy
self.has_run = False
self.can_split = can_split
self.error_context = None
LazySuite.__init__(self, tests)
def __repr__(self):
return "<%s context=%s>" % (
_strclass(self.__class__),
getattr(self.context, '__name__', self.context))
__str__ = __repr__
def id(self):
if self.error_context:
return '%s:%s' % (repr(self), self.error_context)
else:
return repr(self)
def __hash__(self):
return object.__hash__(self)
# 2.3 compat -- force 2.4 call sequence
def __call__(self, *arg, **kw):
return self.run(*arg, **kw)
def exc_info(self):
"""Hook for replacing error tuple output
"""
return sys.exc_info()
def _exc_info(self):
"""Bottleneck to fix up IronPython string exceptions
"""
e = self.exc_info()
if sys.platform == 'cli':
if isinstance(e[0], StringException):
# IronPython throws these StringExceptions, but
# traceback checks type(etype) == str. Make a real
# string here.
e = (str(e[0]), e[1], e[2])
return e
def run(self, result):
"""Run tests in suite inside of suite fixtures.
"""
# proxy the result for myself
if self.resultProxy:
result, orig = self.resultProxy(result, self), result
else:
result, orig = result, result
try:
self.setUp()
except KeyboardInterrupt:
raise
except:
self.error_context = 'setup'
result.addError(self, self._exc_info())
return
try:
for test in self._tests:
if result.shouldStop:
log.debug("stopping")
break
# each nose.case.Test will create its own result proxy
# so the cases need the original result, to avoid proxy
# chains
test(orig)
finally:
self.has_run = True
try:
self.tearDown()
except KeyboardInterrupt:
raise
except:
self.error_context = 'teardown'
result.addError(self, self._exc_info())
def hasFixtures(self, ctx_callback=None):
context = self.context
if context is None:
return False
if self.implementsAnyFixture(context, ctx_callback=ctx_callback):
return True
# My context doesn't have any, but its ancestors might
factory = self.factory
if factory:
ancestors = factory.context.get(self, [])
for ancestor in ancestors:
if self.implementsAnyFixture(
ancestor, ctx_callback=ctx_callback):
return True
return False
def implementsAnyFixture(self, context, ctx_callback):
if isclass(context):
names = self.classSetup + self.classTeardown
else:
names = self.moduleSetup + self.moduleTeardown
if hasattr(context, '__path__'):
names += self.packageSetup + self.packageTeardown
# If my context has any fixture attribute, I have fixtures
fixt = False
for m in names:
if hasattr(context, m):
fixt = True
break
if ctx_callback is None:
return fixt
return ctx_callback(context, fixt)
def setUp(self):
log.debug("suite %s setUp called, tests: %s", id(self), self._tests)
if not self:
# I have no tests
log.debug("suite %s has no tests", id(self))
return
if self.was_setup:
log.debug("suite %s already set up", id(self))
return
context = self.context
if context is None:
return
# before running my own context's setup, I need to
# ask the factory if my context's contexts' setups have been run
factory = self.factory
if factory:
# get a copy, since we'll be destroying it as we go
ancestors = factory.context.get(self, [])[:]
while ancestors:
ancestor = ancestors.pop()
log.debug("ancestor %s may need setup", ancestor)
if ancestor in factory.was_setup:
continue
log.debug("ancestor %s does need setup", ancestor)
self.setupContext(ancestor)
if not context in factory.was_setup:
self.setupContext(context)
else:
self.setupContext(context)
self.was_setup = True
log.debug("completed suite setup")
def setupContext(self, context):
self.config.plugins.startContext(context)
log.debug("%s setup context %s", self, context)
if self.factory:
if context in self.factory.was_setup:
return
# note that I ran the setup for this context, so that I'll run
# the teardown in my teardown
self.factory.was_setup[context] = self
if isclass(context):
names = self.classSetup
else:
names = self.moduleSetup
if hasattr(context, '__path__'):
names = self.packageSetup + names
try_run(context, names)
def shortDescription(self):
if self.context is None:
return "test suite"
return "test suite for %s" % self.context
def tearDown(self):
log.debug('context teardown')
if not self.was_setup or self.was_torndown:
log.debug(
"No reason to teardown (was_setup? %s was_torndown? %s)"
% (self.was_setup, self.was_torndown))
return
self.was_torndown = True
context = self.context
if context is None:
log.debug("No context to tear down")
return
# for each ancestor... if the ancestor was setup
# and I did the setup, I can do teardown
factory = self.factory
if factory:
ancestors = factory.context.get(self, []) + [context]
for ancestor in ancestors:
log.debug('ancestor %s may need teardown', ancestor)
if not ancestor in factory.was_setup:
log.debug('ancestor %s was not setup', ancestor)
continue
if ancestor in factory.was_torndown:
log.debug('ancestor %s already torn down', ancestor)
continue
setup = factory.was_setup[ancestor]
log.debug("%s setup ancestor %s", setup, ancestor)
if setup is self:
self.teardownContext(ancestor)
else:
self.teardownContext(context)
def teardownContext(self, context):
log.debug("%s teardown context %s", self, context)
if self.factory:
if context in self.factory.was_torndown:
return
self.factory.was_torndown[context] = self
if isclass(context):
names = self.classTeardown
else:
names = self.moduleTeardown
if hasattr(context, '__path__'):
names = self.packageTeardown + names
try_run(context, names)
self.config.plugins.stopContext(context)
# FIXME the wrapping has to move to the factory?
def _get_wrapped_tests(self):
for test in self._get_tests():
if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
yield test
else:
yield Test(test,
config=self.config,
resultProxy=self.resultProxy)
_tests = property(_get_wrapped_tests, LazySuite._set_tests, None,
"Access the tests in this suite. Tests are returned "
"inside of a context wrapper.")
class ContextSuiteFactory(object):
"""Factory for ContextSuites. Called with a collection of tests,
the factory decides on a hierarchy of contexts by introspecting
the collection or the tests themselves to find the objects
containing the test objects. It always returns one suite, but that
suite may consist of a hierarchy of nested suites.
"""
suiteClass = ContextSuite
def __init__(self, config=None, suiteClass=None, resultProxy=_def):
if config is None:
config = Config()
self.config = config
if suiteClass is not None:
self.suiteClass = suiteClass
# Using a singleton to represent default instead of None allows
# passing resultProxy=None to turn proxying off.
if resultProxy is _def:
resultProxy = ResultProxyFactory(config=config)
self.resultProxy = resultProxy
self.suites = {}
self.context = {}
self.was_setup = {}
self.was_torndown = {}
def __call__(self, tests, **kw):
"""Return ``ContextSuite`` for tests. ``tests`` may either
be a callable (in which case the resulting ContextSuite will
have no parent context and be evaluated lazily) or an
iterable. In that case the tests will wrapped in
nose.case.Test, be examined and the context of each found and a
suite of suites returned, organized into a stack with the
outermost suites belonging to the outermost contexts.
"""
log.debug("Create suite for %s", tests)
context = kw.pop('context', getattr(tests, 'context', None))
log.debug("tests %s context %s", tests, context)
if context is None:
tests = self.wrapTests(tests)
try:
context = self.findContext(tests)
except MixedContextError:
return self.makeSuite(self.mixedSuites(tests), None, **kw)
return self.makeSuite(tests, context, **kw)
def ancestry(self, context):
"""Return the ancestry of the context (that is, all of the
packages and modules containing the context), in order of
descent with the outermost ancestor last.
This method is a generator.
"""
log.debug("get ancestry %s", context)
if context is None:
return
# Methods include reference to module they are defined in, we
# don't want that, instead want the module the class is in now
# (classes are re-ancestored elsewhere).
if hasattr(context, 'im_class'):
context = context.im_class
if hasattr(context, '__module__'):
ancestors = context.__module__.split('.')
elif hasattr(context, '__name__'):
ancestors = context.__name__.split('.')[:-1]
else:
raise TypeError("%s has no ancestors?" % context)
while ancestors:
log.debug(" %s ancestors %s", context, ancestors)
yield resolve_name('.'.join(ancestors))
ancestors.pop()
def findContext(self, tests):
if callable(tests) or isinstance(tests, unittest.TestSuite):
return None
context = None
for test in tests:
# Don't look at suites for contexts, only tests
ctx = getattr(test, 'context', None)
if ctx is None:
continue
if context is None:
context = ctx
elif context != ctx:
raise MixedContextError(
"Tests with different contexts in same suite! %s != %s"
% (context, ctx))
return context
def makeSuite(self, tests, context, **kw):
suite = self.suiteClass(
tests, context=context, config=self.config, factory=self,
resultProxy=self.resultProxy, **kw)
if context is not None:
self.suites.setdefault(context, []).append(suite)
self.context.setdefault(suite, []).append(context)
log.debug("suite %s has context %s", suite,
getattr(context, '__name__', None))
for ancestor in self.ancestry(context):
self.suites.setdefault(ancestor, []).append(suite)
self.context[suite].append(ancestor)
log.debug("suite %s has ancestor %s", suite, ancestor.__name__)
return suite
def mixedSuites(self, tests):
"""The complex case where there are tests that don't all share
the same context. Groups tests into suites with common ancestors,
according to the following (essentially tail-recursive) procedure:
Starting with the context of the first test, if it is not
None, look for tests in the remaining tests that share that
ancestor. If any are found, group into a suite with that
ancestor as the context, and replace the current suite with
that suite. Continue this process for each ancestor of the
first test, until all ancestors have been processed. At this
point if any tests remain, recurse with those tests as the
input, returning a list of the common suite (which may be the
suite or test we started with, if no common tests were found)
plus the results of recursion.
"""
if not tests:
return []
head = tests.pop(0)
if not tests:
return [head] # short circuit when none are left to combine
suite = head # the common ancestry suite, so far
tail = tests[:]
context = getattr(head, 'context', None)
if context is not None:
ancestors = [context] + [a for a in self.ancestry(context)]
for ancestor in ancestors:
common = [suite] # tests with ancestor in common, so far
remain = [] # tests that remain to be processed
for test in tail:
found_common = False
test_ctx = getattr(test, 'context', None)
if test_ctx is None:
remain.append(test)
continue
if test_ctx is ancestor:
common.append(test)
continue
for test_ancestor in self.ancestry(test_ctx):
if test_ancestor is ancestor:
common.append(test)
found_common = True
break
if not found_common:
remain.append(test)
if common:
suite = self.makeSuite(common, ancestor)
tail = remain
return [suite] + self.mixedSuites(tail)
def wrapTests(self, tests):
log.debug("wrap %s", tests)
if callable(tests) or isinstance(tests, unittest.TestSuite):
log.debug("I won't wrap")
return tests
wrapped = []
for test in tests:
log.debug("wrapping %s", test)
if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
wrapped.append(test)
elif isinstance(test, ContextList):
wrapped.append(self.makeSuite(test, context=test.context))
else:
wrapped.append(
Test(test, config=self.config, resultProxy=self.resultProxy)
)
return wrapped
class ContextList(object):
"""Not quite a suite -- a group of tests in a context. This is used
to hint the ContextSuiteFactory about what context the tests
belong to, in cases where it may be ambiguous or missing.
"""
def __init__(self, tests, context=None):
self.tests = tests
self.context = context
def __iter__(self):
return iter(self.tests)
class FinalizingSuiteWrapper(unittest.TestSuite):
"""Wraps suite and calls final function after suite has
executed. Used to call final functions in cases (like running in
the standard test runner) where test running is not under nose's
control.
"""
def __init__(self, suite, finalize):
self.suite = suite
self.finalize = finalize
def __call__(self, *arg, **kw):
return self.run(*arg, **kw)
def run(self, *arg, **kw):
try:
return self.suite(*arg, **kw)
finally:
self.finalize(*arg, **kw)
# backwards compat -- sort of
class TestDir:
def __init__(*arg, **kw):
raise NotImplementedError(
"TestDir is not usable with nose 0.10. The class is present "
"in nose.suite for backwards compatibility purposes but it "
"may not be used.")
class TestModule:
def __init__(*arg, **kw):
raise NotImplementedError(
"TestModule is not usable with nose 0.10. The class is present "
"in nose.suite for backwards compatibility purposes but it "
"may not be used.")
|
|
#!/usr/bin/env python
# ===================================
# Copyright (c) Microsoft Corporation. All rights reserved.
# See license.txt for license information.
# ===================================
from contextlib import contextmanager
import socket
import os
import sys
import imp
import hashlib
import codecs
import base64
import platform
import shutil
protocol = imp.load_source('protocol', '../protocol.py')
nxDSCLog = imp.load_source('nxDSCLog', '../nxDSCLog.py')
LG = nxDSCLog.DSCLog
# Paths
CONFIG_PATH = '/etc/opt/microsoft/omsagent/conf/'
SERVER_ADDRESS = '/var/opt/microsoft/omsagent/npm_state/npmdagent.sock'
DEST_FILE_NAME = 'npmd_agent_config.xml'
PLUGIN_PATH = '/opt/microsoft/omsagent/plugin/'
PLUGIN_CONF_PATH = '/etc/opt/microsoft/omsagent/conf/omsagent.d/'
RESOURCE_MODULE_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/DSCResources/MSFT_nxOMSAgentNPMConfigResource/NPM/'
DSC_RESOURCE_VERSION_PATH = '/opt/microsoft/omsconfig/modules/nxOMSAgentNPMConfig/VERSION'
AGENT_RESOURCE_VERSION_PATH = '/var/opt/microsoft/omsagent/npm_state/npm_version'
DSC_X64_AGENT_PATH = 'Agent/64/'
DSC_X86_AGENT_PATH = 'Agent/32/'
DSC_PLUGIN_PATH = 'Plugin/plugin/'
DSC_PLUGIN_CONF_PATH = 'Plugin/conf/'
AGENT_BINARY_PATH = '/opt/microsoft/omsagent/plugin/'
AGENT_SCRIPT_PATH = '/opt/microsoft/omsconfig/Scripts/NPMAgentBinaryCap.sh'
# Constants
X64 = '64bit'
AGENT_BINARY_NAME = 'npmd_agent'
def enum(**enums):
return type('Enum', (), enums)
Commands = enum(LogNPM = 'ErrorLog', StartNPM = 'StartNPM', StopNPM = 'StopNPM', Config = 'Config', Purge = 'Purge')
LogType = enum(Error = 'ERROR', Info = 'INFO')
class INPMDiagnosticLog:
def log(self):
pass
class NPMDiagnosticLogUtil(INPMDiagnosticLog):
def log(self, logType, logString):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = Commands.LogNPM + ':' + '[' + logType + ']' + logString
sock.send(message.encode('utf-8'))
except Exception as msg:
LG().Log(LogType.Error, str(msg))
finally:
sock.close()
LOG_ACTION = NPMDiagnosticLogUtil()
class IOMSAgent:
def restart_oms_agent(self):
pass
class OMSAgentUtil(IOMSAgent):
def restart_oms_agent(self):
if os.system('sudo /opt/microsoft/omsagent/bin/service_control restart') == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error restarting omsagent.')
return False
class INPMAgent:
def binary_setcap(self):
pass
class NPMAgentUtil(IOMSAgent):
def binary_setcap(self, binaryPath):
if os.path.exists(AGENT_SCRIPT_PATH) and os.system('sudo %s %s' %(AGENT_SCRIPT_PATH, binaryPath)) == 0:
return True
else:
LOG_ACTION.log(LogType.Error, 'Error setting capabilities to npmd agent binary.')
return False
global show_mof
show_mof = False
OMS_ACTION = OMSAgentUtil()
NPM_ACTION = NPMAgentUtil()
# [key] string ConfigType;
# [write] string ConfigID;
# [write] string Contents;
# [write,ValueMap{"Present", "Absent"},Values{"Present", "Absent"}] string Ensure;
# [write] string ContentChecksum;
def init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if ConfigType is not None and ConfigType != '':
ConfigType = ConfigType
else:
ConfigType = 'UpdatedAgentConfig'
if ConfigID is not None:
ConfigID = ConfigID
else:
ConfigID = ''
if Contents is not None:
Contents = base64.b64decode(Contents.encode('utf-8')).decode('ascii', 'ignore')
else:
Contents = ''
if Ensure is not None and Ensure != '':
Ensure = Ensure
else:
Ensure = 'Present'
if ContentChecksum is not None:
ContentChecksum = ContentChecksum
else:
ContentChecksum = ''
return ConfigType, ConfigID, Contents, Ensure, ContentChecksum
def Set_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = hashlib.md5(Contents.encode('utf-8')).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha256')
# validate with sha256
recvdContentChecksum = hashlib.sha256(Contents.encode('utf-8')).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha256, exiting Set')
return [-1]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Test_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
recvdContentChecksum = hashlib.md5(Contents.encode('utf-8')).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
LOG_ACTION.log(LogType.Info, 'Content received did not match checksum with md5, trying with sha256')
# validate with sha256
recvdContentChecksum = hashlib.sha256(Contents.encode('utf-8')).hexdigest().upper()
if recvdContentChecksum != ContentChecksum:
# data is corrupt do not proceed further
LOG_ACTION.log(LogType.Error, 'Content received did not match checksum with sha256, exiting Set')
return [0]
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return retval
def Get_Marshall(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
arg_names = list(locals().keys())
(ConfigType, ConfigID, Contents, Ensure, ContentChecksum) = init_vars(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
retval = Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
ConfigType = protocol.MI_String(ConfigType)
ConfigID = protocol.MI_String(ConfigID)
Ensure = protocol.MI_String(Ensure)
Contents = protocol.MI_String(Contents)
ContentChecksum = protocol.MI_String(ContentChecksum)
retd = {}
ld = locals()
for k in arg_names:
retd[k] = ld[k]
return retval, retd
############################################################
# Begin user defined DSC functions
############################################################
def SetShowMof(a):
global show_mof
show_mof = a
def ShowMof(op, ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
if not show_mof:
return
mof = ''
mof += op + ' nxOMSAgentNPMConfig MyNPMConfig \n'
mof += '{\n'
mof += ' ConfigType = "' + ConfigType + '"\n'
mof += ' ConfigID = "' + ConfigID + '"\n'
mof += ' Contents = "' + Contents + '"\n'
mof += ' Ensure = "' + Ensure + '"\n'
mof += ' ContentChecksum = "' + ContentChecksum + '"\n'
mof += '}\n'
f = open('./test_mofs.log', 'a')
Print(mof, file=f)
LG().Log(LogType.Info, mof)
f.close()
def Set(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('SET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting set')
return [-1]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, but resource is present, purging')
success = PurgeSolution()
if not success:
retval = -1
return [retval]
if TestConfigUpdate(Contents) != 0:
retval = SetConfigUpdate(Contents)
version = TestResourceVersion()
if version != 0:
retval = SetFilesUpdate(version)
return [retval]
def Test(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
ShowMof('TEST', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
retval = 0
if not os.path.exists(AGENT_SCRIPT_PATH):
LG().Log(LogType.Error, 'npmd set cap script does not exist, exiting test')
return [retval]
if ConfigType != 'UpdatedAgentConfig':
LOG_ACTION.log(LogType.Error, 'Config type did not match, exiting test')
return [retval]
if Ensure == 'Absent':
if os.path.exists(AGENT_RESOURCE_VERSION_PATH):
LG().Log(LogType.Info, 'Ensure is absent, resource is present on the agent, set will purge')
retval = -1
return [retval]
if TestResourceVersion() != 0 or TestConfigUpdate(Contents) != 0:
retval = -1
return [retval]
def Get(ConfigType, ConfigID, Contents, Ensure, ContentChecksum):
retval = 0
ShowMof('GET', ConfigType, ConfigID, Contents, Ensure, ContentChecksum)
return [retval]
def Print(s, file=sys.stdout):
file.write(s + '\n')
# Compare resource version in DSC and agent machine
# Returns
# 0 if version is same
# dsc version number if there is a mismatch or agent config not present
def TestResourceVersion():
retval = 0
dscVersion = ReadFile(DSC_RESOURCE_VERSION_PATH)
if not os.path.exists(AGENT_RESOURCE_VERSION_PATH):
#npmd agent is not present, copy binaries
retval = dscVersion
else:
agentVersion = ReadFile(AGENT_RESOURCE_VERSION_PATH)
if agentVersion != dscVersion:
#version mismatch, copy binaries
retval = dscVersion
return retval
def TestConfigUpdate(Contents):
retval = 0
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = 0
elif not os.path.exists(destFileFullPath):
# Configuration does not exist, fail
retval = -1
else:
origConfigData = ReadFile(destFileFullPath)
#compare
if origConfigData is None or origConfigData != Contents:
retval = -1
return retval
def SetConfigUpdate(Contents):
destFileFullPath = CONFIG_PATH.__add__(DEST_FILE_NAME)
# Update config after checking if directory exists
if not os.path.exists(CONFIG_PATH):
LOG_ACTION.log(LogType.Error, 'CONFIG_PATH does not exist')
retval = -1
else:
retval = WriteFile(destFileFullPath, Contents)
if retval == 0 and os.path.exists(AGENT_RESOURCE_VERSION_PATH): #notify server only if plugin is present
LG().Log(LogType.Info, 'Updated the file, going to notify server')
NotifyServer(Commands.Config)
return retval
def SetFilesUpdate(newVersion):
retval = UpdateAgentBinary(newVersion)
retval &= UpdatePluginFiles()
if retval:
return 0
return -1
def UpdateAgentBinary(newVersion):
retval = True
arch = platform.architecture()
src = ''
if arch is not None and arch[0] == X64:
src = RESOURCE_MODULE_PATH.__add__(DSC_X64_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
else:
src = RESOURCE_MODULE_PATH.__add__(DSC_X86_AGENT_PATH)
retval &= DeleteAllFiles(src, AGENT_BINARY_PATH)
retval &= CopyAllFiles(src, AGENT_BINARY_PATH)
LOG_ACTION.log(LogType.Error, 'npmd agent binary do not support 32-bit.')
#Update version number after deleting and copying new agent files
if retval == True:
WriteFile(AGENT_RESOURCE_VERSION_PATH, newVersion)
# set capabilities to binary
src_files = os.listdir(src)
for file_name in src_files:
if AGENT_BINARY_NAME in file_name:
full_file_name = os.path.join(AGENT_BINARY_PATH, file_name)
break
NPM_ACTION.binary_setcap(full_file_name)
# Notify ruby plugin
#retval &= NotifyServer(Commands.RestartNPM)
return retval
def UpdatePluginFiles():
retval = True
#replace files
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_PATH), PLUGIN_PATH)
retval &= CopyAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# restart oms agent
retval &= OMS_ACTION.restart_oms_agent()
return retval
def CopyAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(src, file_name)
if (os.path.isfile(full_file_name)):
shutil.copy(full_file_name, dest)
except:
LOG_ACTION.log(LogType.Error, 'copy_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
# Deletes all files present in both directories
def DeleteAllFiles(src, dest):
try:
src_files = os.listdir(src)
for file_name in src_files:
full_file_name = os.path.join(dest, file_name)
if (os.path.isfile(full_file_name)):
os.remove(full_file_name)
except:
LOG_ACTION.log(LogType.Error, 'delete_all_files failed for src: ' + src + ' dest: ' + dest)
return False
return True
def PurgeSolution():
# remove plugin config file so that plugin does not start again
retval = DeleteAllFiles(RESOURCE_MODULE_PATH.__add__(DSC_PLUGIN_CONF_PATH), PLUGIN_CONF_PATH)
# remove resource version file
try:
os.remove(AGENT_RESOURCE_VERSION_PATH)
except:
LOG_ACTION.log(LogType.Error, 'failed to remove version file')
retval = False
# notify ruby plugin to purge agent
NotifyServer(Commands.Purge)
return retval
def NotifyServer(command):
# Create a UDS socket
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
LG().Log(LogType.Info, 'connecting to ' + SERVER_ADDRESS)
try:
# Connect the socket to the port where the server is listening
sock.connect(SERVER_ADDRESS)
# Send data
message = command.encode('utf-8')
LG().Log(LogType.Info, 'sending ' + message.decode('utf-8'))
sock.sendall(message)
except Exception as msg:
LG().Log(LogType.Error, str(msg))
# restart omsagent if command was config update and sock conn failed
if (command == Commands.Config):
OMS_ACTION.restart_oms_agent()
finally:
LG().Log(LogType.Info, 'closing socket')
sock.close()
def WriteFile(path, contents):
retval = 0
try:
with open(path, 'w+') as dFile:
dFile.write(contents)
except IOError as error:
errno, strerror = error.args
print ('Exception opening file ' + path + ' Error Code: ' + str(errno) + ' Error: ' + format(error) + strerror)
retval = -1
return retval
def ReadFile(path):
content = None
try:
with codecs.open (path, encoding = 'utf8', mode = "r") as dFile:
content = dFile.read()
except IOError as error:
errno, strerror = error.args
print ('Exception opening file ' + path + ' Error Code: ' + str(errno) + ' Error: ' + format(error) + strerror)
return content
|
|
from abc import abstractmethod
from enable.colors import ColorTrait
from traits.api import ABCHasStrictTraits, Any, Event, Instance, Trait, Tuple
from ..core.component import Component
from ..core.container import Container
# XXX: Rename to Window (and subclasses) to WindowCanvas?
class AbstractWindow(ABCHasStrictTraits):
# The top-level component that this window houses
component = Instance(Component)
# The background window of the window. The entire window first gets
# painted with this color before the component gets to draw.
bgcolor = ColorTrait("sys_window")
# When the underlying toolkit control gets resized, this event gets set
# to the new size of the window, expressed as a tuple (dx, dy).
resized = Event
# Kiva GraphicsContext (XXX: is there a base class?)
_gc = Any
# The previous component that handled an event. Used to generate
# mouse_enter and mouse_leave events. Right now this can only be
# None, self.component, or self.overlay.
_prev_event_handler = Instance(Component)
# Integer size of the Window (width, height).
_size = Trait(None, Tuple)
# --------------------------------------------------------------------------
# Abstract methods
# --------------------------------------------------------------------------
@abstractmethod
def _create_key_event(self, event):
""" Convert a GUI toolkit key event into a KeyEvent. """
@abstractmethod
def _create_mouse_event(self, event):
""" Convert a GUI toolkit mouse event into a MouseEvent. """
@abstractmethod
def _get_control_size(self):
""" Get the size of the underlying toolkit control. """
@abstractmethod
def _create_gc(self, size, pix_format="bgra32"):
""" Create a Kiva graphics context of a specified size.
This method only gets called when the size of the window itself has
changed. To perform pre-draw initialization every time in the paint
loop, use `_init_gc()`.
"""
@abstractmethod
def _render(self, event):
""" Do a GUI toolkit specific screen update. """
@abstractmethod
def set_pointer(self, pointer):
""" Sets the current cursor shape. """
@abstractmethod
def set_tooltip(self, tooltip):
""" Sets the current tooltip. """
@abstractmethod
def _set_focus(self):
""" Sets this window to have keyboard focus. """
@abstractmethod
def _get_event_size(self, event):
""" Return width and height of event. """
@abstractmethod
def redraw(self, rect=None):
""" Request a redraw of the window.
If `rect` is provided, draw within just the (x, y, w, h) rectangle.
Otherwise, draw over the entire window.
"""
# -----------------------------------------------------------------------
# Public methods
# -----------------------------------------------------------------------
def __init__(self, **traits):
self._gc = None
super(ABCHasStrictTraits, self).__init__(**traits)
# Create a default component (if necessary):
if self.component is None:
self.component = Container()
def _component_changed(self, old, new):
if old is not None:
old.window = None
if new is None:
self.component = Container()
return
new.window = self
# If possible, size the new component according to the size of the
# toolkit control
size = self._get_control_size()
if (size is not None) and hasattr(self.component, "size"):
self.component.origin = [0, 0]
self.component.size = list(size)
self.redraw()
# --------------------------------------------------------------------------
# Generic keyboard event handler:
# --------------------------------------------------------------------------
def handle_key_event(self, event_type, event):
""" **event** should be a toolkit-specific opaque object that will
be passed in to the backend's _create_key_event() method. It can
be None if the the toolkit lacks a native "key event" object.
Returns True if the event has been handled within the Enable object
hierarchy, or False otherwise.
"""
# Generate the Enable event
key_event = self._create_key_event(event_type, event)
if key_event is None:
return False
# Normal event handling loop
if (not key_event.handled) and (self.component is not None):
if self.component.is_in(key_event.x, key_event.y):
# Fire the actual event
self.component.dispatch(key_event, event_type)
return key_event.handled
# --------------------------------------------------------------------------
# Generic mouse event handler:
# --------------------------------------------------------------------------
def handle_mouse_event(self, event_type, event, set_focus=False):
""" **event** should be a toolkit-specific opaque object that will
be passed in to the backend's _create_mouse_event() method. It can
be None if the the toolkit lacks a native "mouse event" object.
Returns True if the event has been handled within the Enable object
hierarchy, or False otherwise.
"""
if self._size is None:
return False
mouse_event = self._create_mouse_event(event)
xy = (mouse_event.x, mouse_event.y)
if (not mouse_event.handled) and (self.component is not None):
# Test to see if we need to generate a mouse_leave event
if self._prev_event_handler:
if not self._prev_event_handler.is_in(*xy):
mouse_event.handled = False
self._prev_event_handler.dispatch(mouse_event,
"mouse_leave")
self._prev_event_handler = None
if self.component.is_in(*xy):
# Test to see if we need to generate a mouse_enter event
if self._prev_event_handler != self.component:
self._prev_event_handler = self.component
mouse_event.handled = False
self.component.dispatch(mouse_event, "mouse_enter")
# Fire the actual event
mouse_event.handled = False
self.component.dispatch(mouse_event, event_type)
return mouse_event.handled
def cleanup(self):
""" Clean up after ourselves.
"""
if self.component is not None:
self.component.cleanup(self)
self.component.window = None
self.component = None
self.control = None
if self._gc is not None:
self._gc.window = None
self._gc = None
def render(self, event=None):
""" This method is called directly by the UI toolkit's callback
mechanism on the paint event.
"""
# Create a new GC if necessary
size = self._get_control_size()
if (self._size != tuple(size)) or (self._gc is None):
self._size = tuple(size)
self._gc = self._create_gc(size)
# Always give the GC a chance to initialize
self._init_gc()
# Layout components and draw
if hasattr(self.component, "do_layout"):
self.component.do_layout()
gc = self._gc
self.component.render(gc, view_rect=(0, 0, size[0], size[1]))
# Perform a paint of the GC to the window (only necessary on backends
# that render to an off-screen buffer)
self._render(event)
def on_close(self, event):
self.cleanup()
def on_resize(self, event):
width, height = self._get_event_size(event)
self.resized = (width, height)
component = self.component
component.origin = [0, 0]
component.size = [width, height]
# -------------------------------------------------------------------------
# Private interface
# -------------------------------------------------------------------------
def _init_gc(self):
""" Gives a GC a chance to initialize itself before components perform
layout and draw. This is called every time through the paint loop.
"""
gc = self._gc
gc.clear(self.bgcolor_)
|
|
"""Automatically adapted for numpy Sep 19, 2005 by convertcode.py
"""
from __future__ import division, absolute_import, print_function
__all__ = ['iscomplexobj', 'isrealobj', 'imag', 'iscomplex',
'isreal', 'nan_to_num', 'real', 'real_if_close',
'typename', 'asfarray', 'mintypecode', 'asscalar',
'common_type']
import numpy.core.numeric as _nx
from numpy.core.numeric import asarray, asanyarray, array, isnan, \
obj2sctype, zeros
from .ufunclike import isneginf, isposinf
_typecodes_by_elsize = 'GDFgdfQqLlIiHhBb?'
def mintypecode(typechars,typeset='GDFgdf',default='d'):
"""
Return the character for the minimum-size type to which given types can
be safely cast.
The returned type character must represent the smallest size dtype such
that an array of the returned type can handle the data from an array of
all types in `typechars` (or if `typechars` is an array, then its
dtype.char).
Parameters
----------
typechars : list of str or array_like
If a list of strings, each string should represent a dtype.
If array_like, the character representation of the array dtype is used.
typeset : str or list of str, optional
The set of characters that the returned character is chosen from.
The default set is 'GDFgdf'.
default : str, optional
The default character, this is returned if none of the characters in
`typechars` matches a character in `typeset`.
Returns
-------
typechar : str
The character representing the minimum-size type that was found.
See Also
--------
dtype, sctype2char, maximum_sctype
Examples
--------
>>> np.mintypecode(['d', 'f', 'S'])
'd'
>>> x = np.array([1.1, 2-3.j])
>>> np.mintypecode(x)
'D'
>>> np.mintypecode('abceh', default='G')
'G'
"""
typecodes = [(isinstance(t, str) and t) or asarray(t).dtype.char
for t in typechars]
intersection = [t for t in typecodes if t in typeset]
if not intersection:
return default
if 'F' in intersection and 'd' in intersection:
return 'D'
l = []
for t in intersection:
i = _typecodes_by_elsize.index(t)
l.append((i, t))
l.sort()
return l[0][1]
def asfarray(a, dtype=_nx.float_):
"""
Return an array converted to a float type.
Parameters
----------
a : array_like
The input array.
dtype : str or dtype object, optional
Float type code to coerce input array `a`. If `dtype` is one of the
'int' dtypes, it is replaced with float64.
Returns
-------
out : ndarray
The input `a` as a float ndarray.
Examples
--------
>>> np.asfarray([2, 3])
array([ 2., 3.])
>>> np.asfarray([2, 3], dtype='float')
array([ 2., 3.])
>>> np.asfarray([2, 3], dtype='int8')
array([ 2., 3.])
"""
dtype = _nx.obj2sctype(dtype)
if not issubclass(dtype, _nx.inexact):
dtype = _nx.float_
return asarray(a, dtype=dtype)
def real(val):
"""
Return the real part of the elements of the array.
Parameters
----------
val : array_like
Input array.
Returns
-------
out : ndarray
Output array. If `val` is real, the type of `val` is used for the
output. If `val` has complex elements, the returned type is float.
See Also
--------
real_if_close, imag, angle
Examples
--------
>>> a = np.array([1+2j, 3+4j, 5+6j])
>>> a.real
array([ 1., 3., 5.])
>>> a.real = 9
>>> a
array([ 9.+2.j, 9.+4.j, 9.+6.j])
>>> a.real = np.array([9, 8, 7])
>>> a
array([ 9.+2.j, 8.+4.j, 7.+6.j])
"""
return asanyarray(val).real
def imag(val):
"""
Return the imaginary part of the elements of the array.
Parameters
----------
val : array_like
Input array.
Returns
-------
out : ndarray
Output array. If `val` is real, the type of `val` is used for the
output. If `val` has complex elements, the returned type is float.
See Also
--------
real, angle, real_if_close
Examples
--------
>>> a = np.array([1+2j, 3+4j, 5+6j])
>>> a.imag
array([ 2., 4., 6.])
>>> a.imag = np.array([8, 10, 12])
>>> a
array([ 1. +8.j, 3.+10.j, 5.+12.j])
"""
return asanyarray(val).imag
def iscomplex(x):
"""
Returns a bool array, where True if input element is complex.
What is tested is whether the input has a non-zero imaginary part, not if
the input type is complex.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray of bools
Output array.
See Also
--------
isreal
iscomplexobj : Return True if x is a complex type or an array of complex
numbers.
Examples
--------
>>> np.iscomplex([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([ True, False, False, False, False, True], dtype=bool)
"""
ax = asanyarray(x)
if issubclass(ax.dtype.type, _nx.complexfloating):
return ax.imag != 0
res = zeros(ax.shape, bool)
return +res # convet to array-scalar if needed
def isreal(x):
"""
Returns a bool array, where True if input element is real.
If element has complex type with zero complex part, the return value
for that element is True.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray, bool
Boolean array of same shape as `x`.
See Also
--------
iscomplex
isrealobj : Return True if x is not a complex type.
Examples
--------
>>> np.isreal([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([False, True, True, True, True, False], dtype=bool)
"""
return imag(x) == 0
def iscomplexobj(x):
"""
Check for a complex type or an array of complex numbers.
The type of the input is checked, not the value. Even if the input
has an imaginary part equal to zero, `iscomplexobj` evaluates to True.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
iscomplexobj : bool
The return value, True if `x` is of a complex type or has at least
one complex element.
See Also
--------
isrealobj, iscomplex
Examples
--------
>>> np.iscomplexobj(1)
False
>>> np.iscomplexobj(1+0j)
True
>>> np.iscomplexobj([3, 1+0j, True])
True
"""
return issubclass(asarray(x).dtype.type, _nx.complexfloating)
def isrealobj(x):
"""
Return True if x is a not complex type or an array of complex numbers.
The type of the input is checked, not the value. So even if the input
has an imaginary part equal to zero, `isrealobj` evaluates to False
if the data type is complex.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
y : bool
The return value, False if `x` is of a complex type.
See Also
--------
iscomplexobj, isreal
Examples
--------
>>> np.isrealobj(1)
True
>>> np.isrealobj(1+0j)
False
>>> np.isrealobj([3, 1+0j, True])
False
"""
return not issubclass(asarray(x).dtype.type, _nx.complexfloating)
#-----------------------------------------------------------------------------
def _getmaxmin(t):
from numpy.core import getlimits
f = getlimits.finfo(t)
return f.max, f.min
def nan_to_num(x):
"""
Replace nan with zero and inf with finite numbers.
Returns an array or scalar replacing Not a Number (NaN) with zero,
(positive) infinity with a very large number and negative infinity
with a very small (or negative) number.
Parameters
----------
x : array_like
Input data.
Returns
-------
out : ndarray
New Array with the same shape as `x` and dtype of the element in
`x` with the greatest precision. If `x` is inexact, then NaN is
replaced by zero, and infinity (-infinity) is replaced by the
largest (smallest or most negative) floating point value that fits
in the output dtype. If `x` is not inexact, then a copy of `x` is
returned.
See Also
--------
isinf : Shows which elements are negative or negative infinity.
isneginf : Shows which elements are negative infinity.
isposinf : Shows which elements are positive infinity.
isnan : Shows which elements are Not a Number (NaN).
isfinite : Shows which elements are finite (not NaN, not infinity)
Notes
-----
Numpy uses the IEEE Standard for Binary Floating-Point for Arithmetic
(IEEE 754). This means that Not a Number is not equivalent to infinity.
Examples
--------
>>> np.set_printoptions(precision=8)
>>> x = np.array([np.inf, -np.inf, np.nan, -128, 128])
>>> np.nan_to_num(x)
array([ 1.79769313e+308, -1.79769313e+308, 0.00000000e+000,
-1.28000000e+002, 1.28000000e+002])
"""
x = _nx.array(x, subok=True)
xtype = x.dtype.type
if not issubclass(xtype, _nx.inexact):
return x
iscomplex = issubclass(xtype, _nx.complexfloating)
isscalar = (x.ndim == 0)
x = x[None] if isscalar else x
dest = (x.real, x.imag) if iscomplex else (x,)
maxf, minf = _getmaxmin(x.real.dtype)
for d in dest:
_nx.copyto(d, 0.0, where=isnan(d))
_nx.copyto(d, maxf, where=isposinf(d))
_nx.copyto(d, minf, where=isneginf(d))
return x[0] if isscalar else x
#-----------------------------------------------------------------------------
def real_if_close(a,tol=100):
"""
If complex input returns a real array if complex parts are close to zero.
"Close to zero" is defined as `tol` * (machine epsilon of the type for
`a`).
Parameters
----------
a : array_like
Input array.
tol : float
Tolerance in machine epsilons for the complex part of the elements
in the array.
Returns
-------
out : ndarray
If `a` is real, the type of `a` is used for the output. If `a`
has complex elements, the returned type is float.
See Also
--------
real, imag, angle
Notes
-----
Machine epsilon varies from machine to machine and between data types
but Python floats on most platforms have a machine epsilon equal to
2.2204460492503131e-16. You can use 'np.finfo(np.float).eps' to print
out the machine epsilon for floats.
Examples
--------
>>> np.finfo(np.float).eps
2.2204460492503131e-16
>>> np.real_if_close([2.1 + 4e-14j], tol=1000)
array([ 2.1])
>>> np.real_if_close([2.1 + 4e-13j], tol=1000)
array([ 2.1 +4.00000000e-13j])
"""
a = asanyarray(a)
if not issubclass(a.dtype.type, _nx.complexfloating):
return a
if tol > 1:
from numpy.core import getlimits
f = getlimits.finfo(a.dtype.type)
tol = f.eps * tol
if _nx.allclose(a.imag, 0, atol=tol):
a = a.real
return a
def asscalar(a):
"""
Convert an array of size 1 to its scalar equivalent.
Parameters
----------
a : ndarray
Input array of size 1.
Returns
-------
out : scalar
Scalar representation of `a`. The output data type is the same type
returned by the input's `item` method.
Examples
--------
>>> np.asscalar(np.array([24]))
24
"""
return a.item()
#-----------------------------------------------------------------------------
_namefromtype = {'S1': 'character',
'?': 'bool',
'b': 'signed char',
'B': 'unsigned char',
'h': 'short',
'H': 'unsigned short',
'i': 'integer',
'I': 'unsigned integer',
'l': 'long integer',
'L': 'unsigned long integer',
'q': 'long long integer',
'Q': 'unsigned long long integer',
'f': 'single precision',
'd': 'double precision',
'g': 'long precision',
'F': 'complex single precision',
'D': 'complex double precision',
'G': 'complex long double precision',
'S': 'string',
'U': 'unicode',
'V': 'void',
'O': 'object'
}
def typename(char):
"""
Return a description for the given data type code.
Parameters
----------
char : str
Data type code.
Returns
-------
out : str
Description of the input data type code.
See Also
--------
dtype, typecodes
Examples
--------
>>> typechars = ['S1', '?', 'B', 'D', 'G', 'F', 'I', 'H', 'L', 'O', 'Q',
... 'S', 'U', 'V', 'b', 'd', 'g', 'f', 'i', 'h', 'l', 'q']
>>> for typechar in typechars:
... print typechar, ' : ', np.typename(typechar)
...
S1 : character
? : bool
B : unsigned char
D : complex double precision
G : complex long double precision
F : complex single precision
I : unsigned integer
H : unsigned short
L : unsigned long integer
O : object
Q : unsigned long long integer
S : string
U : unicode
V : void
b : signed char
d : double precision
g : long precision
f : single precision
i : integer
h : short
l : long integer
q : long long integer
"""
return _namefromtype[char]
#-----------------------------------------------------------------------------
#determine the "minimum common type" for a group of arrays.
array_type = [[_nx.half, _nx.single, _nx.double, _nx.longdouble],
[None, _nx.csingle, _nx.cdouble, _nx.clongdouble]]
array_precision = {_nx.half: 0,
_nx.single: 1,
_nx.double: 2,
_nx.longdouble: 3,
_nx.csingle: 1,
_nx.cdouble: 2,
_nx.clongdouble: 3}
def common_type(*arrays):
"""
Return a scalar type which is common to the input arrays.
The return type will always be an inexact (i.e. floating point) scalar
type, even if all the arrays are integer arrays. If one of the inputs is
an integer array, the minimum precision type that is returned is a
64-bit floating point dtype.
All input arrays can be safely cast to the returned dtype without loss
of information.
Parameters
----------
array1, array2, ... : ndarrays
Input arrays.
Returns
-------
out : data type code
Data type code.
See Also
--------
dtype, mintypecode
Examples
--------
>>> np.common_type(np.arange(2, dtype=np.float32))
<type 'numpy.float32'>
>>> np.common_type(np.arange(2, dtype=np.float32), np.arange(2))
<type 'numpy.float64'>
>>> np.common_type(np.arange(4), np.array([45, 6.j]), np.array([45.0]))
<type 'numpy.complex128'>
"""
is_complex = False
precision = 0
for a in arrays:
t = a.dtype.type
if iscomplexobj(a):
is_complex = True
if issubclass(t, _nx.integer):
p = 2 # array_precision[_nx.double]
else:
p = array_precision.get(t, None)
if p is None:
raise TypeError("can't get common type for non-numeric array")
precision = max(precision, p)
if is_complex:
return array_type[1][precision]
else:
return array_type[0][precision]
|
|
"""Tests for openid.server.
"""
from base64 import b64decode
import unittest
import warnings
from urllib.parse import urlparse, parse_qsl, parse_qs
from openid.server import server
from openid import association, cryptutil, oidutil
from openid.message import Message, OPENID_NS, OPENID2_NS, OPENID1_NS, \
IDENTIFIER_SELECT, no_default, OPENID1_URL_LIMIT
from openid.store import memstore
from openid.test.support import CatchLogs
# In general, if you edit or add tests here, try to move in the direction
# of testing smaller units. For testing the external interfaces, we'll be
# developing an implementation-agnostic testing suite.
# for more, see /etc/ssh/moduli
ALT_MODULUS = 0xCAADDDEC1667FC68B5FA15D53C4E1532DD24561A1A2D47A12C01ABEA1E00731F6921AAC40742311FDF9E634BB7131BEE1AF240261554389A910425E044E88C8359B010F5AD2B80E29CB1A5B027B19D9E01A6F63A6F45E5D7ED2FF6A2A0085050A7D0CF307C3DB51D2490355907B4427C23A98DF1EB8ABEF2BA209BB7AFFE86A7
ALT_GEN = 5
class TestProtocolError(unittest.TestCase):
def test_browserWithReturnTo(self):
return_to = "http://rp.unittest/consumer"
# will be a ProtocolError raised by Decode or CheckIDRequest.answer
args = Message.fromPostArgs({
'openid.mode':
'monkeydance',
'openid.identity':
'http://wagu.unittest/',
'openid.return_to':
return_to,
})
e = server.ProtocolError(args, "plucky")
self.assertTrue(e.hasReturnTo())
expected_args = {
'openid.mode': ['error'],
'openid.error': ['plucky'],
}
rt_base, result_args = e.encodeToURL().split('?', 1)
result_args = parse_qs(result_args)
self.assertEqual(result_args, expected_args)
def test_browserWithReturnTo_OpenID2_GET(self):
return_to = "http://rp.unittest/consumer"
# will be a ProtocolError raised by Decode or CheckIDRequest.answer
args = Message.fromPostArgs({
'openid.ns':
OPENID2_NS,
'openid.mode':
'monkeydance',
'openid.identity':
'http://wagu.unittest/',
'openid.claimed_id':
'http://wagu.unittest/',
'openid.return_to':
return_to,
})
e = server.ProtocolError(args, "plucky")
self.assertTrue(e.hasReturnTo())
expected_args = {
'openid.ns': [OPENID2_NS],
'openid.mode': ['error'],
'openid.error': ['plucky'],
}
rt_base, result_args = e.encodeToURL().split('?', 1)
result_args = parse_qs(result_args)
self.assertEqual(result_args, expected_args)
def test_browserWithReturnTo_OpenID2_POST(self):
return_to = "http://rp.unittest/consumer" + ('x' * OPENID1_URL_LIMIT)
# will be a ProtocolError raised by Decode or CheckIDRequest.answer
args = Message.fromPostArgs({
'openid.ns':
OPENID2_NS,
'openid.mode':
'monkeydance',
'openid.identity':
'http://wagu.unittest/',
'openid.claimed_id':
'http://wagu.unittest/',
'openid.return_to':
return_to,
})
e = server.ProtocolError(args, "plucky")
self.assertTrue(e.hasReturnTo())
expected_args = {
'openid.ns': [OPENID2_NS],
'openid.mode': ['error'],
'openid.error': ['plucky'],
}
self.assertTrue(e.whichEncoding() == server.ENCODE_HTML_FORM)
self.assertTrue(e.toFormMarkup() == e.toMessage().toFormMarkup(
args.getArg(OPENID_NS, 'return_to')))
def test_browserWithReturnTo_OpenID1_exceeds_limit(self):
return_to = "http://rp.unittest/consumer" + ('x' * OPENID1_URL_LIMIT)
# will be a ProtocolError raised by Decode or CheckIDRequest.answer
args = Message.fromPostArgs({
'openid.mode':
'monkeydance',
'openid.identity':
'http://wagu.unittest/',
'openid.return_to':
return_to,
})
e = server.ProtocolError(args, "plucky")
self.assertTrue(e.hasReturnTo())
expected_args = {
'openid.mode': ['error'],
'openid.error': ['plucky'],
}
self.assertTrue(e.whichEncoding() == server.ENCODE_URL)
rt_base, result_args = e.encodeToURL().split('?', 1)
result_args = parse_qs(result_args)
self.assertEqual(result_args, expected_args)
def test_noReturnTo(self):
# will be a ProtocolError raised by Decode or CheckIDRequest.answer
args = Message.fromPostArgs({
'openid.mode': 'zebradance',
'openid.identity': 'http://wagu.unittest/',
})
e = server.ProtocolError(args, "waffles")
self.assertFalse(e.hasReturnTo())
expected = b"""error:waffles
mode:error
"""
self.assertEqual(e.encodeToKVForm(), expected)
def test_noMessage(self):
e = server.ProtocolError(None, "no moar pancakes")
self.assertFalse(e.hasReturnTo())
self.assertEqual(e.whichEncoding(), None)
class TestDecode(unittest.TestCase):
def setUp(self):
self.claimed_id = 'http://de.legating.de.coder.unittest/'
self.id_url = "http://decoder.am.unittest/"
self.rt_url = "http://rp.unittest/foobot/?qux=zam"
self.tr_url = "http://rp.unittest/"
self.assoc_handle = "{assoc}{handle}"
self.op_endpoint = 'http://endpoint.unittest/encode'
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, self.op_endpoint)
self.decode = self.server.decoder.decode
self.decode = server.Decoder(self.server).decode
def test_none(self):
args = {}
r = self.decode(args)
self.assertEqual(r, None)
def test_irrelevant(self):
args = {
'pony': 'spotted',
'sreg.mutant_power': 'decaffinator',
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_bad(self):
args = {
'openid.mode': 'twos-compliment',
'openid.pants': 'zippered',
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_dictOfLists(self):
args = {
'openid.mode': ['checkid_setup'],
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.trust_root': self.tr_url,
}
try:
result = self.decode(args)
except TypeError as err:
self.assertTrue(str(err).find('values') != -1, err)
else:
self.fail("Expected TypeError, but got result %s" % (result, ))
def test_checkidImmediate(self):
args = {
'openid.mode': 'checkid_immediate',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.trust_root': self.tr_url,
# should be ignored
'openid.some.extension': 'junk',
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckIDRequest))
self.assertEqual(r.mode, "checkid_immediate")
self.assertEqual(r.immediate, True)
self.assertEqual(r.identity, self.id_url)
self.assertEqual(r.trust_root, self.tr_url)
self.assertEqual(r.return_to, self.rt_url)
self.assertEqual(r.assoc_handle, self.assoc_handle)
def test_checkidSetup(self):
args = {
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.trust_root': self.tr_url,
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckIDRequest))
self.assertEqual(r.mode, "checkid_setup")
self.assertEqual(r.immediate, False)
self.assertEqual(r.identity, self.id_url)
self.assertEqual(r.trust_root, self.tr_url)
self.assertEqual(r.return_to, self.rt_url)
def test_checkidSetupOpenID2(self):
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.claimed_id': self.claimed_id,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.realm': self.tr_url,
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckIDRequest))
self.assertEqual(r.mode, "checkid_setup")
self.assertEqual(r.immediate, False)
self.assertEqual(r.identity, self.id_url)
self.assertEqual(r.claimed_id, self.claimed_id)
self.assertEqual(r.trust_root, self.tr_url)
self.assertEqual(r.return_to, self.rt_url)
def test_checkidSetupNoClaimedIDOpenID2(self):
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.realm': self.tr_url,
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_checkidSetupNoIdentityOpenID2(self):
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.realm': self.tr_url,
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckIDRequest))
self.assertEqual(r.mode, "checkid_setup")
self.assertEqual(r.immediate, False)
self.assertEqual(r.identity, None)
self.assertEqual(r.trust_root, self.tr_url)
self.assertEqual(r.return_to, self.rt_url)
def test_checkidSetupNoReturnOpenID1(self):
"""Make sure an OpenID 1 request cannot be decoded if it lacks
a return_to.
"""
args = {
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.trust_root': self.tr_url,
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_checkidSetupNoReturnOpenID2(self):
"""Make sure an OpenID 2 request with no return_to can be
decoded, and make sure a response to such a request raises
NoReturnToError.
"""
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.claimed_id': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.realm': self.tr_url,
}
self.assertTrue(isinstance(self.decode(args), server.CheckIDRequest))
req = self.decode(args)
self.assertRaises(server.NoReturnToError, req.answer, False)
self.assertRaises(server.NoReturnToError, req.encodeToURL, 'bogus')
self.assertRaises(server.NoReturnToError, req.getCancelURL)
def test_checkidSetupRealmRequiredOpenID2(self):
"""Make sure that an OpenID 2 request which lacks return_to
cannot be decoded if it lacks a realm. Spec: This value
(openid.realm) MUST be sent if openid.return_to is omitted.
"""
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_checkidSetupBadReturn(self):
args = {
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': 'not a url',
}
try:
result = self.decode(args)
except server.ProtocolError as err:
self.assertTrue(err.openid_message)
else:
self.fail("Expected ProtocolError, instead returned with %s" %
(result, ))
def test_checkidSetupUntrustedReturn(self):
args = {
'openid.mode': 'checkid_setup',
'openid.identity': self.id_url,
'openid.assoc_handle': self.assoc_handle,
'openid.return_to': self.rt_url,
'openid.trust_root': 'http://not-the-return-place.unittest/',
}
try:
result = self.decode(args)
except server.UntrustedReturnURL as err:
self.assertTrue(err.openid_message)
else:
self.fail("Expected UntrustedReturnURL, instead returned with %s" %
(result, ))
def test_checkAuth(self):
args = {
'openid.mode': 'check_authentication',
'openid.assoc_handle': '{dumb}{handle}',
'openid.sig': 'sigblob',
'openid.signed': 'identity,return_to,response_nonce,mode',
'openid.identity': 'signedval1',
'openid.return_to': 'signedval2',
'openid.response_nonce': 'signedval3',
'openid.baz': 'unsigned',
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckAuthRequest))
self.assertEqual(r.mode, 'check_authentication')
self.assertEqual(r.sig, 'sigblob')
def test_checkAuthMissingSignature(self):
args = {
'openid.mode': 'check_authentication',
'openid.assoc_handle': '{dumb}{handle}',
'openid.signed': 'foo,bar,mode',
'openid.foo': 'signedval1',
'openid.bar': 'signedval2',
'openid.baz': 'unsigned',
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_checkAuthAndInvalidate(self):
args = {
'openid.mode': 'check_authentication',
'openid.assoc_handle': '{dumb}{handle}',
'openid.invalidate_handle': '[[SMART_handle]]',
'openid.sig': 'sigblob',
'openid.signed': 'identity,return_to,response_nonce,mode',
'openid.identity': 'signedval1',
'openid.return_to': 'signedval2',
'openid.response_nonce': 'signedval3',
'openid.baz': 'unsigned',
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.CheckAuthRequest))
self.assertEqual(r.invalidate_handle, '[[SMART_handle]]')
def test_associateDH(self):
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "Rzup9265tw==",
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.AssociateRequest))
self.assertEqual(r.mode, "associate")
self.assertEqual(r.session.session_type, "DH-SHA1")
self.assertEqual(r.assoc_type, "HMAC-SHA1")
self.assertTrue(r.session.consumer_pubkey)
def test_associateDHMissingKey(self):
"""Trying DH assoc w/o public key"""
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
}
# Using DH-SHA1 without supplying dh_consumer_public is an error.
self.assertRaises(server.ProtocolError, self.decode, args)
def test_associateDHpubKeyNotB64(self):
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "donkeydonkeydonkey",
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_associateDHModGen(self):
# test dh with non-default but valid values for dh_modulus and dh_gen
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "Rzup9265tw==",
'openid.dh_modulus': cryptutil.longToBase64(ALT_MODULUS),
'openid.dh_gen': cryptutil.longToBase64(ALT_GEN)
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.AssociateRequest))
self.assertEqual(r.mode, "associate")
self.assertEqual(r.session.session_type, "DH-SHA1")
self.assertEqual(r.assoc_type, "HMAC-SHA1")
self.assertEqual(r.session.dh.modulus, ALT_MODULUS)
self.assertEqual(r.session.dh.generator, ALT_GEN)
self.assertTrue(r.session.consumer_pubkey)
def test_associateDHCorruptModGen(self):
# test dh with non-default but valid values for dh_modulus and dh_gen
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "Rzup9265tw==",
'openid.dh_modulus': 'pizza',
'openid.dh_gen': 'gnocchi',
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_associateDHMissingModGen(self):
# test dh with non-default but valid values for dh_modulus and dh_gen
args = {
'openid.mode': 'associate',
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "Rzup9265tw==",
'openid.dh_modulus': 'pizza',
}
self.assertRaises(server.ProtocolError, self.decode, args)
# def test_associateDHInvalidModGen(self):
# # test dh with properly encoded values that are not a valid
# # modulus/generator combination.
# args = {
# 'openid.mode': 'associate',
# 'openid.session_type': 'DH-SHA1',
# 'openid.dh_consumer_public': "Rzup9265tw==",
# 'openid.dh_modulus': cryptutil.longToBase64(9),
# 'openid.dh_gen': cryptutil.longToBase64(27) ,
# }
# self.assertRaises(server.ProtocolError, self.decode, args)
# test_associateDHInvalidModGen.todo = "low-priority feature"
def test_associateWeirdSession(self):
args = {
'openid.mode': 'associate',
'openid.session_type': 'FLCL6',
'openid.dh_consumer_public': "YQ==\n",
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_associatePlain(self):
args = {
'openid.mode': 'associate',
}
r = self.decode(args)
self.assertTrue(isinstance(r, server.AssociateRequest))
self.assertEqual(r.mode, "associate")
self.assertEqual(r.session.session_type, "no-encryption")
self.assertEqual(r.assoc_type, "HMAC-SHA1")
def test_nomode(self):
args = {
'openid.session_type': 'DH-SHA1',
'openid.dh_consumer_public': "my public keeey",
}
self.assertRaises(server.ProtocolError, self.decode, args)
def test_invalidns(self):
args = {'openid.ns': 'Tuesday', 'openid.mode': 'associate'}
try:
r = self.decode(args)
except server.ProtocolError as err:
# Assert that the ProtocolError does have a Message attached
# to it, even though the request wasn't a well-formed Message.
self.assertTrue(err.openid_message)
# The error message contains the bad openid.ns.
self.assertTrue('Tuesday' in str(err), str(err))
else:
self.fail("Expected ProtocolError but returned with %r" % (r, ))
class TestEncode(unittest.TestCase):
def setUp(self):
self.encoder = server.Encoder()
self.encode = self.encoder.encode
self.op_endpoint = 'http://endpoint.unittest/encode'
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, self.op_endpoint)
def test_id_res_OpenID2_GET(self):
"""
Check that when an OpenID 2 response does not exceed the
OpenID 1 message size, a GET response (i.e., redirect) is
issued.
"""
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'ns':
OPENID2_NS,
'mode':
'id_res',
'identity':
request.identity,
'claimed_id':
request.identity,
'return_to':
request.return_to,
})
self.assertFalse(response.renderAsForm())
self.assertTrue(response.whichEncoding() == server.ENCODE_URL)
webresponse = self.encode(response)
self.assertTrue('location' in webresponse.headers)
def test_id_res_OpenID2_POST(self):
"""
Check that when an OpenID 2 response exceeds the OpenID 1
message size, a POST response (i.e., an HTML form) is
returned.
"""
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'ns':
OPENID2_NS,
'mode':
'id_res',
'identity':
request.identity,
'claimed_id':
request.identity,
'return_to':
'x' * OPENID1_URL_LIMIT,
})
self.assertTrue(response.renderAsForm())
self.assertTrue(len(response.encodeToURL()) > OPENID1_URL_LIMIT)
self.assertTrue(response.whichEncoding() == server.ENCODE_HTML_FORM)
webresponse = self.encode(response)
self.assertTrue(response.toFormMarkup() in webresponse.body)
def test_toFormMarkup(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'ns':
OPENID2_NS,
'mode':
'id_res',
'identity':
request.identity,
'claimed_id':
request.identity,
'return_to':
'x' * OPENID1_URL_LIMIT,
})
form_markup = response.toFormMarkup({'foo': 'bar'})
self.assertTrue(' foo="bar"' in form_markup)
def test_toHTML(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'ns':
OPENID2_NS,
'mode':
'id_res',
'identity':
request.identity,
'claimed_id':
request.identity,
'return_to':
'x' * OPENID1_URL_LIMIT,
})
html = response.toHTML()
self.assertTrue('<html>' in html)
self.assertTrue('</html>' in html)
self.assertTrue('<body onload=' in html)
self.assertTrue('<form' in html)
self.assertTrue('http://bombom.unittest/' in html)
def test_id_res_OpenID1_exceeds_limit(self):
"""
Check that when an OpenID 1 response exceeds the OpenID 1
message size, a GET response is issued. Technically, this
shouldn't be permitted by the library, but this test is in
place to preserve the status quo for OpenID 1.
"""
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'mode':
'id_res',
'identity':
request.identity,
'return_to':
'x' * OPENID1_URL_LIMIT,
})
self.assertFalse(response.renderAsForm())
self.assertTrue(len(response.encodeToURL()) > OPENID1_URL_LIMIT)
self.assertTrue(response.whichEncoding() == server.ENCODE_URL)
webresponse = self.encode(response)
self.assertEqual(webresponse.headers['location'],
response.encodeToURL())
def test_id_res(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'mode':
'id_res',
'identity':
request.identity,
'return_to':
request.return_to,
})
webresponse = self.encode(response)
self.assertEqual(webresponse.code, server.HTTP_REDIRECT)
self.assertTrue('location' in webresponse.headers)
location = webresponse.headers['location']
self.assertTrue(
location.startswith(request.return_to),
"%s does not start with %s" % (location, request.return_to))
# argh.
q2 = dict(parse_qsl(urlparse(location)[4]))
expected = response.fields.toPostArgs()
self.assertEqual(q2, expected)
def test_cancel(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'mode': 'cancel',
})
webresponse = self.encode(response)
self.assertEqual(webresponse.code, server.HTTP_REDIRECT)
self.assertTrue('location' in webresponse.headers)
def test_cancelToForm(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'mode': 'cancel',
})
form = response.toFormMarkup()
self.assertTrue(form)
def test_assocReply(self):
msg = Message(OPENID2_NS)
msg.setArg(OPENID2_NS, 'session_type', 'no-encryption')
request = server.AssociateRequest.fromMessage(msg)
response = server.OpenIDResponse(request)
response.fields = Message.fromPostArgs(
{
'openid.assoc_handle': "every-zig"
})
webresponse = self.encode(response)
body = """assoc_handle:every-zig
"""
self.assertEqual(webresponse.code, server.HTTP_OK)
self.assertEqual(webresponse.headers, {})
self.assertEqual(webresponse.body, body)
def test_checkauthReply(self):
request = server.CheckAuthRequest('a_sock_monkey', 'siggggg', [])
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'is_valid':
'true',
'invalidate_handle':
'xXxX:xXXx'
})
body = """invalidate_handle:xXxX:xXXx
is_valid:true
"""
webresponse = self.encode(response)
self.assertEqual(webresponse.code, server.HTTP_OK)
self.assertEqual(webresponse.headers, {})
self.assertEqual(webresponse.body, body)
def test_unencodableError(self):
args = Message.fromPostArgs({
'openid.identity': 'http://limu.unittest/',
})
e = server.ProtocolError(args, "wet paint")
self.assertRaises(server.EncodingError, self.encode, e)
def test_encodableError(self):
args = Message.fromPostArgs({
'openid.mode': 'associate',
'openid.identity': 'http://limu.unittest/',
})
body = "error:snoot\nmode:error\n"
webresponse = self.encode(server.ProtocolError(args, "snoot"))
self.assertEqual(webresponse.code, server.HTTP_ERROR)
self.assertEqual(webresponse.headers, {})
self.assertEqual(webresponse.body, body)
class TestSigningEncode(unittest.TestCase):
def setUp(self):
self._dumb_key = server.Signatory._dumb_key
self._normal_key = server.Signatory._normal_key
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, "http://signing.unittest/enc")
self.request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
self.request.message = Message(OPENID2_NS)
self.response = server.OpenIDResponse(self.request)
self.response.fields = Message.fromOpenIDArgs({
'mode':
'id_res',
'identity':
self.request.identity,
'return_to':
self.request.return_to,
})
self.signatory = server.Signatory(self.store)
self.encoder = server.SigningEncoder(self.signatory)
self.encode = self.encoder.encode
def test_idres(self):
assoc_handle = '{bicycle}{shed}'
self.store.storeAssociation(
self._normal_key,
association.Association.fromExpiresIn(60, assoc_handle, 'sekrit',
'HMAC-SHA1'))
self.request.assoc_handle = assoc_handle
webresponse = self.encode(self.response)
self.assertEqual(webresponse.code, server.HTTP_REDIRECT)
self.assertTrue('location' in webresponse.headers)
location = webresponse.headers['location']
query = parse_qs(urlparse(location)[4])
self.assertTrue('openid.sig' in query)
self.assertTrue('openid.assoc_handle' in query)
self.assertTrue('openid.signed' in query)
def test_idresDumb(self):
webresponse = self.encode(self.response)
self.assertEqual(webresponse.code, server.HTTP_REDIRECT)
self.assertTrue('location' in webresponse.headers)
location = webresponse.headers['location']
query = parse_qs(urlparse(location)[4])
self.assertTrue('openid.sig' in query)
self.assertTrue('openid.assoc_handle' in query)
self.assertTrue('openid.signed' in query)
def test_forgotStore(self):
self.encoder.signatory = None
self.assertRaises(ValueError, self.encode, self.response)
def test_cancel(self):
request = server.CheckIDRequest(
identity='http://bombom.unittest/',
trust_root='http://burr.unittest/',
return_to='http://burr.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
request.message = Message(OPENID2_NS)
response = server.OpenIDResponse(request)
response.fields.setArg(OPENID_NS, 'mode', 'cancel')
webresponse = self.encode(response)
self.assertEqual(webresponse.code, server.HTTP_REDIRECT)
self.assertTrue('location' in webresponse.headers)
location = webresponse.headers['location']
query = parse_qs(urlparse(location)[4])
self.assertFalse('openid.sig' in query, response.fields.toPostArgs())
def test_assocReply(self):
msg = Message(OPENID2_NS)
msg.setArg(OPENID2_NS, 'session_type', 'no-encryption')
request = server.AssociateRequest.fromMessage(msg)
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({'assoc_handle': "every-zig"})
webresponse = self.encode(response)
body = """assoc_handle:every-zig
"""
self.assertEqual(webresponse.code, server.HTTP_OK)
self.assertEqual(webresponse.headers, {})
self.assertEqual(webresponse.body, body)
def test_alreadySigned(self):
self.response.fields.setArg(OPENID_NS, 'sig', 'priorSig==')
self.assertRaises(server.AlreadySigned, self.encode, self.response)
class TestCheckID(unittest.TestCase):
def setUp(self):
self.op_endpoint = 'http://endpoint.unittest/'
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, self.op_endpoint)
self.request = server.CheckIDRequest(
identity='http://bambam.unittest/',
trust_root='http://bar.unittest/',
return_to='http://bar.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
self.request.message = Message(OPENID2_NS)
def test_trustRootInvalid(self):
self.request.trust_root = "http://foo.unittest/17"
self.request.return_to = "http://foo.unittest/39"
self.assertFalse(self.request.trustRootValid())
def test_trustRootValid(self):
self.request.trust_root = "http://foo.unittest/"
self.request.return_to = "http://foo.unittest/39"
self.assertTrue(self.request.trustRootValid())
def test_malformedTrustRoot(self):
self.request.trust_root = "invalid://trust*root/"
self.request.return_to = "http://foo.unittest/39"
sentinel = object()
self.request.message = sentinel
try:
result = self.request.trustRootValid()
except server.MalformedTrustRoot as why:
self.assertTrue(sentinel is why.openid_message)
else:
self.fail('Expected MalformedTrustRoot exception. Got %r' %
(result, ))
def test_trustRootValidNoReturnTo(self):
request = server.CheckIDRequest(
identity='http://bambam.unittest/',
trust_root='http://bar.unittest/',
return_to=None,
immediate=False,
op_endpoint=self.server.op_endpoint, )
self.assertTrue(request.trustRootValid())
def test_returnToVerified_callsVerify(self):
"""Make sure that verifyReturnTo is calling the trustroot
function verifyReturnTo
"""
def withVerifyReturnTo(new_verify, callable):
old_verify = server.verifyReturnTo
try:
server.verifyReturnTo = new_verify
return callable()
finally:
server.verifyReturnTo = old_verify
# Ensure that exceptions are passed through
sentinel = Exception()
def vrfyExc(trust_root, return_to):
self.assertEqual(self.request.trust_root, trust_root)
self.assertEqual(self.request.return_to, return_to)
raise sentinel
try:
withVerifyReturnTo(vrfyExc, self.request.returnToVerified)
except Exception as e:
self.assertTrue(e is sentinel, e)
# Ensure that True and False are passed through unchanged
def constVerify(val):
def verify(trust_root, return_to):
self.assertEqual(self.request.trust_root, trust_root)
self.assertEqual(self.request.return_to, return_to)
return val
return verify
for val in [True, False]:
self.assertEqual(val,
withVerifyReturnTo(
constVerify(val),
self.request.returnToVerified))
def _expectAnswer(self, answer, identity=None, claimed_id=None):
expected_list = [
('mode', 'id_res'),
('return_to', self.request.return_to),
('op_endpoint', self.op_endpoint),
]
if identity:
expected_list.append(('identity', identity))
if claimed_id:
expected_list.append(('claimed_id', claimed_id))
else:
expected_list.append(('claimed_id', identity))
for k, expected in expected_list:
actual = answer.fields.getArg(OPENID_NS, k)
self.assertEqual(actual, expected,
"%s: expected %s, got %s" % (k, expected, actual))
self.assertTrue(answer.fields.hasKey(OPENID_NS, 'response_nonce'))
self.assertTrue(answer.fields.getOpenIDNamespace() == OPENID2_NS)
# One for nonce, one for ns
self.assertEqual(
len(answer.fields.toPostArgs()),
len(expected_list) + 2, answer.fields.toPostArgs())
def test_answerAllow(self):
"""Check the fields specified by "Positive Assertions"
including mode=id_res, identity, claimed_id, op_endpoint, return_to
"""
answer = self.request.answer(True)
self.assertEqual(answer.request, self.request)
self._expectAnswer(answer, self.request.identity)
def test_answerAllowDelegatedIdentity(self):
self.request.claimed_id = 'http://delegating.unittest/'
answer = self.request.answer(True)
self._expectAnswer(answer, self.request.identity,
self.request.claimed_id)
def test_answerAllowDelegatedIdentity2(self):
# This time with the identity argument explicitly passed in to
# answer()
self.request.claimed_id = 'http://delegating.unittest/'
answer = self.request.answer(True, identity='http://bambam.unittest/')
self._expectAnswer(answer, self.request.identity,
self.request.claimed_id)
def test_answerAllowWithoutIdentityReally(self):
self.request.identity = None
answer = self.request.answer(True)
self.assertEqual(answer.request, self.request)
self._expectAnswer(answer)
def test_answerAllowAnonymousFail(self):
self.request.identity = None
# XXX - Check on this, I think this behavior is legal in OpenID 2.0?
self.assertRaises(ValueError, self.request.answer, True, identity="=V")
def test_answerAllowWithIdentity(self):
self.request.identity = IDENTIFIER_SELECT
selected_id = 'http://anon.unittest/9861'
answer = self.request.answer(True, identity=selected_id)
self._expectAnswer(answer, selected_id)
def test_answerAllowWithDelegatedIdentityOpenID2(self):
"""Answer an IDENTIFIER_SELECT case with a delegated identifier.
"""
# claimed_id delegates to selected_id here.
self.request.identity = IDENTIFIER_SELECT
selected_id = 'http://anon.unittest/9861'
claimed_id = 'http://monkeyhat.unittest/'
answer = self.request.answer(
True, identity=selected_id, claimed_id=claimed_id)
self._expectAnswer(answer, selected_id, claimed_id)
def test_answerAllowWithDelegatedIdentityOpenID1(self):
"""claimed_id parameter doesn't exist in OpenID 1.
"""
self.request.message = Message(OPENID1_NS)
# claimed_id delegates to selected_id here.
self.request.identity = IDENTIFIER_SELECT
selected_id = 'http://anon.unittest/9861'
claimed_id = 'http://monkeyhat.unittest/'
self.assertRaises(
server.VersionError,
self.request.answer,
True,
identity=selected_id,
claimed_id=claimed_id)
def test_answerAllowWithAnotherIdentity(self):
# XXX - Check on this, I think this behavior is legal in OpenID 2.0?
self.assertRaises(
ValueError,
self.request.answer,
True,
identity="http://pebbles.unittest/")
def test_answerAllowWithIdentityNormalization(self):
# The RP has sent us a non-normalized value for openid.identity,
# and the library user is passing an explicit value for identity
# to CheckIDRequest.answer.
non_normalized = 'http://bambam.unittest'
normalized = non_normalized + '/'
self.request.identity = non_normalized
self.request.claimed_id = non_normalized
answer = self.request.answer(True, identity=normalized)
# Expect the values that were sent in the request, even though
# they're not normalized.
self._expectAnswer(
answer, identity=non_normalized, claimed_id=non_normalized)
def test_answerAllowNoIdentityOpenID1(self):
self.request.message = Message(OPENID1_NS)
self.request.identity = None
self.assertRaises(ValueError, self.request.answer, True, identity=None)
def test_answerAllowForgotEndpoint(self):
self.request.op_endpoint = None
self.assertRaises(RuntimeError, self.request.answer, True)
def test_checkIDWithNoIdentityOpenID1(self):
msg = Message(OPENID1_NS)
msg.setArg(OPENID_NS, 'return_to', 'bogus')
msg.setArg(OPENID_NS, 'trust_root', 'bogus')
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
self.assertRaises(server.ProtocolError,
server.CheckIDRequest.fromMessage, msg, self.server)
def test_fromMessageClaimedIDWithoutIdentityOpenID2(self):
name = 'https://example.myopenid.com'
msg = Message(OPENID2_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'return_to', 'http://invalid:8000/rt')
msg.setArg(OPENID_NS, 'claimed_id', name)
self.assertRaises(server.ProtocolError,
server.CheckIDRequest.fromMessage, msg, self.server)
def test_fromMessageIdentityWithoutClaimedIDOpenID2(self):
name = 'https://example.myopenid.com'
msg = Message(OPENID2_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'return_to', 'http://invalid:8000/rt')
msg.setArg(OPENID_NS, 'identity', name)
self.assertRaises(server.ProtocolError,
server.CheckIDRequest.fromMessage, msg, self.server)
def test_trustRootOpenID1(self):
"""Ignore openid.realm in OpenID 1"""
msg = Message(OPENID1_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'trust_root', 'http://real_trust_root/')
msg.setArg(OPENID_NS, 'realm', 'http://fake_trust_root/')
msg.setArg(OPENID_NS, 'return_to', 'http://real_trust_root/foo')
msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
msg.setArg(OPENID_NS, 'identity', 'george')
result = server.CheckIDRequest.fromMessage(msg,
self.server.op_endpoint)
self.assertTrue(result.trust_root == 'http://real_trust_root/')
def test_trustRootOpenID2(self):
"""Ignore openid.trust_root in OpenID 2"""
msg = Message(OPENID2_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'realm', 'http://real_trust_root/')
msg.setArg(OPENID_NS, 'trust_root', 'http://fake_trust_root/')
msg.setArg(OPENID_NS, 'return_to', 'http://real_trust_root/foo')
msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
msg.setArg(OPENID_NS, 'identity', 'george')
msg.setArg(OPENID_NS, 'claimed_id', 'george')
result = server.CheckIDRequest.fromMessage(msg,
self.server.op_endpoint)
self.assertTrue(result.trust_root == 'http://real_trust_root/')
def test_answerAllowNoTrustRoot(self):
self.request.trust_root = None
answer = self.request.answer(True)
self.assertEqual(answer.request, self.request)
self._expectAnswer(answer, self.request.identity)
def test_fromMessageWithoutTrustRoot(self):
msg = Message(OPENID2_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'return_to', 'http://real_trust_root/foo')
msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
msg.setArg(OPENID_NS, 'identity', 'george')
msg.setArg(OPENID_NS, 'claimed_id', 'george')
result = server.CheckIDRequest.fromMessage(msg,
self.server.op_endpoint)
self.assertEqual(result.trust_root, 'http://real_trust_root/foo')
def test_fromMessageWithEmptyTrustRoot(self):
return_to = 'http://someplace.invalid/?go=thing'
msg = Message.fromPostArgs({
'openid.assoc_handle':
'{blah}{blah}{OZivdQ==}',
'openid.claimed_id':
'http://delegated.invalid/',
'openid.identity':
'http://op-local.example.com/',
'openid.mode':
'checkid_setup',
'openid.ns':
'http://openid.net/signon/1.0',
'openid.return_to':
return_to,
'openid.trust_root':
''
})
result = server.CheckIDRequest.fromMessage(msg,
self.server.op_endpoint)
self.assertEqual(result.trust_root, return_to)
def test_fromMessageWithoutTrustRootOrReturnTo(self):
msg = Message(OPENID2_NS)
msg.setArg(OPENID_NS, 'mode', 'checkid_setup')
msg.setArg(OPENID_NS, 'assoc_handle', 'bogus')
msg.setArg(OPENID_NS, 'identity', 'george')
msg.setArg(OPENID_NS, 'claimed_id', 'george')
self.assertRaises(server.ProtocolError,
server.CheckIDRequest.fromMessage, msg,
self.server.op_endpoint)
def test_answerAllowNoEndpointOpenID1(self):
"""Test .allow() with an OpenID 1.x Message on a CheckIDRequest
built without an op_endpoint parameter.
"""
identity = 'http://bambam.unittest/'
reqmessage = Message.fromOpenIDArgs({
'identity':
identity,
'trust_root':
'http://bar.unittest/',
'return_to':
'http://bar.unittest/999',
})
self.request = server.CheckIDRequest.fromMessage(reqmessage, None)
answer = self.request.answer(True)
expected_list = [
('mode', 'id_res'),
('return_to', self.request.return_to),
('identity', identity),
]
for k, expected in expected_list:
actual = answer.fields.getArg(OPENID_NS, k)
self.assertEqual(expected, actual,
"%s: expected %s, got %s" % (k, expected, actual))
self.assertTrue(answer.fields.hasKey(OPENID_NS, 'response_nonce'))
self.assertEqual(answer.fields.getOpenIDNamespace(), OPENID1_NS)
self.assertTrue(answer.fields.namespaces.isImplicit(OPENID1_NS))
# One for nonce (OpenID v1 namespace is implicit)
self.assertEqual(
len(answer.fields.toPostArgs()),
len(expected_list) + 1, answer.fields.toPostArgs())
def test_answerImmediateDenyOpenID2(self):
"""Look for mode=setup_needed in checkid_immediate negative
response in OpenID 2 case.
See specification Responding to Authentication Requests /
Negative Assertions / In Response to Immediate Requests.
"""
self.request.mode = 'checkid_immediate'
self.request.immediate = True
self.request.claimed_id = 'http://claimed-id.test/'
server_url = "http://setup-url.unittest/"
# crappiting setup_url, you dirty my interface with your presence!
answer = self.request.answer(False, server_url=server_url)
self.assertEqual(answer.request, self.request)
self.assertEqual(len(answer.fields.toPostArgs()), 3, answer.fields)
self.assertEqual(answer.fields.getOpenIDNamespace(), OPENID2_NS)
self.assertEqual(
answer.fields.getArg(OPENID_NS, 'mode'), 'setup_needed')
usu = answer.fields.getArg(OPENID_NS, 'user_setup_url')
expected_substr = 'openid.claimed_id=http%3A%2F%2Fclaimed-id.test%2F'
self.assertTrue(expected_substr in usu, usu)
def test_answerImmediateDenyOpenID1(self):
"""Look for user_setup_url in checkid_immediate negative
response in OpenID 1 case."""
self.request.message = Message(OPENID1_NS)
self.request.mode = 'checkid_immediate'
self.request.immediate = True
server_url = "http://setup-url.unittest/"
# crappiting setup_url, you dirty my interface with your presence!
answer = self.request.answer(False, server_url=server_url)
self.assertEqual(answer.request, self.request)
self.assertEqual(len(answer.fields.toPostArgs()), 2, answer.fields)
self.assertEqual(answer.fields.getOpenIDNamespace(), OPENID1_NS)
self.assertTrue(answer.fields.namespaces.isImplicit(OPENID1_NS))
self.assertEqual(answer.fields.getArg(OPENID_NS, 'mode'), 'id_res')
self.assertTrue(
answer.fields.getArg(OPENID_NS, 'user_setup_url', '').startswith(
server_url))
def test_answerSetupDeny(self):
answer = self.request.answer(False)
expected = {'mode': 'cancel'}
self.assertEqual(answer.fields.getArgs(OPENID_NS), expected)
def test_encodeToURL(self):
server_url = 'http://openid-server.unittest/'
result = self.request.encodeToURL(server_url)
# How to check? How about a round-trip test.
base, result_args = result.split('?', 1)
result_args = dict(parse_qsl(result_args))
message = Message.fromPostArgs(result_args)
rebuilt_request = server.CheckIDRequest.fromMessage(
message, self.server.op_endpoint)
# argh, lousy hack
self.request.message = message
self.assertEqual(rebuilt_request.__dict__, self.request.__dict__)
def test_getCancelURL(self):
url = self.request.getCancelURL()
rt, query_string = url.split('?')
self.assertEqual(self.request.return_to, rt)
query = dict(parse_qsl(query_string))
self.assertEqual(query,
{'openid.mode': 'cancel',
'openid.ns': OPENID2_NS})
def test_getCancelURLimmed(self):
self.request.mode = 'checkid_immediate'
self.request.immediate = True
self.assertRaises(ValueError, self.request.getCancelURL)
class TestCheckIDExtension(unittest.TestCase):
def setUp(self):
self.op_endpoint = 'http://endpoint.unittest/ext'
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, self.op_endpoint)
self.request = server.CheckIDRequest(
identity='http://bambam.unittest/',
trust_root='http://bar.unittest/',
return_to='http://bar.unittest/999',
immediate=False,
op_endpoint=self.server.op_endpoint, )
self.request.message = Message(OPENID2_NS)
self.response = server.OpenIDResponse(self.request)
self.response.fields.setArg(OPENID_NS, 'mode', 'id_res')
self.response.fields.setArg(OPENID_NS, 'blue', 'star')
def test_addField(self):
namespace = 'something:'
self.response.fields.setArg(namespace, 'bright', 'potato')
self.assertEqual(
self.response.fields.getArgs(OPENID_NS), {
'blue': 'star',
'mode': 'id_res',
})
self.assertEqual(
self.response.fields.getArgs(namespace), {'bright': 'potato'})
def test_addFields(self):
namespace = 'mi5:'
args = {'tangy': 'suspenders', 'bravo': 'inclusion'}
self.response.fields.updateArgs(namespace, args)
self.assertEqual(
self.response.fields.getArgs(OPENID_NS),
{'blue': 'star',
'mode': 'id_res'})
self.assertEqual(self.response.fields.getArgs(namespace), args)
class MockSignatory(object):
isValid = True
def __init__(self, assoc):
self.assocs = [assoc]
def verify(self, assoc_handle, message):
assert message.hasKey(OPENID_NS, "sig")
if (True, assoc_handle) in self.assocs:
return self.isValid
else:
return False
def getAssociation(self, assoc_handle, dumb):
if (dumb, assoc_handle) in self.assocs:
# This isn't a valid implementation for many uses of this
# function, mind you.
return True
else:
return None
def invalidate(self, assoc_handle, dumb):
if (dumb, assoc_handle) in self.assocs:
self.assocs.remove((dumb, assoc_handle))
class TestCheckAuth(unittest.TestCase):
def setUp(self):
self.assoc_handle = 'mooooooooo'
self.message = Message.fromPostArgs({
'openid.sig': 'signarture',
'one': 'alpha',
'two': 'beta',
})
self.request = server.CheckAuthRequest(self.assoc_handle, self.message)
self.signatory = MockSignatory((True, self.assoc_handle))
def test_valid(self):
r = self.request.answer(self.signatory)
self.assertEqual(r.fields.getArgs(OPENID_NS), {'is_valid': 'true'})
self.assertEqual(r.request, self.request)
def test_invalid(self):
self.signatory.isValid = False
r = self.request.answer(self.signatory)
self.assertEqual(r.fields.getArgs(OPENID_NS), {'is_valid': 'false'})
def test_replay(self):
"""Don't validate the same response twice.
From "Checking the Nonce"::
When using "check_authentication", the OP MUST ensure that an
assertion has not yet been accepted with the same value for
"openid.response_nonce".
In this implementation, the assoc_handle is only valid once. And
nonces are a signed component of the message, so they can't be used
with another handle without breaking the sig.
"""
r = self.request.answer(self.signatory)
r = self.request.answer(self.signatory)
self.assertEqual(r.fields.getArgs(OPENID_NS), {'is_valid': 'false'})
def test_invalidatehandle(self):
self.request.invalidate_handle = "bogusHandle"
r = self.request.answer(self.signatory)
self.assertEqual(
r.fields.getArgs(OPENID_NS),
{'is_valid': 'true',
'invalidate_handle': "bogusHandle"})
self.assertEqual(r.request, self.request)
def test_invalidatehandleNo(self):
assoc_handle = 'goodhandle'
self.signatory.assocs.append((False, 'goodhandle'))
self.request.invalidate_handle = assoc_handle
r = self.request.answer(self.signatory)
self.assertEqual(r.fields.getArgs(OPENID_NS), {'is_valid': 'true'})
class TestAssociate(unittest.TestCase):
# TODO: test DH with non-default values for modulus and gen.
# (important to do because we actually had it broken for a while.)
def setUp(self):
self.request = server.AssociateRequest.fromMessage(
Message.fromPostArgs({}))
self.store = memstore.MemoryStore()
self.signatory = server.Signatory(self.store)
def test_dhSHA1(self):
self.assoc = self.signatory.createAssociation(
dumb=False, assoc_type='HMAC-SHA1')
from openid.dh import DiffieHellman
from openid.server.server import DiffieHellmanSHA1ServerSession
consumer_dh = DiffieHellman.fromDefaults()
cpub = consumer_dh.public
server_dh = DiffieHellman.fromDefaults()
session = DiffieHellmanSHA1ServerSession(server_dh, cpub)
self.request = server.AssociateRequest(session, 'HMAC-SHA1')
response = self.request.answer(self.assoc)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg("assoc_type"), "HMAC-SHA1")
self.assertEqual(rfg("assoc_handle"), self.assoc.handle)
self.assertFalse(rfg("mac_key"))
self.assertEqual(rfg("session_type"), "DH-SHA1")
self.assertTrue(rfg("enc_mac_key"))
self.assertTrue(rfg("dh_server_public"))
enc_key = b64decode(rfg("enc_mac_key").encode("utf-8"))
spub = cryptutil.base64ToLong(rfg("dh_server_public"))
secret = consumer_dh.xorSecret(spub, enc_key, cryptutil.sha1)
self.assertEqual(secret, self.assoc.secret)
if not cryptutil.SHA256_AVAILABLE:
warnings.warn("Not running SHA256 tests.")
else:
def test_dhSHA256(self):
self.assoc = self.signatory.createAssociation(
dumb=False, assoc_type='HMAC-SHA256')
from openid.dh import DiffieHellman
from openid.server.server import DiffieHellmanSHA256ServerSession
consumer_dh = DiffieHellman.fromDefaults()
cpub = consumer_dh.public
server_dh = DiffieHellman.fromDefaults()
session = DiffieHellmanSHA256ServerSession(server_dh, cpub)
self.request = server.AssociateRequest(session, 'HMAC-SHA256')
response = self.request.answer(self.assoc)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg("assoc_type"), "HMAC-SHA256")
self.assertEqual(rfg("assoc_handle"), self.assoc.handle)
self.assertFalse(rfg("mac_key"))
self.assertEqual(rfg("session_type"), "DH-SHA256")
self.assertTrue(rfg("enc_mac_key"))
self.assertTrue(rfg("dh_server_public"))
enc_key = b64decode(rfg("enc_mac_key").encode("utf-8"))
spub = cryptutil.base64ToLong(rfg("dh_server_public"))
secret = consumer_dh.xorSecret(spub, enc_key, cryptutil.sha256)
self.assertEqual(secret, self.assoc.secret)
def test_protoError256(self):
from openid.consumer.consumer import \
DiffieHellmanSHA256ConsumerSession
s256_session = DiffieHellmanSHA256ConsumerSession()
invalid_s256 = {
'openid.assoc_type': 'HMAC-SHA1',
'openid.session_type': 'DH-SHA256'
}
invalid_s256.update(s256_session.getRequest())
invalid_s256_2 = {
'openid.assoc_type': 'MONKEY-PIRATE',
'openid.session_type': 'DH-SHA256'
}
invalid_s256_2.update(s256_session.getRequest())
bad_request_argss = [
invalid_s256,
invalid_s256_2,
]
for request_args in bad_request_argss:
message = Message.fromPostArgs(request_args)
self.assertRaises(server.ProtocolError,
server.AssociateRequest.fromMessage, message)
def test_protoError(self):
from openid.consumer.consumer import DiffieHellmanSHA1ConsumerSession
s1_session = DiffieHellmanSHA1ConsumerSession()
invalid_s1 = {
'openid.assoc_type': 'HMAC-SHA256',
'openid.session_type': 'DH-SHA1'
}
invalid_s1.update(s1_session.getRequest())
invalid_s1_2 = {
'openid.assoc_type': 'ROBOT-NINJA',
'openid.session_type': 'DH-SHA1'
}
invalid_s1_2.update(s1_session.getRequest())
bad_request_argss = [
{
'openid.assoc_type': 'Wha?'
},
invalid_s1,
invalid_s1_2,
]
for request_args in bad_request_argss:
message = Message.fromPostArgs(request_args)
self.assertRaises(server.ProtocolError,
server.AssociateRequest.fromMessage, message)
def test_protoErrorFields(self):
contact = '[email protected]'
reference = 'Trac ticket number MAX_INT'
error = 'poltergeist'
openid1_args = {
'openid.identitiy': 'invalid',
'openid.mode': 'checkid_setup',
}
openid2_args = dict(openid1_args)
openid2_args.update({'openid.ns': OPENID2_NS})
# Check presence of optional fields in both protocol versions
openid1_msg = Message.fromPostArgs(openid1_args)
p = server.ProtocolError(
openid1_msg, error, contact=contact, reference=reference)
reply = p.toMessage()
self.assertEqual(reply.getArg(OPENID_NS, 'reference'), reference)
self.assertEqual(reply.getArg(OPENID_NS, 'contact'), contact)
openid2_msg = Message.fromPostArgs(openid2_args)
p = server.ProtocolError(
openid2_msg, error, contact=contact, reference=reference)
reply = p.toMessage()
self.assertEqual(reply.getArg(OPENID_NS, 'reference'), reference)
self.assertEqual(reply.getArg(OPENID_NS, 'contact'), contact)
def failUnlessExpiresInMatches(self, msg, expected_expires_in):
expires_in_str = msg.getArg(OPENID_NS, 'expires_in', no_default)
expires_in = int(expires_in_str)
# Slop is necessary because the tests can sometimes get run
# right on a second boundary
slop = 1 # second
difference = expected_expires_in - expires_in
error_message = ('"expires_in" value not within %s of expected: '
'expected=%s, actual=%s' %
(slop, expected_expires_in, expires_in))
self.assertTrue(0 <= difference <= slop, error_message)
def test_plaintext(self):
self.assoc = self.signatory.createAssociation(
dumb=False, assoc_type='HMAC-SHA1')
response = self.request.answer(self.assoc)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg("assoc_type"), "HMAC-SHA1")
self.assertEqual(rfg("assoc_handle"), self.assoc.handle)
self.failUnlessExpiresInMatches(response.fields,
self.signatory.SECRET_LIFETIME)
# remember, oidutil.toBase64 returns bytes...
r_mac_key = rfg("mac_key").encode('utf-8')
self.assertEqual(r_mac_key, oidutil.toBase64(self.assoc.secret))
self.assertFalse(rfg("session_type"))
self.assertFalse(rfg("enc_mac_key"))
self.assertFalse(rfg("dh_server_public"))
def test_plaintext_v2(self):
# The main difference between this and the v1 test is that
# session_type is always returned in v2.
args = {
'openid.ns': OPENID2_NS,
'openid.mode': 'associate',
'openid.assoc_type': 'HMAC-SHA1',
'openid.session_type': 'no-encryption',
}
self.request = server.AssociateRequest.fromMessage(
Message.fromPostArgs(args))
self.assertFalse(self.request.message.isOpenID1())
self.assoc = self.signatory.createAssociation(
dumb=False, assoc_type='HMAC-SHA1')
response = self.request.answer(self.assoc)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg("assoc_type"), "HMAC-SHA1")
self.assertEqual(rfg("assoc_handle"), self.assoc.handle)
self.failUnlessExpiresInMatches(response.fields,
self.signatory.SECRET_LIFETIME)
# rfg gets from the response which will return str; oidutil.toBase64
# returns bytes. Make them comparable by bytes-ifying the mac_key
r_mac_key = rfg("mac_key").encode('utf-8')
self.assertEqual(r_mac_key, oidutil.toBase64(self.assoc.secret))
self.assertEqual(rfg("session_type"), "no-encryption")
self.assertFalse(rfg("enc_mac_key"))
self.assertFalse(rfg("dh_server_public"))
def test_plaintext256(self):
self.assoc = self.signatory.createAssociation(
dumb=False, assoc_type='HMAC-SHA256')
response = self.request.answer(self.assoc)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg("assoc_type"), "HMAC-SHA1")
self.assertEqual(rfg("assoc_handle"), self.assoc.handle)
self.failUnlessExpiresInMatches(response.fields,
self.signatory.SECRET_LIFETIME)
# remember, oidutil.toBase64 returns bytes...
r_mac_key = rfg("mac_key").encode("utf-8")
self.assertEqual(r_mac_key, oidutil.toBase64(self.assoc.secret))
self.assertFalse(rfg("session_type"))
self.assertFalse(rfg("enc_mac_key"))
self.assertFalse(rfg("dh_server_public"))
def test_unsupportedPrefer(self):
allowed_assoc = 'COLD-PET-RAT'
allowed_sess = 'FROG-BONES'
message = 'This is a unit test'
# Set an OpenID 2 message so answerUnsupported doesn't raise
# ProtocolError.
self.request.message = Message(OPENID2_NS)
response = self.request.answerUnsupported(
message=message,
preferred_session_type=allowed_sess,
preferred_association_type=allowed_assoc, )
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg('error_code'), 'unsupported-type')
self.assertEqual(rfg('assoc_type'), allowed_assoc)
self.assertEqual(rfg('error'), message)
self.assertEqual(rfg('session_type'), allowed_sess)
def test_unsupported(self):
message = 'This is a unit test'
# Set an OpenID 2 message so answerUnsupported doesn't raise
# ProtocolError.
self.request.message = Message(OPENID2_NS)
response = self.request.answerUnsupported(message)
rfg = lambda f: response.fields.getArg(OPENID_NS, f)
self.assertEqual(rfg('error_code'), 'unsupported-type')
self.assertEqual(rfg('assoc_type'), None)
self.assertEqual(rfg('error'), message)
self.assertEqual(rfg('session_type'), None)
class Counter(object):
def __init__(self):
self.count = 0
def inc(self):
self.count += 1
class TestServer(unittest.TestCase, CatchLogs):
def setUp(self):
self.store = memstore.MemoryStore()
self.server = server.Server(self.store, "http://server.unittest/endpt")
CatchLogs.setUp(self)
def test_dispatch(self):
monkeycalled = Counter()
def monkeyDo(request):
monkeycalled.inc()
r = server.OpenIDResponse(request)
return r
self.server.openid_monkeymode = monkeyDo
request = server.OpenIDRequest()
request.mode = "monkeymode"
request.namespace = OPENID1_NS
webresult = self.server.handleRequest(request)
self.assertEqual(monkeycalled.count, 1)
def test_associate(self):
request = server.AssociateRequest.fromMessage(Message.fromPostArgs({}))
response = self.server.openid_associate(request)
self.assertTrue(
response.fields.hasKey(OPENID_NS, "assoc_handle"),
"No assoc_handle here: %s" % (response.fields, ))
def test_associate2(self):
"""Associate when the server has no allowed association types
Gives back an error with error_code and no fallback session or
assoc types."""
self.server.negotiator.setAllowedTypes([])
# Set an OpenID 2 message so answerUnsupported doesn't raise
# ProtocolError.
msg = Message.fromPostArgs({
'openid.ns': OPENID2_NS,
'openid.session_type': 'no-encryption',
})
request = server.AssociateRequest.fromMessage(msg)
response = self.server.openid_associate(request)
self.assertTrue(response.fields.hasKey(OPENID_NS, "error"))
self.assertTrue(response.fields.hasKey(OPENID_NS, "error_code"))
self.assertFalse(response.fields.hasKey(OPENID_NS, "assoc_handle"))
self.assertFalse(response.fields.hasKey(OPENID_NS, "assoc_type"))
self.assertFalse(response.fields.hasKey(OPENID_NS, "session_type"))
def test_associate3(self):
"""Request an assoc type that is not supported when there are
supported types.
Should give back an error message with a fallback type.
"""
self.server.negotiator.setAllowedTypes([('HMAC-SHA256', 'DH-SHA256')])
msg = Message.fromPostArgs({
'openid.ns': OPENID2_NS,
'openid.session_type': 'no-encryption',
})
request = server.AssociateRequest.fromMessage(msg)
response = self.server.openid_associate(request)
self.assertTrue(response.fields.hasKey(OPENID_NS, "error"))
self.assertTrue(response.fields.hasKey(OPENID_NS, "error_code"))
self.assertFalse(response.fields.hasKey(OPENID_NS, "assoc_handle"))
self.assertEqual(
response.fields.getArg(OPENID_NS, "assoc_type"), 'HMAC-SHA256')
self.assertEqual(
response.fields.getArg(OPENID_NS, "session_type"), 'DH-SHA256')
if not cryptutil.SHA256_AVAILABLE:
warnings.warn("Not running SHA256 tests.")
else:
def test_associate4(self):
"""DH-SHA256 association session"""
self.server.negotiator.setAllowedTypes(
[('HMAC-SHA256', 'DH-SHA256')])
query = {
'openid.dh_consumer_public':
'ALZgnx8N5Lgd7pCj8K86T/DDMFjJXSss1SKoLmxE72kJTzOtG6I2PaYrHX'
'xku4jMQWSsGfLJxwCZ6280uYjUST/9NWmuAfcrBfmDHIBc3H8xh6RBnlXJ'
'1WxJY3jHd5k1/ZReyRZOxZTKdF/dnIqwF8ZXUwI6peV0TyS/K1fOfF/s',
'openid.assoc_type':
'HMAC-SHA256',
'openid.session_type':
'DH-SHA256',
}
message = Message.fromPostArgs(query)
request = server.AssociateRequest.fromMessage(message)
response = self.server.openid_associate(request)
self.assertTrue(response.fields.hasKey(OPENID_NS, "assoc_handle"))
def test_missingSessionTypeOpenID2(self):
"""Make sure session_type is required in OpenID 2"""
msg = Message.fromPostArgs({
'openid.ns': OPENID2_NS,
})
self.assertRaises(server.ProtocolError,
server.AssociateRequest.fromMessage, msg)
def test_checkAuth(self):
request = server.CheckAuthRequest('arrrrrf', '0x3999', [])
response = self.server.openid_check_authentication(request)
self.assertTrue(response.fields.hasKey(OPENID_NS, "is_valid"))
class TestSignatory(unittest.TestCase, CatchLogs):
def setUp(self):
self.store = memstore.MemoryStore()
self.signatory = server.Signatory(self.store)
self._dumb_key = self.signatory._dumb_key
self._normal_key = self.signatory._normal_key
CatchLogs.setUp(self)
def test_sign(self):
request = server.OpenIDRequest()
assoc_handle = '{assoc}{lookatme}'
self.store.storeAssociation(
self._normal_key,
association.Association.fromExpiresIn(60, assoc_handle, 'sekrit',
'HMAC-SHA1'))
request.assoc_handle = assoc_handle
request.namespace = OPENID1_NS
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'foo': 'amsigned',
'bar': 'notsigned',
'azu': 'alsosigned',
})
sresponse = self.signatory.sign(response)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'assoc_handle'), assoc_handle)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'signed'),
'assoc_handle,azu,bar,foo,signed')
self.assertTrue(sresponse.fields.getArg(OPENID_NS, 'sig'))
self.assertFalse(self.messages, self.messages)
def test_signDumb(self):
request = server.OpenIDRequest()
request.assoc_handle = None
request.namespace = OPENID2_NS
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'foo': 'amsigned',
'bar': 'notsigned',
'azu': 'alsosigned',
'ns': OPENID2_NS,
})
sresponse = self.signatory.sign(response)
assoc_handle = sresponse.fields.getArg(OPENID_NS, 'assoc_handle')
self.assertTrue(assoc_handle)
assoc = self.signatory.getAssociation(assoc_handle, dumb=True)
self.assertTrue(assoc)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'signed'),
'assoc_handle,azu,bar,foo,ns,signed')
self.assertTrue(sresponse.fields.getArg(OPENID_NS, 'sig'))
self.assertFalse(self.messages, self.messages)
def test_signExpired(self):
"""
Sign a response to a message with an expired handle (using
invalidate_handle).
From "Verifying with an Association"::
If an authentication request included an association handle for an
association between the OP and the Relying party, and the OP no
longer wishes to use that handle (because it has expired or the
secret has been compromised, for instance), the OP will send a
response that must be verified directly with the OP, as specified
in Section 11.3.2. In that instance, the OP will include the field
"openid.invalidate_handle" set to the association handle that the
Relying Party included with the original request.
"""
request = server.OpenIDRequest()
request.namespace = OPENID2_NS
assoc_handle = '{assoc}{lookatme}'
self.store.storeAssociation(
self._normal_key,
association.Association.fromExpiresIn(-10, assoc_handle, 'sekrit',
'HMAC-SHA1'))
self.assertTrue(
self.store.getAssociation(self._normal_key, assoc_handle))
request.assoc_handle = assoc_handle
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'foo': 'amsigned',
'bar': 'notsigned',
'azu': 'alsosigned',
})
sresponse = self.signatory.sign(response)
new_assoc_handle = sresponse.fields.getArg(OPENID_NS, 'assoc_handle')
self.assertTrue(new_assoc_handle)
self.assertNotEqual(new_assoc_handle, assoc_handle)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'invalidate_handle'),
assoc_handle)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'signed'),
'assoc_handle,azu,bar,foo,invalidate_handle,signed')
self.assertTrue(sresponse.fields.getArg(OPENID_NS, 'sig'))
# make sure the expired association is gone
self.assertFalse(
self.store.getAssociation(self._normal_key, assoc_handle),
"expired association is still retrievable.")
# make sure the new key is a dumb mode association
self.assertTrue(
self.store.getAssociation(self._dumb_key, new_assoc_handle))
self.assertFalse(
self.store.getAssociation(self._normal_key, new_assoc_handle))
self.assertTrue(self.messages)
def test_signInvalidHandle(self):
request = server.OpenIDRequest()
request.namespace = OPENID2_NS
assoc_handle = '{bogus-assoc}{notvalid}'
request.assoc_handle = assoc_handle
response = server.OpenIDResponse(request)
response.fields = Message.fromOpenIDArgs({
'foo': 'amsigned',
'bar': 'notsigned',
'azu': 'alsosigned',
})
sresponse = self.signatory.sign(response)
new_assoc_handle = sresponse.fields.getArg(OPENID_NS, 'assoc_handle')
self.assertTrue(new_assoc_handle)
self.assertNotEqual(new_assoc_handle, assoc_handle)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'invalidate_handle'),
assoc_handle)
self.assertEqual(
sresponse.fields.getArg(OPENID_NS, 'signed'),
'assoc_handle,azu,bar,foo,invalidate_handle,signed')
self.assertTrue(sresponse.fields.getArg(OPENID_NS, 'sig'))
# make sure the new key is a dumb mode association
self.assertTrue(
self.store.getAssociation(self._dumb_key, new_assoc_handle))
self.assertFalse(
self.store.getAssociation(self._normal_key, new_assoc_handle))
self.assertFalse(self.messages, self.messages)
def test_verify(self):
assoc_handle = '{vroom}{zoom}'
assoc = association.Association.fromExpiresIn(60, assoc_handle,
'sekrit', 'HMAC-SHA1')
self.store.storeAssociation(self._dumb_key, assoc)
signed = Message.fromPostArgs({
'openid.foo':
'bar',
'openid.apple':
'orange',
'openid.assoc_handle':
assoc_handle,
'openid.signed':
'apple,assoc_handle,foo,signed',
'openid.sig':
'uXoT1qm62/BB09Xbj98TQ8mlBco=',
})
verified = self.signatory.verify(assoc_handle, signed)
self.assertFalse(self.messages, self.messages)
self.assertTrue(verified)
def test_verifyBadSig(self):
assoc_handle = '{vroom}{zoom}'
assoc = association.Association.fromExpiresIn(60, assoc_handle,
'sekrit', 'HMAC-SHA1')
self.store.storeAssociation(self._dumb_key, assoc)
signed = Message.fromPostArgs({
'openid.foo':
'bar',
'openid.apple':
'orange',
'openid.assoc_handle':
assoc_handle,
'openid.signed':
'apple,assoc_handle,foo,signed',
'openid.sig':
'uXoT1qm62/BB09Xbj98TQ8mlBco=' [::-1],
})
verified = self.signatory.verify(assoc_handle, signed)
self.assertFalse(self.messages, self.messages)
self.assertFalse(verified)
def test_verifyBadHandle(self):
assoc_handle = '{vroom}{zoom}'
signed = Message.fromPostArgs({
'foo':
'bar',
'apple':
'orange',
'openid.sig':
"Ylu0KcIR7PvNegB/K41KpnRgJl0=",
})
verified = self.signatory.verify(assoc_handle, signed)
self.assertFalse(verified)
self.assertTrue(self.messages)
def test_verifyAssocMismatch(self):
"""Attempt to validate sign-all message with a signed-list assoc."""
assoc_handle = '{vroom}{zoom}'
assoc = association.Association.fromExpiresIn(60, assoc_handle,
'sekrit', 'HMAC-SHA1')
self.store.storeAssociation(self._dumb_key, assoc)
signed = Message.fromPostArgs({
'foo':
'bar',
'apple':
'orange',
'openid.sig':
"d71xlHtqnq98DonoSgoK/nD+QRM=",
})
verified = self.signatory.verify(assoc_handle, signed)
self.assertFalse(verified)
self.assertTrue(self.messages)
def test_getAssoc(self):
assoc_handle = self.makeAssoc(dumb=True)
assoc = self.signatory.getAssociation(assoc_handle, True)
self.assertTrue(assoc)
self.assertEqual(assoc.handle, assoc_handle)
self.assertFalse(self.messages, self.messages)
def test_getAssocExpired(self):
assoc_handle = self.makeAssoc(dumb=True, lifetime=-10)
assoc = self.signatory.getAssociation(assoc_handle, True)
self.assertFalse(assoc, assoc)
self.assertTrue(self.messages)
def test_getAssocInvalid(self):
ah = 'no-such-handle'
self.assertEqual(self.signatory.getAssociation(ah, dumb=False), None)
self.assertFalse(self.messages, self.messages)
def test_getAssocDumbVsNormal(self):
"""getAssociation(dumb=False) cannot get a dumb assoc"""
assoc_handle = self.makeAssoc(dumb=True)
self.assertEqual(
self.signatory.getAssociation(assoc_handle, dumb=False), None)
self.assertFalse(self.messages, self.messages)
def test_getAssocNormalVsDumb(self):
"""getAssociation(dumb=True) cannot get a shared assoc
From "Verifying Directly with the OpenID Provider"::
An OP MUST NOT verify signatures for associations that have shared
MAC keys.
"""
assoc_handle = self.makeAssoc(dumb=False)
self.assertEqual(
self.signatory.getAssociation(assoc_handle, dumb=True), None)
self.assertFalse(self.messages, self.messages)
def test_createAssociation(self):
assoc = self.signatory.createAssociation(dumb=False)
self.assertTrue(
self.signatory.getAssociation(assoc.handle, dumb=False))
self.assertFalse(self.messages, self.messages)
def makeAssoc(self, dumb, lifetime=60):
assoc_handle = '{bling}'
assoc = association.Association.fromExpiresIn(lifetime, assoc_handle,
'sekrit', 'HMAC-SHA1')
self.store.storeAssociation((dumb and self._dumb_key) or
self._normal_key, assoc)
return assoc_handle
def test_invalidate(self):
assoc_handle = '-squash-'
assoc = association.Association.fromExpiresIn(60, assoc_handle,
'sekrit', 'HMAC-SHA1')
self.store.storeAssociation(self._dumb_key, assoc)
assoc = self.signatory.getAssociation(assoc_handle, dumb=True)
self.assertTrue(assoc)
assoc = self.signatory.getAssociation(assoc_handle, dumb=True)
self.assertTrue(assoc)
self.signatory.invalidate(assoc_handle, dumb=True)
assoc = self.signatory.getAssociation(assoc_handle, dumb=True)
self.assertFalse(assoc)
self.assertFalse(self.messages, self.messages)
if __name__ == '__main__':
unittest.main()
|
|
import logging
from django.conf import settings
from django.db.models import Subquery, OuterRef, Sum
from django.db.models.expressions import Value
from django.utils.translation import gettext_lazy as _
from mapentity.views import (MapEntityLayer, MapEntityList, MapEntityJsonList, MapEntityFormat, MapEntityViewSet,
MapEntityDetail, MapEntityDocument, MapEntityCreate, MapEntityUpdate, MapEntityDelete)
from geotrek.altimetry.models import AltimetryMixin
from geotrek.common.mixins import CustomColumnsMixin
from geotrek.common.views import FormsetMixin
from geotrek.authent.decorators import same_structure_required
from .models import Intervention, Project, ManDay
from .filters import InterventionFilterSet, ProjectFilterSet
from .forms import (InterventionForm, ProjectForm,
FundingFormSet, ManDayFormSet)
from .serializers import (InterventionSerializer, ProjectSerializer,
InterventionGeojsonSerializer, ProjectGeojsonSerializer)
from rest_framework import permissions as rest_permissions
logger = logging.getLogger(__name__)
class InterventionLayer(MapEntityLayer):
queryset = Intervention.objects.existing()
filterform = InterventionFilterSet
properties = ['name']
class InterventionList(CustomColumnsMixin, MapEntityList):
queryset = Intervention.objects.existing()
filterform = InterventionFilterSet
mandatory_columns = ['id', 'name']
default_extra_columns = ['date', 'type', 'target', 'status', 'stake']
class InterventionJsonList(MapEntityJsonList, InterventionList):
pass
class InterventionFormatList(MapEntityFormat, InterventionList):
def build_cost_column_name(cls, job_name):
return f"{_('Cost')} {job_name}"
def get_queryset(self):
"""Returns all interventions joined with a new column for each job, to record the total cost of each job in each intervention"""
queryset = Intervention.objects.existing()
if settings.ENABLE_JOBS_COSTS_DETAILED_EXPORT:
# Get all jobs that are used in interventions, as unique names, ids and costs
all_mandays = ManDay.objects.all()
jobs_used_in_interventions = list(
set(all_mandays.values_list("job__job", "job_id", "job__cost"))
)
# Iter over unique jobs
for job_name, job_id, job_cost in jobs_used_in_interventions:
# Create column name for current job cost
column_name = self.build_cost_column_name(job_name)
# Create subquery to retrieve total cost of mandays for a given intervention and a given job
mandays_query = (
ManDay.objects.filter(intervention=OuterRef("pk"), job_id=job_id) # Extract all mandays for a given intervention and a given job
.values("job_id") # Group by job
.annotate(total_days=Sum("nb_days")) # Select number of days worked
.values("total_days") # Rename result as total_days
)
# Use total_days and job cost to calculate total cost for a given intervention and a given job
job_cost_query = Subquery(mandays_query) * Value(job_cost)
# Annotate queryset with this cost query
params = {column_name: job_cost_query}
queryset = queryset.annotate(**params)
return queryset
def get_mandatory_columns(cls):
mandatory_columns = ['id']
if settings.ENABLE_JOBS_COSTS_DETAILED_EXPORT:
all_mandays = ManDay.objects.all() # Used to find all jobs that ARE USED in interventions
# Get all jobs that are used in interventions, as unique names
jobs_as_names = list(
set(all_mandays.values_list("job__job", flat=True))
)
# Create column names for each unique job cost
cost_column_names = list(map(cls.build_cost_column_name, jobs_as_names))
# Add these column names to export
mandatory_columns = mandatory_columns + cost_column_names
return mandatory_columns
default_extra_columns = [
'name', 'date', 'type', 'target', 'status', 'stake',
'disorders', 'total_manday', 'project', 'subcontracting',
'width', 'height', 'area', 'structure',
'description', 'date_insert', 'date_update',
'material_cost', 'heliport_cost', 'subcontract_cost',
'total_cost_mandays', 'total_cost',
'cities', 'districts', 'areas',
] + AltimetryMixin.COLUMNS
class InterventionDetail(MapEntityDetail):
queryset = Intervention.objects.existing()
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['can_edit'] = self.get_object().same_structure(self.request.user)
return context
class InterventionDocument(MapEntityDocument):
model = Intervention
class ManDayFormsetMixin(FormsetMixin):
context_name = 'manday_formset'
formset_class = ManDayFormSet
class InterventionCreate(ManDayFormsetMixin, MapEntityCreate):
model = Intervention
form_class = InterventionForm
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
if 'target_id' in self.request.GET and 'target_type' in self.request.GET:
# Create intervention on an existing infrastructure
kwargs['target_id'] = self.request.GET['target_id']
kwargs['target_type'] = self.request.GET['target_type']
return kwargs
class InterventionUpdate(ManDayFormsetMixin, MapEntityUpdate):
queryset = Intervention.objects.existing()
form_class = InterventionForm
@same_structure_required('maintenance:intervention_detail')
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class InterventionDelete(MapEntityDelete):
model = Intervention
@same_structure_required('maintenance:intervention_detail')
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class InterventionViewSet(MapEntityViewSet):
model = Intervention
queryset = Intervention.objects.existing()
serializer_class = InterventionSerializer
geojson_serializer_class = InterventionGeojsonSerializer
permission_classes = [rest_permissions.DjangoModelPermissionsOrAnonReadOnly]
def get_queryset(self):
# Override annotation done by MapEntityViewSet.get_queryset()
return Intervention.objects.all()
class ProjectLayer(MapEntityLayer):
queryset = Project.objects.existing()
properties = ['name']
def get_queryset(self):
nonemptyqs = Intervention.objects.existing().filter(project__isnull=False).values('project')
return super().get_queryset().filter(pk__in=nonemptyqs)
class ProjectList(CustomColumnsMixin, MapEntityList):
queryset = Project.objects.existing()
filterform = ProjectFilterSet
mandatory_columns = ['id', 'name']
default_extra_columns = ['period', 'type', 'domain']
class ProjectJsonList(MapEntityJsonList, ProjectList):
pass
class ProjectFormatList(MapEntityFormat, ProjectList):
mandatory_columns = ['id']
default_extra_columns = [
'structure', 'name', 'period', 'type', 'domain', 'constraint', 'global_cost',
'interventions', 'interventions_total_cost', 'comments', 'contractors',
'project_owner', 'project_manager', 'founders',
'date_insert', 'date_update',
'cities', 'districts', 'areas',
]
class ProjectDetail(MapEntityDetail):
queryset = Project.objects.existing()
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['can_edit'] = self.get_object().same_structure(self.request.user)
context['empty_map_message'] = _("No intervention related.")
return context
class ProjectDocument(MapEntityDocument):
model = Project
class FundingFormsetMixin(FormsetMixin):
context_name = 'funding_formset'
formset_class = FundingFormSet
class ProjectCreate(FundingFormsetMixin, MapEntityCreate):
model = Project
form_class = ProjectForm
class ProjectUpdate(FundingFormsetMixin, MapEntityUpdate):
queryset = Project.objects.existing()
form_class = ProjectForm
@same_structure_required('maintenance:project_detail')
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class ProjectDelete(MapEntityDelete):
model = Project
@same_structure_required('maintenance:project_detail')
def dispatch(self, *args, **kwargs):
return super().dispatch(*args, **kwargs)
class ProjectViewSet(MapEntityViewSet):
model = Project
queryset = Project.objects.existing()
serializer_class = ProjectSerializer
geojson_serializer_class = ProjectGeojsonSerializer
permission_classes = [rest_permissions.DjangoModelPermissionsOrAnonReadOnly]
def get_queryset(self):
# Override annotation done by MapEntityViewSet.get_queryset()
return Project.objects.all()
|
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test Java compilation with a live Java 1.5 "javac" compiler.
"""
import os
import sys
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
where_javac, java_version = test.java_where_javac('1.5')
test.write('SConstruct', """
env = Environment(tools = ['javac'],
JAVAVERSION = '1.5',
JAVAC = r'%(where_javac)s')
env.Java(target = 'class1', source = 'com/sub/foo')
env.Java(target = 'class2', source = 'com/sub/bar')
env.Java(target = 'class3', source = ['src1', 'src2'])
env.Java(target = 'class4', source = ['src4'])
env.Java(target = 'class5', source = ['src5'])
env.Java(target = 'class6', source = ['src6'])
""" % locals())
test.subdir('com',
['com', 'sub'],
['com', 'sub', 'foo'],
['com', 'sub', 'bar'],
'src1',
'src2',
'src4',
'src5',
'src6')
test.write(['com', 'sub', 'foo', 'Example1.java'], """\
package com.sub.foo;
public class Example1
{
public static void main(String[] args)
{
}
}
""")
test.write(['com', 'sub', 'foo', 'Example2.java'], """\
package com.other;
public class Example2
{
public static void main(String[] args)
{
}
}
""")
test.write(['com', 'sub', 'foo', 'Example3.java'], """\
package com.sub.foo;
public class Example3
{
public static void main(String[] args)
{
}
}
""")
test.write(['com', 'sub', 'bar', 'Example4.java'], """\
package com.sub.bar;
public class Example4
{
public static void main(String[] args)
{
}
}
""")
test.write(['com', 'sub', 'bar', 'Example5.java'], """\
package com.other;
public class Example5
{
public static void main(String[] args)
{
}
}
""")
test.write(['com', 'sub', 'bar', 'Example6.java'], """\
package com.sub.bar;
public class Example6
{
public static void main(String[] args)
{
}
}
""")
test.write(['src1', 'Example7.java'], """\
public class Example7
{
public static void main(String[] args)
{
}
}
""")
# Acid-test file for parsing inner Java classes, courtesy Chad Austin.
test.write(['src2', 'Test.java'], """\
class Empty {
}
interface Listener {
public void execute();
}
public
class
Test {
class Inner {
void go() {
use(new Listener() {
public void execute() {
System.out.println("In Inner");
}
});
}
String s1 = "class A";
String s2 = "new Listener() { }";
/* class B */
/* new Listener() { } */
}
public static void main(String[] args) {
new Test().run();
}
void run() {
use(new Listener() {
public void execute() {
use(new Listener( ) {
public void execute() {
System.out.println("Inside execute()");
}
});
}
});
new Inner().go();
}
void use(Listener l) {
l.execute();
}
}
class Private {
void run() {
new Listener() {
public void execute() {
}
};
}
}
""")
# Testing nested anonymous inner classes, courtesy Brandon Mansfield.
test.write(['src4', 'NestedExample.java'], """\
// import java.util.*;
public class NestedExample
{
public NestedExample()
{
new Thread() {
public void start()
{
new Thread() {
public void start()
{
try {Thread.sleep(200);}
catch (Exception e) {}
}
};
while (true)
{
try {Thread.sleep(200);}
catch (Exception e) {}
}
}
};
}
public static void main(String argv[])
{
new NestedExample();
}
}
""")
# Test not finding an anonymous class when the second token after a
# "new" is a closing brace. This duplicates a test from the unit tests,
# but lets us make sure that we correctly determine that everything is
# up-to-date after the build.
test.write(['src5', 'TestSCons.java'], """\
class TestSCons {
public static void main(String[] args) {
Foo[] fooArray = new Foo[] { new Foo() };
}
}
class Foo { }
""")
# Test private inner class instantiation, courtesy Tilo Prutz:
# http://scons.tigris.org/issues/show_bug.cgi?id=1594
test.write(['src6', 'TestSCons.java'], """\
class test
{
test()
{
super();
new inner();
}
static class inner
{
private inner() {}
}
}
""")
test.run(arguments = '.')
expect_1 = [
test.workpath('class1', 'com', 'other', 'Example2.class'),
test.workpath('class1', 'com', 'sub', 'foo', 'Example1.class'),
test.workpath('class1', 'com', 'sub', 'foo', 'Example3.class'),
]
expect_2 = [
test.workpath('class2', 'com', 'other', 'Example5.class'),
test.workpath('class2', 'com', 'sub', 'bar', 'Example4.class'),
test.workpath('class2', 'com', 'sub', 'bar', 'Example6.class'),
]
expect_3 = [
test.workpath('class3', 'Empty.class'),
test.workpath('class3', 'Example7.class'),
test.workpath('class3', 'Listener.class'),
test.workpath('class3', 'Private$1.class'),
test.workpath('class3', 'Private.class'),
test.workpath('class3', 'Test$1$1.class'),
test.workpath('class3', 'Test$1.class'),
test.workpath('class3', 'Test$Inner$1.class'),
test.workpath('class3', 'Test$Inner.class'),
test.workpath('class3', 'Test.class'),
]
expect_4 = [
test.workpath('class4', 'NestedExample$1$1.class'),
test.workpath('class4', 'NestedExample$1.class'),
test.workpath('class4', 'NestedExample.class'),
]
expect_5 = [
test.workpath('class5', 'Foo.class'),
test.workpath('class5', 'TestSCons.class'),
]
expect_6 = [
test.workpath('class6', 'test$1.class'),
test.workpath('class6', 'test$inner.class'),
test.workpath('class6', 'test.class'),
]
failed = None
def classes_must_match(dir, expect):
global failed
got = test.java_get_class_files(test.workpath(dir))
if expect != got:
sys.stderr.write("Expected the following class files in '%s':\n" % dir)
for c in expect:
sys.stderr.write(' %s\n' % c)
sys.stderr.write("Got the following class files in '%s':\n" % dir)
for c in got:
sys.stderr.write(' %s\n' % c)
failed = 1
def classes_must_not_exist(dir, expect):
global failed
present = list(filter(os.path.exists, expect))
if present:
sys.stderr.write("Found the following unexpected class files in '%s' after cleaning:\n" % dir)
for c in present:
sys.stderr.write(' %s\n' % c)
failed = 1
classes_must_match('class1', expect_1)
classes_must_match('class2', expect_2)
classes_must_match('class3', expect_3)
classes_must_match('class4', expect_4)
classes_must_match('class5', expect_5)
classes_must_match('class6', expect_6)
test.fail_test(failed)
test.up_to_date(options='--debug=explain', arguments = '.')
test.run(arguments = '-c .')
classes_must_not_exist('class1', expect_1)
classes_must_not_exist('class2', expect_2)
classes_must_not_exist('class3', expect_3)
classes_must_not_exist('class4', expect_4)
classes_must_not_exist('class5', expect_5)
# This test case should pass, but doesn't.
# The expect_6 list contains the class files that the Java compiler
# actually creates, apparently because of the "private" instantiation
# of the "inner" class. Our parser doesn't currently detect this, so
# it doesn't know to remove that generated class file.
#classes_must_not_exist('class6', expect_6)
test.fail_test(failed)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
import pytest
from api.base.settings.defaults import API_BASE
from osf_tests.factories import (
ProjectFactory,
AuthUserFactory,
PrivateLinkFactory,
NodeFactory
)
from osf.utils import permissions
@pytest.fixture()
def url(public_project, view_only_link):
return '/{}nodes/{}/view_only_links/{}/'.format(
API_BASE, public_project._id, view_only_link._id)
@pytest.fixture()
def user():
return AuthUserFactory()
@pytest.fixture()
def read_contrib():
return AuthUserFactory()
@pytest.fixture()
def write_contrib():
return AuthUserFactory()
@pytest.fixture()
def non_contrib():
return AuthUserFactory()
@pytest.fixture()
def public_project(user, read_contrib, write_contrib):
public_project = ProjectFactory(is_public=True, creator=user)
public_project.add_contributor(
read_contrib, permissions=[permissions.READ])
public_project.add_contributor(
write_contrib, permissions=[permissions.READ, permissions.WRITE])
public_project.save()
return public_project
@pytest.fixture()
def view_only_link(public_project):
view_only_link = PrivateLinkFactory(name='testlink')
view_only_link.nodes.add(public_project)
view_only_link.save()
return view_only_link
@pytest.mark.django_db
class TestViewOnlyLinksDetail:
def test_non_mutating_view_only_links_detail_tests(
self, app, user, write_contrib, read_contrib, non_contrib, url):
# test_admin_can_view_vol_detail
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'testlink'
# test_read_write_cannot_view_vol_detail
res = app.get(url, auth=write_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_read_only_cannot_view_vol_detail
res = app.get(url, auth=read_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_logged_in_user_cannot_view_vol_detail
res = app.get(url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_unauthenticated_user_cannot_view_vol_detail
res = app.get(url, expect_errors=True)
assert res.status_code == 401
def test_deleted_vol_not_returned(
self, app, user, public_project, view_only_link, url):
res = app.get(url, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'testlink'
view_only_link.is_deleted = True
view_only_link.save()
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
def test_vol_detail_delete(
self, app, user, url, view_only_link):
# test_admin_can_view_vol_detail
res = app.delete_json_api(url, auth=user.auth)
view_only_link.reload()
assert res.status_code == 204
assert view_only_link.is_deleted
@pytest.mark.django_db
class TestViewOnlyLinksUpdate:
@pytest.fixture()
def public_project_admin(self, public_project):
return AuthUserFactory()
@pytest.fixture()
def public_project(self, public_project_admin, public_project):
public_project.add_contributor(
public_project_admin, permissions=[
permissions.ADMIN])
return public_project
@pytest.fixture()
def public_project_component(self, user, public_project):
return NodeFactory(is_public=True, creator=user, parent=public_project)
def test_admin_can_update_vol_name(self, app, user, view_only_link, url):
assert view_only_link.name == 'testlink'
assert not view_only_link.anonymous
payload = {
'attributes': {
'name': 'updated vol name'
}
}
res = app.patch_json_api(url, {'data': payload}, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'updated vol name'
assert not res.json['data']['attributes']['anonymous']
def test_admin_can_update_vol_anonymous(
self, app, user, view_only_link, url):
assert view_only_link.name == 'testlink'
assert not view_only_link.anonymous
payload = {
'attributes': {
'anonymous': True
}
}
# Set anonymous to True
res = app.put_json_api(url, {'data': payload}, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'testlink'
assert res.json['data']['attributes']['anonymous']
# Set anonymous to False
payload = {
'attributes': {
'anonymous': False
}
}
# Set anonymous to True
res = app.put_json_api(url, {'data': payload}, auth=user.auth)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'testlink'
assert res.json['data']['attributes']['anonymous'] is False
def test_cannot_update_vol(
self, app, write_contrib, read_contrib, non_contrib, url):
# test_read_write_cannot_update_vol
payload = {
'attributes': {
'name': 'updated vol name',
'anonymous': True,
}
}
res = app.put_json_api(
url,
{'data': payload},
auth=write_contrib.auth,
expect_errors=True)
assert res.status_code == 403
# test_read_only_cannot_update_vol
payload = {
'attributes': {
'name': 'updated vol name',
'anonymous': True,
}
}
res = app.put_json_api(
url,
{'data': payload},
auth=read_contrib.auth,
expect_errors=True)
assert res.status_code == 403
# test_logged_in_user_cannot_update_vol
payload = {
'attributes': {
'name': 'updated vol name',
'anonymous': True,
}
}
res = app.put_json_api(
url,
{'data': payload},
auth=non_contrib.auth,
expect_errors=True)
assert res.status_code == 403
# test_unauthenticated_user_cannot_update_vol
payload = {
'attributes': {
'name': 'updated vol name',
'anonymous': True,
}
}
res = app.put_json_api(url, {'data': payload}, expect_errors=True)
assert res.status_code == 401
@pytest.mark.django_db
class TestViewOnlyLinksDelete:
def test_admin_can_delete_vol(self, app, user, url, view_only_link):
res = app.delete(url, auth=user.auth)
view_only_link.reload()
assert res.status_code == 204
assert view_only_link.is_deleted
def test_vol_delete(
self, app, write_contrib, read_contrib, non_contrib, url):
# test_read_write_cannot_delete_vol
res = app.delete(url, auth=write_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_read_only_cannot_delete_vol
res = app.delete(url, auth=read_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_logged_in_user_cannot_delete_vol
res = app.delete(url, auth=non_contrib.auth, expect_errors=True)
assert res.status_code == 403
# test_unauthenticated_user_cannot_delete_vol
res = app.delete(url, expect_errors=True)
assert res.status_code == 401
|
|
"""Implementation of RootOf class and related tools. """
from __future__ import print_function, division
from sympy.core import (S, Expr, Integer, Float, I, Add, Lambda, symbols,
sympify, Rational, Dummy)
from sympy.core.cache import cacheit
from sympy.core.function import AppliedUndef
from sympy.functions.elementary.miscellaneous import root as _root
from sympy.polys.polytools import Poly, PurePoly, factor
from sympy.polys.rationaltools import together
from sympy.polys.polyfuncs import symmetrize, viete
from sympy.polys.rootisolation import (
dup_isolate_complex_roots_sqf,
dup_isolate_real_roots_sqf)
from sympy.polys.polyroots import (
roots_linear, roots_quadratic, roots_binomial,
preprocess_roots, roots)
from sympy.polys.polyerrors import (
MultivariatePolynomialError,
GeneratorsNeeded,
PolynomialError,
DomainError)
from sympy.polys.domains import QQ
from mpmath import mpf, mpc, findroot, workprec
from mpmath.libmp.libmpf import prec_to_dps
from sympy.utilities import lambdify, public
from sympy.core.compatibility import range
from math import log as mathlog
def _ispow2(i):
v = mathlog(i, 2)
return v == int(v)
_reals_cache = {}
_complexes_cache = {}
@public
class RootOf(Expr):
"""Represents ``k``-th root of a univariate polynomial. """
__slots__ = ['poly', 'index']
is_complex = True
is_number = True
def __new__(cls, f, x, index=None, radicals=True, expand=True):
"""Construct a new ``RootOf`` object for ``k``-th root of ``f``. """
x = sympify(x)
if index is None and x.is_Integer:
x, index = None, x
else:
index = sympify(index)
if index is not None and index.is_Integer:
index = int(index)
else:
raise ValueError("expected an integer root index, got %s" % index)
poly = PurePoly(f, x, greedy=False, expand=expand)
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
degree = poly.degree()
if degree <= 0:
raise PolynomialError("can't construct RootOf object for %s" % f)
if index < -degree or index >= degree:
raise IndexError("root index out of [%d, %d] range, got %d" %
(-degree, degree - 1, index))
elif index < 0:
index += degree
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError("RootOf is not supported over %s" % dom)
root = cls._indexed_root(poly, index)
return coeff*cls._postprocess_root(root, radicals)
@classmethod
def _new(cls, poly, index):
"""Construct new ``RootOf`` object from raw data. """
obj = Expr.__new__(cls)
obj.poly = PurePoly(poly)
obj.index = index
try:
_reals_cache[obj.poly] = _reals_cache[poly]
_complexes_cache[obj.poly] = _complexes_cache[poly]
except KeyError:
pass
return obj
def _hashable_content(self):
return (self.poly, self.index)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, Integer(self.index))
@property
def free_symbols(self):
# RootOf currently only works with univariate expressions and although
# the poly attribute is often a PurePoly, sometimes it is a Poly. In
# either case no free symbols should be reported.
return set()
def _eval_is_real(self):
"""Return ``True`` if the root is real. """
return self.index < len(_reals_cache[self.poly])
@classmethod
def real_roots(cls, poly, radicals=True):
"""Get real roots of a polynomial. """
return cls._get_roots("_real_roots", poly, radicals)
@classmethod
def all_roots(cls, poly, radicals=True):
"""Get real and complex roots of a polynomial. """
return cls._get_roots("_all_roots", poly, radicals)
@classmethod
def _get_reals_sqf(cls, factor):
"""Compute real root isolating intervals for a square-free polynomial. """
if factor in _reals_cache:
real_part = _reals_cache[factor]
else:
_reals_cache[factor] = real_part = \
dup_isolate_real_roots_sqf(
factor.rep.rep, factor.rep.dom, blackbox=True)
return real_part
@classmethod
def _get_complexes_sqf(cls, factor):
"""Compute complex root isolating intervals for a square-free polynomial. """
if factor in _complexes_cache:
complex_part = _complexes_cache[factor]
else:
_complexes_cache[factor] = complex_part = \
dup_isolate_complex_roots_sqf(
factor.rep.rep, factor.rep.dom, blackbox=True)
return complex_part
@classmethod
def _get_reals(cls, factors):
"""Compute real root isolating intervals for a list of factors. """
reals = []
for factor, k in factors:
real_part = cls._get_reals_sqf(factor)
reals.extend([ (root, factor, k) for root in real_part ])
return reals
@classmethod
def _get_complexes(cls, factors):
"""Compute complex root isolating intervals for a list of factors. """
complexes = []
for factor, k in factors:
complex_part = cls._get_complexes_sqf(factor)
complexes.extend([ (root, factor, k) for root in complex_part ])
return complexes
@classmethod
def _reals_sorted(cls, reals):
"""Make real isolating intervals disjoint and sort roots. """
cache = {}
for i, (u, f, k) in enumerate(reals):
for j, (v, g, m) in enumerate(reals[i + 1:]):
u, v = u.refine_disjoint(v)
reals[i + j + 1] = (v, g, m)
reals[i] = (u, f, k)
reals = sorted(reals, key=lambda r: r[0].a)
for root, factor, _ in reals:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.items():
_reals_cache[factor] = roots
return reals
@classmethod
def _separate_imaginary_from_complex(cls, complexes):
from sympy.utilities.iterables import sift
def is_imag(c):
'''
return True if all roots are imaginary (ax**2 + b)
return False if no roots are imaginary
return None if 2 roots are imaginary (ax**N'''
u, f, k = c
deg = f.degree()
if f.length() == 2:
if deg == 2:
return True # both imag
elif _ispow2(deg):
if f.LC()*f.TC() < 0:
return None # 2 are imag
return False # none are imag
# separate according to the function
sifted = sift(complexes, lambda c: c[1])
del complexes
imag = []
complexes = []
for f in sifted:
isift = sift(sifted[f], lambda c: is_imag(c))
imag.extend(isift.pop(True, []))
complexes.extend(isift.pop(False, []))
mixed = isift.pop(None, [])
assert not isift
if not mixed:
continue
while True:
# the non-imaginary ones will be on one side or the other
# of the y-axis
i = 0
while i < len(mixed):
u, f, k = mixed[i]
if u.ax*u.bx > 0:
complexes.append(mixed.pop(i))
else:
i += 1
if len(mixed) == 2:
imag.extend(mixed)
break
# refine
for i, (u, f, k) in enumerate(mixed):
u = u._inner_refine()
mixed[i] = u, f, k
return imag, complexes
@classmethod
def _refine_complexes(cls, complexes):
"""return complexes such that no bounding rectangles of non-conjugate
roots would intersect if slid horizontally or vertically/
"""
while complexes: # break when all are distinct
# get the intervals pairwise-disjoint. If rectangles were drawn around
# the coordinates of the bounding rectangles, no rectangles would
# intersect after this procedure
for i, (u, f, k) in enumerate(complexes):
for j, (v, g, m) in enumerate(complexes[i + 1:]):
u, v = u.refine_disjoint(v)
complexes[i + j + 1] = (v, g, m)
complexes[i] = (u, f, k)
# Although there are no intersecting rectangles, a given rectangle
# might intersect another when slid horizontally. We have to refine
# intervals until this is not true so we can sort the roots
# unambiguously. Since complex roots come in conjugate pairs, we
# will always have 2 rectangles above each other but we should not
# have more than that.
N = len(complexes)//2 - 1
# check x (real) parts: there must be N + 1 disjoint x ranges, i.e.
# the first one must be different from N others
uu = set([(u.ax, u.bx) for u, _, _ in complexes])
u = uu.pop()
if sum([u[1] <= v[0] or v[1] <= u[0] for v in uu]) < N:
# refine
for i, (u, f, k) in enumerate(complexes):
u = u._inner_refine()
complexes[i] = u, f, k
else:
# intervals with identical x-values have disjoint y-values or
# else they would not be disjoint so there is no need for
# further checks
break
return complexes
@classmethod
def _complexes_sorted(cls, complexes):
"""Make complex isolating intervals disjoint and sort roots. """
if not complexes:
return []
cache = {}
# imaginary roots can cause a problem in terms of sorting since
# their x-intervals will never refine as distinct from others
# so we handle them separately
imag, complexes = cls._separate_imaginary_from_complex(complexes)
complexes = cls._refine_complexes(complexes)
# sort imaginary roots
def key(c):
'''return, for ax**n+b, +/-root(abs(b/a), b) according to the
apparent sign of the imaginary interval, e.g. if the interval
were (0, 3) the positive root would be returned.
'''
u, f, k = c
r = _root(abs(f.TC()/f.LC()), f.degree())
if u.ay < 0 or u.by < 0:
return -r
return r
imag = sorted(imag, key=lambda c: key(c))
# sort complexes and combine with imag
if complexes:
# key is (x1, y1) e.g. (1, 2)x(3, 4) -> (1,3)
complexes = sorted(complexes, key=
lambda c: c[0].a)
# find insertion point for imaginary
for i, c in enumerate(reversed(complexes)):
if c[0].bx <= 0:
break
i = len(complexes) - i - 1
if i:
i += 1
complexes = complexes[:i] + imag + complexes[i:]
else:
complexes = imag
# update cache
for root, factor, _ in complexes:
if factor in cache:
cache[factor].append(root)
else:
cache[factor] = [root]
for factor, roots in cache.items():
_complexes_cache[factor] = roots
return complexes
@classmethod
def _reals_index(cls, reals, index):
"""Map initial real root index to an index in a factor where the root belongs. """
i = 0
for j, (_, factor, k) in enumerate(reals):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in reals[:j]:
if factor == poly:
index += 1
return poly, index
else:
i += k
@classmethod
def _complexes_index(cls, complexes, index):
"""Map initial complex root index to an index in a factor where the root belongs. """
index, i = index, 0
for j, (_, factor, k) in enumerate(complexes):
if index < i + k:
poly, index = factor, 0
for _, factor, _ in complexes[:j]:
if factor == poly:
index += 1
index += len(_reals_cache[poly])
return poly, index
else:
i += k
@classmethod
def _count_roots(cls, roots):
"""Count the number of real or complex roots including multiplicities."""
return sum([ k for _, _, k in roots ])
@classmethod
def _indexed_root(cls, poly, index):
"""Get a root of a composite polynomial by index. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals_count = cls._count_roots(reals)
if index < reals_count:
reals = cls._reals_sorted(reals)
return cls._reals_index(reals, index)
else:
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
return cls._complexes_index(complexes, index - reals_count)
@classmethod
def _real_roots(cls, poly):
"""Get real roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in range(0, reals_count):
roots.append(cls._reals_index(reals, index))
return roots
@classmethod
def _all_roots(cls, poly):
"""Get real and complex roots of a composite polynomial. """
(_, factors) = poly.factor_list()
reals = cls._get_reals(factors)
reals = cls._reals_sorted(reals)
reals_count = cls._count_roots(reals)
roots = []
for index in range(0, reals_count):
roots.append(cls._reals_index(reals, index))
complexes = cls._get_complexes(factors)
complexes = cls._complexes_sorted(complexes)
complexes_count = cls._count_roots(complexes)
for index in range(0, complexes_count):
roots.append(cls._complexes_index(complexes, index))
return roots
@classmethod
@cacheit
def _roots_trivial(cls, poly, radicals):
"""Compute roots in linear, quadratic and binomial cases. """
if poly.degree() == 1:
return roots_linear(poly)
if not radicals:
return None
if poly.degree() == 2:
return roots_quadratic(poly)
elif poly.length() == 2 and poly.TC():
return roots_binomial(poly)
else:
return None
@classmethod
def _preprocess_roots(cls, poly):
"""Take heroic measures to make ``poly`` compatible with ``RootOf``. """
dom = poly.get_domain()
if not dom.is_Exact:
poly = poly.to_exact()
coeff, poly = preprocess_roots(poly)
dom = poly.get_domain()
if not dom.is_ZZ:
raise NotImplementedError(
"sorted roots not supported over %s" % dom)
return coeff, poly
@classmethod
def _postprocess_root(cls, root, radicals):
"""Return the root if it is trivial or a ``RootOf`` object. """
poly, index = root
roots = cls._roots_trivial(poly, radicals)
if roots is not None:
return roots[index]
else:
return cls._new(poly, index)
@classmethod
def _get_roots(cls, method, poly, radicals):
"""Return postprocessed roots of specified kind. """
if not poly.is_univariate:
raise PolynomialError("only univariate polynomials are allowed")
coeff, poly = cls._preprocess_roots(poly)
roots = []
for root in getattr(cls, method)(poly):
roots.append(coeff*cls._postprocess_root(root, radicals))
return roots
def _get_interval(self):
"""Internal function for retrieving isolation interval from cache. """
if self.is_real:
return _reals_cache[self.poly][self.index]
else:
reals_count = len(_reals_cache[self.poly])
return _complexes_cache[self.poly][self.index - reals_count]
def _set_interval(self, interval):
"""Internal function for updating isolation interval in cache. """
if self.is_real:
_reals_cache[self.poly][self.index] = interval
else:
reals_count = len(_reals_cache[self.poly])
_complexes_cache[self.poly][self.index - reals_count] = interval
def _eval_evalf(self, prec):
"""Evaluate this complex root to the given precision. """
with workprec(prec):
g = self.poly.gen
if not g.is_Symbol:
d = Dummy('x')
func = lambdify(d, self.expr.subs(g, d))
else:
func = lambdify(g, self.expr)
interval = self._get_interval()
if not self.is_real:
# For complex intervals, we need to keep refining until the
# imaginary interval is disjunct with other roots, that is,
# until both ends get refined.
ay = interval.ay
by = interval.by
while interval.ay == ay or interval.by == by:
interval = interval.refine()
while True:
if self.is_real:
a = mpf(str(interval.a))
b = mpf(str(interval.b))
if a == b:
root = a
break
x0 = mpf(str(interval.center))
else:
ax = mpf(str(interval.ax))
bx = mpf(str(interval.bx))
ay = mpf(str(interval.ay))
by = mpf(str(interval.by))
if ax == bx and ay == by:
# the sign of the imaginary part will be assigned
# according to the desired index using the fact that
# roots are sorted with negative imag parts coming
# before positive (and all imag roots coming after real
# roots)
deg = self.poly.degree()
i = self.index # a positive attribute after creation
if (deg - i) % 2:
if ay < 0:
ay = -ay
else:
if ay > 0:
ay = -ay
root = mpc(ax, ay)
break
x0 = mpc(*map(str, interval.center))
try:
root = findroot(func, x0)
# If the (real or complex) root is not in the 'interval',
# then keep refining the interval. This happens if findroot
# accidentally finds a different root outside of this
# interval because our initial estimate 'x0' was not close
# enough. It is also possible that the secant method will
# get trapped by a max/min in the interval; the root
# verification by findroot will raise a ValueError in this
# case and the interval will then be tightened -- and
# eventually the root will be found.
if self.is_real:
if (a <= root <= b):
break
elif (ax <= root.real <= bx and ay <= root.imag <= by):
break
except ValueError:
pass
interval = interval.refine()
return Float._new(root.real._mpf_, prec) + I*Float._new(root.imag._mpf_, prec)
def eval_rational(self, tol):
"""
Returns a Rational approximation to ``self`` with the tolerance ``tol``.
This method uses bisection, which is very robust and it will always
converge. The returned Rational instance will be at most 'tol' from the
exact root.
The following example first obtains Rational approximation to 1e-7
accuracy for all roots of the 4-th order Legendre polynomial, and then
evaluates it to 5 decimal digits (so all digits will be correct
including rounding):
>>> from sympy import S, legendre_poly, Symbol
>>> x = Symbol("x")
>>> p = legendre_poly(4, x, polys=True)
>>> roots = [r.eval_rational(S(1)/10**7) for r in p.real_roots()]
>>> roots = [str(r.n(5)) for r in roots]
>>> roots
['-0.86114', '-0.33998', '0.33998', '0.86114']
"""
if not self.is_real:
raise NotImplementedError("eval_rational() only works for real polynomials so far")
func = lambdify(self.poly.gen, self.expr)
interval = self._get_interval()
a = Rational(str(interval.a))
b = Rational(str(interval.b))
return bisect(func, a, b, tol)
def _eval_Eq(self, other):
# RootOf represents a Root, so if other is that root, it should set
# the expression to zero *and* it should be in the interval of the
# RootOf instance. It must also be a number that agrees with the
# is_real value of the RootOf instance.
if type(self) == type(other):
return sympify(self.__eq__(other))
if not (other.is_number and not other.has(AppliedUndef)):
return S.false
if not other.is_finite:
return S.false
z = self.expr.subs(self.expr.free_symbols.pop(), other).is_zero
if z is False: # all roots will make z True but we don't know
# whether this is the right root if z is True
return S.false
o = other.is_real, other.is_imaginary
s = self.is_real, self.is_imaginary
if o != s and None not in o and None not in s:
return S.false
i = self._get_interval()
was = i.a, i.b
need = [True]*2
# make sure it would be distinct from others
while any(need):
i = i.refine()
a, b = i.a, i.b
if need[0] and a != was[0]:
need[0] = False
if need[1] and b != was[1]:
need[1] = False
re, im = other.as_real_imag()
if not im:
if self.is_real:
a, b = [Rational(str(i)) for i in (a, b)]
return sympify(a < other and other < b)
return S.false
if self.is_real:
return S.false
z = r1, r2, i1, i2 = [Rational(str(j)) for j in (
i.ax, i.bx, i.ay, i.by)]
return sympify((
r1 < re and re < r2) and (
i1 < im and im < i2))
@public
class RootSum(Expr):
"""Represents a sum of all roots of a univariate polynomial. """
__slots__ = ['poly', 'fun', 'auto']
def __new__(cls, expr, func=None, x=None, auto=True, quadratic=False):
"""Construct a new ``RootSum`` instance carrying all roots of a polynomial. """
coeff, poly = cls._transform(expr, x)
if not poly.is_univariate:
raise MultivariatePolynomialError(
"only univariate polynomials are allowed")
if func is None:
func = Lambda(poly.gen, poly.gen)
else:
try:
is_func = func.is_Function
except AttributeError:
is_func = False
if is_func and 1 in func.nargs:
if not isinstance(func, Lambda):
func = Lambda(poly.gen, func(poly.gen))
else:
raise ValueError(
"expected a univariate function, got %s" % func)
var, expr = func.variables[0], func.expr
if coeff is not S.One:
expr = expr.subs(var, coeff*var)
deg = poly.degree()
if not expr.has(var):
return deg*expr
if expr.is_Add:
add_const, expr = expr.as_independent(var)
else:
add_const = S.Zero
if expr.is_Mul:
mul_const, expr = expr.as_independent(var)
else:
mul_const = S.One
func = Lambda(var, expr)
rational = cls._is_func_rational(poly, func)
(_, factors), terms = poly.factor_list(), []
for poly, k in factors:
if poly.is_linear:
term = func(roots_linear(poly)[0])
elif quadratic and poly.is_quadratic:
term = sum(map(func, roots_quadratic(poly)))
else:
if not rational or not auto:
term = cls._new(poly, func, auto)
else:
term = cls._rational_case(poly, func)
terms.append(k*term)
return mul_const*Add(*terms) + deg*add_const
@classmethod
def _new(cls, poly, func, auto=True):
"""Construct new raw ``RootSum`` instance. """
obj = Expr.__new__(cls)
obj.poly = poly
obj.fun = func
obj.auto = auto
return obj
@classmethod
def new(cls, poly, func, auto=True):
"""Construct new ``RootSum`` instance. """
if not func.expr.has(*func.variables):
return func.expr
rational = cls._is_func_rational(poly, func)
if not rational or not auto:
return cls._new(poly, func, auto)
else:
return cls._rational_case(poly, func)
@classmethod
def _transform(cls, expr, x):
"""Transform an expression to a polynomial. """
poly = PurePoly(expr, x, greedy=False)
return preprocess_roots(poly)
@classmethod
def _is_func_rational(cls, poly, func):
"""Check if a lambda is areational function. """
var, expr = func.variables[0], func.expr
return expr.is_rational_function(var)
@classmethod
def _rational_case(cls, poly, func):
"""Handle the rational function case. """
roots = symbols('r:%d' % poly.degree())
var, expr = func.variables[0], func.expr
f = sum(expr.subs(var, r) for r in roots)
p, q = together(f).as_numer_denom()
domain = QQ[roots]
p = p.expand()
q = q.expand()
try:
p = Poly(p, domain=domain, expand=False)
except GeneratorsNeeded:
p, p_coeff = None, (p,)
else:
p_monom, p_coeff = zip(*p.terms())
try:
q = Poly(q, domain=domain, expand=False)
except GeneratorsNeeded:
q, q_coeff = None, (q,)
else:
q_monom, q_coeff = zip(*q.terms())
coeffs, mapping = symmetrize(p_coeff + q_coeff, formal=True)
formulas, values = viete(poly, roots), []
for (sym, _), (_, val) in zip(mapping, formulas):
values.append((sym, val))
for i, (coeff, _) in enumerate(coeffs):
coeffs[i] = coeff.subs(values)
n = len(p_coeff)
p_coeff = coeffs[:n]
q_coeff = coeffs[n:]
if p is not None:
p = Poly(dict(zip(p_monom, p_coeff)), *p.gens).as_expr()
else:
(p,) = p_coeff
if q is not None:
q = Poly(dict(zip(q_monom, q_coeff)), *q.gens).as_expr()
else:
(q,) = q_coeff
return factor(p/q)
def _hashable_content(self):
return (self.poly, self.fun)
@property
def expr(self):
return self.poly.as_expr()
@property
def args(self):
return (self.expr, self.fun, self.poly.gen)
@property
def free_symbols(self):
return self.poly.free_symbols | self.fun.free_symbols
@property
def is_commutative(self):
return True
def doit(self, **hints):
if not hints.get('roots', True):
return self
_roots = roots(self.poly, multiple=True)
if len(_roots) < self.poly.degree():
return self
else:
return Add(*[ self.fun(r) for r in _roots ])
def _eval_evalf(self, prec):
try:
_roots = self.poly.nroots(n=prec_to_dps(prec))
except (DomainError, PolynomialError):
return self
else:
return Add(*[ self.fun(r) for r in _roots ])
def _eval_derivative(self, x):
var, expr = self.fun.args
func = Lambda(var, expr.diff(x))
return self.new(self.poly, func, self.auto)
def bisect(f, a, b, tol):
"""
Implements bisection. This function is used in RootOf.eval_rational() and
it needs to be robust.
Examples
========
>>> from sympy import S
>>> from sympy.polys.rootoftools import bisect
>>> bisect(lambda x: x**2-1, -10, 0, S(1)/10**2)
-1025/1024
>>> bisect(lambda x: x**2-1, -10, 0, S(1)/10**4)
-131075/131072
"""
a = sympify(a)
b = sympify(b)
fa = f(a)
fb = f(b)
if fa * fb >= 0:
raise ValueError("bisect: f(a) and f(b) must have opposite signs")
while (b - a > tol):
c = (a + b)/2
fc = f(c)
if (fc == 0):
return c # We need to make sure f(c) is not zero below
if (fa * fc < 0):
b = c
fb = fc
else:
a = c
fa = fc
return (a + b)/2
|
|
from control4.cloud.cloud_interface import *
from control4.misc.console_utils import maybe_call_and_print,check_output_and_print,colorize,call_and_print
from control4.config import load_config
import threading
import time,json,re
import pexpect
import os.path as osp
import tempfile
import subprocess
# XXX terminal output from shells is odd
RUN_COMMANDS_SCRIPT_TEMPLATE = """
#!/bin/bash
%(basic_setup)s
# Actually run commands
cd
%(cmds_str)s
# Upload files
gsutil cp -R $CTRL_DATA/results/* %(new_data_bucket)s/results/
gsutil cp /var/log/startupscript.log %(new_data_bucket)s/logs/%(instance_name)s.log
"""
BASIC_SETUP_TEMPLATE = """
cd ~/control
git pull
git checkout %(branch_name)s
cd ~/control/cpp/3rdparty/mjc2
git pull
cd ~/Theano
git pull
cd ~/build/control
make -j
"""
STARTUP_SCRIPT_TEMPLATE = """
#!/bin/bash
if [[ $(id -u) -eq 0 ]];
then
sudo cp /var/run/google.startup.script /tmp/blah.bash
sudo chown %(image_username)s /tmp/blah.bash
sudo -u %(image_username)s bash /tmp/blah.bash
else
# bash_profile and bashrc haven't been run yet since this is a root shell
source ~/.bashrc
%(run_commands)s
# Maybe delete instance
%(maybe_delete_str)s
fi
"""
PRINT_LOCK = threading.Lock()
def indent(text,n_tabs):
space = " "*(4*n_tabs)
return "\n".join([space + line for line in text.split("\n")])
# TODO delete startup scripts eventually
class GCECloud(Cloud):
def __init__(self):
pass
def start_instances(self, instance_names, dry=False):
config = load_config()
threads = []
for instance_name in instance_names:
d = dict(
branch_name = config["experiments"]["branch_name"],
image_name = config["cloud"]["gce"]["image_name"],
image_username = config["cloud"]["gce"]["image_username"],
instance_name = instance_name,
machine_type = config["cloud"]["gce"]["machine_type"],
project = config["cloud"]["gce"]["project"],
zone = config["cloud"]["gce"]["zone"],
maybe_delete_str = "",
)
d["run_commands"] = indent(BASIC_SETUP_TEMPLATE%d,1)
startup_script_str = STARTUP_SCRIPT_TEMPLATE%d
with tempfile.NamedTemporaryFile(delete=False) as fh:
fh.write(startup_script_str)
d["startup_script_fname"] = fh.name
startcmd = ('gcloud compute instances create'
' %(instance_name)s'
' --boot-disk-type "pd-standard"'
' --format json'
' --image "https://www.googleapis.com/compute/v1/projects/%(project)s/global/images/%(image_name)s"'
' --scopes "https://www.googleapis.com/auth/devstorage.read_only"'
' --machine-type %(machine_type)s'
' --maintenance-policy "MIGRATE"'
' --metadata-from-file startup-script=%(startup_script_fname)s'
' --network "default"'
' --project "%(project)s"'
' --zone "%(zone)s"'
% d
)
threads.append(threading.Thread(target=maybe_call_and_print,args=(startcmd,dry)))
for thread in threads:
thread.start()
time.sleep(.25)
for thread in threads:
thread.join()
def delete_instances(self,instance_names):
gce_config = load_cloud_config()["gce"]
cmd = "gcloud compute instances delete %(instances)s --zone %(zone)s "%dict(instances=" ".join(instance_names),zone=gce_config['zone'],project=gce_config['project'])
call_and_print(cmd)
def list_instances(self):
config = load_cloud_config()["gce"]
cmd = "gcloud compute instances list --format json --zone %(zone)s"%dict(zone=config['zone'],project=config['project'])
output = check_output_and_print(cmd)
return json.loads(output)
def instance_name(self,info):
return info['name']
def instance_address(self,info):
return "tcp://%s:5555"%info["name"]
def instance_address_local(self,info):
return "tcp://*:5555"
def run_shell_command(self,info,cmd,block=True):
name = self.instance_name(info)
shell = get_gce_shell(name)
shell.runcmd(cmd,block=block)
if not block:
return shell
else:
shell.close()
return None
def _gen_cmd_str(self,cmds):
cmds_str = "" #pylint: disable=W0612
# XXX maybe we should make sure directory structure exists in the script itself
for cmd in cmds:
local_fname, = re.findall(r"--outfile=(\S+)",cmd)
cmds_str += "mkdir -p %s\n"%(osp.dirname(local_fname))
cmds_str += cmd
return cmds_str
def _gen_bash_script(self,cmds,instance_name):
config = load_config()
return RUN_COMMANDS_SCRIPT_TEMPLATE%dict(
branch_name = config["experiments"]["branch_name"],
cmds_str = self._gen_cmd_str(cmds),
instance_name = instance_name,
new_data_bucket = config["cloud"]["gce"]["new_data_bucket"],
basic_setup = BASIC_SETUP_TEMPLATE%config["experiments"]
)
# def run_commands_on_existing_instance(self,cmds, instance_name,dry=False,keep_instance=False):
# better to scp script and then run with nohup
# script_str = self._gen_bash_script(cmds, instance_name)
# with tempfile.NamedTemporaryFile() as fh:
# startup_script_fname=fh.name
# print "running script: "
# print "******************"
# print colorize(script_str,"green")
# print "******************"
# fh.write(script_str)
# cmd = "gcutil ssh %(instance_name)s 'bash -s' < %(local_fname)s"%(instance_name,local_fname)
# maybe_call_and_print(cmd,dry)
def run_commands_on_fresh_instance(self,cmds,instance_name,dry=False,keep_instance=False):
config = load_config()
d = dict(
branch_name = config["experiments"]["branch_name"],
image_name = config["cloud"]["gce"]["image_name"],
image_username = config["cloud"]["gce"]["image_username"],
instance_name = instance_name,
machine_type = config["cloud"]["gce"]["machine_type"],
new_data_bucket = config["cloud"]["gce"]["new_data_bucket"],
project = config["cloud"]["gce"]["project"],
run_commands = indent(self._gen_bash_script(cmds,instance_name),1),
zone = config["cloud"]["gce"]["zone"],
)
d["maybe_delete_str"] = "" if keep_instance else "gcutil deleteinstance --force --delete_boot_pd %(instance_name)s --project=%(project)s --zone=%(zone)s"%d #pylint: disable=W0612
startup_script_str = STARTUP_SCRIPT_TEMPLATE%d
with tempfile.NamedTemporaryFile(delete=False) as fh:
fh.write(startup_script_str)
d["startup_script_fname"] = fh.name
startcmd = ('gcloud compute instances create'
' %(instance_name)s'
' --boot-disk-type "pd-standard"'
' --format json'
' --image "https://www.googleapis.com/compute/v1/projects/%(project)s/global/images/%(image_name)s"'
' --scopes "https://www.googleapis.com/auth/devstorage.read_only"'
' --machine-type %(machine_type)s'
' --maintenance-policy "MIGRATE"'
' --metadata-from-file startup-script=%(startup_script_fname)s'
' --network "default"'
' --project "%(project)s"'
' --zone "%(zone)s"'
%d)
with PRINT_LOCK:
print "**************************************"
print "writing startup script to %s"%fh.name
print "startup script:"
print colorize(startup_script_str,'green')
print "start command:"
print colorize(startcmd,'green')
print "**************************************"
if not dry:
subprocess.check_call(startcmd,shell=True)
### Messy GCE auxililliary stuff
class AbstractShell(object):
def runcmd(self, cmd, block):
raise NotImplementedError
def is_busy(self):
raise NotImplementedError
def close(self):
raise NotImplementedError
def __del__(self):
self.close()
DEFAULT_SHELL_PROMPT = r"\S{1,10}@\S{1,20}\:\~\S{1,30}\$"
class PexpectShell(AbstractShell):
def __init__(self, child, disp_prefix="",shell_prompt=None):
self.shell_prompt = DEFAULT_SHELL_PROMPT if shell_prompt is None else shell_prompt
self.child = child
self.busy = False
self.disp_prefix = disp_prefix
def runcmd(self, cmd, block=True):
self.busy = True
print colorize(self.disp_prefix+cmd,'yellow')
self.child.sendline(cmd)
if block:
self.child.expect(self.shell_prompt,timeout=9999999)
print colorize(self.child.before,'magenta')
self.busy = False
def update(self):
if self.busy:
try:
self.child.expect(self.shell_prompt,timeout=0)
print colorize(self.child.before,'magenta')
self.busy = False
except pexpect.TIMEOUT:
# print colorize(self.child.before,'magenta')
pass
def close(self):
self.update()
if self.is_busy():
self.child.sendcontrol('c')
self.child.close()
def is_busy(self):
return self.busy
def join(self):
while self.busy:
self.update()
time.sleep(.1)
def get_gce_shell(name):
config = load_cloud_config()["gce"]
username = config['image_username']
for _i_try in xrange(10):
cmd = "gcloud compute ssh %(name)s --zone %(zone)s --project %(project)s"%dict(name=name,zone=config['zone'],project=config['project'])
print colorize(cmd, 'green')
p = pexpect.spawn(cmd)
shell_prompt = username + r"@\S{1,30}\:\~\S{0,30}\$"
try:
p.expect(shell_prompt, timeout=6)
return PexpectShell(p, "" if name is None else (name+": "),shell_prompt=shell_prompt)
except pexpect.TIMEOUT:
p.close()
print "ssh timed out. trying again"
time.sleep(2)
except pexpect.EOF:
print "ssh got eof. trying again"
try:
p.close()
except pexpect.ExceptionPexpect:
print colorize("pexpect couldn't close child", "red")
time.sleep(5)
raise ValueError("couldn't connect by ssh")
|
|
# -*- coding: utf-8 -*-
'''
Created on 09.09.2016
@author: gongy
'''
import io
import struct
from reflexif.models.jpeg import SZ_VARIABLE, SZ_ZERO, SEGMENTS
from reflexif import errors, compat
from reflexif.models import jpeg
from reflexif.io import readexactly
class SegmentData(object):
def __init__(self, segment, offset, size, payload, incomplete=False):
self.segment = segment
self.offset = offset
self.size = size
self.payload = payload
self.incomplete = incomplete
def __repr__(self):
return '<SegmentData: %s offset=%d, size=%d>' % (
self.segment.name, self.offset, self.size)
@property
def payload_offeset(self):
if self.size == 0:
raise ValueError
return self.offset + 4
class NoMarkerError(Exception):
pass
class JPEGStreamParser(object):
segment_by_marker = {s.marker: s for s in SEGMENTS}
uint16 = struct.Struct('>H')
def __init__(self, fd, search_markers=False, read_trailer=False):
self.fd = fd
self.read_trailer = read_trailer
self.search_markers = search_markers
self.trailer = None
self.segments = []
def read_checked(self, n):
return readexactly(self.fd, n)
def parse_marker(self):
data = self.read_checked(2)
if data[0:1] != b'\xff':
self.fd.seek(-2, io.SEEK_CUR)
raise NoMarkerError
while data[1:2] == b'\xff':
data = data[1:2] + self.read_checked(1)
return self.uint16.unpack(data)[0]
def parse_uint16(self):
return self.uint16.unpack(self.read_checked(2))[0]
def check_marker(self, buffer, length):
offset = 0
while offset < length:
try:
index = buffer.index(b'\xff', offset)
except ValueError:
return None
index += 1
if index == length:
return -1
marker = buffer[index]
if marker == 0:
offset = index + 1
else:
return index - 1
def skip_scan(self):
buf = bytearray(4096)
while True:
r = self.fd.readinto(buf)
if r == 0:
raise EOFError
marker_offset = self.check_marker(buf, r)
if marker_offset is None:
continue
if marker_offset == -1:
self.fd.seek(-1, io.SEEK_CUR)
else:
self.fd.seek(marker_offset - r, io.SEEK_CUR)
break
# self.state = ST_SEG
#def parse_scan(self):
# while True:
# b = self.read_checked(1)
# if b == b'\xff':
# b2 = self.read_checked(1)
# if b2 == b'\x00':
# continue
#
# offset = self.fd.tell() - 2
# self.fd.seek(offset)
# break
def search_marker(self):
while True:
b = self.read_checked(1)
if b != b'\xff':
continue
data = b + self.read_checked(1)
return self.uint16.unpack(data)[0]
#if self.state == ST_SCAN and not segment.is_rst and not segment is jpeg.Segments.EOI:
# raise errors.JPEGError('unexpected segment in ST_SCAN: %r' % segment)
#if segment.is_rst or segment is jpeg.Segments.SOS:
# self.state = ST_SCAN
# self.skip_scan()
#else:
# self.state = ST_SEG
def parse_segments(self):
for seg in self._parse_segments():
self.segments.append(seg)
yield seg
def parse_segments2(self):
for seg in self._parse_segments():
self.segments.append(seg)
def _parse_segments(self):
scan = False
while True:
seg = self.parse_segment()
if not seg.segment.is_rst:
yield seg
if seg.segment == jpeg.Segments.EOI:
break
elif seg.segment == jpeg.Segments.SOS:
scan = True
elif scan and seg.segment.is_rst:
pass
else:
scan = False
if scan:
self.skip_scan()
if self.read_trailer:
self.parse_trailer()
#trailer_data = self.fd.read()
#self.trailer = TrailerData(trailer_data)
def parse_trailer(self):
trailer_data = self.fd.read()
self.trailer = TrailerData(trailer_data)
return self.trailer
def parse_segment(self):
offset = self.fd.tell()
try:
marker = self.parse_marker()
except NoMarkerError:
if not self.search_markers:
raise
marker = self.search_marker()
segment = self.segment_by_marker.get(marker)
if segment is None:
raise errors.JPEGError('unknown marker %04X' % marker)
if segment.sizing == SZ_ZERO:
return SegmentData(segment, offset, 0, None)
else:
size = self.parse_uint16()
if size in (0, 1):
raise errors.JPEGError('illegal segment size %d, segement=%r' % (size, segment))
try:
incomplete = False
payload = self.read_checked(size-2)
except EOFError as e:
incomplete = True
payload = e.args[0]
return SegmentData(segment, offset, size, payload, incomplete)
TR_ALL_ZEROS = 1
TR_ALL_FF = 2
TR_JPEG = 3
TR_UNKNOWN = 4
class TrailerData(object):
def __init__(self, data):
self.data = data
self.type = self._analyze()
def get_jpeg_data(self):
if self.type != TR_JPEG:
raise ValueError
return self.data.lstrip(b'\x00')
def parse_trailer_segments(self, read_trailer=True):
with io.BytesIO(self.get_jpeg_data()) as fd:
p = JPEGStreamParser(fd)
p.parse_segments2()
return p
def _analyze(self):
d = self.data
if not d.strip(b'\x00'):
return TR_ALL_ZEROS
elif not d.strip(b'\xff'):
return TR_ALL_FF
elif d.lstrip(b'\00').startswith(b'\xff\xd8'):
return TR_JPEG
def __bool__(self):
return self.type not in (TR_ALL_FF, TR_ALL_ZEROS)
if compat.PY2:
__nonzero__ = __bool__
|
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
from collections import namedtuple
import json
import os
try:
import koji
except ImportError:
import inspect
import sys
# Find our mocked koji module
import tests.koji as koji
mock_koji_path = os.path.dirname(inspect.getfile(koji.ClientSession))
if mock_koji_path not in sys.path:
sys.path.append(os.path.dirname(mock_koji_path))
# Now load it properly, the same way the plugin will
del koji
import koji
from osbs.build.build_response import BuildResponse
from atomic_reactor.core import DockerTasker
from atomic_reactor.plugins.post_fetch_worker_metadata import FetchWorkerMetadataPlugin
from atomic_reactor.plugins.build_orchestrate_build import (OrchestrateBuildPlugin,
WORKSPACE_KEY_UPLOAD_DIR,
WORKSPACE_KEY_BUILD_INFO)
from atomic_reactor.plugins.exit_koji_import import KojiImportPlugin
from atomic_reactor.plugins.exit_koji_tag_build import KojiTagBuildPlugin
from atomic_reactor.plugins.post_rpmqa import PostBuildRPMqaPlugin
from atomic_reactor.plugins.pre_check_and_set_rebuild import CheckAndSetRebuildPlugin
from atomic_reactor.plugins.pre_add_filesystem import AddFilesystemPlugin
from atomic_reactor.plugin import ExitPluginsRunner, PluginFailedException
from atomic_reactor.inner import DockerBuildWorkflow, TagConf, PushConf
from atomic_reactor.util import ImageName, ManifestDigest
from atomic_reactor.source import GitSource, PathSource
from atomic_reactor.build import BuildResult
from atomic_reactor.constants import (PLUGIN_PULP_PULL_KEY, PLUGIN_PULP_SYNC_KEY,
PLUGIN_GROUP_MANIFESTS_KEY)
from tests.constants import SOURCE, MOCK
from flexmock import flexmock
import pytest
from tests.docker_mock import mock_docker
import subprocess
from osbs.api import OSBS
from osbs.exceptions import OsbsException
from six import string_types
LogEntry = namedtuple('LogEntry', ['platform', 'line'])
NAMESPACE = 'mynamespace'
BUILD_ID = 'build-1'
class X(object):
pass
class MockedPodResponse(object):
def get_container_image_ids(self):
return {'buildroot:latest': '0123456'}
class MockedClientSession(object):
TAG_TASK_ID = 1234
DEST_TAG = 'images-candidate'
def __init__(self, hub, opts=None, task_states=None):
self.uploaded_files = []
self.build_tags = {}
self.task_states = task_states or ['FREE', 'ASSIGNED', 'CLOSED']
self.task_states = list(self.task_states)
self.task_states.reverse()
self.tag_task_state = self.task_states.pop()
def krb_login(self, principal=None, keytab=None, proxyuser=None):
return True
def ssl_login(self, cert, ca, serverca, proxyuser=None):
return True
def logout(self):
pass
def uploadWrapper(self, localfile, path, name=None, callback=None,
blocksize=1048576, overwrite=True):
self.uploaded_files.append(path)
self.blocksize = blocksize
def CGImport(self, metadata, server_dir):
self.metadata = metadata
self.server_dir = server_dir
return {"id": "123"}
def getBuildTarget(self, target):
return {'dest_tag_name': self.DEST_TAG}
def tagBuild(self, tag, build, force=False, fromtag=None):
self.build_tags[build] = tag
return self.TAG_TASK_ID
def getTaskInfo(self, task_id, request=False):
assert task_id == self.TAG_TASK_ID
# For extra code coverage, imagine Koji denies the task ever
# existed.
if self.tag_task_state is None:
return None
return {'state': koji.TASK_STATES[self.tag_task_state]}
def taskFinished(self, task_id):
try:
self.tag_task_state = self.task_states.pop()
except IndexError:
# No more state changes
pass
return self.tag_task_state in ['CLOSED', 'FAILED', 'CANCELED', None]
FAKE_SIGMD5 = b'0' * 32
FAKE_RPM_OUTPUT = (
b'name1;1.0;1;x86_64;0;' + FAKE_SIGMD5 + b';(none);'
b'RSA/SHA256, Mon 29 Jun 2015 13:58:22 BST, Key ID abcdef01234567\n'
b'gpg-pubkey;01234567;01234567;(none);(none);(none);(none);(none)\n'
b'gpg-pubkey-doc;01234567;01234567;noarch;(none);' + FAKE_SIGMD5 +
b';(none);(none)\n'
b'name2;2.0;2;x86_64;0;' + FAKE_SIGMD5 + b';' +
b'RSA/SHA256, Mon 29 Jun 2015 13:58:22 BST, Key ID bcdef012345678;(none)\n'
b'\n')
FAKE_OS_OUTPUT = 'fedora-22'
def fake_subprocess_output(cmd):
if cmd.startswith('/bin/rpm'):
return FAKE_RPM_OUTPUT
elif 'os-release' in cmd:
return FAKE_OS_OUTPUT
else:
raise RuntimeError
class MockedPopen(object):
def __init__(self, cmd, *args, **kwargs):
self.cmd = cmd
def wait(self):
return 0
def communicate(self):
return (fake_subprocess_output(self.cmd), '')
def fake_Popen(cmd, *args, **kwargs):
return MockedPopen(cmd, *args, **kwargs)
def fake_digest(image):
tag = image.to_str(registry=False)
return 'sha256:{0:032x}'.format(len(tag))
def is_string_type(obj):
return any(isinstance(obj, strtype)
for strtype in string_types)
class BuildInfo(object):
def __init__(self, help_file=None, help_valid=True, media_types=None):
annotations = {}
if media_types:
annotations['media-types'] = json.dumps(media_types)
if help_valid:
annotations['help_file'] = json.dumps(help_file)
self.build = BuildResponse({'metadata': {'annotations': annotations}})
def mock_environment(tmpdir, session=None, name=None,
component=None, version=None, release=None,
source=None, build_process_failed=False,
is_rebuild=True, docker_registry=True,
pulp_registries=0, blocksize=None,
task_states=None, additional_tags=None,
has_config=None):
if session is None:
session = MockedClientSession('', task_states=None)
if source is None:
source = GitSource('git', 'git://hostname/path')
if MOCK:
mock_docker()
tasker = DockerTasker()
workflow = DockerBuildWorkflow(SOURCE, "test-image")
base_image_id = '123456parent-id'
setattr(workflow, '_base_image_inspect', {'Id': base_image_id})
setattr(workflow, 'builder', X())
setattr(workflow.builder, 'image_id', '123456imageid')
setattr(workflow.builder, 'base_image', ImageName(repo='Fedora', tag='22'))
setattr(workflow.builder, 'source', X())
setattr(workflow.builder, 'built_image_info', {'ParentId': base_image_id})
setattr(workflow.builder.source, 'dockerfile_path', None)
setattr(workflow.builder.source, 'path', None)
setattr(workflow, 'tag_conf', TagConf())
with open(os.path.join(str(tmpdir), 'Dockerfile'), 'wt') as df:
df.write('FROM base\n'
'LABEL BZComponent={component} com.redhat.component={component}\n'
'LABEL Version={version} version={version}\n'
'LABEL Release={release} release={release}\n'
.format(component=component, version=version, release=release))
setattr(workflow.builder, 'df_path', df.name)
if name and version:
workflow.tag_conf.add_unique_image('user/test-image:{v}-timestamp'
.format(v=version))
if name and version and release:
workflow.tag_conf.add_primary_images(["{0}:{1}-{2}".format(name,
version,
release),
"{0}:{1}".format(name, version),
"{0}:latest".format(name)])
if additional_tags:
workflow.tag_conf.add_primary_images(["{0}:{1}".format(name, tag)
for tag in additional_tags])
flexmock(subprocess, Popen=fake_Popen)
flexmock(koji, ClientSession=lambda hub, opts: session)
flexmock(GitSource)
logs = [LogEntry('x86_64', 'Hurray for bacon: \u2017')]
(flexmock(OSBS)
.should_receive('get_orchestrator_build_logs')
.with_args(BUILD_ID)
.and_return(logs))
(flexmock(OSBS)
.should_receive('get_pod_for_build')
.with_args(BUILD_ID)
.and_return(MockedPodResponse()))
setattr(workflow, 'source', source)
setattr(workflow.source, 'lg', X())
setattr(workflow.source.lg, 'commit_id', '123456')
setattr(workflow, 'push_conf', PushConf())
if docker_registry:
docker_reg = workflow.push_conf.add_docker_registry('docker.example.com')
for image in workflow.tag_conf.images:
tag = image.to_str(registry=False)
if pulp_registries:
docker_reg.digests[tag] = ManifestDigest(v1=fake_digest(image),
v2='sha256:not-used')
else:
docker_reg.digests[tag] = ManifestDigest(v1='sha256:not-used',
v2=fake_digest(image))
if has_config:
docker_reg.config = {
'config': {'architecture': 'x86_64'},
'container_config': {}
}
for pulp_registry in range(pulp_registries):
workflow.push_conf.add_pulp_registry('env', 'pulp.example.com')
with open(os.path.join(str(tmpdir), 'image.tar.xz'), 'wt') as fp:
fp.write('x' * 2**12)
setattr(workflow, 'exported_image_sequence', [{'path': fp.name}])
annotations = {
'worker-builds': {
'x86_64': {
'build': {
'build-name': 'build-1-x64_64',
},
'metadata_fragment': 'configmap/build-1-x86_64-md',
'metadata_fragment_key': 'metadata.json',
},
'ppc64le': {
'build': {
'build-name': 'build-1-ppc64le',
},
'metadata_fragment': 'configmap/build-1-ppc64le-md',
'metadata_fragment_key': 'metadata.json',
}
},
'repositories': {
'unique': ['brew-pulp-docker:8888/myproject/hello-world:0.0.1-9'],
'primary': []
}
}
if build_process_failed:
workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
fail_reason="not built")
else:
workflow.build_result = BuildResult(logs=["docker build log - \u2018 \u2017 \u2019 \n'"],
image_id="id1234",
annotations=annotations)
workflow.prebuild_plugins_conf = {}
workflow.prebuild_results[CheckAndSetRebuildPlugin.key] = is_rebuild
workflow.postbuild_results[PostBuildRPMqaPlugin.key] = [
"name1;1.0;1;x86_64;0;2000;" + FAKE_SIGMD5.decode() + ";23000;"
"RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abc;(none)",
"name2;2.0;1;x86_64;0;3000;" + FAKE_SIGMD5.decode() + ";24000"
"RSA/SHA256, Tue 30 Aug 2016 00:00:00, Key ID 01234567890abd;(none)",
]
workflow.postbuild_results[FetchWorkerMetadataPlugin.key] = {
'x86_64': {
'buildroots': [
{
'container': {
'type': 'docker',
'arch': 'x86_64'
},
'extra': {
'osbs': {
'build_id': '12345',
'builder_image_id': '67890'
}
},
'content_generator': {
'version': '1.6.23',
'name': 'atomic-reactor'
},
'host': {
'os': 'Red Hat Enterprise Linux Server 7.3 (Maipo)',
'arch': 'x86_64'
},
'components': [
{
'name': 'perl-Net-LibIDN',
'sigmd5': '1dba38d073ea8f3e6c99cfe32785c81e',
'arch': 'x86_64',
'epoch': None,
'version': '0.12',
'signature': '199e2f91fd431d51',
'release': '15.el7',
'type': 'rpm'
},
{
'name': 'tzdata',
'sigmd5': '2255a5807ca7e4d7274995db18e52bea',
'arch': 'noarch',
'epoch': None,
'version': '2017b',
'signature': '199e2f91fd431d51',
'release': '1.el7',
'type': 'rpm'
},
],
'tools': [
{
'version': '1.12.6',
'name': 'docker'
}
],
'id': 1
}
],
'metadata_version': 0,
'output': [
{
'type': 'log',
'arch': 'noarch',
'filename': 'openshift-final.log',
'filesize': 106690,
'checksum': '2efa754467c0d2ea1a98fb8bfe435955',
'checksum_type': 'md5',
'buildroot_id': 1
},
{
'type': 'log',
'arch': 'noarch',
'filename': 'build.log',
'filesize': 1660,
'checksum': '8198de09fc5940cf7495e2657039ee72',
'checksum_type': 'md5',
'buildroot_id': 1
},
{
'extra': {
'image': {
'arch': 'x86_64'
},
'docker': {
'repositories': [
'docker-registry.example.com:8888/ns/name:1.0-1',
'docker-registry.example.com:8888/ns/name@sha256:...',
],
'parent_id': 'sha256:bf203442',
'id': '123456',
}
},
'checksum': '58a52e6f3ed52818603c2744b4e2b0a2',
'filename': 'test.x86_64.tar.gz',
'buildroot_id': 1,
'components': [
{
'name': 'tzdata',
'sigmd5': 'd9dc4e4f205428bc08a52e602747c1e9',
'arch': 'noarch',
'epoch': None,
'version': '2016d',
'signature': '199e2f91fd431d51',
'release': '1.el7',
'type': 'rpm'
},
{
'name': 'setup',
'sigmd5': 'b1e5ca72c71f94112cd9fb785b95d4da',
'arch': 'noarch',
'epoch': None,
'version': '2.8.71',
'signature': '199e2f91fd431d51',
'release': '6.el7',
'type': 'rpm'
},
],
'type': 'docker-image',
'checksum_type': 'md5',
'arch': 'x86_64',
'filesize': 71268781
}
]
}
}
workflow.plugin_workspace = {
OrchestrateBuildPlugin.key: {
WORKSPACE_KEY_UPLOAD_DIR: 'test-dir',
WORKSPACE_KEY_BUILD_INFO: {
'x86_64': BuildInfo('help.md')
}
}
}
return tasker, workflow
@pytest.fixture
def os_env(monkeypatch):
monkeypatch.setenv('BUILD', json.dumps({
"metadata": {
"creationTimestamp": "2015-07-27T09:24:00Z",
"namespace": NAMESPACE,
"name": BUILD_ID,
}
}))
monkeypatch.setenv('OPENSHIFT_CUSTOM_BUILD_BASE_IMAGE', 'buildroot:latest')
def create_runner(tasker, workflow, ssl_certs=False, principal=None,
keytab=None, target=None, tag_later=False):
args = {
'kojihub': '',
'url': '/',
}
if ssl_certs:
args['koji_ssl_certs'] = '/'
if principal:
args['koji_principal'] = principal
if keytab:
args['koji_keytab'] = keytab
if target:
args['target'] = target
args['poll_interval'] = 0
plugins_conf = [
{'name': KojiImportPlugin.key, 'args': args},
]
if target and tag_later:
plugins_conf.append({'name': KojiTagBuildPlugin.key,
'args': {
'kojihub': '',
'target': target,
'poll_interval': 0.01}})
workflow.exit_plugins_conf = plugins_conf
runner = ExitPluginsRunner(tasker, workflow, plugins_conf)
return runner
class TestKojiImport(object):
def test_koji_import_get_buildroot(self, tmpdir, os_env):
metadatas = {
'ppc64le': {
'buildroots': [
{
'container': {
'type': 'docker',
'arch': 'ppc64le'
},
'id': 1
}
],
},
'x86_64': {
'buildroots': [
{
'container': {
'type': 'docker',
'arch': 'x86_64'
},
'id': 1
}
],
},
}
results = [
{
'container': {
'arch': 'ppc64le',
'type': 'docker',
},
'id': 'ppc64le-1',
},
{
'container': {
'arch': 'x86_64',
'type': 'docker',
},
'id': 'x86_64-1',
},
]
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
session=session,
build_process_failed=True,
name='ns/name',
version='1.0',
release='1')
plugin = KojiImportPlugin(tasker, workflow, '', '/')
assert plugin.get_buildroot(metadatas) == results
def test_koji_import_failed_build(self, tmpdir, os_env):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
session=session,
build_process_failed=True,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow)
runner.run()
# Must not have importd this build
assert not hasattr(session, 'metadata')
def test_koji_import_no_build_env(self, tmpdir, monkeypatch, os_env):
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow)
# No BUILD environment variable
monkeypatch.delenv("BUILD", raising=False)
with pytest.raises(PluginFailedException) as exc:
runner.run()
assert "plugin 'koji_import' raised an exception: KeyError" in str(exc)
def test_koji_import_no_build_metadata(self, tmpdir, monkeypatch, os_env):
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow)
# No BUILD metadata
monkeypatch.setenv("BUILD", json.dumps({}))
with pytest.raises(PluginFailedException):
runner.run()
def test_koji_import_wrong_source_type(self, tmpdir, os_env):
source = PathSource('path', 'file:///dev/null')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
source=source)
runner = create_runner(tasker, workflow)
with pytest.raises(PluginFailedException) as exc:
runner.run()
assert "plugin 'koji_import' raised an exception: RuntimeError" in str(exc)
@pytest.mark.parametrize(('koji_task_id', 'expect_success'), [
(12345, True),
('x', False),
])
def test_koji_import_log_task_id(self, tmpdir, monkeypatch, os_env,
caplog, koji_task_id, expect_success):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
session=session,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow)
monkeypatch.setenv("BUILD", json.dumps({
'metadata': {
'creationTimestamp': '2015-07-27T09:24:00Z',
'namespace': NAMESPACE,
'name': BUILD_ID,
'labels': {
'koji-task-id': str(koji_task_id),
},
}
}))
runner.run()
metadata = session.metadata
assert 'build' in metadata
build = metadata['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
if expect_success:
assert "Koji Task ID {}".format(koji_task_id) in caplog.text()
assert 'container_koji_task_id' in extra
extra_koji_task_id = extra['container_koji_task_id']
assert isinstance(extra_koji_task_id, int)
assert extra_koji_task_id == koji_task_id
else:
assert "invalid task ID" in caplog.text()
assert 'container_koji_task_id' not in extra
@pytest.mark.parametrize('params', [
{
'should_raise': False,
'principal': None,
'keytab': None,
},
{
'should_raise': False,
'principal': '[email protected]',
'keytab': 'FILE:/var/run/secrets/mysecret',
},
{
'should_raise': True,
'principal': '[email protected]',
'keytab': None,
},
{
'should_raise': True,
'principal': None,
'keytab': 'FILE:/var/run/secrets/mysecret',
},
])
def test_koji_import_krb_args(self, tmpdir, params, os_env):
session = MockedClientSession('')
expectation = flexmock(session).should_receive('krb_login').and_return(True)
name = 'name'
version = '1.0'
release = '1'
tasker, workflow = mock_environment(tmpdir,
session=session,
name=name,
version=version,
release=release)
runner = create_runner(tasker, workflow,
principal=params['principal'],
keytab=params['keytab'])
if params['should_raise']:
expectation.never()
with pytest.raises(PluginFailedException):
runner.run()
else:
expectation.once()
runner.run()
def test_koji_import_krb_fail(self, tmpdir, os_env):
session = MockedClientSession('')
(flexmock(session)
.should_receive('krb_login')
.and_raise(RuntimeError)
.once())
tasker, workflow = mock_environment(tmpdir,
session=session,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow)
with pytest.raises(PluginFailedException):
runner.run()
def test_koji_import_ssl_fail(self, tmpdir, os_env):
session = MockedClientSession('')
(flexmock(session)
.should_receive('ssl_login')
.and_raise(RuntimeError)
.once())
tasker, workflow = mock_environment(tmpdir,
session=session,
name='ns/name',
version='1.0',
release='1')
runner = create_runner(tasker, workflow, ssl_certs=True)
with pytest.raises(PluginFailedException):
runner.run()
@pytest.mark.parametrize('fail_method', [
'get_orchestrator_build_logs',
'get_pod_for_build',
])
def test_koji_import_osbs_fail(self, tmpdir, os_env, fail_method):
tasker, workflow = mock_environment(tmpdir,
name='name',
version='1.0',
release='1')
(flexmock(OSBS)
.should_receive(fail_method)
.and_raise(OsbsException))
runner = create_runner(tasker, workflow)
runner.run()
@staticmethod
def check_components(components):
assert isinstance(components, list)
assert len(components) > 0
for component_rpm in components:
assert isinstance(component_rpm, dict)
assert set(component_rpm.keys()) == set([
'type',
'name',
'version',
'release',
'epoch',
'arch',
'sigmd5',
'signature',
])
assert component_rpm['type'] == 'rpm'
assert component_rpm['name']
assert is_string_type(component_rpm['name'])
assert component_rpm['name'] != 'gpg-pubkey'
assert component_rpm['version']
assert is_string_type(component_rpm['version'])
assert component_rpm['release']
epoch = component_rpm['epoch']
assert epoch is None or isinstance(epoch, int)
assert is_string_type(component_rpm['arch'])
assert component_rpm['signature'] != '(none)'
def validate_buildroot(self, buildroot):
assert isinstance(buildroot, dict)
assert set(buildroot.keys()) == set([
'id',
'host',
'content_generator',
'container',
'tools',
'components',
'extra',
])
host = buildroot['host']
assert isinstance(host, dict)
assert set(host.keys()) == set([
'os',
'arch',
])
assert host['os']
assert is_string_type(host['os'])
assert host['arch']
assert is_string_type(host['arch'])
assert host['arch'] != 'amd64'
content_generator = buildroot['content_generator']
assert isinstance(content_generator, dict)
assert set(content_generator.keys()) == set([
'name',
'version',
])
assert content_generator['name']
assert is_string_type(content_generator['name'])
assert content_generator['version']
assert is_string_type(content_generator['version'])
container = buildroot['container']
assert isinstance(container, dict)
assert set(container.keys()) == set([
'type',
'arch',
])
assert container['type'] == 'docker'
assert container['arch']
assert is_string_type(container['arch'])
assert isinstance(buildroot['tools'], list)
assert len(buildroot['tools']) > 0
for tool in buildroot['tools']:
assert isinstance(tool, dict)
assert set(tool.keys()) == set([
'name',
'version',
])
assert tool['name']
assert is_string_type(tool['name'])
assert tool['version']
assert is_string_type(tool['version'])
self.check_components(buildroot['components'])
extra = buildroot['extra']
assert isinstance(extra, dict)
assert set(extra.keys()) == set([
'osbs',
])
assert 'osbs' in extra
osbs = extra['osbs']
assert isinstance(osbs, dict)
assert set(osbs.keys()) == set([
'build_id',
'builder_image_id',
])
assert is_string_type(osbs['build_id'])
assert is_string_type(osbs['builder_image_id'])
def validate_output(self, output, has_config, expect_digest):
assert isinstance(output, dict)
assert 'buildroot_id' in output
assert 'filename' in output
assert output['filename']
assert is_string_type(output['filename'])
assert 'filesize' in output
assert int(output['filesize']) > 0
assert 'arch' in output
assert output['arch']
assert is_string_type(output['arch'])
assert 'checksum' in output
assert output['checksum']
assert is_string_type(output['checksum'])
assert 'checksum_type' in output
assert output['checksum_type'] == 'md5'
assert is_string_type(output['checksum_type'])
assert 'type' in output
if output['type'] == 'log':
assert set(output.keys()) == set([
'buildroot_id',
'filename',
'filesize',
'arch',
'checksum',
'checksum_type',
'type',
])
assert output['arch'] == 'noarch'
else:
assert set(output.keys()) == set([
'buildroot_id',
'filename',
'filesize',
'arch',
'checksum',
'checksum_type',
'type',
'components',
'extra',
])
assert output['type'] == 'docker-image'
assert is_string_type(output['arch'])
assert output['arch'] != 'noarch'
assert output['arch'] in output['filename']
self.check_components(output['components'])
extra = output['extra']
assert isinstance(extra, dict)
assert set(extra.keys()) == set([
'image',
'docker',
])
image = extra['image']
assert isinstance(image, dict)
assert set(image.keys()) == set([
'arch',
])
assert image['arch'] == output['arch'] # what else?
assert 'docker' in extra
docker = extra['docker']
assert isinstance(docker, dict)
expected_keys_set = set([
'parent_id',
'id',
'repositories',
# 'tags',
])
if has_config:
expected_keys_set.add('config')
assert set(docker.keys()) == expected_keys_set
assert is_string_type(docker['parent_id'])
assert is_string_type(docker['id'])
repositories = docker['repositories']
assert isinstance(repositories, list)
repositories_digest = list(filter(lambda repo: '@sha256' in repo, repositories))
assert sorted(repositories_digest) == sorted(set(repositories_digest))
# if has_config:
# config = docker['config']
# assert isinstance(config, dict)
# assert 'container_config' not in [x.lower() for x in config.keys()]
# assert all(is_string_type(entry) for entry in config)
def test_koji_import_import_fail(self, tmpdir, os_env, caplog):
session = MockedClientSession('')
(flexmock(session)
.should_receive('CGImport')
.and_raise(RuntimeError))
name = 'ns/name'
version = '1.0'
release = '1'
target = 'images-docker-candidate'
tasker, workflow = mock_environment(tmpdir,
name=name,
version=version,
release=release,
session=session)
runner = create_runner(tasker, workflow, target=target)
with pytest.raises(PluginFailedException):
runner.run()
assert 'metadata:' in caplog.text()
@pytest.mark.parametrize(('task_id', 'expect_success'), [
(1234, True),
('x', False),
])
def test_koji_import_filesystem_koji_task_id(self, tmpdir, os_env, caplog, task_id,
expect_success):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
session=session)
workflow.prebuild_results[AddFilesystemPlugin.key] = {
'base-image-id': 'abcd',
'filesystem-koji-task-id': task_id,
}
runner = create_runner(tasker, workflow)
runner.run()
data = session.metadata
assert 'build' in data
build = data['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
if expect_success:
assert 'filesystem_koji_task_id' in extra
filesystem_koji_task_id = extra['filesystem_koji_task_id']
assert isinstance(filesystem_koji_task_id, int)
assert filesystem_koji_task_id == task_id
else:
assert 'invalid task ID' in caplog.text()
assert 'filesystem_koji_task_id' not in extra
def test_koji_import_filesystem_koji_task_id_missing(self, tmpdir, os_env, caplog):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
session=session)
workflow.prebuild_results[AddFilesystemPlugin.key] = {
'base-image-id': 'abcd',
}
runner = create_runner(tasker, workflow)
runner.run()
data = session.metadata
assert 'build' in data
build = data['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
assert 'filesystem_koji_task_id' not in extra
assert AddFilesystemPlugin.key in caplog.text()
@pytest.mark.parametrize(('apis',
'docker_registry',
'pulp_registries',
'target'), [
('v1-only',
False,
1,
'images-docker-candidate'),
('v1+v2',
True,
2,
None),
('v2-only',
True,
1,
None),
('v1+v2',
True,
0,
None),
])
@pytest.mark.parametrize(('has_config', 'is_autorebuild'), [
# (True,
# True),
(False,
False),
])
@pytest.mark.parametrize('tag_later', (True, False))
@pytest.mark.parametrize('pulp_pull,expect_id', (
(("abcdef01234567", ['v1']), 'abcdef01234567'),
((None, ['v1', 'v2']), '123456'),
(False, '123456'),
))
def test_koji_import_success(self, tmpdir, apis, docker_registry,
pulp_registries,
target, os_env, has_config, is_autorebuild,
tag_later, pulp_pull, expect_id):
session = MockedClientSession('')
# When target is provided koji build will always be tagged,
# either by koji_import or koji_tag_build.
(flexmock(session)
.should_call('tagBuild')
.with_args('images-candidate', '123')
) # .times(1 if target else 0))
component = 'component'
name = 'ns/name'
version = '1.0'
release = '1'
if has_config and not docker_registry:
# Not a valid combination
has_config = False
tasker, workflow = mock_environment(tmpdir,
session=session,
name=name,
component=component,
version=version,
release=release,
docker_registry=docker_registry,
pulp_registries=pulp_registries,
has_config=has_config)
workflow.prebuild_results[CheckAndSetRebuildPlugin.key] = is_autorebuild
if pulp_pull:
workflow.exit_results[PLUGIN_PULP_PULL_KEY] = pulp_pull
runner = create_runner(tasker, workflow, target=target, tag_later=tag_later)
runner.run()
data = session.metadata
assert set(data.keys()) == set([
'metadata_version',
'build',
'buildroots',
'output',
])
assert data['metadata_version'] in ['0', 0]
build = data['build']
assert isinstance(build, dict)
buildroots = data['buildroots']
assert isinstance(buildroots, list)
assert len(buildroots) > 0
output_files = data['output']
assert isinstance(output_files, list)
if pulp_pull:
for output in output_files:
if 'extra' in output:
assert output['extra']['docker']['id'] == expect_id
assert set(build.keys()) == set([
'name',
'version',
'release',
'source',
'start_time',
'end_time',
'extra', # optional but always supplied
])
assert build['name'] == component
assert build['version'] == version
assert build['release'] == release
assert build['source'] == 'git://hostname/path#123456'
start_time = build['start_time']
assert isinstance(start_time, int) and start_time
end_time = build['end_time']
assert isinstance(end_time, int) and end_time
extra = build['extra']
assert isinstance(extra, dict)
assert 'image' in extra
image = extra['image']
assert isinstance(image, dict)
assert 'autorebuild' in image
autorebuild = image['autorebuild']
assert isinstance(autorebuild, bool)
assert autorebuild == is_autorebuild
for buildroot in buildroots:
self.validate_buildroot(buildroot)
# Unique within buildroots in this metadata
assert len([b for b in buildroots
if b['id'] == buildroot['id']]) == 1
for output in output_files:
self.validate_output(output, has_config, expect_digest=docker_registry)
buildroot_id = output['buildroot_id']
# References one of the buildroots
assert len([buildroot for buildroot in buildroots
if buildroot['id'] == buildroot_id]) == 1
build_id = runner.plugins_results[KojiImportPlugin.key]
assert build_id == "123"
# if target is not None:
# assert session.build_tags[build_id] == session.DEST_TAG
# assert session.tag_task_state == 'CLOSED'
@pytest.mark.parametrize('use_pulp', [False, True])
def test_koji_import_pullspec(self, tmpdir, os_env, use_pulp):
session = MockedClientSession('')
name = 'ns/name'
version = '1.0'
release = '1'
tasker, workflow = mock_environment(tmpdir,
session=session,
name=name,
version=version,
release=release,
pulp_registries=1,
)
if use_pulp:
workflow.postbuild_results[PLUGIN_PULP_SYNC_KEY] = [
ImageName.parse('crane.example.com/ns/name:1.0-1'),
]
runner = create_runner(tasker, workflow)
runner.run()
log_outputs = [
output
for output in session.metadata['output']
if output['type'] == 'log'
]
assert log_outputs
docker_outputs = [
output
for output in session.metadata['output']
if output['type'] == 'docker-image'
]
assert len(docker_outputs) == 1
docker_output = docker_outputs[0]
digest_pullspecs = [
repo
for repo in docker_output['extra']['docker']['repositories']
if '@sha256' in repo
]
assert len(digest_pullspecs) == 1
tag_pullspecs = [
repo
for repo in docker_output['extra']['docker']['repositories']
if '@sha256' not in repo
]
assert len(tag_pullspecs) == 1
pullspec = tag_pullspecs[0]
nvr_tag = '{}:{}-{}'.format(name, version, release)
assert pullspec.endswith(nvr_tag)
# Check registry
reg = set(ImageName.parse(repo).registry
for repo in docker_output['extra']['docker']['repositories'])
assert len(reg) == 1
if use_pulp:
assert reg == set(['crane.example.com'])
else:
assert reg == set(['docker-registry.example.com:8888'])
def test_koji_import_without_build_info(self, tmpdir, os_env):
class LegacyCGImport(MockedClientSession):
def CGImport(self, *args, **kwargs):
super(LegacyCGImport, self).CGImport(*args, **kwargs)
return
session = LegacyCGImport('')
name = 'ns/name'
version = '1.0'
release = '1'
tasker, workflow = mock_environment(tmpdir,
session=session,
name=name,
version=version,
release=release)
runner = create_runner(tasker, workflow)
runner.run()
assert runner.plugins_results[KojiImportPlugin.key] is None
@pytest.mark.parametrize('expect_result', [
'empty_config',
'no_help_file',
'skip',
'pass'
])
def test_koji_import_add_help(self, tmpdir, os_env, expect_result):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
session=session)
if expect_result == 'pass':
workflow.plugin_workspace[OrchestrateBuildPlugin.key][WORKSPACE_KEY_BUILD_INFO]['x86_64'] = BuildInfo('foo.md') # noqa
elif expect_result == 'empty_config':
workflow.plugin_workspace[OrchestrateBuildPlugin.key][WORKSPACE_KEY_BUILD_INFO]['x86_64'] = BuildInfo('help.md') # noqa
elif expect_result == 'no_help_file':
workflow.plugin_workspace[OrchestrateBuildPlugin.key][WORKSPACE_KEY_BUILD_INFO]['x86_64'] = BuildInfo(None) # noqa
elif expect_result == 'skip':
workflow.plugin_workspace[OrchestrateBuildPlugin.key][WORKSPACE_KEY_BUILD_INFO]['x86_64'] = BuildInfo(None, False) # noqa
runner = create_runner(tasker, workflow)
runner.run()
data = session.metadata
assert 'build' in data
build = data['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
assert 'image' in extra
image = extra['image']
assert isinstance(image, dict)
if expect_result == 'pass':
assert 'help' in image.keys()
assert image['help'] == 'foo.md'
elif expect_result == 'empty_config':
assert 'help' in image.keys()
assert image['help'] == 'help.md'
elif expect_result == 'no_help_file':
assert 'help' in image.keys()
assert image['help'] is None
elif expect_result in ['skip', 'unknown_status']:
assert 'help' not in image.keys()
@pytest.mark.parametrize(('config', 'expected'), [
({'pulp_push': False,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': False,
'schema1': False,
'schema2': False,
'list': False},
None),
({'pulp_push': True,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': False,
'schema1': False,
'schema2': False,
'list': False},
["application/json"]),
({'pulp_push': True,
'pulp_pull_in_worker': True,
'pulp_pull_in_orchestrator': False,
'schema1': True,
'schema2': False,
'list': False},
["application/json",
"application/vnd.docker.distribution.manifest.v1+json"]),
({'pulp_push': False,
'pulp_pull_in_worker': True,
'pulp_pull_in_orchestrator': False,
'schema1': True,
'schema2': False,
'list': False},
["application/vnd.docker.distribution.manifest.v1+json"]),
({'pulp_push': True,
'pulp_pull_in_worker': True,
'pulp_pull_in_orchestrator': False,
'schema1': True,
'schema2': True,
'list': False},
["application/json",
"application/vnd.docker.distribution.manifest.v1+json",
"application/vnd.docker.distribution.manifest.v2+json"]),
({'pulp_push': True,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': True,
'schema1': True,
'schema2': True,
'list': False},
["application/json",
"application/vnd.docker.distribution.manifest.v1+json",
"application/vnd.docker.distribution.manifest.v2+json"]),
({'pulp_push': True,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': True,
'schema1': True,
'schema2': True,
'list': True},
["application/json",
"application/vnd.docker.distribution.manifest.v1+json",
"application/vnd.docker.distribution.manifest.v2+json",
"application/vnd.docker.distribution.manifest.list.v2+json"]),
({'pulp_push': True,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': True,
'schema1': False,
'schema2': True,
'list': True},
["application/json",
"application/vnd.docker.distribution.manifest.v2+json",
"application/vnd.docker.distribution.manifest.list.v2+json"]),
({'pulp_push': False,
'pulp_pull_in_worker': False,
'pulp_pull_in_orchestrator': True,
'schema1': True,
'schema2': True,
'list': True},
["application/vnd.docker.distribution.manifest.v1+json",
"application/vnd.docker.distribution.manifest.v2+json",
"application/vnd.docker.distribution.manifest.list.v2+json"]),
])
def test_koji_import_set_media_types(self, tmpdir, os_env, config, expected):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
session=session)
worker_media_types = []
if config['pulp_push']:
worker_media_types += ["application/json"]
pulp_pull_media_types = []
if config['schema1']:
pulp_pull_media_types += ['application/vnd.docker.distribution.manifest.v1+json']
if config['schema2']:
pulp_pull_media_types += ['application/vnd.docker.distribution.manifest.v2+json']
if config['list']:
pulp_pull_media_types += ['application/vnd.docker.distribution.manifest.list.v2+json']
if config['pulp_pull_in_worker']:
worker_media_types += pulp_pull_media_types
if worker_media_types:
build_info = BuildInfo(media_types=worker_media_types)
orchestrate_plugin = workflow.plugin_workspace[OrchestrateBuildPlugin.key]
orchestrate_plugin[WORKSPACE_KEY_BUILD_INFO]['x86_64'] = build_info
workflow.postbuild_results[PLUGIN_PULP_SYNC_KEY] = [
ImageName.parse('crane.example.com/ns/name:1.0-1'),
]
if config['pulp_pull_in_orchestrator']:
workflow.exit_results[PLUGIN_PULP_PULL_KEY] = ("acbdef1234", pulp_pull_media_types)
runner = create_runner(tasker, workflow)
runner.run()
data = session.metadata
assert 'build' in data
build = data['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
assert 'image' in extra
image = extra['image']
assert isinstance(image, dict)
if expected:
assert 'media_types' in image.keys()
assert sorted(image['media_types']) == sorted(expected)
else:
assert 'media_types' not in image.keys()
@pytest.mark.parametrize('digests', [
[],
[ManifestDigest(v2_list='sha256:e6593f3e')],
])
def test_koji_import_set_manifest_list_info(self, tmpdir, os_env, digests):
session = MockedClientSession('')
tasker, workflow = mock_environment(tmpdir,
name='ns/name',
version='1.0',
release='1',
session=session,
docker_registry=True)
workflow.postbuild_results[PLUGIN_GROUP_MANIFESTS_KEY] = digests
orchestrate_plugin = workflow.plugin_workspace[OrchestrateBuildPlugin.key]
orchestrate_plugin[WORKSPACE_KEY_BUILD_INFO]['x86_64'] = BuildInfo()
runner = create_runner(tasker, workflow)
runner.run()
data = session.metadata
assert 'build' in data
build = data['build']
assert isinstance(build, dict)
assert 'extra' in build
extra = build['extra']
assert isinstance(extra, dict)
assert 'image' in extra
image = extra['image']
assert isinstance(image, dict)
expected_results = {}
expected_results['tags'] = [tag.tag for tag in workflow.tag_conf.images]
if digests:
assert 'index' in image.keys()
pullspec = "docker.example.com/myproject/hello-world@{0}".format(digests[0].v2_list)
expected_results['pull'] = [pullspec]
for tag in expected_results['tags']:
if '-' in tag:
pullspec = "docker.example.com/myproject/hello-world:{0}".format(tag)
expected_results['pull'].append(pullspec)
break
assert image['index'] == expected_results
else:
assert 'index' not in image.keys()
assert 'output' in data
for output in data['output']:
if output['type'] == 'log':
continue
assert 'extra' in output
extra = output['extra']
assert 'docker' in extra
assert 'tags' in extra['docker']
assert sorted(expected_results['tags']) == sorted(extra['docker']['tags'])
|
|
"""
:created: 2018-01
:author: Alex BROSSARD <[email protected]>
"""
from PySide2 import QtWidgets, QtCore, QtGui
from pymel import core as pmc
from auri.auri_lib import AuriScriptView, AuriScriptController, AuriScriptModel, is_checked, grpbox
from auri.scripts.Maya_Scripts import rig_lib
from auri.scripts.Maya_Scripts.rig_lib import RigController
reload(rig_lib)
class View(AuriScriptView):
def __init__(self, *args, **kwargs):
self.modules_cbbox = QtWidgets.QComboBox()
self.outputs_cbbox = QtWidgets.QComboBox()
self.refresh_btn = QtWidgets.QPushButton("Refresh")
self.prebuild_btn = QtWidgets.QPushButton("Prebuild")
self.how_many_jnts = QtWidgets.QSpinBox()
self.how_many_ctrls = QtWidgets.QSpinBox()
self.ik_creation_switch = QtWidgets.QCheckBox()
self.stretch_creation_switch = QtWidgets.QCheckBox()
self.refresh_spaces_btn = QtWidgets.QPushButton("Refresh")
self.add_space_btn = QtWidgets.QPushButton("Add")
self.remove_space_btn = QtWidgets.QPushButton("Remove")
self.space_modules_cbbox = QtWidgets.QComboBox()
self.spaces_cbbox = QtWidgets.QComboBox()
self.selected_space_module = "No_space_module"
self.selected_space = "no_space"
self.space_list_view = QtWidgets.QListView()
self.space_list = QtGui.QStringListModel()
super(View, self).__init__(*args, **kwargs)
def set_controller(self):
self.ctrl = Controller(self.model, self)
def set_model(self):
self.model = Model()
def refresh_view(self):
self.ik_creation_switch.setChecked(self.model.ik_creation_switch)
self.stretch_creation_switch.setChecked(self.model.stretch_creation_switch)
self.how_many_ctrls.setValue(self.model.how_many_ctrls)
self.how_many_jnts.setValue(self.model.how_many_jnts)
self.ctrl.look_for_parent()
self.space_list.setStringList(self.model.space_list)
self.ctrl.look_for_parent(l_cbbox_stringlist=self.ctrl.modules_with_spaces,
l_cbbox_selection=self.selected_space_module,
l_cbbox=self.space_modules_cbbox, r_cbbox_stringlist=self.ctrl.spaces_model,
r_cbbox_selection=self.selected_space, r_cbbox=self.spaces_cbbox)
def setup_ui(self):
self.modules_cbbox.setModel(self.ctrl.modules_with_output)
self.modules_cbbox.currentTextChanged.connect(self.ctrl.on_modules_cbbox_changed)
self.outputs_cbbox.setModel(self.ctrl.outputs_model)
self.outputs_cbbox.currentTextChanged.connect(self.ctrl.on_outputs_cbbox_changed)
self.how_many_jnts.setMinimum(1)
self.how_many_jnts.valueChanged.connect(self.ctrl.on_how_many_jnts_changed)
self.how_many_ctrls.setMinimum(2)
self.how_many_ctrls.valueChanged.connect(self.ctrl.on_how_many_ctrls_changed)
self.ik_creation_switch.stateChanged.connect(self.ctrl.on_ik_creation_switch_changed)
self.stretch_creation_switch.stateChanged.connect(self.ctrl.on_stretch_creation_switch_changed)
self.refresh_btn.clicked.connect(self.ctrl.look_for_parent)
self.prebuild_btn.clicked.connect(self.ctrl.prebuild)
self.space_modules_cbbox.setModel(self.ctrl.modules_with_spaces)
self.space_modules_cbbox.currentTextChanged.connect(self.ctrl.on_space_modules_cbbox_changed)
self.spaces_cbbox.setModel(self.ctrl.spaces_model)
self.spaces_cbbox.currentTextChanged.connect(self.ctrl.on_spaces_cbbox_changed)
self.space_list_view.setSelectionMode(QtWidgets.QAbstractItemView.ExtendedSelection)
self.space_list.setStringList(self.model.space_list)
self.space_list_view.setModel(self.space_list)
self.add_space_btn.clicked.connect(self.ctrl.add_space_to_list)
self.remove_space_btn.clicked.connect(self.ctrl.remove_space_from_list)
self.refresh_spaces_btn.clicked.connect(self.ctrl.look_for_spaces)
main_layout = QtWidgets.QVBoxLayout()
select_parent_layout = QtWidgets.QVBoxLayout()
select_parent_grp = grpbox("Select parent", select_parent_layout)
cbbox_layout = QtWidgets.QHBoxLayout()
cbbox_layout.addWidget(self.modules_cbbox)
cbbox_layout.addWidget(self.outputs_cbbox)
select_parent_layout.addLayout(cbbox_layout)
select_parent_layout.addWidget(self.refresh_btn)
select_spaces_layout = QtWidgets.QVBoxLayout()
select_spaces_grp = grpbox("Select local spaces :", select_spaces_layout)
spaces_cbbox_layout = QtWidgets.QHBoxLayout()
spaces_cbbox_layout.addWidget(self.space_modules_cbbox)
spaces_cbbox_layout.addWidget(self.spaces_cbbox)
btn_layout = QtWidgets.QVBoxLayout()
btn_layout.addWidget(self.refresh_spaces_btn)
btn_layout.addWidget(self.add_space_btn)
select_spaces_layout.addLayout(spaces_cbbox_layout)
select_spaces_layout.addLayout(btn_layout)
space_list_layout = QtWidgets.QVBoxLayout()
space_list_grp = grpbox("local spaces :", space_list_layout)
space_list_layout.addWidget(self.space_list_view)
space_list_layout.addWidget(self.remove_space_btn)
options_layout = QtWidgets.QVBoxLayout()
options_grp = grpbox("Options", options_layout)
how_many_layout = QtWidgets.QVBoxLayout()
jnts_layout = QtWidgets.QVBoxLayout()
jnts_text = QtWidgets.QLabel("How many jnts :")
jnts_layout.addWidget(jnts_text)
jnts_layout.addWidget(self.how_many_jnts)
ctrls_layout = QtWidgets.QVBoxLayout()
ctrls_text = QtWidgets.QLabel("How many ctrls :")
ctrls_layout.addWidget(ctrls_text)
ctrls_layout.addWidget(self.how_many_ctrls)
how_many_layout.addLayout(jnts_layout)
how_many_layout.addLayout(ctrls_layout)
checkbox_layout = QtWidgets.QVBoxLayout()
ik_layout = QtWidgets.QHBoxLayout()
ik_text = QtWidgets.QLabel("IK ctrls :")
ik_layout.addWidget(ik_text)
ik_layout.addWidget(self.ik_creation_switch)
stretch_layout = QtWidgets.QHBoxLayout()
stretch_text = QtWidgets.QLabel("stretch/squash :")
stretch_layout.addWidget(stretch_text)
stretch_layout.addWidget(self.stretch_creation_switch)
checkbox_layout.addLayout(ik_layout)
checkbox_layout.addLayout(stretch_layout)
options_layout.addLayout(how_many_layout)
options_layout.addLayout(checkbox_layout)
main_layout.addWidget(select_parent_grp)
main_layout.addWidget(options_grp)
main_layout.addWidget(select_spaces_grp)
main_layout.addWidget(space_list_grp)
main_layout.addWidget(self.prebuild_btn)
self.setLayout(main_layout)
class Controller(RigController):
def __init__(self, model, view):
"""
Args:
model (Model):
view (View):
"""
self.guides_grp = None
self.guides = []
self.guide_names = []
self.created_spine_jnts = []
self.created_pelvis_jnt = None
self.ik_spline = None
self.created_locs = []
# self.tangent_locs = []
self.created_fk_ctrls = []
self.created_pelvis_ctrl = None
self.created_ik_ctrls = []
self.jnts_to_skin = []
RigController.__init__(self, model, view)
def prebuild(self):
temp_outputs = ["pelvis_OUTPUT", "start_OUTPUT", "end_OUTPUT"]
for i in xrange(self.model.how_many_jnts):
temp_output = "jnt_{0}_OUTPUT".format(i)
temp_outputs.append(temp_output)
self.create_temporary_outputs(temp_outputs)
self.guide_names = ["{0}_pelvis_GUIDE".format(self.model.module_name),
"{0}_spine_GUIDE".format(self.model.module_name)]
d = 3
nb_points = self.model.how_many_ctrls - 2
if self.model.how_many_ctrls < 4:
d = 3 + self.model.how_many_ctrls - 4
nb_points = 2
if self.guide_check(self.guide_names):
self.guides = pmc.ls(self.guide_names)
if d != 2 and (self.guides[1].getShape().getAttr("spans") != nb_points - 1 or
self.guides[1].getShape().getAttr("degree") != d):
self.guides[1] = pmc.rebuildCurve(self.guide_names[1], rpo=0, rt=0, end=1, kr=0, kep=1, kt=0,
s=(nb_points - 1), d=d, ch=0, replaceOriginal=1)[0]
elif self.guides[1].getShape().getAttr("spans") != nb_points - 1 or self.guides[1].getShape().getAttr("degree") != d:
self.guides[1] = pmc.rebuildCurve(self.guide_names[1], rpo=0, rt=0, end=1, kr=0, kep=1, kt=0,
s=3, d=d, ch=0, replaceOriginal=1)[0]
pmc.delete(self.guides[1].cv[-2])
pmc.delete(self.guides[1].cv[1])
self.guides_grp = pmc.ls("{0}_guides".format(self.model.module_name))[0]
self.guides_grp.setAttr("visibility", 1)
self.view.refresh_view()
pmc.select(cl=1)
return
pelvis_guides = pmc.spaceLocator(p=(0, 0, 0), n=self.guide_names[0])
spine_guide = rig_lib.create_curve_guide(d=d, number_of_points=nb_points, name=self.guide_names[1],
hauteur_curve=8, front_axe="z")
self.guides = [pelvis_guides, spine_guide]
self.guides_grp = self.group_guides(self.guides)
self.guides[0].setAttr("translate", (0, 7, -1))
self.guides[1].setAttr("translate", (0, 8, 0))
self.view.refresh_view()
pmc.select(cl=1)
def execute(self):
self.created_locs = []
self.created_fk_ctrls = []
self.prebuild()
self.delete_existing_objects()
self.connect_to_parent()
self.create_jnts()
self.create_ikspline()
self.create_fk()
self.activate_twist()
if self.model.stretch_creation_switch == 1 and self.model.how_many_ctrls > 2:
self.connect_z_ik_spline_stretch(self.ik_spline, self.created_spine_jnts, measure_type="accurate")
elif self.model.stretch_creation_switch == 1:
self.connect_z_ik_spline_stretch(self.ik_spline, self.created_spine_jnts, measure_type="average")
if self.model.ik_creation_switch == 1:
self.create_ik()
self.create_outputs()
self.create_local_spaces()
self.clean_rig()
pmc.select(cl=1)
def create_jnts(self):
guide_rebuilded = pmc.rebuildCurve(self.guides[1], rpo=0, rt=0, end=1, kr=0, kep=1, kt=0,
s=self.model.how_many_jnts, d=1, ch=0, replaceOriginal=0)[0]
if self.model.how_many_jnts == 2:
pmc.delete(guide_rebuilded.cv[-2])
pmc.delete(guide_rebuilded.cv[1])
guide_rebuilded.rename("{0}_temp_rebuilded_GUIDE".format(self.model.module_name))
vertex_list = guide_rebuilded.cv[:]
self.created_spine_jnts = rig_lib.create_jnts_from_cv_list_and_return_jnts_list(vertex_list,
self.model.module_name,
forward_axis="z")
pmc.parent(self.created_spine_jnts[0], self.jnt_input_grp, r=0)
rig_lib.change_jnt_chain_suffix(self.created_spine_jnts, new_suffix="SKN")
pmc.delete(guide_rebuilded)
pmc.select(cl=1)
self.created_pelvis_jnt = pmc.joint(p=(pmc.xform(self.guides[0], q=1, ws=1, translation=1)),
n="{0}_pelvis_SKN".format(self.model.module_name))
self.created_pelvis_jnt.setAttr("rotateOrder", 2)
pmc.parent(self.created_pelvis_jnt, self.jnt_input_grp)
self.jnts_to_skin = self.created_spine_jnts[:]
self.jnts_to_skin.append(self.created_pelvis_jnt)
def create_ikspline(self):
# if self.model.ik_creation_switch:
# self.ik_spline = pmc.rebuildCurve(self.guides[1], rpo=0, rt=0, end=1, kr=0, kep=1, kt=0,
# s=self.model.how_many_ctrls - 1, d=3, ch=0, replaceOriginal=0,
# n="{0}_ik_CRV".format(self.model.module_name))[0]
# else:
# self.ik_spline = pmc.duplicate(self.guides[1], n="{0}_ik_CRV".format(self.model.module_name))[0]
#
self.ik_spline = pmc.duplicate(self.guides[1], n="{0}_ik_CRV".format(self.model.module_name))[0]
ik_handle = pmc.ikHandle(n=("{0}_ik_HDL".format(self.model.module_name)), startJoint=self.created_spine_jnts[0],
endEffector=self.created_spine_jnts[-1], solver="ikSplineSolver", curve=self.ik_spline,
createCurve=False, parentCurve=False)[0]
pmc.parent(self.ik_spline, self.parts_grp, r=1)
pmc.parent(ik_handle, self.parts_grp, r=1)
ik_effector = pmc.listRelatives(self.created_spine_jnts[-2], children=1)[1]
ik_effector.rename("{0}_ik_EFF".format(self.model.module_name))
# pmc.parent(ik_effector, self.created_spine_jnts[-1])
def create_fk(self):
ik_spline_cv_list = []
ik_spline_controlpoints_list = []
for i, cv in enumerate(self.guides[1].cv):
ik_spline_cv_list.append(cv)
for i, cp in enumerate(self.ik_spline.controlPoints):
ik_spline_controlpoints_list.append(cp)
for i, cv in enumerate(ik_spline_cv_list):
self.created_locs.append(self.create_locators(i, cv, ik_spline_controlpoints_list))
self.create_ctrls(i, self.created_locs[i])
# for i, cv in enumerate(self.ik_spline.cv):
# ik_spline_cv_list.append(cv)
#
# for i, cp in enumerate(self.ik_spline.controlPoints):
# ik_spline_controlpoints_list.append(cp)
#
# if not self.model.ik_creation_switch:
# for i, cv in enumerate(ik_spline_cv_list):
# cv_loc = self.create_locators(i, cv, ik_spline_controlpoints_list)
# self.create_ctrls(i, cv_loc)
# self.created_locs.append(cv_loc)
# else:
# ik_spline_none_tangent_cv_list = [cv for cv in ik_spline_cv_list if cv != ik_spline_cv_list[1] and cv != ik_spline_cv_list[-2]]
#
# ik_spline_none_tangent_controlpoints_list = [cp for cp in ik_spline_controlpoints_list if cp != ik_spline_controlpoints_list[1] and cp != ik_spline_controlpoints_list[-2]]
#
# for i, cv in enumerate(ik_spline_none_tangent_cv_list):
# cv_loc = self.create_locators(i, cv, ik_spline_none_tangent_controlpoints_list)
# self.create_ctrls(i, cv_loc)
# self.created_locs.append(cv_loc)
#
# start_tangent_loc = self.create_locators(i=1, cv=ik_spline_cv_list[1],
# ik_spline_controlpoints=ik_spline_controlpoints_list)
# end_tangent_loc = self.create_locators(i=-2, cv=ik_spline_cv_list[-2],
# ik_spline_controlpoints=ik_spline_controlpoints_list)
# start_tangent_loc.rename("{0}_start_tangent_pos".format(self.model.module_name))
# end_tangent_loc.rename("{0}_end_tangent_pos".format(self.model.module_name))
# self.tangent_locs = [start_tangent_loc, end_tangent_loc]
self.ik_spline.setAttr("translate", (0, 0, 0))
self.ik_spline.setAttr("rotate", (0, 0, 0))
self.ik_spline.setAttr("scale", (1, 1, 1))
# pmc.parentConstraint(self.created_fk_ctrls[-1], self.created_spine_jnts[-1], maintainOffset=1, skipTranslate=("x", "y", "z"))
pmc.select(cl=1)
pelvis_ctrl_shape = pmc.circle(c=(0, 0, 0), nr=(0, 1, 0), sw=360, r=2.5, d=3, s=8,
n="{0}_pelvis_CTRL_shape".format(self.model.module_name), ch=0)[0]
self.created_pelvis_ctrl = rig_lib.create_jnttype_ctrl(name="{0}_pelvis_CTRL".format(self.model.module_name),
shape=pelvis_ctrl_shape, drawstyle=2, rotateorder=2)
self.created_pelvis_ctrl.setAttr("translate", pmc.xform(self.created_pelvis_jnt, q=1, ws=1, translation=1))
pmc.parent(self.created_pelvis_ctrl, self.ctrl_input_grp)
# pmc.pointConstraint(self.created_pelvis_ctrl, self.created_pelvis_jnt, maintainOffset=1)
# self.created_pelvis_ctrl.rotate >> self.created_pelvis_jnt.rotate
pmc.parentConstraint(self.created_pelvis_ctrl, self.created_pelvis_jnt, maintainOffset=1)
self.created_pelvis_ctrl.scale >> self.created_pelvis_jnt.scale
def create_locators(self, i, cv, ik_spline_controlpoints):
cv_loc = pmc.spaceLocator(p=(0, 0, 0), n="{0}_{1}_pos".format(self.model.module_name, (i + 1)))
cv_loc.setAttr("translate", pmc.xform(cv, q=1, ws=1, translation=1))
cv_loc_shape = cv_loc.getShape()
cv_loc_shape.worldPosition >> ik_spline_controlpoints[i]
return cv_loc
def create_ctrls(self, i, cv_loc):
pmc.select(cl=1)
ctrl_shape = pmc.circle(c=(0, 0, 0), nr=(0, 0, 1), sw=360, r=3, d=3, s=8,
n="{0}_{1}_fk_CTRL_shape".format(self.model.module_name, (i + 1)), ch=0)[0]
ctrl = rig_lib.create_jnttype_ctrl(name="{0}_{1}_fk_CTRL".format(self.model.module_name, (i + 1)), shape=ctrl_shape,
drawstyle=2, rotateorder=2)
nearest_point_on_curve = pmc.createNode("nearestPointOnCurve", n="temp_NPOC")
self.guides[1].worldSpace >> nearest_point_on_curve.inputCurve
cv_loc.getShape().worldPosition >> nearest_point_on_curve.inPosition
ctrl.setAttr("translate", nearest_point_on_curve.getAttr("position"))
# nearest_point_on_curve.position >> ctrl.translate
pmc.delete(nearest_point_on_curve)
# ctrl.setAttr("translate", pmc.xform(cv_loc, q=1, ws=1, translation=1))
if i == 0:
pmc.parent(ctrl, self.ctrl_input_grp, r=0)
else:
pmc.parent(ctrl, "{0}_{1}_fk_CTRL".format(self.model.module_name, i), r=0)
pmc.reorder(ctrl, front=1)
pmc.parent(cv_loc, ctrl, r=0)
self.created_fk_ctrls.append(ctrl)
def create_ik(self):
start_shape = rig_lib.z_box_curve("{0}_start_ik_CTRL_shape".format(self.model.module_name))
start_ctrl = rig_lib.create_jnttype_ctrl(name="{0}_start_ik_CTRL".format(self.model.module_name), shape=start_shape,
drawstyle=2, rotateorder=2)
end_shape = rig_lib.z_box_curve("{0}_end_ik_CTRL_shape".format(self.model.module_name))
end_ctrl = rig_lib.create_jnttype_ctrl(name="{0}_end_ik_CTRL".format(self.model.module_name), shape=end_shape,
drawstyle=2, rotateorder=2)
start_ofs = pmc.group(start_ctrl, n="{0}_start_ik_ctrl_OFS".format(self.model.module_name))
start_ofs.setAttr("rotateOrder", 2)
end_ofs = pmc.group(end_ctrl, n="{0}_end_ik_ctrl_OFS".format(self.model.module_name))
end_ofs.setAttr("rotateOrder", 2)
start_ofs.setAttr("translate", pmc.xform(self.created_fk_ctrls[0], q=1, ws=1, translation=1))
start_ofs.setAttr("rotate", pmc.xform(self.created_fk_ctrls[0], q=1, ws=1, rotation=1))
end_ofs.setAttr("translate", pmc.xform(self.created_fk_ctrls[-1], q=1, ws=1, translation=1))
end_ofs.setAttr("rotate", pmc.xform(self.created_fk_ctrls[-1], q=1, ws=1, rotation=1))
pmc.parent(start_ofs, self.ctrl_input_grp, r=0)
pmc.parent(end_ofs, self.created_fk_ctrls[-2], r=0)
pmc.parent(self.created_fk_ctrls[-1], end_ctrl, r=0)
pmc.parent(self.created_locs[0], start_ctrl, r=0)
self.created_fk_ctrls[-1].setAttr("visibility", 0)
self.created_ik_ctrls = [start_ctrl, end_ctrl]
if not self.model.how_many_ctrls == 2:
center_ctrl = (self.model.how_many_ctrls / 2.0) - 0.5
for i, loc in enumerate(self.created_locs):
if i == center_ctrl:
const = pmc.parentConstraint(start_ctrl, end_ctrl, loc, maintainOffset=1,
skipRotate=["x", "y", "z"])
const.setAttr("{0}W0".format(start_ctrl), 1)
const.setAttr("{0}W1".format(end_ctrl), 1)
elif i < center_ctrl:
const = pmc.parentConstraint(start_ctrl, end_ctrl, loc, maintainOffset=1,
skipRotate=["x", "y", "z"])
const.setAttr("{0}W0".format(start_ctrl), 1)
const.setAttr("{0}W1".format(end_ctrl), ((1 / (self.model.how_many_ctrls / 2.0)) * (i / 2.0)))
elif i > center_ctrl:
const = pmc.parentConstraint(start_ctrl, end_ctrl, loc, maintainOffset=1,
skipRotate=["x", "y", "z"])
const.setAttr("{0}W0".format(start_ctrl), ((1 / (self.model.how_many_ctrls / 2.0)) *
(((len(self.created_locs)-1) - i) / 2.0)))
const.setAttr("{0}W1".format(end_ctrl), 1)
pmc.parent(self.created_pelvis_ctrl, start_ctrl)
pmc.parentConstraint(end_ctrl, self.created_spine_jnts[-1], maintainOffset=1, skipTranslate=["x", "y", "z"])
def activate_twist(self):
ik_handle = pmc.ls("{0}_ik_HDL".format(self.model.module_name))[0]
ik_handle.setAttr("dTwistControlEnable", 1)
ik_handle.setAttr("dWorldUpType", 4)
ik_handle.setAttr("dForwardAxis", 4)
ik_handle.setAttr("dWorldUpAxis", 0)
ik_handle.setAttr("dWorldUpVectorX", 0)
ik_handle.setAttr("dWorldUpVectorY", 1)
ik_handle.setAttr("dWorldUpVectorZ", 0)
ik_handle.setAttr("dWorldUpVectorEndX", 0)
ik_handle.setAttr("dWorldUpVectorEndY", 1)
ik_handle.setAttr("dWorldUpVectorEndZ", 0)
self.created_locs[0].worldMatrix[0] >> ik_handle.dWorldUpMatrix
self.created_locs[-1].worldMatrix[0] >> ik_handle.dWorldUpMatrixEnd
def clean_rig(self):
self.jnt_input_grp.setAttr("visibility", 0)
self.parts_grp.setAttr("visibility", 0)
self.guides_grp.setAttr("visibility", 0)
for loc in self.created_locs:
loc_shape = loc.getShape()
loc_shape.setAttr("visibility", 0)
for ctrl in self.created_fk_ctrls:
rig_lib.clean_ctrl(ctrl, 14, trs="ts")
for ctrl in self.created_ik_ctrls:
rig_lib.clean_ctrl(ctrl, 17, trs="s")
rig_lib.clean_ctrl(self.created_pelvis_ctrl, 14, trs="t")
if len(self.model.space_list) > 0:
if self.model.ik_creation_switch == 0:
self.created_fk_ctrls[-1].setAttr("space", len(self.model.space_list))
else:
self.created_ik_ctrls[-1].setAttr("space", len(self.model.space_list))
info_crv = rig_lib.signature_shape_curve("{0}_INFO".format(self.model.module_name))
info_crv.getShape().setAttr("visibility", 0)
info_crv.setAttr("hiddenInOutliner", 1)
info_crv.setAttr("translateX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("translateY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("translateZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("rotateZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleX", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleY", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("scaleZ", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("visibility", lock=True, keyable=False, channelBox=False)
info_crv.setAttr("overrideEnabled", 1)
info_crv.setAttr("overrideDisplayType", 2)
pmc.parent(info_crv, self.parts_grp)
rig_lib.add_parameter_as_extra_attr(info_crv, "Module", "horizontal_spine")
rig_lib.add_parameter_as_extra_attr(info_crv, "parent_Module", self.model.selected_module)
rig_lib.add_parameter_as_extra_attr(info_crv, "parent_output", self.model.selected_output)
rig_lib.add_parameter_as_extra_attr(info_crv, "how_many_jnts", self.model.how_many_jnts)
rig_lib.add_parameter_as_extra_attr(info_crv, "how_many_ctrls", self.model.how_many_ctrls)
rig_lib.add_parameter_as_extra_attr(info_crv, "ik_creation", self.model.ik_creation_switch)
rig_lib.add_parameter_as_extra_attr(info_crv, "stretch_creation", self.model.stretch_creation_switch)
rig_lib.add_parameter_as_extra_attr(info_crv, "local_spaces", self.model.space_list)
if not pmc.objExists("jnts_to_SKN_SET"):
skn_set = pmc.createNode("objectSet", n="jnts_to_SKN_SET")
else:
skn_set = pmc.ls("jnts_to_SKN_SET", type="objectSet")[0]
for jnt in self.jnts_to_skin:
if type(jnt) == list:
for obj in jnt:
skn_set.add(obj)
else:
skn_set.add(jnt)
def create_outputs(self):
rig_lib.create_output(name="{0}_pelvis_OUTPUT".format(self.model.module_name), parent=self.created_pelvis_jnt)
rig_lib.create_output(name="{0}_start_OUTPUT".format(self.model.module_name), parent=self.created_locs[0])
rig_lib.create_output(name="{0}_end_OUTPUT".format(self.model.module_name), parent=self.created_spine_jnts[-1])
for i, jnt in enumerate(self.created_spine_jnts):
if jnt != self.created_spine_jnts[-1]:
name = "{0}_jnt_{1}_OUTPUT".format(self.model.module_name, i)
rig_lib.create_output(name=name, parent=jnt)
def create_local_spaces(self):
spaces_names = []
space_locs = []
for space in self.model.space_list:
name = str(space).replace("_OUTPUT", "")
if "local_ctrl" in name:
name = "world"
spaces_names.append(name)
if pmc.objExists("{0}_{1}_SPACELOC".format(self.model.module_name, name)):
pmc.delete("{0}_{1}_SPACELOC".format(self.model.module_name, name))
space_loc = pmc.spaceLocator(p=(0, 0, 0), n="{0}_{1}_SPACELOC".format(self.model.module_name, name))
space_locs.append(space_loc)
spaces_names.append("local")
if len(self.model.space_list) > 0:
if self.model.ik_creation_switch == 0:
self.created_fk_ctrls[-1].addAttr("space", attributeType="enum", enumName=spaces_names, hidden=0, keyable=1)
pmc.group(self.created_fk_ctrls[-1], p=self.created_fk_ctrls[-2],
n="{0}_CONSTGRP".format(self.created_fk_ctrls[-1]))
else:
self.created_ik_ctrls[-1].addAttr("space", attributeType="enum", enumName=spaces_names, hidden=0, keyable=1)
for i, space in enumerate(self.model.space_list):
space_locs[i].setAttr("translate", pmc.xform(self.created_spine_jnts[-1], q=1, ws=1, translation=1))
pmc.parent(space_locs[i], space)
if self.model.ik_creation_switch == 0:
fk_space_const = pmc.orientConstraint(space_locs[i], self.created_fk_ctrls[-1].getParent(),
maintainOffset=1)
rig_lib.connect_condition_to_constraint("{0}.{1}W{2}".format(fk_space_const, space_locs[i], i),
self.created_fk_ctrls[-1].space, i,
"{0}_{1}Space_COND".format(self.created_fk_ctrls[-1],
spaces_names[i]))
else:
ik_space_const = pmc.parentConstraint(space_locs[i], self.created_ik_ctrls[-1].getParent(),
maintainOffset=1)
rig_lib.connect_condition_to_constraint("{0}.{1}W{2}".format(ik_space_const, space_locs[i], i),
self.created_ik_ctrls[-1].space, i,
"{0}_{1}Space_COND".format(self.created_ik_ctrls[-1],
spaces_names[i]))
class Model(AuriScriptModel):
def __init__(self):
AuriScriptModel.__init__(self)
self.selected_module = None
self.selected_output = None
self.how_many_jnts = 10
self.how_many_ctrls = 4
self.ik_creation_switch = True
self.stretch_creation_switch = True
self.space_list = []
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import girder_client
import json
import mock
import os
import shutil
import six
import time
from girder import config, events
from tests import base
os.environ['GIRDER_PORT'] = os.environ.get('GIRDER_TEST_PORT', '20200')
config.loadConfig() # Must reload config to pickup correct port
def setUpModule():
plugins = os.environ.get('ENABLED_PLUGINS', '')
if plugins:
base.enabledPlugins.extend(plugins.split())
base.startServer(False)
def tearDownModule():
base.stopServer()
class PythonClientTestCase(base.TestCase):
def setUp(self):
base.TestCase.setUp(self)
def writeFile(dirName):
filename = os.path.join(dirName, 'f')
f = open(filename, 'w')
f.write(filename)
f.close()
# make some temp dirs and files
self.libTestDir = os.path.join(os.path.dirname(__file__),
'_libTestDir')
# unlink old temp dirs and files first
shutil.rmtree(self.libTestDir, ignore_errors=True)
os.mkdir(self.libTestDir)
writeFile(self.libTestDir)
for subDir in range(0, 3):
subDirName = os.path.join(self.libTestDir, 'sub'+str(subDir))
os.mkdir(subDirName)
writeFile(subDirName)
def tearDown(self):
shutil.rmtree(self.libTestDir, ignore_errors=True)
base.TestCase.tearDown(self)
def testRestCore(self):
client = girder_client.GirderClient(port=os.environ['GIRDER_PORT'])
# Register a user
user = client.createResource('user', params={
'firstName': 'First',
'lastName': 'Last',
'login': 'mylogin',
'password': 'password',
'email': '[email protected]'
})
self.assertTrue(user['admin'])
# Test authentication with bad args
flag = False
try:
client.authenticate()
except Exception:
flag = True
self.assertTrue(flag)
# Test authentication failure
flag = False
try:
client.authenticate(username=user['login'], password='wrong')
except girder_client.AuthenticationError:
flag = True
self.assertTrue(flag)
# Interactive login (successfully)
with mock.patch('six.moves.input', return_value=user['login']),\
mock.patch('getpass.getpass', return_value='password'):
client.authenticate(interactive=True)
# /user/me should now return our user info
user = client.getResource('user/me')
self.assertEqual(user['login'], 'mylogin')
# Test HTTP error case
flag = False
try:
client.getResource('user/badId')
except girder_client.HttpError as e:
self.assertEqual(e.status, 400)
self.assertEqual(e.method, 'GET')
resp = json.loads(e.responseText)
self.assertEqual(resp['type'], 'validation')
self.assertEqual(resp['field'], 'id')
self.assertEqual(resp['message'], 'Invalid ObjectId: badId')
flag = True
self.assertTrue(flag)
# Test some folder routes
folders = client.listFolder(
parentId=user['_id'], parentFolderType='user')
self.assertEqual(len(folders), 2)
privateFolder = publicFolder = None
for folder in folders:
if folder['name'] == 'Public':
publicFolder = folder
elif folder['name'] == 'Private':
privateFolder = folder
self.assertNotEqual(privateFolder, None)
self.assertNotEqual(publicFolder, None)
self.assertEqual(client.getFolder(privateFolder['_id']), privateFolder)
acl = client.getFolderAccess(privateFolder['_id'])
self.assertIn('users', acl)
self.assertIn('groups', acl)
client.setFolderAccess(privateFolder['_id'], json.dumps(acl),
public=False)
self.assertEqual(acl, client.getFolderAccess(privateFolder['_id']))
# Test recursive ACL propagation (not very robust test yet)
client.createFolder(privateFolder['_id'], name='Subfolder')
client.inheritAccessControlRecursive(privateFolder['_id'])
def testUploadCallbacks(self):
callbackUser = self.model('user').createUser(
firstName='Callback', lastName='Last', login='callback',
password='password', email='[email protected]')
callbackPublicFolder = six.next(self.model('folder').childFolders(
parentType='user', parent=callbackUser, user=None, limit=1))
callback_counts = {'folder': 0, 'item': 0}
folders = {}
items = {}
folders[self.libTestDir] = False
folder_count = 1 # 1 for self.libTestDir
item_count = 0
for root, dirs, files in os.walk(self.libTestDir):
for name in files:
items[os.path.join(root, name)] = False
item_count += 1
for name in dirs:
folders[os.path.join(root, name)] = False
folder_count += 1
def folder_callback(folder, filepath):
self.assertIn(filepath, six.viewkeys(folders))
folders[filepath] = True
callback_counts['folder'] += 1
def item_callback(item, filepath):
self.assertIn(filepath, six.viewkeys(items))
items[filepath] = True
callback_counts['item'] += 1
client = girder_client.GirderClient(port=os.environ['GIRDER_PORT'])
client.authenticate('callback', 'password')
client.add_folder_upload_callback(folder_callback)
client.add_item_upload_callback(item_callback)
client.upload(self.libTestDir, callbackPublicFolder['_id'])
# make sure counts are the same (callbacks not called more than once)
# and that all folders and files have callbacks called on them
self.assertEqual(folder_count, callback_counts['folder'])
self.assertEqual(item_count, callback_counts['item'])
self.assertTrue(all(six.viewvalues(items)))
self.assertTrue(all(six.viewvalues(folders)))
# Upload again with reuse_existing on
existingList = list(self.model('folder').childFolders(
parentType='folder', parent=callbackPublicFolder,
user=callbackUser, limit=0))
client.upload(self.libTestDir, callbackPublicFolder['_id'],
reuse_existing=True)
newList = list(self.model('folder').childFolders(
parentType='folder', parent=callbackPublicFolder,
user=callbackUser, limit=0))
self.assertEqual(existingList, newList)
self.assertEqual(len(newList), 1)
self.assertEqual([f['name'] for f in self.model('folder').childFolders(
parentType='folder', parent=newList[0],
user=callbackUser, limit=0)], ['sub0', 'sub1', 'sub2'])
# Test upload via a file-like object into a folder
callbacks = []
path = os.path.join(self.libTestDir, 'sub0', 'f')
size = os.path.getsize(path)
def progressCallback(info):
callbacks.append(info)
with open(path) as f:
with self.assertRaises(girder_client.IncorrectUploadLengthError):
try:
client.uploadFile(
callbackPublicFolder['_id'], stream=f, name='test',
size=size + 1, parentType='folder')
except girder_client.IncorrectUploadLengthError as exc:
self.assertEqual(
exc.upload['received'], exc.upload['size'] - 1)
upload = self.model('upload').load(exc.upload['_id'])
self.assertEqual(upload, None)
raise
with open(path) as f:
file = client.uploadFile(
callbackPublicFolder['_id'], stream=f, name='test', size=size,
parentType='folder', progressCallback=progressCallback)
self.assertEqual(len(callbacks), 1)
self.assertEqual(callbacks[0]['current'], size)
self.assertEqual(callbacks[0]['total'], size)
self.assertEqual(file['name'], 'test')
self.assertEqual(file['size'], size)
self.assertEqual(file['mimeType'], 'application/octet-stream')
items = list(
self.model('folder').childItems(folder=callbackPublicFolder))
self.assertEqual(len(items), 1)
self.assertEqual(items[0]['name'], 'test')
files = list(self.model('item').childFiles(items[0]))
self.assertEqual(len(files), 1)
def testUploadReference(self):
eventList = []
client = girder_client.GirderClient(port=os.environ['GIRDER_PORT'])
# Register a user
user = client.createResource('user', params={
'firstName': 'First',
'lastName': 'Last',
'login': 'mylogin',
'password': 'password',
'email': '[email protected]'
})
client.authenticate('mylogin', 'password')
folders = client.listFolder(
parentId=user['_id'], parentFolderType='user', name='Public')
publicFolder = folders[0]
def processEvent(event):
eventList.append(event.info)
events.bind('data.process', 'lib_test', processEvent)
path = os.path.join(self.libTestDir, 'sub0', 'f')
size = os.path.getsize(path)
client.uploadFile(publicFolder['_id'], open(path), name='test1',
size=size, parentType='folder',
reference='test1_reference')
starttime = time.time()
while (not events.daemon.eventQueue.empty() and
time.time() - starttime < 5):
time.sleep(0.05)
self.assertEqual(len(eventList), 1)
self.assertEqual(eventList[0]['reference'], 'test1_reference')
client.uploadFileToItem(str(eventList[0]['file']['itemId']), path,
reference='test2_reference')
while (not events.daemon.eventQueue.empty() and
time.time() - starttime < 5):
time.sleep(0.05)
self.assertEqual(len(eventList), 2)
self.assertEqual(eventList[1]['reference'], 'test2_reference')
self.assertNotEqual(eventList[0]['file']['_id'],
eventList[1]['file']['_id'])
open(path, 'ab').write(b'test')
size = os.path.getsize(path)
client.uploadFileToItem(str(eventList[0]['file']['itemId']), path,
reference='test3_reference')
while (not events.daemon.eventQueue.empty() and
time.time() - starttime < 5):
time.sleep(0.05)
self.assertEqual(len(eventList), 3)
self.assertEqual(eventList[2]['reference'], 'test3_reference')
self.assertNotEqual(eventList[0]['file']['_id'],
eventList[2]['file']['_id'])
self.assertEqual(eventList[1]['file']['_id'],
eventList[2]['file']['_id'])
|
|
"""Robinhood.py: a collection of utilities for working with Robinhood's Private API """
#Standard libraries
import logging
import warnings
from enum import Enum
#External dependencies
from six.moves.urllib.parse import unquote # pylint: disable=E0401
from six.moves.urllib.request import getproxies # pylint: disable=E0401
from six.moves import input
import getpass
import requests
import six
import dateutil
#Application-specific imports
from . import exceptions as RH_exception
from . import endpoints
class Bounds(Enum):
"""Enum for bounds in `historicals` endpoint """
REGULAR = 'regular'
EXTENDED = 'extended'
class Transaction(Enum):
"""Enum for buy/sell orders """
BUY = 'buy'
SELL = 'sell'
class Robinhood:
"""Wrapper class for fetching/parsing Robinhood endpoints """
session = None
username = None
password = None
headers = None
auth_token = None
refresh_token = None
logger = logging.getLogger('Robinhood')
logger.addHandler(logging.NullHandler())
client_id = "c82SH0WZOsabOXGP2sxqcj34FxkvfnWRZBKlBjFS"
###########################################################################
# Logging in and initializing
###########################################################################
def __init__(self):
self.session = requests.session()
self.session.proxies = getproxies()
self.headers = {
"Accept": "*/*",
"Accept-Encoding": "gzip, deflate",
"Accept-Language": "en;q=1, fr;q=0.9, de;q=0.8, ja;q=0.7, nl;q=0.6, it;q=0.5",
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8",
"X-Robinhood-API-Version": "1.0.0",
"Connection": "keep-alive",
"User-Agent": "Robinhood/823 (iPhone; iOS 7.1.2; Scale/2.00)"
}
self.session.headers = self.headers
self.auth_method = self.login_prompt
def login_required(function): # pylint: disable=E0213
""" Decorator function that prompts user for login if they are not logged in already. Can be applied to any function using the @ notation. """
def wrapper(self, *args, **kwargs):
if 'Authorization' not in self.headers:
self.auth_method()
return function(self, *args, **kwargs) # pylint: disable=E1102
return wrapper
def login_prompt(self): # pragma: no cover
"""Prompts user for username and password and calls login() """
username = input("Username: ")
password = getpass.getpass()
return self.login(username=username, password=password)
def login(self,
username,
password,
mfa_code=None):
"""Save and test login info for Robinhood accounts
Args:
username (str): username
password (str): password
Returns:
(bool): received valid auth token
"""
self.username = username
self.password = password
payload = {
'password': self.password,
'username': self.username,
'grant_type': 'password',
'client_id': self.client_id
}
if mfa_code:
payload['mfa_code'] = mfa_code
try:
res = self.session.post(endpoints.login(), data=payload, timeout=15)
res.raise_for_status()
data = res.json()
except requests.exceptions.HTTPError:
raise RH_exception.LoginFailed()
if 'mfa_required' in data.keys(): # pragma: no cover
raise RH_exception.TwoFactorRequired() # requires a second call to enable 2FA
if 'access_token' in data.keys() and 'refresh_token' in data.keys():
self.auth_token = data['access_token']
self.refresh_token = data['refresh_token']
self.headers['Authorization'] = 'Bearer ' + self.auth_token
return True
return False
def logout(self):
"""Logout from Robinhood
Returns:
(:obj:`requests.request`) result from logout endpoint
"""
try:
payload = {
'client_id': self.client_id,
'token': self.refresh_token
}
req = self.session.post(endpoints.logout(), data=payload, timeout=15)
req.raise_for_status()
except requests.exceptions.HTTPError as err_msg:
warnings.warn('Failed to log out ' + repr(err_msg))
self.headers['Authorization'] = None
self.auth_token = None
return req
###########################################################################
# GET DATA
###########################################################################
def investment_profile(self):
"""Fetch investment_profile """
res = self.session.get(endpoints.investment_profile(), timeout=15)
res.raise_for_status() # will throw without auth
data = res.json()
return data
def instruments(self, stock):
"""Fetch instruments endpoint
Args:
stock (str): stock ticker
Returns:
(:obj:`dict`): JSON contents from `instruments` endpoint
"""
res = self.session.get(endpoints.instruments(), params={'query': stock.upper()}, timeout=15)
res.raise_for_status()
res = res.json()
# if requesting all, return entire object so may paginate with ['next']
if (stock == ""):
return res
return res['results']
def instrument(self, id):
"""Fetch instrument info
Args:
id (str): instrument id
Returns:
(:obj:`dict`): JSON dict of instrument
"""
url = str(endpoints.instruments()) + "?symbol=" + str(id)
try:
req = requests.get(url, timeout=15)
req.raise_for_status()
data = req.json()
except requests.exceptions.HTTPError:
raise RH_exception.InvalidInstrumentId()
return data['results']
def quote_data(self, stock=''):
"""Fetch stock quote
Args:
stock (str): stock ticker, prompt if blank
Returns:
(:obj:`dict`): JSON contents from `quotes` endpoint
"""
url = None
if stock.find(',') == -1:
url = str(endpoints.quotes()) + str(stock) + "/"
else:
url = str(endpoints.quotes()) + "?symbols=" + str(stock)
#Check for validity of symbol
try:
req = self.session.get(url, timeout=15)
req.raise_for_status()
data = req.json()
except requests.exceptions.HTTPError:
raise RH_exception.InvalidTickerSymbol()
return data
# We will keep for compatibility until next major release
def quotes_data(self, stocks):
"""Fetch quote for multiple stocks, in one single Robinhood API call
Args:
stocks (list<str>): stock tickers
Returns:
(:obj:`list` of :obj:`dict`): List of JSON contents from `quotes` endpoint, in the
same order of input args. If any ticker is invalid, a None will occur at that position.
"""
url = str(endpoints.quotes()) + "?symbols=" + ",".join(stocks)
try:
req = self.session.get(url, timeout=15)
req.raise_for_status()
data = req.json()
except requests.exceptions.HTTPError:
raise RH_exception.InvalidTickerSymbol()
return data["results"]
def get_quote_list(self,
stock='',
key=''):
"""Returns multiple stock info and keys from quote_data (prompt if blank)
Args:
stock (str): stock ticker (or tickers separated by a comma)
, prompt if blank
key (str): key attributes that the function should return
Returns:
(:obj:`list`): Returns values from each stock or empty list
if none of the stocks were valid
"""
#Creates a tuple containing the information we want to retrieve
def append_stock(stock):
keys = key.split(',')
myStr = ''
for item in keys:
myStr += stock[item] + ","
return (myStr.split(','))
#Prompt for stock if not entered
if not stock: # pragma: no cover
stock = input("Symbol: ")
data = self.quote_data(stock)
res = []
# Handles the case of multple tickers
if stock.find(',') != -1:
for stock in data['results']:
if stock is None:
continue
res.append(append_stock(stock))
else:
res.append(append_stock(data))
return res
def get_quote(self, stock=''):
"""Wrapper for quote_data """
data = self.quote_data(stock)
return data
def get_historical_quotes(self, stock, interval, span, bounds=Bounds.REGULAR):
"""Fetch historical data for stock
Note: valid interval/span configs
interval = 5minute | 10minute + span = day, week
interval = day + span = year
interval = week
TODO: NEEDS TESTS
Args:
stock (str): stock ticker
interval (str): resolution of data
span (str): length of data
bounds (:enum:`Bounds`, optional): 'extended' or 'regular' trading hours
Returns:
(:obj:`dict`) values returned from `historicals` endpoint
"""
if type(stock) is str:
stock = [stock]
if isinstance(bounds, str): # recast to Enum
bounds = Bounds(bounds)
historicals = endpoints.historicals() + "/?symbols=" + ','.join(stock).upper() + "&interval=" + interval + "&span=" + span + "&bounds=" + bounds.name.lower()
res = self.session.get(historicals, timeout=15)
return res.json()['results'][0]
def get_news(self, stock):
"""Fetch news endpoint
Args:
stock (str): stock ticker
Returns:
(:obj:`dict`) values returned from `news` endpoint
"""
return self.session.get(endpoints.news(stock.upper()), timeout=15).json()
def print_quote(self, stock=''): # pragma: no cover
"""Print quote information
Args:
stock (str): ticker to fetch
Returns:
None
"""
data = self.get_quote_list(stock, 'symbol,last_trade_price')
for item in data:
quote_str = item[0] + ": $" + item[1]
self.logger.info(quote_str)
def print_quotes(self, stocks): # pragma: no cover
"""Print a collection of stocks
Args:
stocks (:obj:`list`): list of stocks to pirnt
Returns:
None
"""
if stocks is None:
return
for stock in stocks:
self.print_quote(stock)
def ask_price(self, stock=''):
"""Get asking price for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(float): ask price
"""
return self.get_quote_list(stock, 'ask_price')
def ask_size(self, stock=''):
"""Get ask size for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(int): ask size
"""
return self.get_quote_list(stock, 'ask_size')
def bid_price(self, stock=''):
"""Get bid price for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(float): bid price
"""
return self.get_quote_list(stock, 'bid_price')
def bid_size(self, stock=''):
"""Get bid size for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(int): bid size
"""
return self.get_quote_list(stock, 'bid_size')
def last_trade_price(self, stock=''):
"""Get last trade price for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(float): last trade price
"""
return self.get_quote_list(stock, 'last_trade_price')
def previous_close(self, stock=''):
"""Get previous closing price for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(float): previous closing price
"""
return self.get_quote_list(stock, 'previous_close')
def previous_close_date(self, stock=''):
"""Get previous closing date for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(str): previous close date
"""
return self.get_quote_list(stock, 'previous_close_date')
def adjusted_previous_close(self, stock=''):
"""Get adjusted previous closing price for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(float): adjusted previous closing price
"""
return self.get_quote_list(stock, 'adjusted_previous_close')
def symbol(self, stock=''):
"""Get symbol for a stock
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(str): stock symbol
"""
return self.get_quote_list(stock, 'symbol')
def last_updated_at(self, stock=''):
"""Get last update datetime
Note:
queries `quote` endpoint, dict wrapper
Args:
stock (str): stock ticker
Returns:
(str): last update datetime
"""
return self.get_quote_list(stock, 'last_updated_at')
def last_updated_at_datetime(self, stock=''):
"""Get last updated datetime
Note:
queries `quote` endpoint, dict wrapper
`self.last_updated_at` returns time as `str` in format: 'YYYY-MM-ddTHH:mm:ss:000Z'
Args:
stock (str): stock ticker
Returns:
(datetime): last update datetime
"""
#Will be in format: 'YYYY-MM-ddTHH:mm:ss:000Z'
datetime_string = self.last_updated_at(stock)
result = dateutil.parser.parse(datetime_string)
return result
def get_account(self):
"""Fetch account information
Returns:
(:obj:`dict`): `accounts` endpoint payload
"""
res = self.session.get(endpoints.accounts(), timeout=15)
res.raise_for_status() # auth required
res = res.json()
return res['results'][0]
def get_url(self, url):
"""
Flat wrapper for fetching URL directly
"""
return self.session.get(url, timeout=15).json()
def get_popularity(self, stock=''):
"""Get the number of robinhood users who own the given stock
Args:
stock (str): stock ticker
Returns:
(int): number of users who own the stock
"""
stock_instrument = self.get_url(self.quote_data(stock)["instrument"])["id"]
return self.get_url(endpoints.instruments(stock_instrument, "popularity"))["num_open_positions"]
def get_tickers_by_tag(self, tag=None):
"""Get a list of instruments belonging to a tag
Args: tag - Tags may include but are not limited to:
* top-movers
* etf
* 100-most-popular
* mutual-fund
* finance
* cap-weighted
* investment-trust-or-fund
Returns:
(List): a list of Ticker strings
"""
instrument_list = self.get_url(endpoints.tags(tag))["instruments"]
return [self.get_url(instrument)["symbol"] for instrument in instrument_list]
@login_required
def get_transfers(self):
"""Returns a page of list of transfers made to/from the Bank.
Note that this is a paginated response. The consumer will have to look
at 'next' key in the JSON and make a subsequent request for the next
page.
Returns:
(list): List of all transfers to/from the bank.
"""
res = self.session.get(endpoints.ach('transfers'), timeout=15)
res.raise_for_status()
return res.json()
###########################################################################
# GET OPTIONS INFO
###########################################################################
def get_options(self, stock, expiration_dates, option_type):
"""Get a list (chain) of options contracts belonging to a particular stock
Args: stock ticker (str), list of expiration dates to filter on (YYYY-MM-DD), and whether or not its a 'put' or a 'call' option type (str).
Returns:
Options Contracts (List): a list (chain) of contracts for a given underlying equity instrument
"""
instrument_id = self.get_url(self.quote_data(stock)["instrument"])["id"]
if (type(expiration_dates) == list):
_expiration_dates_string = ",".join(expiration_dates)
else:
_expiration_dates_string = expiration_dates
chain_id = self.get_url(endpoints.chain(instrument_id))["results"][0]["id"]
return [contract for contract in self.get_url(endpoints.options(chain_id, _expiration_dates_string, option_type))["results"]]
@login_required
def get_option_market_data(self, optionid):
"""Gets a list of market data for a given optionid.
Args: (str) option id
Returns: dictionary of options market data.
"""
market_data = {}
try:
market_data = self.get_url(endpoints.market_data(optionid)) or {}
except requests.exceptions.HTTPError:
raise RH_exception.InvalidOptionId()
return market_data
###########################################################################
# GET FUNDAMENTALS
###########################################################################
def get_fundamentals(self, stock=''):
"""Find stock fundamentals data
Args:
(str): stock ticker
Returns:
(:obj:`dict`): contents of `fundamentals` endpoint
"""
#Prompt for stock if not entered
if not stock: # pragma: no cover
stock = input("Symbol: ")
url = str(endpoints.fundamentals(str(stock.upper())))
#Check for validity of symbol
try:
req = self.session.get(url, timeout=15)
req.raise_for_status()
data = req.json()
except requests.exceptions.HTTPError:
raise RH_exception.InvalidTickerSymbol()
return data
def fundamentals(self, stock=''):
"""Wrapper for get_fundamentlals function """
return self.get_fundamentals(stock)
###########################################################################
# PORTFOLIOS DATA
###########################################################################
def portfolios(self):
"""Returns the user's portfolio data """
req = self.session.get(endpoints.portfolios(), timeout=15)
req.raise_for_status()
return req.json()['results'][0]
def adjusted_equity_previous_close(self):
"""Wrapper for portfolios
Returns:
(float): `adjusted_equity_previous_close` value
"""
return float(self.portfolios()['adjusted_equity_previous_close'])
def equity(self):
"""Wrapper for portfolios
Returns:
(float): `equity` value
"""
return float(self.portfolios()['equity'])
def equity_previous_close(self):
"""Wrapper for portfolios
Returns:
(float): `equity_previous_close` value
"""
return float(self.portfolios()['equity_previous_close'])
def excess_margin(self):
"""Wrapper for portfolios
Returns:
(float): `excess_margin` value
"""
return float(self.portfolios()['excess_margin'])
def extended_hours_equity(self):
"""Wrapper for portfolios
Returns:
(float): `extended_hours_equity` value
"""
try:
return float(self.portfolios()['extended_hours_equity'])
except TypeError:
return None
def extended_hours_market_value(self):
"""Wrapper for portfolios
Returns:
(float): `extended_hours_market_value` value
"""
try:
return float(self.portfolios()['extended_hours_market_value'])
except TypeError:
return None
def last_core_equity(self):
"""Wrapper for portfolios
Returns:
(float): `last_core_equity` value
"""
return float(self.portfolios()['last_core_equity'])
def last_core_market_value(self):
"""Wrapper for portfolios
Returns:
(float): `last_core_market_value` value
"""
return float(self.portfolios()['last_core_market_value'])
def market_value(self):
"""Wrapper for portfolios
Returns:
(float): `market_value` value
"""
return float(self.portfolios()['market_value'])
@login_required
def order_history(self, orderId=None):
"""Wrapper for portfolios
Optional Args: add an order ID to retrieve information about a single order.
Returns:
(:obj:`dict`): JSON dict from getting orders
"""
return self.session.get(endpoints.orders(orderId), timeout=15).json()
def dividends(self):
"""Wrapper for portfolios
Returns:
(:obj: `dict`): JSON dict from getting dividends
"""
return self.session.get(endpoints.dividends(), timeout=15).json()
###########################################################################
# POSITIONS DATA
###########################################################################
def positions(self):
"""Returns the user's positions data
Returns:
(:object: `dict`): JSON dict from getting positions
"""
return self.session.get(endpoints.positions(), timeout=15).json()
def securities_owned(self):
"""Returns list of securities' symbols that the user has shares in
Returns:
(:object: `dict`): Non-zero positions
"""
return self.session.get(endpoints.positions() + '?nonzero=true', timeout=15).json()
###########################################################################
# PLACE ORDER
###########################################################################
def place_order(self,
instrument,
quantity=1,
price=0.0,
transaction=None,
trigger='immediate',
order='market',
time_in_force='gfd'):
"""Place an order with Robinhood
Notes:
OMFG TEST THIS PLEASE!
Just realized this won't work since if type is LIMIT you need to use "price" and if
a STOP you need to use "stop_price". Oops.
Reference: https://github.com/sanko/Robinhood/blob/master/Order.md#place-an-order
Args:
instrument (dict): the RH URL and symbol in dict for the instrument to be traded
quantity (int): quantity of stocks in order
bid_price (float): price for order
transaction (:enum:`Transaction`): BUY or SELL enum
trigger (:enum:`Trigger`): IMMEDIATE or STOP enum
order (:enum:`Order`): MARKET or LIMIT
time_in_force (:enum:`TIME_IN_FORCE`): GFD or GTC (day or until cancelled)
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
if isinstance(transaction, str):
transaction = Transaction(transaction)
if not price:
price = self.quote_data(instrument['symbol'])['bid_price']
payload = {
'account': self.get_account()['url'],
'instrument': unquote(instrument['url']),
'quantity': quantity,
'side': transaction.name.lower(),
'symbol': instrument['symbol'],
'time_in_force': time_in_force.lower(),
'trigger': trigger,
'type': order.lower()
}
if order.lower() == "stop":
payload['stop_price'] = float(price)
else:
payload['price'] = float(price)
res = self.session.post(endpoints.orders(), data=payload, timeout=15)
res.raise_for_status()
return res
def place_buy_order(self,
instrument,
quantity,
bid_price=0.0):
"""Wrapper for placing buy orders
Args:
instrument (dict): the RH URL and symbol in dict for the instrument to be traded
quantity (int): quantity of stocks in order
bid_price (float): price for order
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
transaction = Transaction.BUY
return self.place_order(instrument, quantity, bid_price, transaction)
def place_sell_order(self,
instrument,
quantity,
bid_price=0.0):
"""Wrapper for placing sell orders
Args:
instrument (dict): the RH URL and symbol in dict for the instrument to be traded
quantity (int): quantity of stocks in order
bid_price (float): price for order
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
transaction = Transaction.SELL
return self.place_order(instrument, quantity, bid_price, transaction)
# Methods below here are a complete rewrite for buying and selling
# These are new. Use at your own risk!
def place_market_buy_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
quantity=None):
"""Wrapper for placing market buy orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
quantity (int): Number of shares to buy
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='market',
trigger='immediate',
side='buy',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
quantity=quantity))
def place_limit_buy_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
price=None,
quantity=None):
"""Wrapper for placing limit buy orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
price (float): The max price you're willing to pay per share
quantity (int): Number of shares to buy
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='limit',
trigger='immediate',
side='buy',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
price=price,
quantity=quantity))
def place_stop_loss_buy_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
stop_price=None,
quantity=None):
"""Wrapper for placing stop loss buy orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
stop_price (float): The price at which this becomes a market order
quantity (int): Number of shares to buy
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='market',
trigger='stop',
side='buy',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
stop_price=stop_price,
quantity=quantity))
def place_stop_limit_buy_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
stop_price=None,
price=None,
quantity=None):
"""Wrapper for placing stop limit buy orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
stop_price (float): The price at which this becomes a limit order
price (float): The max price you're willing to pay per share
quantity (int): Number of shares to buy
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='limit',
trigger='stop',
side='buy',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
stop_price=stop_price,
price=price,
quantity=quantity))
def place_market_sell_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
quantity=None):
"""Wrapper for placing market sell orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
quantity (int): Number of shares to sell
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='market',
trigger='immediate',
side='sell',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
quantity=quantity))
def place_limit_sell_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
price=None,
quantity=None):
"""Wrapper for placing limit sell orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
price (float): The minimum price you're willing to get per share
quantity (int): Number of shares to sell
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='limit',
trigger='immediate',
side='sell',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
price=price,
quantity=quantity))
def place_stop_loss_sell_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
stop_price=None,
quantity=None):
"""Wrapper for placing stop loss sell orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
stop_price (float): The price at which this becomes a market order
quantity (int): Number of shares to sell
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='market',
trigger='stop',
side='sell',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
stop_price=stop_price,
quantity=quantity))
def place_stop_limit_sell_order(self,
instrument_URL=None,
symbol=None,
time_in_force=None,
price=None,
stop_price=None,
quantity=None):
"""Wrapper for placing stop limit sell orders
Notes:
If only one of the instrument_URL or symbol are passed as
arguments the other will be looked up automatically.
Args:
instrument_URL (str): The RH URL of the instrument
symbol (str): The ticker symbol of the instrument
time_in_force (str): 'GFD' or 'GTC' (day or until cancelled)
stop_price (float): The price at which this becomes a limit order
price (float): The max price you're willing to get per share
quantity (int): Number of shares to sell
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
return(self.submit_order(order_type='limit',
trigger='stop',
side='sell',
instrument_URL=instrument_URL,
symbol=symbol,
time_in_force=time_in_force,
stop_price=stop_price,
price=price,
quantity=quantity))
def submit_order(self,
instrument_URL=None,
symbol=None,
order_type=None,
time_in_force=None,
trigger=None,
price=None,
stop_price=None,
quantity=None,
side=None):
"""Submits order to Robinhood
Notes:
This is normally not called directly. Most programs should use
one of the following instead:
place_market_buy_order()
place_limit_buy_order()
place_stop_loss_buy_order()
place_stop_limit_buy_order()
place_market_sell_order()
place_limit_sell_order()
place_stop_loss_sell_order()
place_stop_limit_sell_order()
Args:
instrument_URL (str): the RH URL for the instrument
symbol (str): the ticker symbol for the instrument
order_type (str): 'MARKET' or 'LIMIT'
time_in_force (:enum:`TIME_IN_FORCE`): GFD or GTC (day or
until cancelled)
trigger (str): IMMEDIATE or STOP enum
price (float): The share price you'll accept
stop_price (float): The price at which the order becomes a
market or limit order
quantity (int): The number of shares to buy/sell
side (str): BUY or sell
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
# Used for default price input
# Price is required, so we use the current bid price if it is not specified
current_quote = self.get_quote(symbol)
current_bid_price = current_quote['bid_price']
# Start with some parameter checks. I'm paranoid about $.
if(instrument_URL is None):
if(symbol is None):
raise(ValueError('Neither instrument_URL nor symbol were passed to submit_order'))
for result in self.instruments(symbol):
if result['symbol'].upper() == symbol.upper() :
instrument_URL = result['url']
break
if(instrument_URL is None):
raise(ValueError('instrument_URL could not be defined. Symbol %s not found' % symbol))
if(symbol is None):
symbol = self.session.get(instrument_URL, timeout=15).json()['symbol']
if(side is None):
raise(ValueError('Order is neither buy nor sell in call to submit_order'))
if(order_type is None):
if(price is None):
if(stop_price is None):
order_type = 'market'
else:
order_type = 'limit'
symbol = str(symbol).upper()
order_type = str(order_type).lower()
time_in_force = str(time_in_force).lower()
trigger = str(trigger).lower()
side = str(side).lower()
if(order_type != 'market') and (order_type != 'limit'):
raise(ValueError('Invalid order_type in call to submit_order'))
if(order_type == 'limit'):
if(price is None):
raise(ValueError('Limit order has no price in call to submit_order'))
if(price <= 0):
raise(ValueError('Price must be positive number in call to submit_order'))
if(trigger == 'stop'):
if(stop_price is None):
raise(ValueError('Stop order has no stop_price in call to submit_order'))
if(stop_price <= 0):
raise(ValueError('Stop_price must be positive number in call to submit_order'))
if(stop_price is not None):
if(trigger != 'stop'):
raise(ValueError('Stop price set for non-stop order in call to submit_order'))
if(price is None):
if(order_type == 'limit'):
raise(ValueError('Limit order has no price in call to submit_order'))
if(price is not None):
if(order_type.lower() == 'market'):
raise(ValueError('Market order has price limit in call to submit_order'))
price = float(price)
else:
price = current_bid_price # default to current bid price
if(quantity is None):
raise(ValueError('No quantity specified in call to submit_order'))
quantity = int(quantity)
if(quantity <= 0):
raise(ValueError('Quantity must be positive number in call to submit_order'))
payload = {}
for field, value in [
('account', self.get_account()['url']),
('instrument', instrument_URL),
('symbol', symbol),
('type', order_type),
('time_in_force', time_in_force),
('trigger', trigger),
('price', price),
('stop_price', stop_price),
('quantity', quantity),
('side', side)
]:
if(value is not None):
payload[field] = value
print(payload)
res = self.session.post(endpoints.orders(), data=payload, timeout=15)
res.raise_for_status()
return res
##############################
# CANCEL ORDER
##############################
def cancel_order(
self,
order_id):
"""
Cancels specified order and returns the response (results from `orders` command).
If order cannot be cancelled, `None` is returned.
Args:
order_id (str or dict): Order ID string that is to be cancelled or open order dict returned from
order get.
Returns:
(:obj:`requests.request`): result from `orders` put command
"""
if isinstance(order_id, str):
try:
order = self.session.get(endpoints.orders() + order_id, timeout=15).json()
except (requests.exceptions.HTTPError) as err_msg:
raise ValueError('Failed to get Order for ID: ' + order_id
+ '\n Error message: '+ repr(err_msg))
if order.get('cancel') is not None:
try:
res = self.session.post(order['cancel'], timeout=15)
res.raise_for_status()
return res
except (requests.exceptions.HTTPError) as err_msg:
raise ValueError('Failed to cancel order ID: ' + order_id
+ '\n Error message: '+ repr(err_msg))
return None
if isinstance(order_id, dict):
order_id = order_id['id']
try:
order = self.session.get(endpoints.orders() + order_id, timeout=15).json()
except (requests.exceptions.HTTPError) as err_msg:
raise ValueError('Failed to get Order for ID: ' + order_id
+ '\n Error message: '+ repr(err_msg))
if order.get('cancel') is not None:
try:
res = self.session.post(order['cancel'], timeout=15)
res.raise_for_status()
return res
except (requests.exceptions.HTTPError) as err_msg:
raise ValueError('Failed to cancel order ID: ' + order_id
+ '\n Error message: '+ repr(err_msg))
return None
elif not isinstance(order_id, str) or not isinstance(order_id, dict):
raise ValueError('Cancelling orders requires a valid order_id string or open order dictionary')
# Order type cannot be cancelled without a valid cancel link
else:
raise ValueError('Unable to cancel order ID: ' + order_id)
|
|
# -*- coding: utf-8 -*-
"""
celery.events.state
~~~~~~~~~~~~~~~~~~~
This module implements a datastructure used to keep
track of the state of a cluster of workers and the tasks
it is working on (by consuming events).
For every event consumed the state is updated,
so the state represents the state of the cluster
at the time of the last event.
Snapshots (:mod:`celery.events.snapshot`) can be used to
take "pictures" of this state at regular intervals
to e.g. store that in a database.
:copyright: (c) 2009 - 2012 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from __future__ import with_statement
import heapq
from threading import Lock
from time import time
from kombu.utils import kwdict
from celery import states
from celery.datastructures import AttributeDict, LRUCache
# The window (in percentage) is added to the workers heartbeat
# frequency. If the time between updates exceeds this window,
# then the worker is considered to be offline.
HEARTBEAT_EXPIRE_WINDOW = 200
class Element(AttributeDict):
"""Base class for worker state elements."""
class Worker(Element):
"""Worker State."""
heartbeat_max = 4
expire_window = HEARTBEAT_EXPIRE_WINDOW
def __init__(self, **fields):
fields.setdefault("freq", 60)
super(Worker, self).__init__(**fields)
self.heartbeats = []
def on_online(self, timestamp=None, **kwargs):
"""Callback for the `worker-online` event."""
self.update(**kwargs)
self._heartpush(timestamp)
def on_offline(self, **kwargs):
"""Callback for the `worker-offline` event."""
self.update(**kwargs)
self.heartbeats = []
def on_heartbeat(self, timestamp=None, **kwargs):
"""Callback for the `worker-heartbeat` event."""
self.update(**kwargs)
self._heartpush(timestamp)
def _heartpush(self, timestamp):
if timestamp:
heapq.heappush(self.heartbeats, timestamp)
if len(self.heartbeats) > self.heartbeat_max:
self.heartbeats = self.heartbeats[self.heartbeat_max:]
def __repr__(self):
return "<Worker: %s (%s)" % (self.hostname,
self.alive and "ONLINE" or "OFFLINE")
@property
def heartbeat_expires(self):
return self.heartbeats[-1] + self.freq * (self.expire_window / 1e2)
@property
def alive(self):
return (self.heartbeats and time() < self.heartbeat_expires)
class Task(Element):
"""Task State."""
#: How to merge out of order events.
#: Disorder is detected by logical ordering (e.g. task-received must have
#: happened before a task-failed event).
#:
#: A merge rule consists of a state and a list of fields to keep from
#: that state. ``(RECEIVED, ("name", "args")``, means the name and args
#: fields are always taken from the RECEIVED state, and any values for
#: these fields received before or after is simply ignored.
merge_rules = {states.RECEIVED: ("name", "args", "kwargs",
"retries", "eta", "expires")}
#: meth:`info` displays these fields by default.
_info_fields = ("args", "kwargs", "retries", "result",
"eta", "runtime", "expires", "exception")
#: Default values.
_defaults = dict(uuid=None, name=None, state=states.PENDING,
received=False, sent=False, started=False,
succeeded=False, failed=False, retried=False,
revoked=False, args=None, kwargs=None, eta=None,
expires=None, retries=None, worker=None, result=None,
exception=None, timestamp=None, runtime=None,
traceback=None)
def __init__(self, **fields):
super(Task, self).__init__(**dict(self._defaults, **fields))
def update(self, state, timestamp, fields):
"""Update state from new event.
:param state: State from event.
:param timestamp: Timestamp from event.
:param fields: Event data.
"""
if self.worker:
self.worker.on_heartbeat(timestamp=timestamp)
if state != states.RETRY and self.state != states.RETRY and \
states.state(state) < states.state(self.state):
# this state logically happens-before the current state, so merge.
self.merge(state, timestamp, fields)
else:
self.state = state
self.timestamp = timestamp
super(Task, self).update(fields)
def merge(self, state, timestamp, fields):
"""Merge with out of order event."""
keep = self.merge_rules.get(state)
if keep is not None:
fields = dict((key, fields.get(key)) for key in keep)
super(Task, self).update(fields)
def on_sent(self, timestamp=None, **fields):
"""Callback for the ``task-sent`` event."""
self.sent = timestamp
self.update(states.PENDING, timestamp, fields)
def on_received(self, timestamp=None, **fields):
"""Callback for the ``task-received`` event."""
self.received = timestamp
self.update(states.RECEIVED, timestamp, fields)
def on_started(self, timestamp=None, **fields):
"""Callback for the ``task-started`` event."""
self.started = timestamp
self.update(states.STARTED, timestamp, fields)
def on_failed(self, timestamp=None, **fields):
"""Callback for the ``task-failed`` event."""
self.failed = timestamp
self.update(states.FAILURE, timestamp, fields)
def on_retried(self, timestamp=None, **fields):
"""Callback for the ``task-retried`` event."""
self.retried = timestamp
self.update(states.RETRY, timestamp, fields)
def on_succeeded(self, timestamp=None, **fields):
"""Callback for the ``task-succeeded`` event."""
self.succeeded = timestamp
self.update(states.SUCCESS, timestamp, fields)
def on_revoked(self, timestamp=None, **fields):
"""Callback for the ``task-revoked`` event."""
self.revoked = timestamp
self.update(states.REVOKED, timestamp, fields)
def on_unknown_event(self, type, timestamp=None, **fields):
self.update(type.upper(), timestamp, fields)
def info(self, fields=None, extra=[]):
"""Information about this task suitable for on-screen display."""
if fields is None:
fields = self._info_fields
return dict((key, getattr(self, key, None))
for key in list(fields) + list(extra)
if getattr(self, key, None) is not None)
def __repr__(self):
return "<Task: %s(%s) %s>" % (self.name, self.uuid, self.state)
@property
def ready(self):
return self.state in states.READY_STATES
class State(object):
"""Records clusters state."""
event_count = 0
task_count = 0
def __init__(self, callback=None,
max_workers_in_memory=5000, max_tasks_in_memory=10000):
self.workers = LRUCache(limit=max_workers_in_memory)
self.tasks = LRUCache(limit=max_tasks_in_memory)
self.event_callback = callback
self.group_handlers = {"worker": self.worker_event,
"task": self.task_event}
self._mutex = Lock()
def freeze_while(self, fun, *args, **kwargs):
clear_after = kwargs.pop("clear_after", False)
with self._mutex:
try:
return fun(*args, **kwargs)
finally:
if clear_after:
self._clear()
def clear_tasks(self, ready=True):
with self._mutex:
return self._clear_tasks(ready)
def _clear_tasks(self, ready=True):
if ready:
in_progress = dict((uuid, task) for uuid, task in self.itertasks()
if task.state not in states.READY_STATES)
self.tasks.clear()
self.tasks.update(in_progress)
else:
self.tasks.clear()
def _clear(self, ready=True):
self.workers.clear()
self._clear_tasks(ready)
self.event_count = 0
self.task_count = 0
def clear(self, ready=True):
with self._mutex:
return self._clear(ready)
def get_or_create_worker(self, hostname, **kwargs):
"""Get or create worker by hostname."""
try:
worker = self.workers[hostname]
worker.update(kwargs)
except KeyError:
worker = self.workers[hostname] = Worker(
hostname=hostname, **kwargs)
return worker
def get_or_create_task(self, uuid):
"""Get or create task by uuid."""
try:
return self.tasks[uuid]
except KeyError:
task = self.tasks[uuid] = Task(uuid=uuid)
return task
def worker_event(self, type, fields):
"""Process worker event."""
hostname = fields.pop("hostname", None)
if hostname:
worker = self.get_or_create_worker(hostname)
handler = getattr(worker, "on_%s" % type, None)
if handler:
handler(**fields)
def task_event(self, type, fields):
"""Process task event."""
uuid = fields.pop("uuid")
hostname = fields.pop("hostname")
worker = self.get_or_create_worker(hostname)
task = self.get_or_create_task(uuid)
handler = getattr(task, "on_%s" % type, None)
if type == "received":
self.task_count += 1
if handler:
handler(**fields)
else:
task.on_unknown_event(type, **fields)
task.worker = worker
def event(self, event):
with self._mutex:
return self._dispatch_event(event)
def _dispatch_event(self, event):
self.event_count += 1
event = kwdict(event)
group, _, type = event.pop("type").partition("-")
self.group_handlers[group](type, event)
if self.event_callback:
self.event_callback(self, event)
def itertasks(self, limit=None):
for index, row in enumerate(self.tasks.iteritems()):
yield row
if limit and index + 1 >= limit:
break
def tasks_by_timestamp(self, limit=None):
"""Get tasks by timestamp.
Returns a list of `(uuid, task)` tuples.
"""
return self._sort_tasks_by_time(self.itertasks(limit))
def _sort_tasks_by_time(self, tasks):
"""Sort task items by time."""
return sorted(tasks, key=lambda t: t[1].timestamp,
reverse=True)
def tasks_by_type(self, name, limit=None):
"""Get all tasks by type.
Returns a list of `(uuid, task)` tuples.
"""
return self._sort_tasks_by_time([(uuid, task)
for uuid, task in self.itertasks(limit)
if task.name == name])
def tasks_by_worker(self, hostname, limit=None):
"""Get all tasks by worker.
Returns a list of `(uuid, task)` tuples.
"""
return self._sort_tasks_by_time([(uuid, task)
for uuid, task in self.itertasks(limit)
if task.worker.hostname == hostname])
def task_types(self):
"""Returns a list of all seen task types."""
return list(sorted(set(task.name for task in self.tasks.itervalues())))
def alive_workers(self):
"""Returns a list of (seemingly) alive workers."""
return [w for w in self.workers.values() if w.alive]
def __repr__(self):
return "<ClusterState: events=%s tasks=%s>" % (self.event_count,
self.task_count)
state = State()
|
|
"""The test for binary_sensor device automation."""
from datetime import timedelta
from unittest.mock import patch
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.binary_sensor import DEVICE_CLASSES, DOMAIN
from homeassistant.components.binary_sensor.device_condition import ENTITY_CONDITIONS
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a binary_sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
for condition in ENTITY_CONDITIONS[device_class]
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a binary_sensor condition."""
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.async_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_bat_low",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_not_bat_low",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on event - test_event1"
hass.states.async_set(sensor1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off event - test_event2"
async def test_if_fires_on_for_condition(hass, calls):
"""Test for firing if condition is on with delay."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=10)
point3 = point2 + timedelta(seconds=10)
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = point1
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": {
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_not_bat_low",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
("platform", "event.event_type")
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 10 secs into the future
mock_utcnow.return_value = point2
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
hass.states.async_set(sensor1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 0
# Time travel 20 secs into the future
mock_utcnow.return_value = point3
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_off event - test_event1"
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_solve."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import benchmark
from tensorflow.python.platform import test
class MatrixSolveOpTest(test.TestCase):
def _verifySolve(self, x, y, batch_dims=None):
for np_type in [np.float32, np.float64, np.complex64, np.complex128]:
if np_type == np.float32 or np_type == np.complex64:
tol = 1e-5
else:
tol = 1e-12
for adjoint in False, True:
if np_type is [np.float32, np.float64]:
a = x.real().astype(np_type)
b = y.real().astype(np_type)
else:
a = x.astype(np_type)
b = y.astype(np_type)
a_np = np.conj(np.transpose(a)) if adjoint else a
if batch_dims is not None:
a = np.tile(a, batch_dims + [1, 1])
a_np = np.tile(a_np, batch_dims + [1, 1])
b = np.tile(b, batch_dims + [1, 1])
np_ans = np.linalg.solve(a_np, b)
for use_placeholder in False, True:
with self.cached_session(use_gpu=True) as sess:
if use_placeholder:
a_ph = array_ops.placeholder(dtypes.as_dtype(np_type))
b_ph = array_ops.placeholder(dtypes.as_dtype(np_type))
tf_ans = linalg_ops.matrix_solve(a_ph, b_ph, adjoint=adjoint)
out = sess.run(tf_ans, {a_ph: a, b_ph: b})
else:
tf_ans = linalg_ops.matrix_solve(a, b, adjoint=adjoint)
out = tf_ans.eval()
self.assertEqual(tf_ans.get_shape(), out.shape)
self.assertEqual(np_ans.shape, out.shape)
self.assertAllClose(np_ans, out, atol=tol, rtol=tol)
def _generateMatrix(self, m, n):
matrix = (np.random.normal(-5, 5,
m * n).astype(np.complex128).reshape([m, n]))
matrix.imag = (np.random.normal(-5, 5, m * n).astype(np.complex128).reshape(
[m, n]))
return matrix
def testSolve(self):
for n in 1, 2, 4, 9:
matrix = self._generateMatrix(n, n)
for nrhs in 1, 2, n:
rhs = self._generateMatrix(n, nrhs)
self._verifySolve(matrix, rhs)
def testSolveBatch(self):
for n in 2, 5:
matrix = self._generateMatrix(n, n)
for nrhs in 1, n:
rhs = self._generateMatrix(n, nrhs)
for batch_dims in [[2], [2, 2], [7, 4]]:
self._verifySolve(matrix, rhs, batch_dims=batch_dims)
def testNonSquareMatrix(self):
# When the solve of a non-square matrix is attempted we should return
# an error
with self.session(use_gpu=True):
with self.assertRaises(ValueError):
matrix = constant_op.constant([[1., 2., 3.], [3., 4., 5.]])
linalg_ops.matrix_solve(matrix, matrix)
def testWrongDimensions(self):
# The matrix and right-hand sides should have the same number of rows.
with self.session(use_gpu=True):
matrix = constant_op.constant([[1., 0.], [0., 1.]])
rhs = constant_op.constant([[1., 0.]])
with self.assertRaises(ValueError):
linalg_ops.matrix_solve(matrix, rhs)
def testNotInvertible(self):
# The input should be invertible.
with self.session(use_gpu=True):
with self.assertRaisesOpError("Input matrix is not invertible."):
# All rows of the matrix below add to zero
matrix = constant_op.constant([[1., 0., -1.], [-1., 1., 0.],
[0., -1., 1.]])
linalg_ops.matrix_solve(matrix, matrix).eval()
def testConcurrent(self):
with self.session(use_gpu=True) as sess:
all_ops = []
for adjoint_ in False, True:
lhs1 = random_ops.random_normal([3, 3], seed=42)
lhs2 = random_ops.random_normal([3, 3], seed=42)
rhs1 = random_ops.random_normal([3, 3], seed=42)
rhs2 = random_ops.random_normal([3, 3], seed=42)
s1 = linalg_ops.matrix_solve(lhs1, rhs1, adjoint=adjoint_)
s2 = linalg_ops.matrix_solve(lhs2, rhs2, adjoint=adjoint_)
all_ops += [s1, s2]
val = sess.run(all_ops)
self.assertAllEqual(val[0], val[1])
self.assertAllEqual(val[2], val[3])
class MatrixSolveBenchmark(test.Benchmark):
matrix_shapes = [
(4, 4),
(10, 10),
(16, 16),
(101, 101),
(256, 256),
(1001, 1001),
(1024, 1024),
(2048, 2048),
(513, 4, 4),
(513, 16, 16),
(513, 256, 256),
]
def _GenerateTestData(self, matrix_shape, num_rhs):
batch_shape = matrix_shape[:-2]
matrix_shape = matrix_shape[-2:]
assert matrix_shape[0] == matrix_shape[1]
n = matrix_shape[0]
matrix = (np.ones(matrix_shape).astype(np.float32) /
(2.0 * n) + np.diag(np.ones(n).astype(np.float32)))
rhs = np.ones([n, num_rhs]).astype(np.float32)
matrix = variables.Variable(
np.tile(matrix, batch_shape + (1, 1)), trainable=False)
rhs = variables.Variable(
np.tile(rhs, batch_shape + (1, 1)), trainable=False)
return matrix, rhs
def benchmarkMatrixSolveOp(self):
run_gpu_test = test.is_gpu_available(True)
for adjoint in False, True:
for matrix_shape in self.matrix_shapes:
for num_rhs in 1, 2, matrix_shape[-1]:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/cpu:0"):
matrix, rhs = self._GenerateTestData(matrix_shape, num_rhs)
x = linalg_ops.matrix_solve(matrix, rhs, adjoint=adjoint)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(x),
min_iters=25,
store_memory_usage=False,
name=("matrix_solve_cpu_shape_{matrix_shape}_num_rhs_{num_rhs}_"
"adjoint_{adjoint}").format(
matrix_shape=matrix_shape,
num_rhs=num_rhs,
adjoint=adjoint))
if run_gpu_test:
with ops.Graph().as_default(), \
session.Session(config=benchmark.benchmark_config()) as sess, \
ops.device("/gpu:0"):
matrix, rhs = self._GenerateTestData(matrix_shape, num_rhs)
x = linalg_ops.matrix_solve(matrix, rhs, adjoint=adjoint)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(x),
min_iters=25,
store_memory_usage=False,
name=("matrix_solve_gpu_shape_{matrix_shape}_num_rhs_"
"{num_rhs}_adjoint_{adjoint}").format(
matrix_shape=matrix_shape, num_rhs=num_rhs,
adjoint=adjoint))
if __name__ == "__main__":
test.main()
|
|
import os
import contextlib
import os.path
import errno
import stat
import time
import mmap
from functools import partial
import logging
import fnmatch
from ftsdb import re # re or re2
from ftsdb import update_document, add_document, remove_document
from ftsdb import prefix_expr, logger, Cursor
# nly index the first N bytes of a file
MAX_FSIZE = 1024*1024
@contextlib.contextmanager
def get_bytes(fname, size):
"""
yield a python Buffer mapping to the first MAX_FSIZE bytes of the given file
"""
size = min(size, MAX_FSIZE)
if size == 0:
yield ''
return
# try to save some memory by using the OS buffers instead of copying
# the file contents
with open(fname, 'rb') as f:
with contextlib.closing(mmap.mmap(f.fileno(), size, access=mmap.ACCESS_READ)) as mm:
yield buffer(mm, 0, size)
def should_allow(exclusions, basename, dbpath):
"""
returns whether a given file should be allowed to exist based on our
exclusion list
"""
# exclusions =:= [{type, pattern}]
for typ, pattern in exclusions:
if typ == 'simple':
if basename == pattern:
return False
elif typ == 'glob':
# on basename only?
if fnmatch.fnmatch(basename, pattern):
return False
elif typ == 're':
if pattern.search(dbpath):
# match or search? basename or full path or dbpath?
return False
return True
def visitor(path, prefix, exclusions, cu, dirname, fnames):
if logger.getEffectiveLevel() <= logging.DEBUG:
logger.debug("Walking %s", dirname)
fnames.sort() # makes the child 'walking' messages come out in an order the user expects
remove = []
for basename in fnames:
fname = os.path.join(dirname, basename)
assert fname.startswith(path)
if prefix:
assert fname.startswith(os.path.join(path, prefix))
dbfname = fname[len(path)+1:]
if exclusions and not should_allow(exclusions, basename, dbfname):
remove.append(basename)
continue
try:
st = os.stat(fname)
mode = st.st_mode
size = st[stat.ST_SIZE]
if stat.S_ISDIR(mode):
continue
if not stat.S_ISREG(mode):
logger.warn("Skipping non-regular file %s (%s)", dbfname, stat.S_IFMT(mode))
continue
except IOError as e:
if e.errno == errno.ENOENT:
# it was deleted in between
continue
raise
cu.execute("INSERT INTO ondisk(path, dbpath, last_modified, size) VALUES (?, ?, ?, ?)",
(fname, dbfname, int(st[stat.ST_MTIME]), size))
if remove and logger.getEffectiveLevel() <= logging.DEBUG:
logger.debug("Removing %r from walk", list(remove))
for r in remove:
fnames.remove(r)
def tcount(c, tname):
return c.execute("SELECT COUNT(*) FROM %s;" % tname).fetchone()[0]
def sync(conn, path, prefix, files = None):
# path must be a full path on disk
# prefix must be the full path on disk that we're syncing (or empty)
start = time.time()
news = updates = deletes = 0
tnews = tupdates = tdeletes = 0 # for debug printing
with Cursor(conn) as c, Cursor(conn) as cu:
c.execute("""
CREATE TEMPORARY TABLE
ondisk (
path TEXT PRIMARY KEY COLLATE BINARY,
dbpath TEXT COLLATE BINARY,
last_modified INTEGER,
size INTEGER
);
""")
exclusions = []
c.execute("SELECT type AS typ, expression AS e FROM exclusions;")
for typ, expression in c:
if typ == 're':
expression = re.compile(expression)
exclusions.append((typ, expression))
wpath = path
if prefix:
wpath = os.path.join(path, prefix)
if files is None:
os.path.walk(wpath, partial(visitor, path, prefix, exclusions), cu)
else:
visitor(path, prefix, exclusions, cu, wpath, files)
logger.debug("Creating temporary index on ondisk(dbpath)")
c.execute("CREATE INDEX tmp_ondisk_dbpath_idx ON ondisk(dbpath)")
if logger.getEffectiveLevel() <= logging.DEBUG:
logger.debug("Found %d files on disk", tcount(cu, "ondisk"))
# now build three groups: new files to be added, missing files to be
# deleted, and old files to be updated
# updated ones
cu.execute("""
CREATE TEMPORARY VIEW updated_files AS
SELECT f.docid AS docid,
od.path AS path,
od.last_modified AS last_modified,
od.size AS size
FROM ondisk od, files f
WHERE od.dbpath = f.path
AND f.last_modified < od.last_modified
""")
if logger.getEffectiveLevel() <= logging.DEBUG:
tupdates = tcount(cu, "updated_files")
logger.debug("Prepared %d files for updating", tupdates)
# new files to create
cu.execute("""
CREATE TEMPORARY VIEW created_files AS
SELECT od.path AS path,
od.dbpath AS dbpath,
od.last_modified,
od.size AS size
FROM ondisk od
WHERE NOT EXISTS(SELECT 1 FROM files f1 WHERE od.dbpath = f1.path)
""")
if logger.getEffectiveLevel() <= logging.DEBUG:
tnews = tcount(cu, "created_files")
logger.debug("Prepared %d files for creation", tnews)
# files that we've indexed in the past but don't exist anymore
if files is None:
# has to be a table instead of a view because parameters aren't allowed in views
cu.execute("""
CREATE TEMPORARY TABLE deletedocs AS
SELECT f.docid AS docid,
f.path AS path
FROM files f
WHERE (? = '' OR f.path LIKE ? ESCAPE '\\') -- ESCAPE disables the LIKE optimization :(
AND NOT EXISTS(SELECT 1 FROM ondisk od WHERE od.dbpath = f.path)
""", (prefix, prefix_expr(prefix)))
if logger.getEffectiveLevel() <= logging.DEBUG:
tdeletes = tcount(cu, "deletedocs")
logger.debug("Prepared %d files for deletion", tdeletes)
# set up our debugging progress-printing closure
def printprogress(*a):
pass
if logger.getEffectiveLevel() <= logging.INFO:
progresstotal = tnews + tupdates + tdeletes
if progresstotal > 0:
def printprogress(s, fname):
total = updates+news+deletes
percent = float(total)/progresstotal*100
logger.info("%d/%d (%.1f%%) %s: %s", total, progresstotal, percent, s, fname)
# files that we've indexed in the past but don't exist anymore
if files is None:
c.execute("SELECT docid, path FROM deletedocs");
for (docid, fname) in c:
printprogress("Deleting", fname)
remove_document(cu, docid)
deletes += 1
c.execute("SELECT docid, path, last_modified, size FROM updated_files;")
for (docid, fname, last_modified, size) in c:
printprogress("Updating %.2f" % (size/1024.0), fname)
try:
with get_bytes(fname, size) as bb:
update_document(cu, docid, last_modified, bb)
except IOError as e:
if e.errno in (errno.ENOENT, errno.EPERM):
logger.warning("Skipping %s: %s", fname, os.strerror(e.errno))
else:
raise
continue
updates += 1
# new files to create
c.execute("SELECT path, dbpath, last_modified, size FROM created_files;")
for (fname, dbpath, last_modified, size) in c:
# is it safe to re-use the last_modified that we got before, or do
# we need to re-stat() the file? reusing it like this could make a
# race-condition whereby we never re-update that file
printprogress("Adding %.1fk" % (size/1024.0), fname)
try:
with get_bytes(fname, size) as bb:
add_document(cu, dbpath, last_modified, bb)
except IOError as e:
if e.errno in (errno.ENOENT, errno.EPERM):
logger.warning("Skipping %s: %s", fname, os.strerror(e.errno))
else:
raise
continue
news += 1
logger.info("%d new documents, %d deletes, %d updates in %.2fs", news, deletes, updates, time.time()-start)
cu.execute("DROP VIEW updated_files;")
cu.execute("DROP VIEW created_files;")
cu.execute("DROP TABLE IF EXISTS deletedocs;")
cu.execute("DROP TABLE ondisk;")
|
|
# Copyright 2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This class contains the basic functionality needed to run any interpreter
# or an interpreter-based tool.
from .common import CMakeException
from .generator import parse_generator_expressions
from .. import mlog
from ..mesonlib import version_compare
import typing as T
from pathlib import Path
import re
import json
import textwrap
class CMakeTraceLine:
def __init__(self, file: Path, line: int, func: str, args: T.List[str]) -> None:
self.file = file
self.line = line
self.func = func.lower()
self.args = args
def __repr__(self) -> str:
s = 'CMake TRACE: {0}:{1} {2}({3})'
return s.format(self.file, self.line, self.func, self.args)
class CMakeTarget:
def __init__(
self,
name: str,
target_type: str,
properties: T.Optional[T.Dict[str, T.List[str]]] = None,
imported: bool = False,
tline: T.Optional[CMakeTraceLine] = None
):
if properties is None:
properties = {}
self.name = name
self.type = target_type
self.properties = properties
self.imported = imported
self.tline = tline
self.depends = [] # type: T.List[str]
self.current_bin_dir = None # type: T.Optional[Path]
self.current_src_dir = None # type: T.Optional[Path]
def __repr__(self) -> str:
s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}'
propSTR = ''
for i in self.properties:
propSTR += " '{}': {}\n".format(i, self.properties[i])
return s.format(self.name, self.type, self.imported, propSTR, self.tline)
def strip_properties(self) -> None:
# Strip the strings in the properties
if not self.properties:
return
for key, val in self.properties.items():
self.properties[key] = [x.strip() for x in val]
assert all([';' not in x for x in self.properties[key]])
class CMakeGeneratorTarget(CMakeTarget):
def __init__(self, name: str) -> None:
super().__init__(name, 'CUSTOM', {})
self.outputs = [] # type: T.List[Path]
self.command = [] # type: T.List[T.List[str]]
self.working_dir = None # type: T.Optional[Path]
class CMakeTraceParser:
def __init__(self, cmake_version: str, build_dir: Path, permissive: bool = True) -> None:
self.vars = {} # type: T.Dict[str, T.List[str]]
self.targets = {} # type: T.Dict[str, CMakeTarget]
self.explicit_headers = set() # type: T.Set[Path]
# T.List of targes that were added with add_custom_command to generate files
self.custom_targets = [] # type: T.List[CMakeGeneratorTarget]
self.permissive = permissive # type: bool
self.cmake_version = cmake_version # type: str
self.trace_file = 'cmake_trace.txt'
self.trace_file_path = build_dir / self.trace_file
self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human'
# State for delayed command execution. Delayed command execution is realised
# with a custom CMake file that overrides some functions and adds some
# introspection information to the trace.
self.delayed_commands = [] # type: T.List[str]
self.stored_commands = [] # type: T.List[CMakeTraceLine]
# All supported functions
self.functions = {
'set': self._cmake_set,
'unset': self._cmake_unset,
'add_executable': self._cmake_add_executable,
'add_library': self._cmake_add_library,
'add_custom_command': self._cmake_add_custom_command,
'add_custom_target': self._cmake_add_custom_target,
'set_property': self._cmake_set_property,
'set_target_properties': self._cmake_set_target_properties,
'target_compile_definitions': self._cmake_target_compile_definitions,
'target_compile_options': self._cmake_target_compile_options,
'target_include_directories': self._cmake_target_include_directories,
'target_link_libraries': self._cmake_target_link_libraries,
'target_link_options': self._cmake_target_link_options,
'add_dependencies': self._cmake_add_dependencies,
# Special functions defined in the preload script.
# These functions do nothing in the CMake code, but have special
# meaning here in the trace parser.
'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls,
'meson_ps_reload_vars': self._meson_ps_reload_vars,
'meson_ps_disabled_function': self._meson_ps_disabled_function,
} # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]]
def trace_args(self) -> T.List[str]:
arg_map = {
'human': ['--trace', '--trace-expand'],
'json-v1': ['--trace-expand', '--trace-format=json-v1'],
}
base_args = ['--no-warn-unused-cli']
if not self.requires_stderr():
base_args += [f'--trace-redirect={self.trace_file}']
return arg_map[self.trace_format] + base_args
def requires_stderr(self) -> bool:
return version_compare(self.cmake_version, '<3.16')
def parse(self, trace: T.Optional[str] = None) -> None:
# First load the trace (if required)
if not self.requires_stderr():
if not self.trace_file_path.exists and not self.trace_file_path.is_file():
raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path)))
trace = self.trace_file_path.read_text(errors='ignore')
if not trace:
raise CMakeException('CMake: The CMake trace was not provided or is empty')
# Second parse the trace
lexer1 = None
if self.trace_format == 'human':
lexer1 = self._lex_trace_human(trace)
elif self.trace_format == 'json-v1':
lexer1 = self._lex_trace_json(trace)
else:
raise CMakeException(f'CMake: Internal error: Invalid trace format {self.trace_format}. Expected [human, json-v1]')
# Primary pass -- parse everything
for l in lexer1:
# store the function if its execution should be delayed
if l.func in self.delayed_commands:
self.stored_commands += [l]
continue
# "Execute" the CMake function if supported
fn = self.functions.get(l.func, None)
if(fn):
fn(l)
# Postprocess
for tgt in self.targets.values():
tgt.strip_properties()
def get_first_cmake_var_of(self, var_list: T.List[str]) -> T.List[str]:
# Return the first found CMake variable in list var_list
for i in var_list:
if i in self.vars:
return self.vars[i]
return []
def get_cmake_var(self, var: str) -> T.List[str]:
# Return the value of the CMake variable var or an empty list if var does not exist
if var in self.vars:
return self.vars[var]
return []
def var_to_str(self, var: str) -> T.Optional[str]:
if var in self.vars and self.vars[var]:
return self.vars[var][0]
return None
def _str_to_bool(self, expr: T.Union[str, T.List[str]]) -> bool:
if not expr:
return False
if isinstance(expr, list):
expr_str = expr[0]
else:
expr_str = expr
expr_str = expr_str.upper()
return expr_str not in ['0', 'OFF', 'NO', 'FALSE', 'N', 'IGNORE'] and not expr_str.endswith('NOTFOUND')
def var_to_bool(self, var: str) -> bool:
return self._str_to_bool(self.vars.get(var, []))
def _gen_exception(self, function: str, error: str, tline: CMakeTraceLine) -> None:
# Generate an exception if the parser is not in permissive mode
if self.permissive:
mlog.debug(f'CMake trace warning: {function}() {error}\n{tline}')
return None
raise CMakeException(f'CMake: {function}() {error}\n{tline}')
def _cmake_set(self, tline: CMakeTraceLine) -> None:
"""Handler for the CMake set() function in all variaties.
comes in three flavors:
set(<var> <value> [PARENT_SCOPE])
set(<var> <value> CACHE <type> <docstring> [FORCE])
set(ENV{<var>} <value>)
We don't support the ENV variant, and any uses of it will be ignored
silently. the other two variates are supported, with some caveats:
- we don't properly handle scoping, so calls to set() inside a
function without PARENT_SCOPE set could incorrectly shadow the
outer scope.
- We don't honor the type of CACHE arguments
"""
# DOC: https://cmake.org/cmake/help/latest/command/set.html
# 1st remove PARENT_SCOPE and CACHE from args
args = []
for i in tline.args:
if not i or i == 'PARENT_SCOPE':
continue
# Discard everything after the CACHE keyword
if i == 'CACHE':
break
args.append(i)
if len(args) < 1:
return self._gen_exception('set', 'requires at least one argument', tline)
# Now that we've removed extra arguments all that should be left is the
# variable identifier and the value, join the value back together to
# ensure spaces in the value are correctly handled. This assumes that
# variable names don't have spaces. Please don't do that...
identifier = args.pop(0)
value = ' '.join(args)
if not value:
# Same as unset
if identifier in self.vars:
del self.vars[identifier]
else:
self.vars[identifier] = value.split(';')
def _cmake_unset(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/unset.html
if len(tline.args) < 1:
return self._gen_exception('unset', 'requires at least one argument', tline)
if tline.args[0] in self.vars:
del self.vars[tline.args[0]]
def _cmake_add_executable(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/add_executable.html
args = list(tline.args) # Make a working copy
# Make sure the exe is imported
is_imported = True
if 'IMPORTED' not in args:
return self._gen_exception('add_executable', 'non imported executables are not supported', tline)
args.remove('IMPORTED')
if len(args) < 1:
return self._gen_exception('add_executable', 'requires at least 1 argument', tline)
self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported)
def _cmake_add_library(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/add_library.html
args = list(tline.args) # Make a working copy
# Make sure the lib is imported
if 'INTERFACE' in args:
args.remove('INTERFACE')
if len(args) < 1:
return self._gen_exception('add_library', 'interface library name not specified', tline)
self.targets[args[0]] = CMakeTarget(args[0], 'INTERFACE', {}, tline=tline, imported='IMPORTED' in args)
elif 'IMPORTED' in args:
args.remove('IMPORTED')
# Now, only look at the first two arguments (target_name and target_type) and ignore the rest
if len(args) < 2:
return self._gen_exception('add_library', 'requires at least 2 arguments', tline)
self.targets[args[0]] = CMakeTarget(args[0], args[1], {}, tline=tline, imported=True)
elif 'ALIAS' in args:
args.remove('ALIAS')
# Now, only look at the first two arguments (target_name and target_ref) and ignore the rest
if len(args) < 2:
return self._gen_exception('add_library', 'requires at least 2 arguments', tline)
# Simulate the ALIAS with INTERFACE_LINK_LIBRARIES
self.targets[args[0]] = CMakeTarget(args[0], 'ALIAS', {'INTERFACE_LINK_LIBRARIES': [args[1]]}, tline=tline)
elif 'OBJECT' in args:
return self._gen_exception('add_library', 'OBJECT libraries are not supported', tline)
else:
self.targets[args[0]] = CMakeTarget(args[0], 'NORMAL', {}, tline=tline)
def _cmake_add_custom_command(self, tline: CMakeTraceLine, name: T.Optional[str] = None) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/add_custom_command.html
args = self._flatten_args(list(tline.args)) # Commands can be passed as ';' separated lists
if not args:
return self._gen_exception('add_custom_command', 'requires at least 1 argument', tline)
# Skip the second function signature
if args[0] == 'TARGET':
return self._gen_exception('add_custom_command', 'TARGET syntax is currently not supported', tline)
magic_keys = ['OUTPUT', 'COMMAND', 'MAIN_DEPENDENCY', 'DEPENDS', 'BYPRODUCTS',
'IMPLICIT_DEPENDS', 'WORKING_DIRECTORY', 'COMMENT', 'DEPFILE',
'JOB_POOL', 'VERBATIM', 'APPEND', 'USES_TERMINAL', 'COMMAND_EXPAND_LISTS']
target = CMakeGeneratorTarget(name)
def handle_output(key: str, target: CMakeGeneratorTarget) -> None:
target.outputs += [Path(key)]
def handle_command(key: str, target: CMakeGeneratorTarget) -> None:
if key == 'ARGS':
return
target.command[-1] += [key]
def handle_depends(key: str, target: CMakeGeneratorTarget) -> None:
target.depends += [key]
working_dir = None
def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None:
nonlocal working_dir
if working_dir is None:
working_dir = key
else:
working_dir += ' '
working_dir += key
fn = None
for i in args:
if i in magic_keys:
if i == 'OUTPUT':
fn = handle_output
elif i == 'DEPENDS':
fn = handle_depends
elif i == 'WORKING_DIRECTORY':
fn = handle_working_dir
elif i == 'COMMAND':
fn = handle_command
target.command += [[]]
else:
fn = None
continue
if fn is not None:
fn(i, target)
cbinary_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR')
csource_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR')
target.working_dir = Path(working_dir) if working_dir else None
target.current_bin_dir = Path(cbinary_dir) if cbinary_dir else None
target.current_src_dir = Path(csource_dir) if csource_dir else None
target.outputs = [Path(x) for x in self._guess_files([str(y) for y in target.outputs])]
target.depends = self._guess_files(target.depends)
target.command = [self._guess_files(x) for x in target.command]
self.custom_targets += [target]
if name:
self.targets[name] = target
def _cmake_add_custom_target(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html
# We only the first parameter (the target name) is interesting
if len(tline.args) < 1:
return self._gen_exception('add_custom_target', 'requires at least one argument', tline)
# It's pretty much the same as a custom command
self._cmake_add_custom_command(tline, tline.args[0])
def _cmake_set_property(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/set_property.html
args = list(tline.args)
scope = args.pop(0)
append = False
targets = []
while args:
curr = args.pop(0)
# XXX: APPEND_STRING is specifically *not* supposed to create a
# list, is treating them as aliases really okay?
if curr == 'APPEND' or curr == 'APPEND_STRING':
append = True
continue
if curr == 'PROPERTY':
break
targets += curr.split(';')
if not args:
return self._gen_exception('set_property', 'faild to parse argument list', tline)
if len(args) == 1:
# Tries to set property to nothing so nothing has to be done
return
identifier = args.pop(0)
if self.trace_format == 'human':
value = ' '.join(args).split(';')
else:
value = [y for x in args for y in x.split(';')]
if not value:
return
def do_target(tgt: str) -> None:
if i not in self.targets:
return self._gen_exception('set_property', f'TARGET {i} not found', tline)
if identifier not in self.targets[i].properties:
self.targets[i].properties[identifier] = []
if append:
self.targets[i].properties[identifier] += value
else:
self.targets[i].properties[identifier] = value
def do_source(src: str) -> None:
if identifier != 'HEADER_FILE_ONLY' or not self._str_to_bool(value):
return
current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR')
if not current_src_dir:
mlog.warning(textwrap.dedent('''\
CMake trace: set_property(SOURCE) called before the preload script was loaded.
Unable to determine CMAKE_CURRENT_SOURCE_DIR. This can lead to build errors.
'''))
current_src_dir = '.'
cur_p = Path(current_src_dir)
src_p = Path(src)
if not src_p.is_absolute():
src_p = cur_p / src_p
self.explicit_headers.add(src_p)
if scope == 'TARGET':
for i in targets:
do_target(i)
elif scope == 'SOURCE':
files = self._guess_files(targets)
for i in files:
do_source(i)
def _cmake_set_target_properties(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html
args = list(tline.args)
targets = []
while args:
curr = args.pop(0)
if curr == 'PROPERTIES':
break
targets.append(curr)
# Now we need to try to reconsitute the original quoted format of the
# arguments, as a property value could have spaces in it. Unlike
# set_property() this is not context free. There are two approaches I
# can think of, both have drawbacks:
#
# 1. Assume that the property will be capitalized ([A-Z_]), this is
# convention but cmake doesn't require it.
# 2. Maintain a copy of the list here: https://cmake.org/cmake/help/latest/manual/cmake-properties.7.html#target-properties
#
# Neither of these is awesome for obvious reasons. I'm going to try
# option 1 first and fall back to 2, as 1 requires less code and less
# synchroniztion for cmake changes.
#
# With the JSON output format, introduced in CMake 3.17, spaces are
# handled properly and we don't have to do either options
arglist = [] # type: T.List[T.Tuple[str, T.List[str]]]
if self.trace_format == 'human':
name = args.pop(0)
values = [] # type: T.List[str]
prop_regex = re.compile(r'^[A-Z_]+$')
for a in args:
if prop_regex.match(a):
if values:
arglist.append((name, ' '.join(values).split(';')))
name = a
values = []
else:
values.append(a)
if values:
arglist.append((name, ' '.join(values).split(';')))
else:
arglist = [(x[0], x[1].split(';')) for x in zip(args[::2], args[1::2])]
for name, value in arglist:
for i in targets:
if i not in self.targets:
return self._gen_exception('set_target_properties', f'TARGET {i} not found', tline)
self.targets[i].properties[name] = value
def _cmake_add_dependencies(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/add_dependencies.html
args = list(tline.args)
if len(args) < 2:
return self._gen_exception('add_dependencies', 'takes at least 2 arguments', tline)
target = self.targets.get(args[0])
if not target:
return self._gen_exception('add_dependencies', 'target not found', tline)
for i in args[1:]:
target.depends += i.split(';')
def _cmake_target_compile_definitions(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_compile_definitions.html
self._parse_common_target_options('target_compile_definitions', 'COMPILE_DEFINITIONS', 'INTERFACE_COMPILE_DEFINITIONS', tline)
def _cmake_target_compile_options(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_compile_options.html
self._parse_common_target_options('target_compile_options', 'COMPILE_OPTIONS', 'INTERFACE_COMPILE_OPTIONS', tline)
def _cmake_target_include_directories(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_include_directories.html
self._parse_common_target_options('target_include_directories', 'INCLUDE_DIRECTORIES', 'INTERFACE_INCLUDE_DIRECTORIES', tline, ignore=['SYSTEM', 'BEFORE'], paths=True)
def _cmake_target_link_options(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_link_options.html
self._parse_common_target_options('target_link_options', 'LINK_OPTIONS', 'INTERFACE_LINK_OPTIONS', tline)
def _cmake_target_link_libraries(self, tline: CMakeTraceLine) -> None:
# DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html
self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline)
def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False) -> None:
if ignore is None:
ignore = ['BEFORE']
args = list(tline.args)
if len(args) < 1:
return self._gen_exception(func, 'requires at least one argument', tline)
target = args[0]
if target not in self.targets:
return self._gen_exception(func, f'TARGET {target} not found', tline)
interface = []
private = []
mode = 'PUBLIC'
for i in args[1:]:
if i in ignore:
continue
if i in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE']:
mode = i
continue
if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']:
interface += i.split(';')
if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']:
private += i.split(';')
if paths:
interface = self._guess_files(interface)
private = self._guess_files(private)
interface = [x for x in interface if x]
private = [x for x in private if x]
for j in [(private_prop, private), (interface_prop, interface)]:
if not j[0] in self.targets[target].properties:
self.targets[target].properties[j[0]] = []
self.targets[target].properties[j[0]] += j[1]
def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None:
for l in self.stored_commands:
fn = self.functions.get(l.func, None)
if(fn):
fn(l)
# clear the stored commands
self.stored_commands = []
def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None:
self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS')
def _meson_ps_disabled_function(self, tline: CMakeTraceLine) -> None:
args = list(tline.args)
if not args:
mlog.error('Invalid preload.cmake script! At least one argument to `meson_ps_disabled_function` is expected')
return
mlog.warning('The CMake function "{}" was disabed to avoid compatibility issues with Meson.'.format(args[0]))
def _lex_trace_human(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]:
# The trace format is: '<file>(<line>): <func>(<args -- can contain \n> )\n'
reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE)
reg_other = re.compile(r'[^\n]*\n')
loc = 0
while loc < len(trace):
mo_file_line = reg_tline.match(trace, loc)
if not mo_file_line:
skip_match = reg_other.match(trace, loc)
if not skip_match:
print(trace[loc:])
raise CMakeException('Failed to parse CMake trace')
loc = skip_match.end()
continue
loc = mo_file_line.end()
file = mo_file_line.group(1)
line = mo_file_line.group(3)
func = mo_file_line.group(4)
args = mo_file_line.group(5)
args = parse_generator_expressions(args)
argl = args.split(' ')
argl = list(map(lambda x: x.strip(), argl))
yield CMakeTraceLine(Path(file), int(line), func, argl)
def _lex_trace_json(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]:
lines = trace.splitlines(keepends=False)
lines.pop(0) # The first line is the version
for i in lines:
data = json.loads(i)
assert isinstance(data['file'], str)
assert isinstance(data['line'], int)
assert isinstance(data['cmd'], str)
assert isinstance(data['args'], list)
args = data['args']
for j in args:
assert isinstance(j, str)
args = [parse_generator_expressions(x) for x in args]
yield CMakeTraceLine(Path(data['file']), data['line'], data['cmd'], args)
def _flatten_args(self, args: T.List[str]) -> T.List[str]:
# Split lists in arguments
res = [] # type: T.List[str]
for i in args:
res += i.split(';')
return res
def _guess_files(self, broken_list: T.List[str]) -> T.List[str]:
# Nothing has to be done for newer formats
if self.trace_format != 'human':
return broken_list
# Try joining file paths that contain spaces
reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
fixed_list = [] # type: T.List[str]
curr_str = None # type: T.Optional[str]
path_found = False # type: bool
for i in broken_list:
if curr_str is None:
curr_str = i
path_found = False
elif Path(curr_str).is_file():
# Abort concatenation if curr_str is an existing file
fixed_list += [curr_str]
curr_str = i
path_found = False
elif not reg_start.match(curr_str):
# Abort concatenation if curr_str no longer matches the regex
fixed_list += [curr_str]
curr_str = i
path_found = False
elif reg_end.match(i):
# File detected
curr_str = f'{curr_str} {i}'
fixed_list += [curr_str]
curr_str = None
path_found = False
elif Path(f'{curr_str} {i}').exists():
# Path detected
curr_str = f'{curr_str} {i}'
path_found = True
elif path_found:
# Add path to fixed_list after ensuring the whole path is in curr_str
fixed_list += [curr_str]
curr_str = i
path_found = False
else:
curr_str = f'{curr_str} {i}'
path_found = False
if curr_str:
fixed_list += [curr_str]
return fixed_list
|
|
import requests
import unittest
import time
import json
import sys
import shutil
import getpass
from zrest.server import App
from zrest.datamodels.shelvemodels import ShelveModel, ShelveForeign
from urllib import request
def set_proxies(cls, done=list()): #This is going to be moved, it's useful
if len(done) == 0:
cls.proxies = request.getproxies()
if len(cls.proxies) > 0:
answer = input("Is it a authentyfied proxy? (Y/n) ")
if not "n" in answer.lower():
pwd = input("Type your password in: ")
usr = getpass.getuser()
final_proxies = dict()
for item in cls.proxies:
final_string = cls.proxies[item] # Dangerous
final_string = "{}://{}".format(item, final_string.replace("{}://".format(item),
"{}:{}@".format(usr, pwd)))
final_proxies[item] = final_string
cls.proxies = final_proxies
done.append(1)
#TODO: Implement HAL HATEOAS http://stateless.co/hal_specification.html
class App_Test_0(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.app = App()
cls.path = "extrafiles/app_test/model1"
cls.app.set_model(ShelveModel(cls.path,
2,
index_fields=["a", "b", "c"],
headers=["a", "b", "c"]),
"model1",
"^/model1/<_id>$",)
cls.app.run_thread("127.0.0.1", 9000)
@classmethod
def tearDownClass(cls):
cls.app.shutdown()
time.sleep(0.5)
shutil.rmtree(cls.path)
def setUp(self):
self.data1 = {"a": 1, "b": 2, "c": 3}
self.data1_id = self.data1.copy()
self.data1_id.update({"_id": 1,
"_links": {"self": {"href": "/model1/0"}}})
self.datas = [dict(zip(("a", "b", "c"), data)) for data in [[4, 5, 6],
[7, 8, 9],
[10, 11, 12],
[13, 14, 15]]]
self.datas_id = self.datas.copy()
[data.update({"_id": item+1,
"_links": {"self": {"href": "/model1/{}".format(item+1)}}})
for item, data in enumerate(self.datas_id)]
self.gets1 = ["0", "?a=1", "?b=2", "?c=3",
"?a=1&b=2", "?b=2&c=3",
"?a=1&b=2&c=3"]
self.data2id = {"a":4, "b":5, "c":9, "_id":1, "_links": {"self": {"href": "/model1/1"}}}
self.data3 = {"a":16, "b":17, "c":18}
self.data3id = self.data3.copy()
self.data3id.update({"_id": 5,
"_links": {"self": {"href": "/model1/4"}}})
def test_0_post(self):
req = requests.post("http://localhost:9000/model1",
json = self.data1)
self.assertEqual(req.status_code, 201)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
self.assertEqual(req.headers["Location"], "/model1/1")
self.assertEqual(json.loads(req.text), self.data1_id)
datas_id = self.datas.copy()
for index, data in enumerate(self.datas):
req = requests.post("http://localhost:9000/model1",
json=data)
self.assertEqual(req.status_code, 201)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
self.assertEqual(req.headers["Location"], "/model1/{}".format(index+1))
datas_id[index].update({"_id": json.loads(req.text)["_id"]})
self.assertEqual(json.loads(req.text), datas_id[index+1])
def test_1_get(self):
for query in self.gets1:
req = requests.get("http://localhost:9000/model1/{}".format(query))
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
self.assertEqual(json.loads(req.text), self.data1_id)
req = requests.get("http://localhost:9000/model1")
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
print("JSON: {}".format(req.text))
self.assertEqual(len(json.loads(req.text)["_embedded"]["model1"]), 5)
def test_2_delete(self):
req = requests.delete("http://localhost:9000/model1/0")
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
def test_3_patch(self):
req = requests.patch("http://localhost:9000/model1?a=4", json={"c": 9})
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
print("JSON: {}".format(req.text))
self.assertEqual(json.loads(req.text), self.data2id)
def test_4_put(self):
req = requests.put("http://localhost:9000/model1?a=13", json=self.data3)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
#self.assertEqual(json.loads(req.text), self.data3id)
req = requests.put("http://localhost:9000/model1/2", json=self.data3)
self.assertEqual(req.status_code, 200)
self.assertEqual(req.headers["Content-Type"], "application/json; charset=utf-8")
self.data3id.update({"_id": 2, "_links": {"self": {"href": "/model1/2"}}})
self.assertEqual(json.loads(req.text), self.data3id) # Returns the current data
class App_Test_1(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.app = App()
cls.path_customers = "extrafiles/app_test/customers"
cls.path_invoices = "extrafiles/app_test/invoices"
cls.app.set_model(ShelveModel(cls.path_customers,
2,
index_fields=["nombre", "dni"],
headers=["nombre", "dni"]),
"customers",
"^/customers/<dni>$",)
cls.app.set_model(ShelveModel(cls.path_invoices,
2,
index_fields=["cliente", "fecha", "importe"],
headers=["cliente", "fecha", "importe"]),
"invoices",
"^/invoices/<_id>$", )
cls.app.set_model(ShelveForeign(cls.app._models["customers"],
cls.app._models["invoices"],
"cliente"),
"customers/invoices",
"^/customers/<customers_dni>/invoices/<invoices__id>$")
cls.app.run_thread("127.0.0.1", 9001)
@classmethod
def tearDownClass(cls):
cls.app.shutdown()
time.sleep(0.05)
shutil.rmtree(cls.path_customers)
shutil.rmtree(cls.path_invoices)
def test_00_post(self):
print("posting")
req = requests.post("http://localhost:9001/customers",
json={"dni": "12345678H", "nombre": "Yo, yo mismo"})
print("???")
print(req.headers["Location"])
print(req.text)
req = requests.post("http://localhost:9001/customers",
json={"dni": "11111111J", "nombre": "Nanai"})
req = requests.get("http://localhost:9001/customers/11111111J")
print("HERE AGAIN ", req.text)
def test_01_post(self):
req = requests.post("http://localhost:9001/invoices",
json={"cliente":1, "fecha": "01/01/2017", "importe": "10.25"})
print(req.headers["Location"])
print(req.text)
req = requests.post("http://localhost:9001/invoices",
json={"cliente": 1, "fecha": "01/01/2017", "importe": "20.55"})
print(req.headers["Location"])
print(req.text)
req = requests.patch("http://localhost:9001/invoices/1",
json={"cliente": 1, "fecha": "01/01/2017", "importe": "10.55"})
print("HERE ", req.text)
req = requests.get("http://localhost:9001/invoices/1")
print("AND HERE ", req.text)
def test_1_post_foreign(self):
req = requests.post("http://localhost:9001/customers/12345678H/invoices",
json={"fecha": "01/02/2017", "importe": "16.30"})
print(req.headers["Location"]) #Mal
print(req.text) #Mal
req = requests.post("http://localhost:9001/customers/11111111J/invoices",
json={"fecha": "01/02/2017", "importe": "18.30"})
print(req.headers["Location"]) # Mal
print(req.text) # Mal
req = requests.get("http://localhost:9001/customers/11111111J/invoices")
print("SO HERE ", req.text)
def test_2_get(self):
req = requests.get("http://localhost:9001/customers/12345678H/invoices")
print(req.text)
def test_3_put(self):
req = requests.put("http://localhost:9001/customers/12345678H/invoices/1",
json={"fecha": "01/03/2017", "importe": "18.00"})
print(req.text)
req = requests.get("http://localhost:9001/invoices")
print(req.text)
def test_4_patch(self):
req = requests.patch("http://localhost:9001/customers/12345678H/invoices/1",
json={"importe": "00.00"})
print(req.text)
req = requests.get("http://localhost:9001/invoices")
print(req.text)
def test_5_delete(self):
req = requests.delete("http://localhost:9001/customers/12345678H/invoices/1")
print(req.text)
def test_6_get_single(self):
req = requests.get("http://localhost:9001/customers/12345678H/invoices")
print(req.text)
def test_7_get_invoices(self):
req = requests.get("http://localhost:9001/invoices")
print(req.text)
def test_8_get_customers(self):
req = requests.get("http://localhost:9001/customers")
print(req.text)
def test_9_get_all(self):
req = requests.get("http://localhost:9001/customers//invoices")
print(req.text)
if __name__ == "__main__":
unittest.main()
|
|
#!/usr/bin/env python2.7
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from argparse import ArgumentParser
from collections import OrderedDict
from contextlib import contextmanager
import json
import logging
import multiprocessing
import os
import shutil
import subprocess
import sys
import cv2
import numpy
LOG_LEVELS = (
logging.CRITICAL,
logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG
)
LOG_LEVEL_TO_NAMES = OrderedDict((level, logging.getLevelName(level).lower())
for level in LOG_LEVELS)
LOG_NAME_TO_LEVEL = OrderedDict((name, level)
for level, name in LOG_LEVEL_TO_NAMES.items())
VIDEO_EXTENSION = 'webm'
YOUTUBE_VIDEO_FORMAT = '242'
YOUTUBE_AUDIO_FORMAT = '171'
THRESHOLD = 30
CLIPS_OUTPUT_DIR = os.path.join('html', 'clips')
MIN_CLIP_LENGTH = 1
MAX_CLIP_LENGTH = 5
LISTINGS_PATH = os.path.join('html', 'listings.json')
def main(argv=None):
args = parse_args(argv=argv)
configure_logger(args)
command = args.command
if command == 'bulk':
bulk(args)
elif command == 'download':
download_trailer(args)
elif command == 'find':
find_scenes(args)
elif command == 'render':
render_clips(args)
elif command == 'listing':
make_listing(args)
else:
raise RuntimeError('Invalid command {}'.format(args.command))
def parse_args(argv=None):
if argv is None:
argv = sys.argv
parser = ArgumentParser()
parser.add_argument('-l', '--log-level', choices=LOG_NAME_TO_LEVEL.keys(),
default=LOG_LEVEL_TO_NAMES[logging.INFO])
subparsers = parser.add_subparsers(dest='command')
bulk = subparsers.add_parser('bulk')
bulk.add_argument('-m', '--max-length', default=MAX_CLIP_LENGTH, type=float)
bulk.add_argument('-n', '--min-length', default=MIN_CLIP_LENGTH, type=float)
bulk.add_argument('-c', '--trailers_config_path', default='trailers.json')
bulk.add_argument('-l', '--listings_path', default=LISTINGS_PATH)
bulk.add_argument('-o', '--trailers_output_dir', default='trailers')
bulk.add_argument('-s', '--scenes_output_dir', default='scenes')
bulk.add_argument('-t', '--clips_output_dir', default=CLIPS_OUTPUT_DIR)
bulk.add_argument('-d', '--download', dest='download', action='store_true')
bulk.add_argument('-D', '--skip-download', dest='download',
action='store_false')
bulk.set_defaults(download=True)
bulk.add_argument('-u', '--search-scenes', dest='search_scenes',
action='store_true')
bulk.add_argument('-U', '--skip-search-scenes', dest='search_scenes',
action='store_false')
bulk.set_defaults(search_scenes=True)
bulk.add_argument('-r', '--render', dest='render', action='store_true')
bulk.add_argument('-R', '--skip-render', dest='render',
action='store_false')
bulk.set_defaults(render=True)
download = subparsers.add_parser('download')
download.add_argument('youtube_id')
download.add_argument('output_filename')
download.add_argument('-v', '--video_format', default=YOUTUBE_VIDEO_FORMAT)
download.add_argument('-a', '--audio_format', default=YOUTUBE_AUDIO_FORMAT)
find = subparsers.add_parser('find')
find.add_argument('-t', '--threshold', default=THRESHOLD, type=int)
find.add_argument('video_path')
find.add_argument('output_dir')
render = subparsers.add_parser('render')
render.add_argument('-m', '--max-length', default=MAX_CLIP_LENGTH,
type=float)
render.add_argument('-n', '--min-length', default=MIN_CLIP_LENGTH,
type=float)
render.add_argument('scenes_path')
render.add_argument('video_path')
render.add_argument('output_dir')
listing = subparsers.add_parser('listing')
listing.add_argument('clips_dir')
listing.add_argument('listing_path')
return parser.parse_args(args=argv[1:])
def configure_logger(args):
global logger
logging.basicConfig(datefmt='%H:%M:%S',
format='[%(levelname).1s %(asctime)s] %(message)s',
level=LOG_NAME_TO_LEVEL[args.log_level])
logger = logging.getLogger(__name__)
def bulk(args):
with open(args.trailers_config_path) as trailers_config_file:
trailers_config = json.load(trailers_config_file)
trailers_output_dir = args.trailers_output_dir
ensure_dir(trailers_output_dir)
scenes_output_dir = args.scenes_output_dir
ensure_dir(scenes_output_dir)
clips_output_dir = args.clips_output_dir
ensure_dir(clips_output_dir)
# XXX: Only run task so OpenCV doesn't corrupt itself up, had problems when
# opening another video in the same process, would open the video and
# immediately close.
pool = multiprocessing.Pool(maxtasksperchild=1)
for trailer in trailers_config['trailers']:
pool.apply_async(create_clips_for_trailer,
[trailer, trailers_output_dir, scenes_output_dir,
clips_output_dir, args.download, args.search_scenes])
pool.close()
pool.join()
for trailer in trailers_config['trailers']:
video_path = get_video_file_name(trailers_output_dir, trailer['name'])
scene_file = get_scenes_file_name(video_path, scenes_output_dir)
if args.render:
_render_clips(video_path, clips_output_dir, scene_file,
min_length=args.min_length,
max_length=args.max_length)
_make_listing(os.path.join(clips_output_dir, '..'))
def get_video_file_name(output_dir, name):
return os.path.join(output_dir, name)
def create_clips_for_trailer(trailer, trailers_output_dir, scenes_output_dir,
clips_output_dir, download=True,
search_scenes=True):
output_path = get_video_file_name(trailers_output_dir, trailer['name'])
if download:
_download_trailer(output_path, trailer['youtube_id'])
logger.info('Searching %s', output_path)
if search_scenes:
_find_scenes(output_path, scenes_output_dir)
def download_trailer(args):
_download_trailer(args.output_filename,
args.youtube_id,
video_format=args.video_format,
audio_format=args.audio_format)
def _download_trailer(
output_filename,
youtube_id,
video_format=YOUTUBE_VIDEO_FORMAT,
audio_format=YOUTUBE_AUDIO_FORMAT):
logger.info('Downloading %s ...', output_filename)
subprocess.check_call([
'youtube-dl',
'-o', '{}'.format(output_filename),
'https://www.youtube.com/watch?v={}'.format(youtube_id),
'-f', '{}+{}'.format(video_format, audio_format)
])
# XXX: youtube-dl leaves some artifacts of the audio and video streams it
# downloaded so we'll delete them.
def unlink_download_artifacts(output_filename, dl_format):
extension = os.path.splitext(output_filename)[1]
output_dir = os.path.dirname(os.path.realpath(output_filename))
output_basename = os.path.basename(os.path.realpath(output_filename))
basename = os.path.splitext(output_basename)[0]
artifact = '{}.f{}{}'.format(basename, dl_format, extension)
os.unlink(os.path.join(output_dir, artifact))
unlink_download_artifacts(output_filename, video_format)
unlink_download_artifacts(output_filename, audio_format)
def find_scenes(args):
_find_scenes(args.video_path, args.output_dir, threshold=args.threshold)
def get_scenes_file_name(video_path, output_dir):
video_name = os.path.basename(video_path)
video_stem, video_ext = os.path.splitext(video_name)
scenes_name = '{stem}.json'.format(stem=video_stem)
return os.path.join(output_dir, scenes_name)
def _find_scenes(video_path, output_dir, threshold=THRESHOLD):
ensure_dir(output_dir)
scenes_path = get_scenes_file_name(video_path, output_dir)
with video_capture(video_path) as cap:
scene_splitter = SceneFinder(cap, threshold)
scenes = scene_splitter.find_scenes()
if len(scenes) == 0:
logger.error('No scenes found for %s' % video_path)
with open(scenes_path, 'w') as scenes_file:
json.dump(scenes, scenes_file)
return scenes_path
@contextmanager
def video_capture(vido_path):
cap = cv2.VideoCapture(vido_path)
yield cap
cap.release()
class SceneFinder(object):
def __init__(self, cap, threshold):
self._cap = cap
self._threshold = threshold
self._find_scenes_called = False
self._in_fade = False
self._scenes = []
self._start_index = 0
self._video_width = self._get_int_prop('FRAME_WIDTH')
self._video_height = self._get_int_prop('FRAME_HEIGHT')
self._video_fps = self._get_int_prop('FPS')
def _get_int_prop(self, prop_name):
name = 'CV_CAP_PROP_{prop_name}'.format(prop_name=prop_name)
return int(self._cap.get(getattr(cv2.cv, name)))
def find_scenes(self):
if not self._find_scenes_called:
self._find_scenes_called = True
for index, frame in enumerate(self._frames()):
self._check_frame(index, frame)
return self._scenes
def _frames(self):
while True:
ret, frame = self._cap.read()
if not ret:
logger.info('Stopping on frame %d' % self._start_index)
if self._start_index == 0:
logger.error('Not able to read any frames')
raise StopIteration
yield cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
def _check_frame(self, index, frame):
if self._count_light_pixels(frame) == 0:
if not self._in_fade:
self._in_fade = True
self._add_frame(self._start_index, index)
elif self._in_fade:
self._in_fade = False
self._start_index = index
def _count_light_pixels(self, frame):
return numpy.count_nonzero(frame > self._threshold)
def _add_frame(self, start_index, stop_index):
def timestamp(index):
return index / self._video_fps
scene = (timestamp(start_index), timestamp(stop_index))
logger.info('Scene: %.1f %.1f', *scene)
self._scenes.append(scene)
def render_clips(args):
_render_clips(
args.video_path,
args.output_dir,
args.scenes_path,
min_length=args.min_length,
max_length=args.max_length
)
def _render_clips(video_path, output_dir, scenes_path,
min_length=MIN_CLIP_LENGTH, max_length=MAX_CLIP_LENGTH):
video_name = os.path.basename(video_path)
video_stem, video_ext = os.path.splitext(video_name)
clips_dir = os.path.join(output_dir, video_stem)
ensure_dir(output_dir)
if os.path.isdir(clips_dir):
shutil.rmtree(clips_dir)
os.mkdir(clips_dir)
with open(scenes_path) as scenes_file:
scenes = json.load(scenes_file)
def min_max_length(scene):
return min_length < scene[1] - scene[0] < max_length
scenes = filter(min_max_length, scenes)
pool = multiprocessing.Pool()
for index, (start_time, stop_time) in enumerate(scenes):
clip_name = '{}-{}.{}'.format(video_stem, index, VIDEO_EXTENSION)
clip_path = os.path.join(clips_dir, clip_name)
if os.path.exists(clip_path):
os.remove(clip_path)
pool.apply_async(render_clip, [video_path, clip_path, start_time,
stop_time])
pool.close()
pool.join()
def render_clip(video_path, clip_path, start_time, stop_time):
logger.info('Rendering %s ...', clip_path)
subprocess.check_call([
'/usr/bin/ffmpeg',
'-ss', str(start_time),
'-t', str(stop_time - start_time),
'-i', video_path,
'-c:v', 'libvpx',
'-c:a', 'libvorbis',
clip_path,
])
def ensure_dir(path):
if not os.path.exists(path):
os.mkdir(path)
def make_listing(args):
_make_listing(args.clips_dir, listing_path=args.listing_path)
def _make_listing(clips_dir, listing_path=LISTINGS_PATH):
listing = {'videos': []}
for root, dirs, files in os.walk(clips_dir):
for file_ in files:
if os.path.splitext(file_)[1] != '.{}'.format(VIDEO_EXTENSION):
continue
common_prefix = os.path.commonprefix([clips_dir, root])
path = os.path.join(root[len(common_prefix) + 1:], file_)
listing['videos'].append(path)
with open(listing_path, 'w') as listing_file:
json.dump(listing, listing_file)
if __name__ == '__main__':
sys.exit(main())
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
from selenium.common import exceptions
from selenium.webdriver.common import by
from openstack_dashboard.test.integration_tests.regions import baseregion
NORMAL_COLUMN_CLASS = 'normal_column'
class RowRegion(baseregion.BaseRegion):
"""Classic table row."""
_cell_locator = (by.By.CSS_SELECTOR, 'td.%s' % NORMAL_COLUMN_CLASS)
_row_checkbox_locator = (
by.By.CSS_SELECTOR,
'td .themable-checkbox [type="checkbox"] + label'
)
def __init__(self, driver, conf, src_elem, column_names):
self.column_names = column_names
super(RowRegion, self).__init__(driver, conf, src_elem)
@property
def cells(self):
elements = self._get_elements(*self._cell_locator)
return {column_name: elements[i]
for i, column_name in enumerate(self.column_names)}
def mark(self):
chck_box = self._get_element(*self._row_checkbox_locator)
chck_box.click()
class TableRegion(baseregion.BaseRegion):
"""Basic class representing table object."""
_heading_locator = (by.By.CSS_SELECTOR, 'h3.table_title')
_columns_names_locator = (by.By.CSS_SELECTOR, 'thead > tr > th')
_footer_locator = (by.By.CSS_SELECTOR, 'tfoot > tr > td > span')
_rows_locator = (by.By.CSS_SELECTOR, 'tbody > tr')
_empty_table_locator = (by.By.CSS_SELECTOR, 'tbody > tr.empty')
_search_field_locator = (by.By.CSS_SELECTOR,
'div.table_search input.form-control')
_search_button_locator = (by.By.CSS_SELECTOR,
'div.table_search > button')
_search_option_locator = (by.By.CSS_SELECTOR,
'div.table_search select.form-control')
marker_name = 'marker'
prev_marker_name = 'prev_marker'
def _table_locator(self, table_name):
return by.By.CSS_SELECTOR, 'table#%s' % table_name
@property
def _next_locator(self):
return by.By.CSS_SELECTOR, 'a[href^="?%s"]' % self.marker_name
@property
def _prev_locator(self):
return by.By.CSS_SELECTOR, 'a[href^="?%s"]' % self.prev_marker_name
def __init__(self, driver, conf):
self._default_src_locator = self._table_locator(self.__class__.name)
super(TableRegion, self).__init__(driver, conf)
@property
def heading(self):
return self._get_element(*self._heading_locator)
@property
def rows(self):
if self._is_element_present(*self._empty_table_locator):
return []
else:
return self._get_rows()
@property
def column_names(self):
names = []
for element in self._get_elements(*self._columns_names_locator):
classes = element.get_attribute('class').split()
if NORMAL_COLUMN_CLASS in classes:
names.append(element.get_attribute('data-selenium'))
return names
@property
def column_heads(self):
col_heads = {}
for element in self._get_elements(*self._columns_names_locator):
classes = element.get_attribute('class').split()
if NORMAL_COLUMN_CLASS in classes:
name = element.get_attribute('data-selenium')
col_heads[name] = element
return col_heads
@property
def footer(self):
return self._get_element(*self._footer_locator)
def filter(self, value):
self._set_search_field(value)
self._click_search_btn()
def set_filter_value(self, value):
srch_option = self._get_element(*self._search_option_locator)
return self._select_dropdown_by_value(value, srch_option)
def get_row(self, column_name, text, exact_match=True):
"""Get row that contains specified text in specified column.
In case exact_match is set to True, text contained in row must equal
searched text, otherwise occurrence of searched text in the column
text will result in row match.
"""
def get_text(element):
text = element.get_attribute('data-selenium')
return text or element.text
for row in self.rows:
try:
cell = row.cells[column_name]
if exact_match and text == get_text(cell):
return row
if not exact_match and text in get_text(cell):
return row
# NOTE(tsufiev): if a row was deleted during iteration
except exceptions.StaleElementReferenceException:
pass
return None
def _set_search_field(self, value):
srch_field = self._get_element(*self._search_field_locator)
srch_field.clear()
srch_field.send_keys(value)
def _click_search_btn(self):
btn = self._get_element(*self._search_button_locator)
btn.click()
def _get_rows(self, *args):
return [RowRegion(self.driver, self.conf, elem, self.column_names)
for elem in self._get_elements(*self._rows_locator)]
def is_row_deleted(self, row_getter):
def predicate(driver):
if self._is_element_present(*self._empty_table_locator):
return True
with self.waits_disabled():
return not self._is_element_displayed(row_getter())
try:
self._wait_until(predicate)
except exceptions.TimeoutException:
return False
except IndexError:
return True
return True
def wait_cell_status(self, cell_getter, statuses):
if not isinstance(statuses, (list, tuple)):
statuses = (statuses,)
try:
return self._wait_till_text_present_in_element(cell_getter,
statuses)
except exceptions.TimeoutException:
return False
def is_next_link_available(self):
try:
self._turn_off_implicit_wait()
return self._is_element_visible(*self._next_locator)
finally:
self._turn_on_implicit_wait()
def is_prev_link_available(self):
try:
self._turn_off_implicit_wait()
return self._is_element_visible(*self._prev_locator)
finally:
self._turn_on_implicit_wait()
def turn_next_page(self):
if self.is_next_link_available():
lnk = self._get_element(*self._next_locator)
lnk.click()
def turn_prev_page(self):
if self.is_prev_link_available():
lnk = self._get_element(*self._prev_locator)
lnk.click()
def assert_definition(self, expected_table_definition, sorting=False):
"""Checks that actual table is expected one.
Items to compare: 'next' and 'prev' links, count of rows and names of
elements in list
:param expected_table_definition: expected values (dictionary)
:param sorting: boolean arg specifying whether to sort actual names
:return:
"""
names = [row.cells['name'].text for row in self.rows]
if sorting:
names.sort()
actual_table = {'Next': self.is_next_link_available(),
'Prev': self.is_prev_link_available(),
'Count': len(self.rows),
'Names': names}
self.assertDictEqual(actual_table, expected_table_definition)
def bind_table_action(action_name):
"""A decorator to bind table region method to an actual table action
button.
Many table actions when started (by clicking a corresponding button
in UI) lead to some form showing up. To further interact with this form,
a Python/ Selenium wrapper needs to be created for it. It is very
convenient to return this newly created wrapper in the same method that
initiates clicking an actual table action button. Binding the method to a
button is performed behind the scenes in this decorator.
.. param:: action_name
Part of the action button id which is specific to action itself. It
is safe to use action `name` attribute from the dashboard tables.py
code.
"""
_actions_locator = (by.By.CSS_SELECTOR, 'div.table_actions > button,'
' div.table_actions > a')
def decorator(method):
@functools.wraps(method)
def wrapper(table):
actions = table._get_elements(*_actions_locator)
action_element = None
for action in actions:
target_action_id = '%s__action_%s' % (table.name, action_name)
if action.get_attribute('id').lower() == \
target_action_id.lower():
action_element = action
break
if action_element is None:
msg = "Could not bind method '%s' to action control '%s'" % (
method.__name__, action_name)
raise ValueError(msg)
return method(table, action_element)
return wrapper
return decorator
def bind_row_action(action_name):
"""A decorator to bind table region method to an actual row action button.
Many table actions when started (by clicking a corresponding button
in UI) lead to some form showing up. To further interact with this form,
a Python/ Selenium wrapper needs to be created for it. It is very
convenient to return this newly created wrapper in the same method that
initiates clicking an actual action button. Row action could be
either primary (if its name is written right away on row action
button) or secondary (if its name is inside of a button drop-down). Binding
the method to a button and toggling the button drop-down open (in case
a row action is secondary) is performed behind the scenes in this
decorator.
.. param:: action_name
Part of the action button id which is specific to action itself. It
is safe to use action `name` attribute from the dashboard tables.py
code.
"""
# NOTE(tsufiev): button tag could be either <a> or <button> - target
# both with *. Also primary action could be single as well, do not use
# .btn-group because of that
primary_action_locator = (
by.By.CSS_SELECTOR, 'td.actions_column *.btn:nth-child(1)')
secondary_actions_opener_locator = (
by.By.CSS_SELECTOR,
'td.actions_column > .btn-group > *.btn:nth-child(2)')
secondary_actions_locator = (
by.By.CSS_SELECTOR,
'td.actions_column > .btn-group > ul.row_actions > li > a, button')
def decorator(method):
@functools.wraps(method)
def wrapper(table, row):
def find_action(element):
pattern = "__action_%s" % action_name
return element.get_attribute('id').endswith(pattern)
action_element = row._get_element(*primary_action_locator)
if not find_action(action_element):
action_element = None
row._get_element(*secondary_actions_opener_locator).click()
for element in row._get_elements(*secondary_actions_locator):
if find_action(element):
action_element = element
break
if action_element is None:
msg = "Could not bind method '%s' to action control '%s'" % (
method.__name__, action_name)
raise ValueError(msg)
return method(table, action_element, row)
return wrapper
return decorator
def bind_row_anchor_column(column_name):
"""A decorator to bind table region method to a anchor in a column.
Typical examples of such tables are Project -> Compute -> Images, Admin
-> System -> Flavors, Project -> Compute -> Instancies.
The method can be used to follow the link in the anchor by the click.
"""
def decorator(method):
@functools.wraps(method)
def wrapper(table, row):
cell = row.cells[column_name]
action_element = cell.find_element(
by.By.CSS_SELECTOR, 'td.%s > a' % NORMAL_COLUMN_CLASS)
return method(table, action_element, row)
return wrapper
return decorator
|
|
# python imports
import json
# django imports
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template.loader import render_to_string
from django.template import RequestContext
from django.utils.translation import ugettext as _
# lfs imports
import lfs.cart.utils
import lfs.catalog.utils
from lfs.payment.models import PaymentMethod
from lfs.shipping.models import ShippingMethod
import lfs.voucher.utils
import lfs.discounts.utils
from lfs.caching.utils import lfs_get_object_or_404
from lfs.core.signals import cart_changed
from lfs.core import utils as core_utils
from lfs.catalog.models import Product, PropertyGroup
from lfs.catalog.models import Property
from lfs.cart import utils as cart_utils
from lfs.cart.models import CartItem
from lfs.core.models import Country
from lfs.core.utils import LazyEncoder
from lfs.shipping import utils as shipping_utils
from lfs.payment import utils as payment_utils
from lfs.customer import utils as customer_utils
from lfs.voucher.models import Voucher
from lfs.voucher.settings import MESSAGES
def cart(request, template_name="lfs/cart/cart.html"):
"""
The main view of the cart.
"""
return render_to_response(template_name, RequestContext(request, {
"voucher_number": lfs.voucher.utils.get_current_voucher_number(request),
"cart_inline": cart_inline(request),
}))
def cart_inline(request, template_name="lfs/cart/cart_inline.html"):
"""
The actual content of the cart.
This is factored out to be reused within 'normal' and ajax requests.
"""
cart = cart_utils.get_cart(request)
shopping_url = lfs.cart.utils.get_go_on_shopping_url(request)
if cart is None:
return render_to_string(template_name, RequestContext(request, {
"shopping_url": shopping_url,
}))
shop = core_utils.get_default_shop(request)
countries = shop.shipping_countries.all()
selected_country = shipping_utils.get_selected_shipping_country(request)
# Get default shipping method, so that we have a one in any case.
selected_shipping_method = shipping_utils.get_selected_shipping_method(request)
selected_payment_method = payment_utils.get_selected_payment_method(request)
shipping_costs = shipping_utils.get_shipping_costs(request, selected_shipping_method)
# Payment
payment_costs = payment_utils.get_payment_costs(request, selected_payment_method)
# Cart costs
cart_price = cart.get_price_gross(request) + shipping_costs["price_gross"] + payment_costs["price"]
cart_tax = cart.get_tax(request) + shipping_costs["tax"] + payment_costs["tax"]
# Discounts
discounts = lfs.discounts.utils.get_valid_discounts(request)
for discount in discounts:
cart_price = cart_price - discount["price_gross"]
cart_tax = cart_tax - discount["tax"]
# Voucher
voucher_number = lfs.voucher.utils.get_current_voucher_number(request)
try:
voucher = Voucher.objects.get(number=voucher_number)
except Voucher.DoesNotExist:
display_voucher = False
voucher_value = 0
voucher_tax = 0
voucher_message = MESSAGES[6]
else:
lfs.voucher.utils.set_current_voucher_number(request, voucher_number)
is_voucher_effective, voucher_message = voucher.is_effective(request, cart)
if is_voucher_effective:
display_voucher = True
voucher_value = voucher.get_price_gross(request, cart)
cart_price = cart_price - voucher_value
voucher_tax = voucher.get_tax(request, cart)
cart_tax = cart_tax - voucher_tax
else:
display_voucher = False
voucher_value = 0
voucher_tax = 0
# Calc delivery time for cart (which is the maximum of all cart items)
max_delivery_time = cart.get_delivery_time(request)
cart_items = []
for cart_item in cart.get_items():
product = cart_item.product
quantity = product.get_clean_quantity(cart_item.amount)
cart_items.append({
"obj": cart_item,
"quantity": quantity,
"product": product,
"product_price_net": cart_item.get_price_net(request),
"product_price_gross": cart_item.get_price_gross(request),
"product_tax": cart_item.get_tax(request),
})
return render_to_string(template_name, RequestContext(request, {
"cart": cart,
"cart_items": cart_items,
"cart_price": cart_price,
"cart_tax": cart_tax,
"shipping_methods": shipping_utils.get_valid_shipping_methods(request),
"selected_shipping_method": selected_shipping_method,
"shipping_costs": shipping_costs,
"payment_methods": payment_utils.get_valid_payment_methods(request),
"selected_payment_method": selected_payment_method,
"payment_price": payment_costs["price"],
"countries": countries,
"selected_country": selected_country,
"max_delivery_time": max_delivery_time,
"shopping_url": shopping_url,
"discounts": discounts,
"display_voucher": display_voucher,
"voucher_number": voucher_number,
"voucher_value": voucher_value,
"voucher_tax": voucher_tax,
"voucher_message": voucher_message,
}))
def added_to_cart(request, template_name="lfs/cart/added_to_cart.html"):
"""
Displays the product that has been added to the cart along with the
selected accessories.
"""
cart_items = request.session.get("cart_items", [])
try:
accessories = cart_items[0].product.get_accessories()
except IndexError:
accessories = []
cart_items_count = len(cart_items)
return render_to_response(template_name, RequestContext(request, {
"plural": cart_items_count > 1,
"cart_items_count": cart_items_count,
"shopping_url": request.META.get("HTTP_REFERER", "/"),
"product_accessories": accessories,
"product": cart_items[0].product if cart_items else None,
"cart_items": added_to_cart_items(request),
}))
def added_to_cart_items(request, template_name="lfs/cart/added_to_cart_items.html"):
"""
Displays the added items for the added-to-cart view.
"""
total = 0
cart_items = []
for cart_item in request.session.get("cart_items", []):
total += cart_item.get_price_gross(request)
product = cart_item.product
quantity = product.get_clean_quantity(cart_item.amount)
cart_items.append({
"product": product,
"obj": cart_item,
"quantity": quantity,
"product_price_net": cart_item.get_price_net(request),
"product_price_gross": cart_item.get_price_gross(request),
"product_tax": cart_item.get_tax(request),
})
return render_to_string(template_name, RequestContext(request, {
"total": total,
"cart_items": cart_items,
}))
# Actions
def add_accessory_to_cart(request, product_id, quantity=1):
"""
Adds the product with passed product_id as an accessory to the cart and
updates the added-to-cart view.
"""
product = lfs_get_object_or_404(Product, pk=product_id)
# for product with variants add default variant
if product.is_product_with_variants():
variant = product.get_default_variant()
if variant:
product = variant
else:
return HttpResponse(added_to_cart_items(request))
quantity = product.get_clean_quantity_value(quantity)
session_cart_items = request.session.get("cart_items", [])
cart = cart_utils.get_cart(request)
cart_item = cart.add(product=product, amount=quantity)
# Update session
if cart_item not in session_cart_items:
session_cart_items.append(cart_item)
else:
for session_cart_item in session_cart_items:
if cart_item.product == session_cart_item.product:
session_cart_item.amount += quantity
request.session["cart_items"] = session_cart_items
cart_changed.send(cart, request=request)
return HttpResponse(added_to_cart_items(request))
def add_to_cart(request, product_id=None):
"""
Adds the passed product with passed product_id to the cart after
some validations have been taken place. The amount is taken from the query
string.
"""
if product_id is None:
product_id = (request.POST if request.method == 'POST' else request.GET).get("product_id")
product = lfs_get_object_or_404(Product, pk=product_id)
# Only active and deliverable products can be added to the cart.
if not (product.is_active() and product.is_deliverable()):
raise Http404()
quantity = request.POST.get("quantity", "1.0")
quantity = product.get_clean_quantity_value(quantity)
# Validate properties (They are added below)
properties_dict = {}
if product.is_configurable_product():
for key, value in request.POST.items():
if key.startswith("property-"):
try:
property_group_id, property_id = key.split("-")[1:]
except IndexError:
continue
try:
prop = Property.objects.get(pk=property_id)
except Property.DoesNotExist:
continue
property_group = None
if property_group_id != '0':
try:
property_group = PropertyGroup.objects.get(pk=property_group_id)
except PropertyGroup.DoesNotExist:
continue
if prop.is_number_field:
try:
value = lfs.core.utils.atof(value)
except ValueError:
value = 0.0
key = '{0}_{1}'.format(property_group_id, property_id)
properties_dict[key] = {'value': unicode(value),
'property_group_id': property_group_id,
'property_id': property_id}
# validate property's value
if prop.is_number_field:
if (value < prop.unit_min) or (value > prop.unit_max):
msg = _(u"%(name)s must be between %(min)s and %(max)s %(unit)s.") % {"name": prop.title, "min": prop.unit_min, "max": prop.unit_max, "unit": prop.unit}
return lfs.core.utils.set_message_cookie(
product.get_absolute_url(), msg)
# calculate valid steps
steps = []
x = prop.unit_min
while x < prop.unit_max:
steps.append("%.2f" % x)
x += prop.unit_step
steps.append("%.2f" % prop.unit_max)
value = "%.2f" % value
if value not in steps:
msg = _(u"Your entered value for %(name)s (%(value)s) is not in valid step width, which is %(step)s.") % {"name": prop.title, "value": value, "step": prop.unit_step}
return lfs.core.utils.set_message_cookie(
product.get_absolute_url(), msg)
if product.get_active_packing_unit():
quantity = product.get_amount_by_packages(quantity)
cart = cart_utils.get_or_create_cart(request)
cart_item = cart.add(product, properties_dict, quantity)
cart_items = [cart_item]
# Check stock amount
message = ""
if product.manage_stock_amount and cart_item.amount > product.stock_amount and not product.order_time:
if product.stock_amount == 0:
message = _(u"Sorry, but '%(product)s' is not available anymore.") % {"product": product.name}
elif product.stock_amount == 1:
message = _(u"Sorry, but '%(product)s' is only one time available.") % {"product": product.name}
else:
message = _(u"Sorry, but '%(product)s' is only %(amount)s times available.") % {"product": product.name, "amount": product.stock_amount}
cart_item.amount = product.stock_amount
cart_item.save()
# Add selected accessories to cart
for key, value in request.POST.items():
if key.startswith("accessory"):
accessory_id = key.split("-")[1]
try:
accessory = Product.objects.get(pk=accessory_id)
except ObjectDoesNotExist:
continue
# for product with variants add default variant
if accessory.is_product_with_variants():
accessory_variant = accessory.get_default_variant()
if accessory_variant:
accessory = accessory_variant
else:
continue
# Get quantity
quantity = request.POST.get("quantity-%s" % accessory_id, 0)
quantity = accessory.get_clean_quantity_value(quantity)
cart_item = cart.add(product=accessory, amount=quantity)
cart_items.append(cart_item)
# Store cart items for retrieval within added_to_cart.
request.session["cart_items"] = cart_items
cart_changed.send(cart, request=request)
# Update the customer's shipping method (if appropriate)
customer = customer_utils.get_or_create_customer(request)
shipping_utils.update_to_valid_shipping_method(request, customer, save=True)
# Update the customer's payment method (if appropriate)
payment_utils.update_to_valid_payment_method(request, customer, save=True)
# Save the cart to update modification date
cart.save()
try:
url_name = settings.LFS_AFTER_ADD_TO_CART
except AttributeError:
url_name = "lfs_added_to_cart"
if message:
return lfs.core.utils.set_message_cookie(reverse(url_name), message)
else:
return HttpResponseRedirect(reverse(url_name))
def delete_cart_item(request, cart_item_id):
"""
Deletes the cart item with the given id.
"""
cart = cart_utils.get_cart(request)
if not cart:
raise Http404
item = lfs_get_object_or_404(CartItem, pk=cart_item_id)
if item.cart.id != cart.id:
raise Http404
item.delete()
cart_changed.send(cart, request=request)
return HttpResponse(cart_inline(request))
def refresh_cart(request):
"""
Refreshes the cart after some changes has been taken place, e.g.: the
amount of a product or shipping/payment method.
"""
cart = cart_utils.get_cart(request)
if not cart:
raise Http404
customer = customer_utils.get_or_create_customer(request)
# Update country
country_iso = request.POST.get("country")
if country_iso:
selected_country = Country.objects.get(code=country_iso.lower())
customer.selected_country_id = selected_country.id
if customer.selected_shipping_address:
customer.selected_shipping_address.country = selected_country
customer.selected_shipping_address.save()
customer.selected_shipping_address.save()
if customer.selected_invoice_address:
customer.selected_invoice_address.country = selected_country
customer.selected_invoice_address.save()
customer.selected_invoice_address.save()
# NOTE: The customer has to be saved already here in order to calculate
# a possible new valid shippig method below, which coulb be triggered by
# the changing of the shipping country.
customer.save()
# Update Amounts
message = ""
for item in cart.get_items():
amount = request.POST.get("amount-cart-item_%s" % item.id, "0.0")
amount = item.product.get_clean_quantity_value(amount, allow_zero=True)
if item.product.manage_stock_amount and amount > item.product.stock_amount and not item.product.order_time:
amount = item.product.stock_amount
if amount < 0:
amount = 0
if amount == 0:
message = _(u"Sorry, but '%(product)s' is not available anymore." % {"product": item.product.name})
elif amount == 1:
message = _(u"Sorry, but '%(product)s' is only one time available." % {"product": item.product.name})
else:
message = _(u"Sorry, but '%(product)s' is only %(amount)s times available.") % {"product": item.product.name, "amount": amount}
if item.product.get_active_packing_unit():
item.amount = item.product.get_amount_by_packages(float(amount))
else:
item.amount = amount
if amount == 0:
item.delete()
else:
item.save()
# IMPORTANT: We have to send the signal already here, because the valid
# shipping methods might be dependent on the price.
cart_changed.send(cart, request=request)
# Update shipping method
shipping_method = get_object_or_404(ShippingMethod, pk=request.POST.get("shipping_method"))
customer.selected_shipping_method = shipping_method
valid_shipping_methods = shipping_utils.get_valid_shipping_methods(request)
if customer.selected_shipping_method not in valid_shipping_methods:
customer.selected_shipping_method = shipping_utils.get_default_shipping_method(request)
# Update payment method
payment_method = get_object_or_404(PaymentMethod, pk=request.POST.get("payment_method"))
customer.selected_payment_method = payment_method
# Last but not least we save the customer ...
customer.save()
result = json.dumps({
"html": cart_inline(request),
"message": message,
}, cls=LazyEncoder)
return HttpResponse(result, content_type='application/json')
def check_voucher(request):
"""
Updates the cart after the voucher number has been changed.
"""
voucher_number = lfs.voucher.utils.get_current_voucher_number(request)
lfs.voucher.utils.set_current_voucher_number(request, voucher_number)
result = json.dumps({
"html": (("#cart-inline", cart_inline(request)),)
})
return HttpResponse(result, content_type='application/json')
|
|
# -*- test-case-name: twisted.test.test_compat -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Compatibility module to provide backwards compatibility for useful Python
features.
This is mainly for use of internal Twisted code. We encourage you to use
the latest version of Python directly from your code, if possible.
@var unicode: The type of Unicode strings, C{unicode} on Python 2 and C{str}
on Python 3.
@var NativeStringIO: An in-memory file-like object that operates on the native
string type (bytes in Python 2, unicode in Python 3).
@var urllib_parse: a URL-parsing module (urlparse on Python 2, urllib.parse on
Python 3)
"""
from __future__ import absolute_import, division
import inspect
import os
import platform
import socket
import struct
import sys
import tokenize
from types import MethodType as _MethodType
from io import TextIOBase, IOBase
if sys.version_info < (3, 0):
_PY3 = False
else:
_PY3 = True
if platform.python_implementation() == 'PyPy':
_PYPY = True
else:
_PYPY = False
def _shouldEnableNewStyle():
"""
Returns whether or not we should enable the new-style conversion of
old-style classes. It inspects the environment for C{TWISTED_NEWSTYLE},
accepting an empty string, C{no}, C{false}, C{False}, and C{0} as falsey
values and everything else as a truthy value.
@rtype: L{bool}
"""
value = os.environ.get('TWISTED_NEWSTYLE', '')
if value in ['', 'no', 'false', 'False', '0']:
return False
else:
return True
_EXPECT_NEWSTYLE = _PY3 or _shouldEnableNewStyle()
def _shouldEnableNewStyle():
"""
Returns whether or not we should enable the new-style conversion of
old-style classes. It inspects the environment for C{TWISTED_NEWSTYLE},
accepting an empty string, C{no}, C{false}, C{False}, and C{0} as falsey
values and everything else as a truthy value.
@rtype: L{bool}
"""
value = os.environ.get('TWISTED_NEWSTYLE', '')
if value in ['', 'no', 'false', 'False', '0']:
return False
else:
return True
_EXPECT_NEWSTYLE = _PY3 or _shouldEnableNewStyle()
def currentframe(n=0):
"""
In Python 3, L{inspect.currentframe} does not take a stack-level argument.
Restore that functionality from Python 2 so we don't have to re-implement
the C{f_back}-walking loop in places where it's called.
@param n: The number of stack levels above the caller to walk.
@type n: L{int}
@return: a frame, n levels up the stack from the caller.
@rtype: L{types.FrameType}
"""
f = inspect.currentframe()
for x in range(n + 1):
f = f.f_back
return f
def inet_pton(af, addr):
"""
Emulator of L{socket.inet_pton}.
@param af: An address family to parse; C{socket.AF_INET} or
C{socket.AF_INET6}.
@type af: L{int}
@param addr: An address.
@type addr: native L{str}
@return: The binary packed version of the passed address.
@rtype: L{bytes}
"""
if not addr:
raise ValueError("illegal IP address string passed to inet_pton")
if af == socket.AF_INET:
return socket.inet_aton(addr)
elif af == getattr(socket, 'AF_INET6', 'AF_INET6'):
if '%' in addr and (addr.count('%') > 1 or addr.index("%") == 0):
raise ValueError("illegal IP address string passed to inet_pton")
addr = addr.split('%')[0]
parts = addr.split(':')
elided = parts.count('')
ipv4Component = '.' in parts[-1]
if len(parts) > (8 - ipv4Component) or elided > 3:
raise ValueError("Syntactically invalid address")
if elided == 3:
return '\x00' * 16
if elided:
zeros = ['0'] * (8 - len(parts) - ipv4Component + elided)
if addr.startswith('::'):
parts[:2] = zeros
elif addr.endswith('::'):
parts[-2:] = zeros
else:
idx = parts.index('')
parts[idx:idx+1] = zeros
if len(parts) != 8 - ipv4Component:
raise ValueError("Syntactically invalid address")
else:
if len(parts) != (8 - ipv4Component):
raise ValueError("Syntactically invalid address")
if ipv4Component:
if parts[-1].count('.') != 3:
raise ValueError("Syntactically invalid address")
rawipv4 = socket.inet_aton(parts[-1])
unpackedipv4 = struct.unpack('!HH', rawipv4)
parts[-1:] = [hex(x)[2:] for x in unpackedipv4]
parts = [int(x, 16) for x in parts]
return struct.pack('!8H', *parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
def inet_ntop(af, addr):
if af == socket.AF_INET:
return socket.inet_ntoa(addr)
elif af == socket.AF_INET6:
if len(addr) != 16:
raise ValueError("address length incorrect")
parts = struct.unpack('!8H', addr)
curBase = bestBase = None
for i in range(8):
if not parts[i]:
if curBase is None:
curBase = i
curLen = 0
curLen += 1
else:
if curBase is not None:
bestLen = None
if bestBase is None or curLen > bestLen:
bestBase = curBase
bestLen = curLen
curBase = None
if curBase is not None and (bestBase is None or curLen > bestLen):
bestBase = curBase
bestLen = curLen
parts = [hex(x)[2:] for x in parts]
if bestBase is not None:
parts[bestBase:bestBase + bestLen] = ['']
if parts[0] == '':
parts.insert(0, '')
if parts[-1] == '':
parts.insert(len(parts) - 1, '')
return ':'.join(parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
try:
socket.AF_INET6
except AttributeError:
socket.AF_INET6 = 'AF_INET6'
try:
socket.inet_pton(socket.AF_INET6, "::")
except (AttributeError, NameError, socket.error):
socket.inet_pton = inet_pton
socket.inet_ntop = inet_ntop
adict = dict
if _PY3:
# These are actually useless in Python 2 as well, but we need to go
# through deprecation process there (ticket #5895):
del adict, inet_pton, inet_ntop
set = set
frozenset = frozenset
try:
from functools import reduce
except ImportError:
reduce = reduce
def execfile(filename, globals, locals=None):
"""
Execute a Python script in the given namespaces.
Similar to the execfile builtin, but a namespace is mandatory, partly
because that's a sensible thing to require, and because otherwise we'd
have to do some frame hacking.
This is a compatibility implementation for Python 3 porting, to avoid the
use of the deprecated builtin C{execfile} function.
"""
if locals is None:
locals = globals
with open(filename, "rb") as fin:
source = fin.read()
code = compile(source, filename, "exec")
exec(code, globals, locals)
try:
cmp = cmp
except NameError:
def cmp(a, b):
"""
Compare two objects.
Returns a negative number if C{a < b}, zero if they are equal, and a
positive number if C{a > b}.
"""
if a < b:
return -1
elif a == b:
return 0
else:
return 1
def comparable(klass):
"""
Class decorator that ensures support for the special C{__cmp__} method.
On Python 2 this does nothing.
On Python 3, C{__eq__}, C{__lt__}, etc. methods are added to the class,
relying on C{__cmp__} to implement their comparisons.
"""
# On Python 2, __cmp__ will just work, so no need to add extra methods:
if not _PY3:
return klass
def __eq__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c == 0
def __ne__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c != 0
def __lt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c >= 0
klass.__lt__ = __lt__
klass.__gt__ = __gt__
klass.__le__ = __le__
klass.__ge__ = __ge__
klass.__eq__ = __eq__
klass.__ne__ = __ne__
return klass
if _PY3:
unicode = str
long = int
else:
unicode = unicode
long = long
def ioType(fileIshObject, default=unicode):
"""
Determine the type which will be returned from the given file object's
read() and accepted by its write() method as an argument.
In other words, determine whether the given file is 'opened in text mode'.
@param fileIshObject: Any object, but ideally one which resembles a file.
@type fileIshObject: L{object}
@param default: A default value to return when the type of C{fileIshObject}
cannot be determined.
@type default: L{type}
@return: There are 3 possible return values:
1. L{unicode}, if the file is unambiguously opened in text mode.
2. L{bytes}, if the file is unambiguously opened in binary mode.
3. L{basestring}, if we are on python 2 (the L{basestring} type
does not exist on python 3) and the file is opened in binary
mode, but has an encoding and can therefore accept both bytes
and text reliably for writing, but will return L{bytes} from
read methods.
4. The C{default} parameter, if the given type is not understood.
@rtype: L{type}
"""
if isinstance(fileIshObject, TextIOBase):
# If it's for text I/O, then it's for text I/O.
return unicode
if isinstance(fileIshObject, IOBase):
# If it's for I/O but it's _not_ for text I/O, it's for bytes I/O.
return bytes
encoding = getattr(fileIshObject, 'encoding', None)
import codecs
if isinstance(fileIshObject, (codecs.StreamReader, codecs.StreamWriter)):
# On StreamReaderWriter, the 'encoding' attribute has special meaning;
# it is unambiguously unicode.
if encoding:
return unicode
else:
return bytes
if not _PY3:
# Special case: if we have an encoding file, we can *give* it unicode,
# but we can't expect to *get* unicode.
if isinstance(fileIshObject, file):
if encoding is not None:
return basestring
else:
return bytes
from cStringIO import InputType, OutputType
from StringIO import StringIO
if isinstance(fileIshObject, (StringIO, InputType, OutputType)):
return bytes
return default
def nativeString(s):
"""
Convert C{bytes} or C{unicode} to the native C{str} type, using ASCII
encoding if conversion is necessary.
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
"""
if not isinstance(s, (bytes, unicode)):
raise TypeError("%r is neither bytes nor unicode" % s)
if _PY3:
if isinstance(s, bytes):
return s.decode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.encode("ascii")
else:
if isinstance(s, unicode):
return s.encode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.decode("ascii")
return s
def _matchingString(constantString, inputString):
"""
Some functions, such as C{os.path.join}, operate on string arguments which
may be bytes or text, and wish to return a value of the same type. In
those cases you may wish to have a string constant (in the case of
C{os.path.join}, that constant would be C{os.path.sep}) involved in the
parsing or processing, that must be of a matching type in order to use
string operations on it. L{_matchingString} will take a constant string
(either L{bytes} or L{unicode}) and convert it to the same type as the
input string. C{constantString} should contain only characters from ASCII;
to ensure this, it will be encoded or decoded regardless.
@param constantString: A string literal used in processing.
@type constantString: L{unicode} or L{bytes}
@param inputString: A byte string or text string provided by the user.
@type inputString: L{unicode} or L{bytes}
@return: C{constantString} converted into the same type as C{inputString}
@rtype: the type of C{inputString}
"""
if isinstance(constantString, bytes):
otherType = constantString.decode("ascii")
else:
otherType = constantString.encode("ascii")
if type(constantString) == type(inputString):
return constantString
else:
return otherType
if _PY3:
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
exec("""def reraise(exception, traceback):
raise exception.__class__, exception, traceback""")
reraise.__doc__ = """
Re-raise an exception, with an optional traceback, in a way that is compatible
with both Python 2 and Python 3.
Note that on Python 3, re-raised exceptions will be mutated, with their
C{__traceback__} attribute being set.
@param exception: The exception instance.
@param traceback: The traceback to use, or L{None} indicating a new traceback.
"""
if _PY3:
from io import StringIO as NativeStringIO
else:
from io import BytesIO as NativeStringIO
# Functions for dealing with Python 3's bytes type, which is somewhat
# different than Python 2's:
if _PY3:
def iterbytes(originalBytes):
for i in range(len(originalBytes)):
yield originalBytes[i:i+1]
def intToBytes(i):
return ("%d" % i).encode("ascii")
# Ideally we would use memoryview, but it has a number of differences from
# the Python 2 buffer() that make that impractical
# (http://bugs.python.org/issue15945, incompatibility with pyOpenSSL due to
# PyArg_ParseTuple differences.)
def lazyByteSlice(object, offset=0, size=None):
"""
Return a copy of the given bytes-like object.
If an offset is given, the copy starts at that offset. If a size is
given, the copy will only be of that length.
@param object: C{bytes} to be copied.
@param offset: C{int}, starting index of copy.
@param size: Optional, if an C{int} is given limit the length of copy
to this size.
"""
if size is None:
return object[offset:]
else:
return object[offset:(offset + size)]
def networkString(s):
if not isinstance(s, unicode):
raise TypeError("Can only convert text to bytes on Python 3")
return s.encode('ascii')
else:
def iterbytes(originalBytes):
return originalBytes
def intToBytes(i):
return b"%d" % i
lazyByteSlice = buffer
def networkString(s):
if not isinstance(s, str):
raise TypeError("Can only pass-through bytes on Python 2")
# Ensure we're limited to ASCII subset:
s.decode('ascii')
return s
iterbytes.__doc__ = """
Return an iterable wrapper for a C{bytes} object that provides the behavior of
iterating over C{bytes} on Python 2.
In particular, the results of iteration are the individual bytes (rather than
integers as on Python 3).
@param originalBytes: A C{bytes} object that will be wrapped.
"""
intToBytes.__doc__ = """
Convert the given integer into C{bytes}, as ASCII-encoded Arab numeral.
In other words, this is equivalent to calling C{bytes} in Python 2 on an
integer.
@param i: The C{int} to convert to C{bytes}.
@rtype: C{bytes}
"""
networkString.__doc__ = """
Convert the native string type to C{bytes} if it is not already C{bytes} using
ASCII encoding if conversion is necessary.
This is useful for sending text-like bytes that are constructed using string
interpolation. For example, this is safe on Python 2 and Python 3:
networkString("Hello %d" % (n,))
@param s: A native string to convert to bytes if necessary.
@type s: C{str}
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
@rtype: C{bytes}
"""
try:
StringType = basestring
except NameError:
# Python 3+
StringType = str
try:
from types import InstanceType
except ImportError:
# Python 3+
InstanceType = object
try:
from types import FileType
except ImportError:
# Python 3+
FileType = IOBase
if _PY3:
import urllib.parse as urllib_parse
from html import escape
from urllib.parse import quote as urlquote
from urllib.parse import unquote as urlunquote
from http import cookiejar as cookielib
else:
import urlparse as urllib_parse
from cgi import escape
from urllib import quote as urlquote
from urllib import unquote as urlunquote
import cookielib
# Dealing with the differences in items/iteritems
if _PY3:
def iteritems(d):
return d.items()
def itervalues(d):
return d.values()
def items(d):
return list(d.items())
xrange = range
izip = zip
else:
def iteritems(d):
return d.iteritems()
def itervalues(d):
return d.itervalues()
def items(d):
return d.items()
xrange = xrange
from itertools import izip
izip # shh pyflakes
iteritems.__doc__ = """
Return an iterable of the items of C{d}.
@type d: L{dict}
@rtype: iterable
"""
itervalues.__doc__ = """
Return an iterable of the values of C{d}.
@type d: L{dict}
@rtype: iterable
"""
items.__doc__ = """
Return a list of the items of C{d}.
@type d: L{dict}
@rtype: L{list}
"""
def _keys(d):
"""
Return a list of the keys of C{d}.
@type d: L{dict}
@rtype: L{list}
"""
if _PY3:
return list(d.keys())
else:
return d.keys()
def bytesEnviron():
"""
Return a L{dict} of L{os.environ} where all text-strings are encoded into
L{bytes}.
This function is POSIX only; environment variables are always text strings
on Windows.
"""
if not _PY3:
# On py2, nothing to do.
return dict(os.environ)
target = dict()
for x, y in os.environ.items():
target[os.environ.encodekey(x)] = os.environ.encodevalue(y)
return target
def _constructMethod(cls, name, self):
"""
Construct a bound method.
@param cls: The class that the method should be bound to.
@type cls: L{types.ClassType} or L{type}.
@param name: The name of the method.
@type name: native L{str}
@param self: The object that the method is bound to.
@type self: any object
@return: a bound method
@rtype: L{types.MethodType}
"""
func = cls.__dict__[name]
if _PY3:
return _MethodType(func, self)
return _MethodType(func, self, cls)
from incremental import Version
from twisted.python.deprecate import deprecatedModuleAttribute
from collections import OrderedDict
deprecatedModuleAttribute(
Version("Twisted", 15, 5, 0),
"Use collections.OrderedDict instead.",
"twisted.python.compat",
"OrderedDict")
if _PY3:
from base64 import encodebytes as _b64encodebytes
from base64 import decodebytes as _b64decodebytes
else:
from base64 import encodestring as _b64encodebytes
from base64 import decodestring as _b64decodebytes
def _bytesChr(i):
"""
Like L{chr} but always works on ASCII, returning L{bytes}.
@param i: The ASCII code point to return.
@type i: L{int}
@rtype: L{bytes}
"""
if _PY3:
return bytes([i])
else:
return chr(i)
try:
from sys import intern
except ImportError:
intern = intern
def _coercedUnicode(s):
"""
Coerce ASCII-only byte strings into unicode for Python 2.
In Python 2 C{unicode(b'bytes')} returns a unicode string C{'bytes'}. In
Python 3, the equivalent C{str(b'bytes')} will return C{"b'bytes'"}
instead. This function mimics the behavior for Python 2. It will decode the
byte string as ASCII. In Python 3 it simply raises a L{TypeError} when
passing a byte string. Unicode strings are returned as-is.
@param s: The string to coerce.
@type s: L{bytes} or L{unicode}
@raise UnicodeError: The input L{bytes} is not ASCII decodable.
@raise TypeError: The input is L{bytes} on Python 3.
"""
if isinstance(s, bytes):
if _PY3:
raise TypeError("Expected str not %r (bytes)" % (s,))
else:
return s.decode('ascii')
else:
return s
if _PY3:
unichr = chr
raw_input = input
else:
unichr = unichr
raw_input = raw_input
def _bytesRepr(bytestring):
"""
Provide a repr for a byte string that begins with 'b' on both
Python 2 and 3.
@param bytestring: The string to repr.
@type bytestring: L{bytes}
@raise TypeError: The input is not L{bytes}.
@return: The repr with a leading 'b'.
@rtype: L{bytes}
"""
if not isinstance(bytestring, bytes):
raise TypeError("Expected bytes not %r" % (bytestring,))
if _PY3:
return repr(bytestring)
else:
return 'b' + repr(bytestring)
if _PY3:
_tokenize = tokenize.tokenize
else:
_tokenize = tokenize.generate_tokens
__all__ = [
"reraise",
"execfile",
"frozenset",
"reduce",
"set",
"cmp",
"comparable",
"OrderedDict",
"nativeString",
"NativeStringIO",
"networkString",
"unicode",
"iterbytes",
"intToBytes",
"lazyByteSlice",
"StringType",
"InstanceType",
"FileType",
"items",
"iteritems",
"itervalues",
"xrange",
"urllib_parse",
"bytesEnviron",
"escape",
"urlquote",
"urlunquote",
"cookielib",
"_keys",
"_b64encodebytes",
"_b64decodebytes",
"_bytesChr",
"_coercedUnicode",
"_bytesRepr",
"intern",
"unichr",
"raw_input",
"_tokenize"
]
|
|
"""Base class for sparse matrices"""
from __future__ import division, print_function, absolute_import
__all__ = ['spmatrix', 'isspmatrix', 'issparse',
'SparseWarning','SparseEfficiencyWarning']
import sys
import numpy as np
from scipy.lib.six import xrange
from .sputils import isdense, isscalarlike, isintlike
class SparseWarning(Warning):
pass
class SparseFormatWarning(SparseWarning):
pass
class SparseEfficiencyWarning(SparseWarning):
pass
# The formats that we might potentially understand.
_formats = {'csc':[0, "Compressed Sparse Column"],
'csr':[1, "Compressed Sparse Row"],
'dok':[2, "Dictionary Of Keys"],
'lil':[3, "LInked List"],
'dod':[4, "Dictionary of Dictionaries"],
'sss':[5, "Symmetric Sparse Skyline"],
'coo':[6, "COOrdinate"],
'lba':[7, "Linpack BAnded"],
'egd':[8, "Ellpack-itpack Generalized Diagonal"],
'dia':[9, "DIAgonal"],
'bsr':[10, "Block Sparse Row"],
'msr':[11, "Modified compressed Sparse Row"],
'bsc':[12, "Block Sparse Column"],
'msc':[13, "Modified compressed Sparse Column"],
'ssk':[14, "Symmetric SKyline"],
'nsk':[15, "Nonsymmetric SKyline"],
'jad':[16, "JAgged Diagonal"],
'uss':[17, "Unsymmetric Sparse Skyline"],
'vbr':[18, "Variable Block Row"],
'und':[19, "Undefined"]
}
# These univariate ufuncs preserve zeros.
_ufuncs_with_fixed_point_at_zero = frozenset([
np.sin, np.tan, np.arcsin, np.arctan, np.sinh, np.tanh, np.arcsinh,
np.arctanh, np.rint, np.sign, np.expm1, np.log1p, np.deg2rad,
np.rad2deg, np.floor, np.ceil, np.trunc, np.sqrt])
MAXPRINT = 50
class spmatrix(object):
""" This class provides a base class for all sparse matrices. It
cannot be instantiated. Most of the work is provided by subclasses.
"""
__array_priority__ = 10.1
ndim = 2
def __init__(self, maxprint=MAXPRINT):
self.format = self.__class__.__name__[:3]
self._shape = None
if self.format == 'spm':
raise ValueError("This class is not intended"
" to be instantiated directly.")
self.maxprint = maxprint
def set_shape(self,shape):
shape = tuple(shape)
if len(shape) != 2:
raise ValueError("Only two-dimensional sparse arrays "
"are supported.")
try:
shape = int(shape[0]),int(shape[1]) # floats, other weirdness
except:
raise TypeError('invalid shape')
if not (shape[0] >= 0 and shape[1] >= 0):
raise ValueError('invalid shape')
if (self._shape != shape) and (self._shape is not None):
try:
self = self.reshape(shape)
except NotImplementedError:
raise NotImplementedError("Reshaping not implemented for %s." %
self.__class__.__name__)
self._shape = shape
def get_shape(self):
return self._shape
shape = property(fget=get_shape, fset=set_shape)
def reshape(self,shape):
raise NotImplementedError
def astype(self, t):
return self.tocsr().astype(t).asformat(self.format)
def asfptype(self):
"""Upcast matrix to a floating point format (if necessary)"""
fp_types = ['f','d','F','D']
if self.dtype.char in fp_types:
return self
else:
for fp_type in fp_types:
if self.dtype <= np.dtype(fp_type):
return self.astype(fp_type)
raise TypeError('cannot upcast [%s] to a floating '
'point format' % self.dtype.name)
def __iter__(self):
for r in xrange(self.shape[0]):
yield self[r,:]
def getmaxprint(self):
try:
maxprint = self.maxprint
except AttributeError:
maxprint = MAXPRINT
return maxprint
# def typecode(self):
# try:
# typ = self.dtype.char
# except AttributeError:
# typ = None
# return typ
def getnnz(self):
try:
return self.nnz
except AttributeError:
raise AttributeError("nnz not defined")
def getformat(self):
try:
format = self.format
except AttributeError:
format = 'und'
return format
def __repr__(self):
nnz = self.getnnz()
format = self.getformat()
return "<%dx%d sparse matrix of type '%s'\n" \
"\twith %d stored elements in %s format>" % \
(self.shape + (self.dtype.type, nnz, _formats[format][1]))
def __str__(self):
maxprint = self.getmaxprint()
A = self.tocoo()
nnz = self.getnnz()
# helper function, outputs "(i,j) v"
def tostr(row,col,data):
triples = zip(list(zip(row,col)),data)
return '\n'.join([(' %s\t%s' % t) for t in triples])
if nnz > maxprint:
half = maxprint // 2
out = tostr(A.row[:half], A.col[:half], A.data[:half])
out += "\n :\t:\n"
half = maxprint - maxprint//2
out += tostr(A.row[-half:], A.col[-half:], A.data[-half:])
else:
out = tostr(A.row, A.col, A.data)
return out
def __bool__(self): # Simple -- other ideas?
if self.shape == (1, 1):
return True if self.nnz == 1 else False
else:
raise ValueError("The truth value of an array with more than one "
"element is ambiguous. Use a.any() or a.all().")
__nonzero__ = __bool__
# What should len(sparse) return? For consistency with dense matrices,
# perhaps it should be the number of rows? But for some uses the number of
# non-zeros is more important. For now, raise an exception!
def __len__(self):
# return self.getnnz()
raise TypeError("sparse matrix length is ambiguous; use getnnz()"
" or shape[0]")
def asformat(self, format):
"""Return this matrix in a given sparse format
Parameters
----------
format : {string, None}
desired sparse matrix format
- None for no format conversion
- "csr" for csr_matrix format
- "csc" for csc_matrix format
- "lil" for lil_matrix format
- "dok" for dok_matrix format and so on
"""
if format is None or format == self.format:
return self
else:
return getattr(self,'to' + format)()
###################################################################
# NOTE: All arithmetic operations use csr_matrix by default.
# Therefore a new sparse matrix format just needs to define a
# .tocsr() method to provide arithmetic support. Any of these
# methods can be overridden for efficiency.
####################################################################
def multiply(self, other):
"""Point-wise multiplication by another matrix
"""
return self.tocsr().multiply(other)
def maximum(self, other):
return self.tocsr().maximum(other)
def minimum(self, other):
return self.tocsr().minimum(other)
def dot(self, other):
"""Ordinary dot product
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import csr_matrix
>>> A = csr_matrix([[1, 2, 0], [0, 0, 3], [4, 0, 5]])
>>> v = np.array([1, 0, -1])
>>> A.dot(v)
array([ 1, -3, -1], dtype=int64)
"""
return self * other
def __eq__(self, other):
return self.tocsr().__eq__(other)
def __ne__(self, other):
return self.tocsr().__ne__(other)
def __lt__(self,other):
return self.tocsr().__lt__(other)
def __gt__(self,other):
return self.tocsr().__gt__(other)
def __le__(self,other):
return self.tocsr().__le__(other)
def __ge__(self,other):
return self.tocsr().__ge__(other)
def __abs__(self):
return abs(self.tocsr())
def __add__(self, other): # self + other
return self.tocsr().__add__(other)
def __radd__(self, other): # other + self
return self.tocsr().__radd__(other)
def __sub__(self, other): # self - other
# note: this can't be replaced by self + (-other) for unsigned types
return self.tocsr().__sub__(other)
def __rsub__(self, other): # other - self
return self.tocsr().__rsub__(other)
def __mul__(self, other):
"""interpret other and call one of the following
self._mul_scalar()
self._mul_vector()
self._mul_multivector()
self._mul_sparse_matrix()
"""
M,N = self.shape
if other.__class__ is np.ndarray:
# Fast path for the most common case
if other.shape == (N,):
return self._mul_vector(other)
elif other.shape == (N, 1):
return self._mul_vector(other.ravel()).reshape(M, 1)
elif other.ndim == 2 and other.shape[0] == N:
return self._mul_multivector(other)
if isscalarlike(other):
# scalar value
return self._mul_scalar(other)
if issparse(other):
if self.shape[1] != other.shape[0]:
raise ValueError('dimension mismatch')
return self._mul_sparse_matrix(other)
try:
other.shape
except AttributeError:
# If it's a list or whatever, treat it like a matrix
other_a = np.asanyarray(other)
if other_a.ndim == 0 and other_a.dtype == np.object_:
# Not interpretable as an array; return NotImplemented so that
# other's __rmul__ can kick in if that's implemented.
return NotImplemented
other = other_a
if other.ndim == 1 or other.ndim == 2 and other.shape[1] == 1:
# dense row or column vector
if other.shape != (N,) and other.shape != (N,1):
raise ValueError('dimension mismatch')
result = self._mul_vector(np.ravel(other))
if isinstance(other, np.matrix):
result = np.asmatrix(result)
if other.ndim == 2 and other.shape[1] == 1:
# If 'other' was an (nx1) column vector, reshape the result
result = result.reshape(-1,1)
return result
elif other.ndim == 2:
##
# dense 2D array or matrix ("multivector")
if other.shape[0] != self.shape[1]:
raise ValueError('dimension mismatch')
result = self._mul_multivector(np.asarray(other))
if isinstance(other, np.matrix):
result = np.asmatrix(result)
return result
else:
raise ValueError('could not interpret dimensions')
# by default, use CSR for __mul__ handlers
def _mul_scalar(self, other):
return self.tocsr()._mul_scalar(other)
def _mul_vector(self, other):
return self.tocsr()._mul_vector(other)
def _mul_multivector(self, other):
return self.tocsr()._mul_multivector(other)
def _mul_sparse_matrix(self, other):
return self.tocsr()._mul_sparse_matrix(other)
def __rmul__(self, other): # other * self
if isscalarlike(other):
return self.__mul__(other)
else:
# Don't use asarray unless we have to
try:
tr = other.transpose()
except AttributeError:
tr = np.asarray(other).transpose()
return (self.transpose() * tr).transpose()
####################
# Other Arithmetic #
####################
def _divide(self, other, true_divide=False, rdivide=False):
if isscalarlike(other):
if rdivide:
if true_divide:
return np.true_divide(other, self.todense())
else:
return np.divide(other, self.todense())
if true_divide and np.can_cast(self.dtype, np.float_):
return self.astype(np.float_)._mul_scalar(1./other)
else:
r = self._mul_scalar(1./other)
scalar_dtype = np.asarray(other).dtype
if (np.issubdtype(self.dtype, np.integer)
and np.issubdtype(scalar_dtype, np.integer)):
return r.astype(self.dtype)
else:
return r
elif isdense(other):
if not rdivide:
if true_divide:
return np.true_divide(self.todense(), other)
else:
return np.divide(self.todense(), other)
else:
if true_divide:
return np.true_divide(other, self.todense())
else:
return np.divide(other, self.todense())
elif isspmatrix(other):
if rdivide:
return other._divide(self, true_divide, rdivide=False)
self_csr = self.tocsr()
if true_divide and np.can_cast(self.dtype, np.float_):
return self_csr.astype(np.float_)._divide_sparse(other)
else:
return self_csr._divide_sparse(other)
else:
return NotImplemented
def __truediv__(self, other):
return self._divide(other, true_divide=True)
def __div__(self, other):
# Always do true division
return self._divide(other, true_divide=True)
def __rtruediv__(self, other):
# Implementing this as the inverse would be too magical -- bail out
return NotImplemented
def __rdiv__(self, other):
# Implementing this as the inverse would be too magical -- bail out
return NotImplemented
def __neg__(self):
return -self.tocsr()
def __iadd__(self, other):
return NotImplemented
def __isub__(self, other):
return NotImplemented
def __imul__(self, other):
return NotImplemented
def __idiv__(self, other):
return self.__itruediv__(other)
def __itruediv__(self, other):
return NotImplemented
def __pow__(self, other):
if self.shape[0] != self.shape[1]:
raise TypeError('matrix is not square')
if isintlike(other):
other = int(other)
if other < 0:
raise ValueError('exponent must be >= 0')
if other == 0:
from .construct import eye
return eye(self.shape[0], dtype=self.dtype)
elif other == 1:
return self.copy()
else:
tmp = self.__pow__(other//2)
if (other % 2):
return self * tmp * tmp
else:
return tmp * tmp
elif isscalarlike(other):
raise ValueError('exponent must be an integer')
else:
raise NotImplementedError
def __getattr__(self, attr):
if attr == 'A':
return self.toarray()
elif attr == 'T':
return self.transpose()
elif attr == 'H':
return self.getH()
elif attr == 'real':
return self._real()
elif attr == 'imag':
return self._imag()
elif attr == 'size':
return self.getnnz()
else:
raise AttributeError(attr + " not found")
def transpose(self):
return self.tocsr().transpose()
def conj(self):
return self.tocsr().conj()
def conjugate(self):
return self.conj()
# Renamed conjtranspose() -> getH() for compatibility with dense matrices
def getH(self):
return self.transpose().conj()
def _real(self):
return self.tocsr()._real()
def _imag(self):
return self.tocsr()._imag()
def nonzero(self):
"""nonzero indices
Returns a tuple of arrays (row,col) containing the indices
of the non-zero elements of the matrix.
Examples
--------
>>> from scipy.sparse import csr_matrix
>>> A = csr_matrix([[1,2,0],[0,0,3],[4,0,5]])
>>> A.nonzero()
(array([0, 0, 1, 2, 2]), array([0, 1, 2, 0, 2]))
"""
# convert to COOrdinate format
A = self.tocoo()
nz_mask = A.data != 0
return (A.row[nz_mask],A.col[nz_mask])
def getcol(self, j):
"""Returns a copy of column j of the matrix, as an (m x 1) sparse
matrix (column vector).
"""
# Spmatrix subclasses should override this method for efficiency.
# Post-multiply by a (n x 1) column vector 'a' containing all zeros
# except for a_j = 1
from .csc import csc_matrix
n = self.shape[1]
if j < 0:
j += n
if j < 0 or j >= n:
raise IndexError("index out of bounds")
col_selector = csc_matrix(([1], [[j], [0]]), shape=(n,1), dtype=self.dtype)
return self * col_selector
def getrow(self, i):
"""Returns a copy of row i of the matrix, as a (1 x n) sparse
matrix (row vector).
"""
# Spmatrix subclasses should override this method for efficiency.
# Pre-multiply by a (1 x m) row vector 'a' containing all zeros
# except for a_i = 1
from .csr import csr_matrix
m = self.shape[0]
if i < 0:
i += m
if i < 0 or i >= m:
raise IndexError("index out of bounds")
row_selector = csr_matrix(([1], [[0], [i]]), shape=(1,m), dtype=self.dtype)
return row_selector * self
# def __array__(self):
# return self.toarray()
def todense(self, order=None, out=None):
"""
Return a dense matrix representation of this matrix.
Parameters
----------
order : {'C', 'F'}, optional
Whether to store multi-dimensional data in C (row-major)
or Fortran (column-major) order in memory. The default
is 'None', indicating the NumPy default of C-ordered.
Cannot be specified in conjunction with the `out`
argument.
out : ndarray, 2-dimensional, optional
If specified, uses this array (or `numpy.matrix`) as the
output buffer instead of allocating a new array to
return. The provided array must have the same shape and
dtype as the sparse matrix on which you are calling the
method.
Returns
-------
arr : numpy.matrix, 2-dimensional
A NumPy matrix object with the same shape and containing
the same data represented by the sparse matrix, with the
requested memory order. If `out` was passed and was an
array (rather than a `numpy.matrix`), it will be filled
with the appropriate values and returned wrapped in a
`numpy.matrix` object that shares the same memory.
"""
return np.asmatrix(self.toarray(order=order, out=out))
def toarray(self, order=None, out=None):
"""
Return a dense ndarray representation of this matrix.
Parameters
----------
order : {'C', 'F'}, optional
Whether to store multi-dimensional data in C (row-major)
or Fortran (column-major) order in memory. The default
is 'None', indicating the NumPy default of C-ordered.
Cannot be specified in conjunction with the `out`
argument.
out : ndarray, 2-dimensional, optional
If specified, uses this array as the output buffer
instead of allocating a new array to return. The provided
array must have the same shape and dtype as the sparse
matrix on which you are calling the method. For most
sparse types, `out` is required to be memory contiguous
(either C or Fortran ordered).
Returns
-------
arr : ndarray, 2-dimensional
An array with the same shape and containing the same
data represented by the sparse matrix, with the requested
memory order. If `out` was passed, the same object is
returned after being modified in-place to contain the
appropriate values.
"""
return self.tocoo().toarray(order=order, out=out)
def todok(self):
return self.tocoo().todok()
def tocoo(self):
return self.tocsr().tocoo()
def tolil(self):
return self.tocsr().tolil()
def todia(self):
return self.tocoo().todia()
def tobsr(self, blocksize=None):
return self.tocsr().tobsr(blocksize=blocksize)
def copy(self):
return self.__class__(self,copy=True)
def sum(self, axis=None):
"""Sum the matrix over the given axis. If the axis is None, sum
over both rows and columns, returning a scalar.
"""
# We use multiplication by an array of ones to achieve this.
# For some sparse matrix formats more efficient methods are
# possible -- these should override this function.
m, n = self.shape
# Mimic numpy's casting.
if np.issubdtype(self.dtype, np.float_):
res_dtype = np.float_
elif (np.issubdtype(self.dtype, np.int_) or
np.issubdtype(self.dtype, np.bool_)):
res_dtype = np.int_
elif np.issubdtype(self.dtype, np.complex_):
res_dtype = np.complex_
else:
res_dtype = self.dtype
if axis is None:
# sum over rows and columns
return (self * np.asmatrix(np.ones((n, 1), dtype=res_dtype))).sum()
if axis < 0:
axis += 2
if axis == 0:
# sum over columns
return np.asmatrix(np.ones((1, m), dtype=res_dtype)) * self
elif axis == 1:
# sum over rows
return self * np.asmatrix(np.ones((n, 1), dtype=res_dtype))
else:
raise ValueError("axis out of bounds")
def mean(self, axis=None):
"""Average the matrix over the given axis. If the axis is None,
average over both rows and columns, returning a scalar.
"""
# Mimic numpy's casting.
if (np.issubdtype(self.dtype, np.float_) or
np.issubdtype(self.dtype, np.integer) or
np.issubdtype(self.dtype, np.bool_)):
res_dtype = np.float_
elif np.issubdtype(self.dtype, np.complex_):
res_dtype = np.complex_
else:
res_dtype = self.dtype
if axis is None:
return self.sum(None) * 1.0 / (self.shape[0]*self.shape[1])
if axis < 0:
axis += 2
if axis == 0:
mean = self.astype(res_dtype).sum(0)
mean *= 1.0 / self.shape[0]
return mean
elif axis == 1:
mean = self.astype(res_dtype).sum(1)
mean *= 1.0 / self.shape[1]
return mean
else:
raise ValueError("axis out of bounds")
def diagonal(self):
"""Returns the main diagonal of the matrix
"""
# TODO support k != 0
return self.tocsr().diagonal()
def setdiag(self, values, k=0):
"""
Set diagonal or off-diagonal elements of the array.
Parameters
----------
values : array_like
New values of the diagonal elements.
Values may have any length. If the diagonal is longer than values,
then the remaining diagonal entries will not be set. If values if
longer than the diagonal, then the remaining values are ignored.
If a scalar value is given, all of the diagonal is set to it.
k : int, optional
Which off-diagonal to set, corresponding to elements a[i,i+k].
Default: 0 (the main diagonal).
"""
M, N = self.shape
if (k > 0 and k >= N) or (k < 0 and -k >= M):
raise ValueError("k exceeds matrix dimensions")
self._setdiag(np.asarray(values), k)
def _setdiag(self, values, k):
M, N = self.shape
if k < 0:
if values.ndim == 0:
# broadcast
max_index = min(M+k, N)
for i in xrange(max_index):
self[i - k, i] = values
else:
max_index = min(M+k, N, len(values))
if max_index <= 0:
return
for i,v in enumerate(values[:max_index]):
self[i - k, i] = v
else:
if values.ndim == 0:
# broadcast
max_index = min(M, N-k)
for i in xrange(max_index):
self[i, i + k] = values
else:
max_index = min(M, N-k, len(values))
if max_index <= 0:
return
for i,v in enumerate(values[:max_index]):
self[i, i + k] = v
def _process_toarray_args(self, order, out):
if out is not None:
if order is not None:
raise ValueError('order cannot be specified if out '
'is not None')
if out.shape != self.shape or out.dtype != self.dtype:
raise ValueError('out array must be same dtype and shape as '
'sparse matrix')
out[...] = 0.
return out
else:
return np.zeros(self.shape, dtype=self.dtype, order=order)
def __numpy_ufunc__(self, func, method, pos, inputs, **kwargs):
"""Method for compatibility with NumPy's ufuncs and dot
functions.
"""
if any(not isinstance(x, spmatrix) and np.asarray(x).dtype == object
for x in inputs):
# preserve previous behavior with object arrays
with_self = list(inputs)
with_self[pos] = np.asarray(self, dtype=object)
return getattr(func, method)(*with_self, **kwargs)
out = kwargs.pop('out', None)
if method != '__call__' or kwargs:
return NotImplemented
without_self = list(inputs)
del without_self[pos]
without_self = tuple(without_self)
if func is np.multiply:
result = self.multiply(*without_self)
elif func is np.add:
result = self.__add__(*without_self)
elif func is np.dot:
if pos == 0:
result = self.__mul__(inputs[1])
else:
result = self.__rmul__(inputs[0])
elif func is np.subtract:
if pos == 0:
result = self.__sub__(inputs[1])
else:
result = self.__rsub__(inputs[0])
elif func is np.divide:
true_divide = (sys.version_info[0] >= 3)
rdivide = (pos == 1)
result = self._divide(*without_self,
true_divide=true_divide,
rdivide=rdivide)
elif func is np.true_divide:
rdivide = (pos == 1)
result = self._divide(*without_self, true_divide=True, rdivide=rdivide)
elif func is np.maximum:
result = self.maximum(*without_self)
elif func is np.minimum:
result = self.minimum(*without_self)
elif func is np.absolute:
result = abs(self)
elif func in _ufuncs_with_fixed_point_at_zero:
func_name = func.__name__
if hasattr(self, func_name):
result = getattr(self, func_name)()
else:
result = getattr(self.tocsr(), func_name)()
else:
return NotImplemented
if out is not None:
if not isinstance(out, spmatrix) and isinstance(result, spmatrix):
out[...] = result.todense()
else:
out[...] = result
result = out
return result
def isspmatrix(x):
return isinstance(x, spmatrix)
issparse = isspmatrix
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
# Variable definition
makefile = 'Makefile'
executable = 'chimera'
extend_library = 'module'
extend_library_name = 'lib' + extend_library + '.so'
core_library = 'chimera'
core_library_name = 'lib' + core_library + '.so'
runtime_library = 'runtime'
runtime_library_name = 'lib' + runtime_library + '.so'
lua_path = 'lua-5.3.3'
lua_src_path = os.path.join(lua_path, 'src')
lua_lib = 'liblua.so.5.3.3'
lua_lib_ln = 'liblua.so'
part_main = 'main'
part_runtime = 'runtime'
part_extend = 'extend'
part_core = 'core'
part_module = 'module'
flags = {
part_main: {
'inc': ['-Ilua-5.3.3/src'],
'cflags': ['-Wall', '-fexceptions', '-std=c++11'],
'resinc': [],
'libdir': [],
'lib': [],
'ldflags': ['-ldl', '-lboost_system', '-lboost_filesystem']
},
part_runtime: {
'inc': ['-Ilua-5.3.3/src'],
'cflags': ['-Wall', '-fexceptions', '-fPIC', '-std=c++11'],
'resinc': [],
'libdir': [],
'lib': [],
'ldflags': ['-ldl', '-lboost_system', '-lboost_filesystem', '-shared', '-Wl,-soname']
},
part_extend: {
'inc': ['-Ilua-5.3.3/src'],
'cflags': ['-Wall', '-fexceptions', '-fPIC', '-std=c++11'],
'resinc': [],
'libdir': [],
'lib': [],
'ldflags': ['-lboost_system', '-shared', '-Wl,-soname']
},
part_module: {
'inc': ['-Ilua-5.3.3/src'],
'cflags': ['-Wall', '-fexceptions', '-fPIC', '-std=c++11'],
'resinc': [],
'libdir': [],
'lib': [],
'ldflags': ['-lboost_system', '-shared', '-Wl,-soname']
},
part_core: {
'inc': ['-Ilua-5.3.3/src'],
'cflags': ['-Wall', '-fexceptions', '-fPIC', '-std=c++11'],
'resinc': [],
'libdir': [],
'lib': [],
'ldflags': ['-Lbin/Debug', '-llua', '-lboost_system', '-ldl', '-shared', '-Wl,-soname']
}
}
built_names = {
part_main: '$(EXE_NAME)',
part_runtime: '$(RUN_NAME)',
part_extend: '$(MOD_NAME)',
part_core: '$(CORE_NAME)',
part_module: ''
}
path_names = {
part_runtime: 'runtime',
part_extend: 'extend',
part_main: 'main',
part_core: 'core',
part_module: 'modules'
}
relate_names = {
part_main: 'out',
part_runtime: 'run',
part_extend: 'mod',
part_core: 'core',
part_module: ''
}
inc_names = {
part_runtime: runtime_library,
part_extend: extend_library,
part_core: core_library
}
dependencies = {
part_main: [part_runtime, part_extend, part_core],
part_runtime: [part_extend, part_core],
part_extend: [part_core],
part_core: [],
part_module: [part_extend, part_core]
}
phony = {
'unique': ['clean', 'install', 'uninstall'],
'target': ['before', 'after', 'clean']
}
# Add include paths for all dependencies
for fname in flags:
flags[fname]['inc'].append('-I' + path_names[fname] + '/include')
for d in dependencies[fname]:
flags[fname]['inc'].append('-I' + path_names[d] + '/include')
default_target = 'release'
targets = ['debug', 'release']
def createTarget(f, target, files, modules):
f.write('{0}: before_{0} lua_{0} {3}_{0} $({2}_OBJ_{1}) after_{0}\n\n'.format(target, target.upper(), part_module.upper(), relate_names[part_main]))
beforeTarget(f, target)
luaTarget(f, target)
libTarget(f, target, part_runtime)
libTarget(f, target, part_extend)
libTarget(f, target, part_core)
for m in modules:
moduleTarget(f, target, m, modules[m])
outTarget(f, target)
for ft in files:
for filedict in files[ft]:
srcPath = filedict['src']
objPath = filedict['obj']
cxxFileTarget(f, target, srcPath, objPath, ft)
#if ft == part_module:
# ldFileTarget(f, target, filedict['name'], objPath, ft)
f.write('\n')
afterTarget(f, target)
cleanTarget(f, target)
def beforeTarget(f, target):
f.write('before_' + target + ':\n')
for c in path_names:
f.write(' test -d $(' + c.upper() + '_OUT_' + target.upper() + ') || mkdir -p $(' + c.upper() + '_OUT_' + target.upper() + ')\n')
f.write(' test -d $(' + c.upper() + '_OBJDIR_' + target.upper() + ') || mkdir -p $(' + c.upper() + '_OBJDIR_' + target.upper() + ')\n')
f.write('\n')
def luaTarget(f, target):
f.write('lua_{0}: lua\n'.format(target))
f.write(' cp -u {2} $({1}_OUT_{0})\n'.format(target.upper(), part_main.upper(), os.path.join(lua_src_path, lua_lib)))
f.write(' cd $({1}_OUT_{0}) && ln -sf {2} {3}\n'.format(target.upper(), part_main.upper(), lua_lib, lua_lib_ln))
f.write('\n')
def libTarget(f, target, part_name):
str_dep = ''
str_inc = ''
for dep_n in dependencies[part_name]:
str_dep += '{1}_{0} '.format(target, relate_names[dep_n])
#str_inc += '-l' + inc_names[part_name] + ' '
f.write(('{3}_{0}: before_{0} ' + str_dep + ' $({2}_OBJ_{1})\n').format(target, target.upper(), part_name.upper(), relate_names[part_name]))
f.write((' $(LD) $({1}_LIBDIR_{0}) -o $({1}_OUTFILE_{0}) $({1}_OBJ_{0}) ' + str_inc + ' $({1}_LDFLAGS_{0}),{2} $({1}_LIB_{0})\n').format(target.upper(), part_name.upper(), built_names[part_name]))
f.write('\n')
def outTarget(f, target):
str_dep = ''
str_inc = ''
for dep_n in dependencies[part_main]:
str_dep += '{1}_{0} '.format(target, relate_names[dep_n])
str_inc += '-l' + inc_names[dep_n] + ' '
f.write(('{3}_{0}: before_{0} ' + str_dep + ' $({2}_OBJ_{1})\n').format(target, target.upper(), part_main.upper(), relate_names[part_main]))
f.write((' $(LD) $({1}_LIBDIR_{0}) -o $({1}_OUTFILE_{0}) $({1}_OBJ_{0}) ' + str_inc + ' $({1}_LDFLAGS_{0}) $({1}_LIB_{0})\n').format(target.upper(), part_main.upper()))
f.write('\n')
def cxxFileTarget(f, target, src_file, obj_file, part):
f.write('$({1}_OBJDIR_{0})/{3}: {4}/src/{2}\n'.format(target.upper(), part.upper(), src_file, obj_file, path_names[part]))
f.write(' $(CXX) $({1}_CFLAGS_{0}) $({1}_INC_{0}) -c {4}/src/{2} -o $({1}_OBJDIR_{0})/{3}\n'.format(target.upper(), part.upper(), src_file, obj_file, path_names[part]))
def ldFileTarget(f, target, name, obj_file, part):
f.write(' $(LD) $({1}_LIBDIR_{0}) -o $({1}_OUT_{0})/{2}.mod $({1}_OBJDIR_{0})/{3} $({1}_LDFLAGS_{0}),{2}.mod $({1}_LIB_{0})\n'.format(target.upper(), part.upper(), name, obj_file))
def moduleTarget(f, target, name, files):
str_objs = ''
for fi in files:
str_objs += ('$({1}_OBJDIR_{0})/{2} ').format(target.upper(), part_module.upper(), fi['obj'])
target_name = part_module.lower() + '_' + name.lower() + '_' + target.lower()
f.write(target_name + ': ' + str_objs + '\n')
f.write((' $(LD) $({1}_LIBDIR_{0}) -o $({1}_OUT_{0})/{2}.mod ' + str_objs + ' $({1}_LDFLAGS_{0}),{2}.mod $({1}_LIB_{0})\n\n').format(target.upper(), part_module.upper(), name))
def afterTarget(f, target):
f.write('after_' + target + ':\n'.format(target.upper(), part_runtime.upper()))
f.write(' cp test.lua $({1}_OUT_{0})/test.lua\n'.format(target.upper(), part_runtime.upper()))
f.write(' cp run.sh $({1}_OUT_{0})/run.sh\n'.format(target.upper(), part_runtime.upper()))
f.write('\n')
def cleanTarget(f, target):
f.write('clean_' + target + ':\n')
for c in path_names:
f.write(' rm -rf $({1}_OBJDIR_{0})\n'.format(target.upper(), c.upper()))
f.write(' rm -rf $({1}_OUT_{0})\n'.format(target.upper(), c.upper()))
f.write('\n')
with open(makefile, 'w') as f:
f.write('#------------------------------------------------------------------------------#\n')
f.write('# This makefile was created with python #\n')
f.write('#------------------------------------------------------------------------------#\n')
f.write('\n')
f.write('WORKDIR = `pwd`\n')
f.write('\n')
f.write('CC = gcc\n')
f.write('CXX = g++\n')
f.write('AR = ar\n')
f.write('LD = g++\n')
f.write('WINDRES = windres\n')
f.write('\n')
# set all common definitions
for c in flags:
for item in flags[c]:
f.write(c.upper() + '_' + item.upper() + ' =')
for flag in flags[c][item]:
f.write(' ' + flag)
f.write('\n')
f.write('\n')
# built names
f.write('EXE_NAME = ' + executable + '\n')
f.write('MOD_NAME = ' + extend_library_name + '\n')
f.write('RUN_NAME = ' + runtime_library_name + '\n')
f.write('CORE_NAME = ' + core_library_name + '\n')
f.write('\n')
# Target definitions
for t in targets:
targetName = t[0].upper() + t[1:].lower()
for c in flags:
for item in flags[c]:
f.write(c.upper() + '_' + item.upper() + '_' + t.upper() + ' =')
if t.lower() == 'release' and item.lower() == 'ldflags':
f.write(' -s')
f.write(' $(' + c.upper() + '_' + item.upper() + ')')
if t.lower() == 'debug' and item.lower() == 'cflags':
f.write(' -g')
elif t.lower() == 'release' and item.lower() == 'cflags':
f.write(' -O2')
if item.lower() == 'ldflags' and c == part_main:
f.write(' -Lbin/' + targetName + ' -l' +extend_library + ' -llua')
#if item.lower() == 'ldflags' and c == part_extend:
# f.write(' -Lbin/' + targetName + ' -llua')
f.write('\n')
f.write(c.upper() + '_OBJDIR_' + t.upper() + ' = obj/' + targetName + '/' + c + '\n')
if c != part_module:
f.write(c.upper() + '_OUT_' + t.upper() + ' = bin/' + targetName + '\n')
else:
f.write(c.upper() + '_OUT_' + t.upper() + ' = bin/' + targetName + '/modules\n')
f.write(c.upper() + '_OUTFILE_' + t.upper() + ' = bin/' + targetName + '/' + built_names[c] + '\n')
f.write('\n')
f.write('\n')
# Find all files recursively
src_files = {}
def checkSrcPath(root, path, files):
lpath = root if path is None else os.path.join(root, path)
for o in os.listdir(lpath):
if os.path.isfile(os.path.join(lpath, o)) and o.rfind('.cpp') == len(o) - 4:
on = o.replace('.cpp', '')
files.append({
'name': on,
'path': "" if path is None else path,
'src': o if path is None else os.path.join(path, o),
'obj': on + ".o"
})
if os.path.isdir(os.path.join(lpath, o)):
dpath = o if path is None else os.path.join(path, o)
checkSrcPath(root, dpath, files)
# Find all files for built and libs
for pn in path_names:
src_files[pn] = []
checkSrcPath(os.path.join(path_names[pn], 'src'), None, src_files[pn])
# find structure for all modules
module_names = {}
lpath = os.path.join(path_names[part_module], 'src')
for o in os.listdir(lpath):
if os.path.isfile(os.path.join(lpath, o)) and o.rfind('.cpp') == len(o) - 4:
on = o.replace('.cpp', '')
module_names[on] = [{
'name': on,
'path': lpath,
'src': os.path.join(lpath, o),
'obj': on + ".o"
}]
if os.path.isdir(os.path.join(lpath, o)):
module_names[o] = []
checkSrcPath(lpath, o, module_names[o])
for t in targets:
for c in src_files:
f.write(c.upper() + '_OBJ_' + t.upper() + ' =')
objdir = '$(' + c.upper() + '_OBJDIR_' + t.upper() + ')/'
if c != part_module:
for sf in src_files[c]:
f.write(' ' + objdir + sf['obj'])
f.write('\n')
else:
for mn in module_names:
f.write(' ' + c.lower() + '_' + mn.lower() + '_' + t.lower())
f.write('\n')
f.write('\n')
f.write('\n')
createTarget(f, default_target, src_files, module_names)
for t in targets:
if t == default_target:
continue
createTarget(f, t, src_files, module_names)
f.write('lua:\n')
f.write('\tcd lua-5.3.3 && $(MAKE) linux\n\n')
f.write('clean:')
for t in targets:
f.write(' clean_' + t)
f.write('\n\n')
f.write('install:\n')
f.write('\ttest -d bin/Release || exit 1\n')
f.write('\ttest -d /usr/local/bin || mkdir -p /usr/local/bin\n')
f.write('\tcp bin/Release/$(EXE_NAME) /usr/local/bin/$(EXE_NAME)\n')
f.write('\tchmod +x /usr/local/bin/$(EXE_NAME)\n')
f.write('\ttest -d /usr/local/lib || mkdir -p /usr/local/lib\n')
f.write('\tcp bin/Release/*.so /usr/local/lib/\n')
f.write('\tldconfig\n')
f.write('\tcp -R bin/Release/modules /usr/local/lib/chimera-modules\n')
f.write('\ttest -d /etc/chimera || mkdir -p /etc/chimera\n')
f.write('\techo "LogLevel=Error" > /etc/chimera/solver.ini\n')
f.write('\techo "" >> /etc/chimera/solver.ini\n')
f.write('\techo "[Filesystem]" >> /etc/chimera/solver.ini\n')
f.write('\techo "type=filesystem" >> /etc/chimera/solver.ini\n')
f.write('\techo "path=/usr/local/lib/chimera-modules" >> /etc/chimera/solver.ini\n')
f.write('\n')
f.write('uninstall:\n')
f.write('\trm -rf /usr/local/bin/chimera\n')
f.write('\trm -rf /usr/local/lib/chimera-modules\n')
f.write('\trm -rf /usr/local/lib/libchimera.so\n')
f.write('\trm -rf /usr/local/lib/liblua.so\n')
f.write('\trm -rf /usr/local/lib/libmodule.so\n')
f.write('\trm -rf /usr/local/lib/libruntime.so\n')
f.write('\tldconfig\n')
f.write('\n')
f.write('.PHONY:')
for pt in phony['unique']:
f.write(' ' + pt)
for t in targets:
for pt in phony['target']:
f.write(' ' + pt + '_' + t)
|
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import ad_group_feed
from google.ads.googleads.v8.services.types import ad_group_feed_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import AdGroupFeedServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import AdGroupFeedServiceGrpcTransport
class AdGroupFeedServiceClientMeta(type):
"""Metaclass for the AdGroupFeedService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[AdGroupFeedServiceTransport]]
_transport_registry["grpc"] = AdGroupFeedServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[AdGroupFeedServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class AdGroupFeedServiceClient(metaclass=AdGroupFeedServiceClientMeta):
"""Service to manage ad group feeds."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AdGroupFeedServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AdGroupFeedServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> AdGroupFeedServiceTransport:
"""Return the transport used by the client instance.
Returns:
AdGroupFeedServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def ad_group_path(customer_id: str, ad_group_id: str,) -> str:
"""Return a fully-qualified ad_group string."""
return "customers/{customer_id}/adGroups/{ad_group_id}".format(
customer_id=customer_id, ad_group_id=ad_group_id,
)
@staticmethod
def parse_ad_group_path(path: str) -> Dict[str, str]:
"""Parse a ad_group path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/adGroups/(?P<ad_group_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def ad_group_feed_path(
customer_id: str, ad_group_id: str, feed_id: str,
) -> str:
"""Return a fully-qualified ad_group_feed string."""
return "customers/{customer_id}/adGroupFeeds/{ad_group_id}~{feed_id}".format(
customer_id=customer_id, ad_group_id=ad_group_id, feed_id=feed_id,
)
@staticmethod
def parse_ad_group_feed_path(path: str) -> Dict[str, str]:
"""Parse a ad_group_feed path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/adGroupFeeds/(?P<ad_group_id>.+?)~(?P<feed_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def feed_path(customer_id: str, feed_id: str,) -> str:
"""Return a fully-qualified feed string."""
return "customers/{customer_id}/feeds/{feed_id}".format(
customer_id=customer_id, feed_id=feed_id,
)
@staticmethod
def parse_feed_path(path: str) -> Dict[str, str]:
"""Parse a feed path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/feeds/(?P<feed_id>.+?)$", path
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, AdGroupFeedServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the ad group feed service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.AdGroupFeedServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, AdGroupFeedServiceTransport):
# transport is a AdGroupFeedServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = AdGroupFeedServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_ad_group_feed(
self,
request: ad_group_feed_service.GetAdGroupFeedRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> ad_group_feed.AdGroupFeed:
r"""Returns the requested ad group feed in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.GetAdGroupFeedRequest`):
The request object. Request message for
[AdGroupFeedService.GetAdGroupFeed][google.ads.googleads.v8.services.AdGroupFeedService.GetAdGroupFeed].
resource_name (:class:`str`):
Required. The resource name of the ad
group feed to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.resources.types.AdGroupFeed:
An ad group feed.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a ad_group_feed_service.GetAdGroupFeedRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, ad_group_feed_service.GetAdGroupFeedRequest):
request = ad_group_feed_service.GetAdGroupFeedRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_ad_group_feed
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def mutate_ad_group_feeds(
self,
request: ad_group_feed_service.MutateAdGroupFeedsRequest = None,
*,
customer_id: str = None,
operations: Sequence[ad_group_feed_service.AdGroupFeedOperation] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> ad_group_feed_service.MutateAdGroupFeedsResponse:
r"""Creates, updates, or removes ad group feeds. Operation statuses
are returned.
List of thrown errors: `AdGroupFeedError <>`__
`AuthenticationError <>`__ `AuthorizationError <>`__
`CollectionSizeError <>`__ `DatabaseError <>`__
`DistinctError <>`__ `FieldError <>`__ `FunctionError <>`__
`FunctionParsingError <>`__ `HeaderError <>`__ `IdError <>`__
`InternalError <>`__ `MutateError <>`__ `NotEmptyError <>`__
`NullError <>`__ `OperatorError <>`__ `QuotaError <>`__
`RangeError <>`__ `RequestError <>`__ `SizeLimitError <>`__
`StringFormatError <>`__ `StringLengthError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.MutateAdGroupFeedsRequest`):
The request object. Request message for
[AdGroupFeedService.MutateAdGroupFeeds][google.ads.googleads.v8.services.AdGroupFeedService.MutateAdGroupFeeds].
customer_id (:class:`str`):
Required. The ID of the customer
whose ad group feeds are being modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (:class:`Sequence[google.ads.googleads.v8.services.types.AdGroupFeedOperation]`):
Required. The list of operations to
perform on individual ad group feeds.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.services.types.MutateAdGroupFeedsResponse:
Response message for an ad group feed
mutate.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([customer_id, operations]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a ad_group_feed_service.MutateAdGroupFeedsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, ad_group_feed_service.MutateAdGroupFeedsRequest
):
request = ad_group_feed_service.MutateAdGroupFeedsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.mutate_ad_group_feeds
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("AdGroupFeedServiceClient",)
|
|
"""The nut component."""
from homeassistant.components.sensor import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_TEMPERATURE,
)
from homeassistant.const import (
ELECTRICAL_CURRENT_AMPERE,
ELECTRICAL_VOLT_AMPERE,
FREQUENCY_HERTZ,
POWER_WATT,
TEMP_CELSIUS,
TIME_SECONDS,
UNIT_PERCENTAGE,
VOLT,
)
DOMAIN = "nut"
PLATFORMS = ["sensor"]
UNDO_UPDATE_LISTENER = "undo_update_listener"
DEFAULT_NAME = "NUT UPS"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 3493
KEY_STATUS = "ups.status"
KEY_STATUS_DISPLAY = "ups.status.display"
COORDINATOR = "coordinator"
DEFAULT_SCAN_INTERVAL = 60
PYNUT_DATA = "data"
PYNUT_UNIQUE_ID = "unique_id"
PYNUT_MANUFACTURER = "manufacturer"
PYNUT_MODEL = "model"
PYNUT_FIRMWARE = "firmware"
PYNUT_NAME = "name"
SENSOR_TYPES = {
"ups.status.display": ["Status", "", "mdi:information-outline", None],
"ups.status": ["Status Data", "", "mdi:information-outline", None],
"ups.alarm": ["Alarms", "", "mdi:alarm", None],
"ups.temperature": [
"UPS Temperature",
TEMP_CELSIUS,
"mdi:thermometer",
DEVICE_CLASS_TEMPERATURE,
],
"ups.load": ["Load", UNIT_PERCENTAGE, "mdi:gauge", None],
"ups.load.high": ["Overload Setting", UNIT_PERCENTAGE, "mdi:gauge", None],
"ups.id": ["System identifier", "", "mdi:information-outline", None],
"ups.delay.start": ["Load Restart Delay", TIME_SECONDS, "mdi:timer", None],
"ups.delay.reboot": ["UPS Reboot Delay", TIME_SECONDS, "mdi:timer", None],
"ups.delay.shutdown": ["UPS Shutdown Delay", TIME_SECONDS, "mdi:timer", None],
"ups.timer.start": ["Load Start Timer", TIME_SECONDS, "mdi:timer", None],
"ups.timer.reboot": ["Load Reboot Timer", TIME_SECONDS, "mdi:timer", None],
"ups.timer.shutdown": ["Load Shutdown Timer", TIME_SECONDS, "mdi:timer", None],
"ups.test.interval": ["Self-Test Interval", TIME_SECONDS, "mdi:timer", None],
"ups.test.result": ["Self-Test Result", "", "mdi:information-outline", None],
"ups.test.date": ["Self-Test Date", "", "mdi:calendar", None],
"ups.display.language": ["Language", "", "mdi:information-outline", None],
"ups.contacts": ["External Contacts", "", "mdi:information-outline", None],
"ups.efficiency": ["Efficiency", UNIT_PERCENTAGE, "mdi:gauge", None],
"ups.power": ["Current Apparent Power", ELECTRICAL_VOLT_AMPERE, "mdi:flash", None],
"ups.power.nominal": ["Nominal Power", ELECTRICAL_VOLT_AMPERE, "mdi:flash", None],
"ups.realpower": [
"Current Real Power",
POWER_WATT,
"mdi:flash",
DEVICE_CLASS_POWER,
],
"ups.realpower.nominal": [
"Nominal Real Power",
POWER_WATT,
"mdi:flash",
DEVICE_CLASS_POWER,
],
"ups.beeper.status": ["Beeper Status", "", "mdi:information-outline", None],
"ups.type": ["UPS Type", "", "mdi:information-outline", None],
"ups.watchdog.status": ["Watchdog Status", "", "mdi:information-outline", None],
"ups.start.auto": ["Start on AC", "", "mdi:information-outline", None],
"ups.start.battery": ["Start on Battery", "", "mdi:information-outline", None],
"ups.start.reboot": ["Reboot on Battery", "", "mdi:information-outline", None],
"ups.shutdown": ["Shutdown Ability", "", "mdi:information-outline", None],
"battery.charge": [
"Battery Charge",
UNIT_PERCENTAGE,
"mdi:gauge",
DEVICE_CLASS_BATTERY,
],
"battery.charge.low": ["Low Battery Setpoint", UNIT_PERCENTAGE, "mdi:gauge", None],
"battery.charge.restart": [
"Minimum Battery to Start",
UNIT_PERCENTAGE,
"mdi:gauge",
None,
],
"battery.charge.warning": [
"Warning Battery Setpoint",
UNIT_PERCENTAGE,
"mdi:gauge",
None,
],
"battery.charger.status": ["Charging Status", "", "mdi:information-outline", None],
"battery.voltage": ["Battery Voltage", VOLT, "mdi:flash", None],
"battery.voltage.nominal": ["Nominal Battery Voltage", VOLT, "mdi:flash", None],
"battery.voltage.low": ["Low Battery Voltage", VOLT, "mdi:flash", None],
"battery.voltage.high": ["High Battery Voltage", VOLT, "mdi:flash", None],
"battery.capacity": ["Battery Capacity", "Ah", "mdi:flash", None],
"battery.current": [
"Battery Current",
ELECTRICAL_CURRENT_AMPERE,
"mdi:flash",
None,
],
"battery.current.total": [
"Total Battery Current",
ELECTRICAL_CURRENT_AMPERE,
"mdi:flash",
None,
],
"battery.temperature": [
"Battery Temperature",
TEMP_CELSIUS,
"mdi:thermometer",
DEVICE_CLASS_TEMPERATURE,
],
"battery.runtime": ["Battery Runtime", TIME_SECONDS, "mdi:timer", None],
"battery.runtime.low": ["Low Battery Runtime", TIME_SECONDS, "mdi:timer", None],
"battery.runtime.restart": [
"Minimum Battery Runtime to Start",
TIME_SECONDS,
"mdi:timer",
None,
],
"battery.alarm.threshold": [
"Battery Alarm Threshold",
"",
"mdi:information-outline",
None,
],
"battery.date": ["Battery Date", "", "mdi:calendar", None],
"battery.mfr.date": ["Battery Manuf. Date", "", "mdi:calendar", None],
"battery.packs": ["Number of Batteries", "", "mdi:information-outline", None],
"battery.packs.bad": [
"Number of Bad Batteries",
"",
"mdi:information-outline",
None,
],
"battery.type": ["Battery Chemistry", "", "mdi:information-outline", None],
"input.sensitivity": [
"Input Power Sensitivity",
"",
"mdi:information-outline",
None,
],
"input.transfer.low": ["Low Voltage Transfer", VOLT, "mdi:flash", None],
"input.transfer.high": ["High Voltage Transfer", VOLT, "mdi:flash", None],
"input.transfer.reason": [
"Voltage Transfer Reason",
"",
"mdi:information-outline",
None,
],
"input.voltage": ["Input Voltage", VOLT, "mdi:flash", None],
"input.voltage.nominal": ["Nominal Input Voltage", VOLT, "mdi:flash", None],
"input.frequency": ["Input Line Frequency", FREQUENCY_HERTZ, "mdi:flash", None],
"input.frequency.nominal": [
"Nominal Input Line Frequency",
FREQUENCY_HERTZ,
"mdi:flash",
None,
],
"input.frequency.status": [
"Input Frequency Status",
"",
"mdi:information-outline",
None,
],
"output.current": ["Output Current", ELECTRICAL_CURRENT_AMPERE, "mdi:flash", None],
"output.current.nominal": [
"Nominal Output Current",
ELECTRICAL_CURRENT_AMPERE,
"mdi:flash",
None,
],
"output.voltage": ["Output Voltage", VOLT, "mdi:flash", None],
"output.voltage.nominal": ["Nominal Output Voltage", VOLT, "mdi:flash", None],
"output.frequency": ["Output Frequency", FREQUENCY_HERTZ, "mdi:flash", None],
"output.frequency.nominal": [
"Nominal Output Frequency",
FREQUENCY_HERTZ,
"mdi:flash",
None,
],
}
STATE_TYPES = {
"OL": "Online",
"OB": "On Battery",
"LB": "Low Battery",
"HB": "High Battery",
"RB": "Battery Needs Replaced",
"CHRG": "Battery Charging",
"DISCHRG": "Battery Discharging",
"BYPASS": "Bypass Active",
"CAL": "Runtime Calibration",
"OFF": "Offline",
"OVER": "Overloaded",
"TRIM": "Trimming Voltage",
"BOOST": "Boosting Voltage",
"FSD": "Forced Shutdown",
"ALARM": "Alarm",
}
SENSOR_NAME = 0
SENSOR_UNIT = 1
SENSOR_ICON = 2
SENSOR_DEVICE_CLASS = 3
|
|
#!/usr/bin/python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ctypes
import os
import socket
import sys
class SnifferTransport(object):
""" Interface for transport that allows eavesdrop other nodes. """
def open(self):
""" Open transport.
Raises:
RuntimeError: when transport is already opened or when transport opening failed.
"""
raise NotImplementedError
def close(self):
""" Close transport.
Raises:
RuntimeError: when transport is already closed.
"""
raise NotImplementedError
@property
def is_opened(self):
""" Check if transport is opened.
Returns:
bool: True if the transport is opened, False in otherwise
"""
raise NotImplementedError
def send(self, data, nodeid):
""" Send data to the node with nodeid.
Args:
data (bytearray): outcoming data.
nodeid (int): node id
Returns:
int: number of sent bytes
"""
raise NotImplementedError
def recv(self, bufsize):
""" Receive data sent by other node.
Args:
bufsize (int): size of buffer for incoming data.
Returns:
A tuple contains data and node id.
For example:
(bytearray([0x00, 0x01...], 1)
"""
raise NotImplementedError
class SnifferSocketTransport(SnifferTransport):
""" Socket based implementation of sniffer transport. """
BASE_PORT = 9000
WELLKNOWN_NODE_ID = 34
PORT_OFFSET = int(os.getenv('PORT_OFFSET', "0"))
def __init__(self, nodeid):
self._nodeid = nodeid
self._socket = None
def __del__(self):
if not self.is_opened:
return
self.close()
def _nodeid_to_address(self, nodeid, ip_address=""):
return ip_address, self.BASE_PORT + (self.PORT_OFFSET * self.WELLKNOWN_NODE_ID) + nodeid
def _address_to_nodeid(self, address):
_, port = address
return (port - self.BASE_PORT - (self.PORT_OFFSET * self.WELLKNOWN_NODE_ID))
def open(self):
if self.is_opened:
raise RuntimeError("Transport is already opened.")
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if not self.is_opened:
raise RuntimeError("Transport opening failed.")
self._socket.bind(self._nodeid_to_address(self._nodeid))
def close(self):
if not self.is_opened:
raise RuntimeError("Transport is closed.")
self._socket.close()
self._socket = None
@property
def is_opened(self):
return bool(self._socket is not None)
def send(self, data, nodeid):
address = self._nodeid_to_address(nodeid)
return self._socket.sendto(data, address)
def recv(self, bufsize):
data, address = self._socket.recvfrom(bufsize)
nodeid = self._address_to_nodeid(address)
return bytearray(data), nodeid
class MacFrame(ctypes.Structure):
_fields_ = [("buffer", ctypes.c_ubyte * 128),
("length", ctypes.c_ubyte),
("nodeid", ctypes.c_uint)]
class SnifferVirtualTransport(SnifferTransport):
""" Virtual interface based implementation of sniffer transport. """
def __init__(self, nodeid):
self.Handle = None
# Load the DLL
self.Api = ctypes.WinDLL("otnodeapi.dll")
if self.Api == None:
raise OSError("Failed to load otnodeapi.dll!")
# Define the functions
self.Api.otListenerInit.argtypes = [ctypes.c_uint]
self.Api.otListenerInit.restype = ctypes.c_void_p
self.Api.otListenerFinalize.argtypes = [ctypes.c_void_p]
self.Api.otListenerRead.argtypes = [ctypes.c_void_p, ctypes.POINTER(MacFrame)]
def __del__(self):
if not self.is_opened:
return
self.close()
def open(self):
if self.is_opened:
raise RuntimeError("Transport is already opened.")
# Initialize a listener
self.Handle = self.Api.otListenerInit(0)
if not self.is_opened:
raise RuntimeError("Transport opening failed.")
def close(self):
if not self.is_opened:
raise RuntimeError("Transport is closed.")
self.Api.otListenerFinalize(self.Handle);
self.Handle = None
@property
def is_opened(self):
return bool(self.Handle is not None)
def recv(self, bufsize):
frame = MacFrame()
pFrame = ctypes.pointer(frame);
self.Api.otListenerRead(self.Handle, pFrame)
return bytearray(frame.buffer)[:frame.length], frame.nodeid
class SnifferTransportFactory(object):
def create_transport(self, nodeid):
if sys.platform != "win32":
return SnifferSocketTransport(nodeid)
else:
return SnifferVirtualTransport(nodeid)
|
|
"""This module is part of the EDA(essential dynamics analysis) program, it
provides several classes to perform the EDA.
Classes:
EDAnalysis -> It is the main class, provides an easy interface to
carry out ED
WrongModeException -> Specific Exception class that indicates a misuse
of the EDA program
"""
import copy
import Bio.PDB as pdb
import numpy as np
import matplotlib
matplotlib.use("TkAgg")
from matplotlib import pyplot as plt
from scipy import linalg
class EDAnalysis:
"""Implements an essential dynamics analysis(EDA).
Methods:
__init__ -> Set the basic attributes of the EDAnalysis
get_id -> Return the id of the object (as PDB code)
get_mode -> Return the mode of the object
get_atom -> Return the atom list chosen for the analysis
superimpose_models- > Superimpose two or more structures
createcordsarray -> Calculate the coordinates for the diferent models
cal_cov -> Calculate the covariance matrix
plot_eig -> Plot the values of the principal eigenvectors of the
covariance matrix
plot_eig_wosv -> Same as plot_eig but adapted to some requirements of
the Tkinter GUI
is_NMR_struct -> Check if the structure loaded is was obtained by NMR
check_mode -> Check if the mode selected is consisted with the data
move_structure -> Move the structure along a certain eigenvector
RMSD_res_plot -> Create a plot with the distance of the residues along
some eigenvectors
Attributes:
PDBid(private) -> String with the PDB code of the structure anlyzed
mode(private) -> String with the mode of the anlysis (for now NMR or MD)
atom(private) -> List with the atoms to study
structure -> Bio.PDB.Structure object on which the analysis is performed
reference -> Bio.PDB.Structure object with a X-Ray structure
that will serve as a reference model, this should only be used
with the MD mode
n -> Number of models or trajectories (in NMR or MD modes,respectively)
N -> Number of coordinates of the analysis
coords_array -> nxN Numpy array with the N coordinates for the n models
means -> 1xN Numpy array with the means of the N coordinates over the n
models
C -> NxN Numoy array that contains the covariance matrix
eigvc -> Numpy array with the eigenvectors of the covariance matrix
eigvl -> Numpy array with the eigenvalues of the covariance matrix
"""
def __init__(self, PDBid, mode, atom, pdbfile, reference=None):
"""Set the basic attributes of the EDAnalysis class.
Set the basic attributes of the EDAnalysis class, calculate the number
of coordinates of interest, the number of models, load the PDB file
with the structure to analyze and check the consistency of the data
provided by the user
Args:
PDBid is a string with a PDB code
mode is a string with value NMR or MD
atom is a list with the particular atoms to focus on the analysis
pdbfile is the path to pdbfile to load
"""
self.__PDBid = PDBid
self.__mode = mode
self.__atom = atom
if reference is not None:
if self.__mode != 'MD':
raise WrongModeException(self.__mode)
self.reference = reference
parser = pdb.PDBParser(QUIET=True)
self.structure = parser.get_structure(self.__PDBid, pdbfile)
N = 0
# Calculate the number of residues that are aa
for residue in self.structure[0].get_residues():
if residue.has_id('CA'):
if self.__atom == []:
N += len(list(residue.get_atom()))
else:
N += 1
# Calculate the number of atoms to study, this will depend on which
# atoms the users wants to focus(CA, backbone or all)
if self.__atom != []:
N *= len(self.__atom)*3
else:
N *= 3
# Calculate the number of configurations available, in NMR this is the
# number of models, in MD the number of time instants
self.n = len(self.structure)
self.N = N
self.check_mode()
def get_id(self):
"""Return the id of the EDAnalysis as a string."""
return self.__PDBid
def get_mode(self):
"""Return the selected mode of the EDAnalysis as a string."""
return self.__mode
def get_atom(self):
"""Return the list of atoms of interest to the EDAnalysis."""
return self.__atom
def superimpose_models(self, reference_model=0):
"""Superimpose two or more structures.
Superimpose two or more structures by using the Bio.PDB.Superimposer
class.
Args:
reference_model is an int with the reference model (default=0)
"""
ref_model = self.structure[reference_model]
if self.reference is not None:
ref_model = self.reference[0]
for alt_model in self.structure:
ref_atoms = []
alt_atoms = []
# Iterate over the structure method to obtain all atoms of interest
# for the analysis and superimposes the structures using them
for (ref_chain, alt_chain) in zip(ref_model, alt_model):
for ref_res, alt_res in \
zip(ref_chain, alt_chain):
# assert ref_res.resname == alt_res.resname, \
# "{:s} is not equal to {:s}".format(ref_res.resname,
# alt_res.resname)
# assert ref_res.id == alt_res.id, \
# "{:s} is not equal to {:s}".format(ref_res.id,
# alt_res.id)
# CA = alpha carbon
if ref_res.has_id('CA'):
if self.__atom == []:
ref_atoms.extend(list(ref_res.get_atom()))
alt_atoms.extend(list(alt_res.get_atom()))
else:
for atoms in self.__atom:
try:
ref_atoms.append(ref_res[atoms])
alt_atoms.append(alt_res[atoms])
except KeyError:
raise KeyError(('Your input data is '
'misssing information for '
'{:s} atoms. Input more '
'complete data or select a'
' smaller set of '
'atoms').format(atoms))
# Align these paired atom lists:
super_imposer = pdb.Superimposer()
super_imposer.set_atoms(ref_atoms, alt_atoms)
if ref_model.get_full_id() == alt_model.get_full_id():
# Check for self/self get zero RMS, zero translation
# and identity matrix for the rotation.
assert np.abs(super_imposer.rms) < 0.0000001
assert np.max(np.abs(super_imposer.rotran[1])) < 0.000001
assert np.max(np.abs(super_imposer.rotran[0]) -
np.identity(3)) < 0.000001
else:
# Update the structure by moving all the atoms in
# this model (not just the ones used for the alignment)
super_imposer.apply(alt_model.get_atoms())
def createcordsarray(self):
"""Calculate the coordinates for the diferent models."""
array_stored = np.zeros((self.n, self.N))
means = np.zeros((1, self.N))
i = 0
for model in self.structure:
j = 0
for residue in model.get_residues():
if residue.has_id('CA'):
if self.__atom != []:
for atoms in self.__atom:
try:
array_stored[i][j:j+3] = residue[atoms].get_coord()
means[0][j:j+3] += residue[atoms].get_coord()
j += 3
except KeyError:
raise KeyError(('Your input data is misssing '
'information for {:s} atoms'
'. Input more complete data '
'or select a smaller set of '
'atoms').format(atoms))
else:
for atoms in residue.get_atom():
array_stored[i][j:j+3] = residue[atoms.id].get_coord()
means[0][j:j+3] += residue[atoms.id].get_coord()
j += 3
i += 1
means *= (1/self.n)
self.coords_array = array_stored
self.means = means
def cal_cov(self):
"""Calculate the covariance matrix.
Calculate the covariance matrix from the coordinates caculated with the
createcordsarray method, the diagonalize the covariance matrix using
scipy.linalg.eigh
"""
C = np.zeros((self.N, self.N))
for x in self.coords_array:
x_prod = x - self.means
C += np.outer(x_prod, x_prod)
C *= (1/self.n)
self.C = C
self.eigvl, self.eigvc = linalg.eigh(C)
def plot_eig(self, n_plot, pathplots, fig=None):
"""Plot the values of the principal eigenvectors of the cov. matrix.
Args:
n_plot -> Int with the number of eigvalues to plot
pathplots -> Directory where the plot will be stored
fig -> Matplotlib figure handle, the default in None, so a new
figure handle will be created
Returns the figure handle that contains the plot
"""
if fig is None:
fig = plt.figure()
valid_evl = self.eigvl[-1:-n_plot-1:-1] / 100
plt.plot(range(1, n_plot+1), valid_evl)
plt.xlabel('Eigenvector index')
plt.ylabel('Eigenvalue ($nm^2$)')
fig.savefig(pathplots+'eig_'+self.__PDBid+'_plot.png',
bbox_inches='tight', dpi=300)
return fig
def plot_eig_wosv(self, n_plot, fig=None):
"""Essentially the same as plot_eig, just adapted to the Tkinter GUI."""
if fig is None:
fig = plt.figure()
valid_evl = self.eigvl[-1:-n_plot-1:-1] / 100
plt.plot(range(1, n_plot+1), valid_evl)
plt.xlabel('Eigenvector index')
plt.ylabel('Eigenvalue ($nm^2$)')
return fig
def is_NMR_struct(self):
"""Check wether the loaded structure is a NMR"""
return 'nmr' in self.structure.header['structure_method'].lower()
def check_mode(self):
"""Check the consistency betweent the mode chosen by the user and the
data provided.
"""
if self.__mode is None:
raise WrongModeException('empty')
if self.__mode == 'NMR' and not self.is_NMR_struct():
raise WrongModeException(self.__mode)
elif self.__mode == 'MD' and self.is_NMR_struct():
raise WrongModeException(self.__mode)
def move_structure(self, t_max, evc, pathname, t_min=0, step=0.1):
"""Move the structure along a certain eigenvector.
Project the coordinates of the stucture onto an eigenvector and then
uses this projection to move the structure along this eigenvector,
creating a PDB file with the now moved coordinates
Args:
t_max -> Int with the final time of the trajectory
evc -> Int with the eigenvector to use, raise a ValueError if it is
not between 1 and N
pathname -> Directory where to store the pdb files generated
t_min -> Int with initial time of the trajectory (default=0)
step -> Float with the time-step of the trajectory (default=0.1)
Return the name of the file containing the new coordinates generated
as diferent models
"""
if evc < 1 or evc > self.N:
raise ValueError('Eigenvector index has to be between 1 and N')
structure_moved = copy.deepcopy(self.structure)
pcord = np.dot(self.eigvc[:, -evc], (self.coords_array[0] -
self.means[0, :]))
nsteps = int((t_max - t_min) / step)
for i in range(self.n):
structure_moved.detach_child(i)
idnum = 0
for t in np.linspace(t_min, t_max, num=nsteps):
new_model = copy.deepcopy(self.structure[0])
new_model.full_id = (self.structure.id, idnum)
new_model.id = idnum
new_model.serial_num = idnum+1
idnum += 1
eig_move = t * pcord * self.eigvc[:, -evc] + self.means
j = 0
for residue in new_model.get_residues():
if residue.has_id('CA'):
if self.__atom != []:
for atoms in self.__atom:
try:
residue[atoms].set_coord(eig_move[0][j:j+3])
j += 3
except KeyError:
raise KeyError(('Your input data is misssing '
'information for {:s} atoms'
'. Input more complete data '
'or select a smaller set of '
'atoms').format(atoms))
else:
for atoms in residue.get_atom():
residue[atoms.id].set_coord(eig_move[0][j:j+3])
j += 3
structure_moved.add(new_model)
filename = ''.join([pathname, self.__PDBid, '_traj_evc', str(evc),
'i.pdb'])
self.structure_moved = structure_moved
io = pdb.PDBIO()
io.set_structure(structure_moved)
# io.save(filename, ChainSelect(nsteps))
io.save(filename)
return filename
def RMSD_res_plot(self, evc, pathplots, fig=None, origin=None):
"""Create a plot with the distance of the residues along
some eigenvectors
Project the structure over some eigenvectors and calculte new
coordinates, calculate the RMSD between the new and old coordinates by
using a shifting-window RMS method (with a window of 15 residues) and
plot the RMSD against the residue number
Args:
evc -> Int with the maximum eigenvector to include in the plot
pathplots -> Directory to store the plot
fig -> Matplotlib figure handle, the default in None, so a new
figure handle will be created
Returns the figure handle that contains the plot
"""
if fig is None:
fig = plt.figure()
if evc < 1 or evc > self.N:
raise ValueError('Eigenvector index has to be between 1 and N')
step = 1
if self.__atom != []:
step = len(self.__atom)*3
nres = int(self.N / step) - 15 + 1 + 1
for evcn in range(1, evc+1):
pcord = 0
pmin = 10000000
pmax = -1000000
for ind in range(self.n):
# Look for the maximum and minimum translation
pcord = np.dot(self.eigvc[:, -evcn], (self.coords_array[ind] -
self.means[0, :]))
if pcord > pmax:
pmax = pcord
elif pcord < pmin:
pmin = pcord
eig_move_max = pmax * self.eigvc[:, -evcn] + self.means
eig_move_min = pmin * self.eigvc[:, -evcn] + self.means
pcord = np.dot(self.eigvc[:, -evcn], (self.coords_array[0] -
self.means[0, :]))
RMSD_list = np.zeros(nres)
j = 0
i = 0
for residue in self.structure[0].get_residues():
# Calculate the RMSD for each residue using the shifting-window
# RMS method with a window size of 15
if residue.has_id('CA'):
j_final = j+7*step
j_init = j-7*step
if j_final > self.N:
break
elif j_init < 0:
j += step
continue
else:
RMSDvl = eig_move_max[0][j_init:j_final] - \
eig_move_min[0][j_init:j_final]
j += step
RMSD = np.sqrt(np.sum(RMSDvl**2)/int(len(RMSDvl)/step))
RMSD_list[i] = RMSD
i += 1
plt.plot(range(7, nres+7), RMSD_list,
label="EV {:d}".format(evcn))
plt.ylabel('RMSD ($\AA$)')
plt.xlabel('Residue number')
plt.legend(loc='best', frameon=False)
filename = ''.join([pathplots, 'eig_', str(evc), '_', self.__PDBid,
'_resplot.png'])
if origin != 'interface':
fig.savefig(filename, bbox_inches='tight', dpi=300)
return fig
class WrongModeException(Exception):
"""Specific Exception class that indicates a misuse of the EDA program."""
def __init__(self, mode):
self.mode = mode
def __str__(self):
return ("Your mode selection is {:s}, please input an appropiate "
"structure").format(self.mode)
|
|
import socket
import telnetlib
import time
import Queue
import unittest
from unittest import TestCase
from test import test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
EOF_sigil = object()
def server(evt, serv, dataq=None):
""" Open a tcp server in three steps
1) set evt to true to let the parent know we are ready
2) [optional] if is not False, write the list of data from dataq.get()
to the socket.
"""
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
if dataq:
data = ''
new_data = dataq.get(True, 0.5)
dataq.task_done()
for item in new_data:
if item == EOF_sigil:
break
if type(item) in [int, float]:
time.sleep(item)
else:
data += item
written = conn.send(data)
data = data[written:]
conn.close()
except socket.timeout:
pass
finally:
serv.close()
class GeneralTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
def testBasic(self):
# connects
telnet = telnetlib.Telnet(HOST, self.port)
telnet.sock.close()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutNone(self):
# None, having other default
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(telnet.sock.gettimeout() is None)
telnet.sock.close()
def testTimeoutValue(self):
telnet = telnetlib.Telnet(HOST, self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutOpen(self):
telnet = telnetlib.Telnet()
telnet.open(HOST, self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testGetters(self):
# Test telnet getter methods
telnet = telnetlib.Telnet(HOST, self.port, timeout=30)
t_sock = telnet.sock
self.assertEqual(telnet.get_socket(), t_sock)
self.assertEqual(telnet.fileno(), t_sock.fileno())
telnet.sock.close()
def _read_setUp(self):
self.evt = threading.Event()
self.dataq = Queue.Queue()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock, self.dataq))
self.thread.start()
self.evt.wait()
def _read_tearDown(self):
self.thread.join()
class ReadTests(TestCase):
setUp = _read_setUp
tearDown = _read_tearDown
# use a similar approach to testing timeouts as test_timeout.py
# these will never pass 100% but make the fuzz big enough that it is rare
block_long = 0.6
block_short = 0.3
def test_read_until_A(self):
"""
read_until(expected, [timeout])
Read until the expected string has been seen, or a timeout is
hit (default is no timeout); may block.
"""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_until_B(self):
# test the timeout - it does NOT raise socket.timeout
want = ['hello', self.block_long, 'not seen', EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_until('not seen', self.block_short)
self.assertEqual(data, want[0])
self.assertEqual(telnet.read_all(), 'not seen')
def test_read_until_with_poll(self):
"""Use select.poll() to implement telnet.read_until()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
if not telnet._has_poll:
raise unittest.SkipTest('select.poll() is required')
telnet._has_poll = True
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_until_with_select(self):
"""Use select.select() to implement telnet.read_until()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
telnet._has_poll = False
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_all_A(self):
"""
read_all()
Read all data until EOF; may block.
"""
want = ['x' * 500, 'y' * 500, 'z' * 500, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_all()
self.assertEqual(data, ''.join(want[:-1]))
return
def _test_blocking(self, func):
self.dataq.put([self.block_long, EOF_sigil])
self.dataq.join()
start = time.time()
data = func()
self.assertTrue(self.block_short <= time.time() - start)
def test_read_all_B(self):
self._test_blocking(telnetlib.Telnet(HOST, self.port).read_all)
def test_read_all_C(self):
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
telnet.read_all()
telnet.read_all() # shouldn't raise
def test_read_some_A(self):
"""
read_some()
Read at least one byte or EOF; may block.
"""
# test 'at least one byte'
want = ['x' * 500, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_all()
self.assertTrue(len(data) >= 1)
def test_read_some_B(self):
# test EOF
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
self.assertEqual('', telnet.read_some())
def test_read_some_C(self):
self._test_blocking(telnetlib.Telnet(HOST, self.port).read_some)
def _test_read_any_eager_A(self, func_name):
"""
read_very_eager()
Read all data available already queued or on the socket,
without blocking.
"""
want = [self.block_long, 'x' * 100, 'y' * 100, EOF_sigil]
expects = want[1] + want[2]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
func = getattr(telnet, func_name)
data = ''
while True:
try:
data += func()
self.assertTrue(expects.startswith(data))
except EOFError:
break
self.assertEqual(expects, data)
def _test_read_any_eager_B(self, func_name):
# test EOF
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
func = getattr(telnet, func_name)
self.assertRaises(EOFError, func)
# read_eager and read_very_eager make the same gaurantees
# (they behave differently but we only test the gaurantees)
def test_read_very_eager_A(self):
self._test_read_any_eager_A('read_very_eager')
def test_read_very_eager_B(self):
self._test_read_any_eager_B('read_very_eager')
def test_read_eager_A(self):
self._test_read_any_eager_A('read_eager')
def test_read_eager_B(self):
self._test_read_any_eager_B('read_eager')
# NB -- we need to test the IAC block which is mentioned in the docstring
# but not in the module docs
def _test_read_any_lazy_B(self, func_name):
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
func = getattr(telnet, func_name)
telnet.fill_rawq()
self.assertRaises(EOFError, func)
def test_read_lazy_A(self):
want = ['x' * 100, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
self.assertEqual('', telnet.read_lazy())
data = ''
while True:
try:
read_data = telnet.read_lazy()
data += read_data
if not read_data:
telnet.fill_rawq()
except EOFError:
break
self.assertTrue(want[0].startswith(data))
self.assertEqual(data, want[0])
def test_read_lazy_B(self):
self._test_read_any_lazy_B('read_lazy')
def test_read_very_lazy_A(self):
want = ['x' * 100, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
self.assertEqual('', telnet.read_very_lazy())
data = ''
while True:
try:
read_data = telnet.read_very_lazy()
except EOFError:
break
data += read_data
if not read_data:
telnet.fill_rawq()
self.assertEqual('', telnet.cookedq)
telnet.process_rawq()
self.assertTrue(want[0].startswith(data))
self.assertEqual(data, want[0])
def test_read_very_lazy_B(self):
self._test_read_any_lazy_B('read_very_lazy')
class nego_collector(object):
def __init__(self, sb_getter=None):
self.seen = ''
self.sb_getter = sb_getter
self.sb_seen = ''
def do_nego(self, sock, cmd, opt):
self.seen += cmd + opt
if cmd == tl.SE and self.sb_getter:
sb_data = self.sb_getter()
self.sb_seen += sb_data
tl = telnetlib
class OptionTests(TestCase):
setUp = _read_setUp
tearDown = _read_tearDown
# RFC 854 commands
cmds = [tl.AO, tl.AYT, tl.BRK, tl.EC, tl.EL, tl.GA, tl.IP, tl.NOP]
def _test_command(self, data):
""" helper for testing IAC + cmd """
self.setUp()
self.dataq.put(data)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
nego = nego_collector()
telnet.set_option_negotiation_callback(nego.do_nego)
txt = telnet.read_all()
cmd = nego.seen
self.assertTrue(len(cmd) > 0) # we expect at least one command
self.assertIn(cmd[0], self.cmds)
self.assertEqual(cmd[1], tl.NOOPT)
self.assertEqual(len(''.join(data[:-1])), len(txt + cmd))
nego.sb_getter = None # break the nego => telnet cycle
self.tearDown()
def test_IAC_commands(self):
# reset our setup
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
self.tearDown()
for cmd in self.cmds:
self._test_command(['x' * 100, tl.IAC + cmd, 'y'*100, EOF_sigil])
self._test_command(['x' * 10, tl.IAC + cmd, 'y'*10, EOF_sigil])
self._test_command([tl.IAC + cmd, EOF_sigil])
# all at once
self._test_command([tl.IAC + cmd for (cmd) in self.cmds] + [EOF_sigil])
self.assertEqual('', telnet.read_sb_data())
def test_SB_commands(self):
# RFC 855, subnegotiations portion
send = [tl.IAC + tl.SB + tl.IAC + tl.SE,
tl.IAC + tl.SB + tl.IAC + tl.IAC + tl.IAC + tl.SE,
tl.IAC + tl.SB + tl.IAC + tl.IAC + 'aa' + tl.IAC + tl.SE,
tl.IAC + tl.SB + 'bb' + tl.IAC + tl.IAC + tl.IAC + tl.SE,
tl.IAC + tl.SB + 'cc' + tl.IAC + tl.IAC + 'dd' + tl.IAC + tl.SE,
EOF_sigil,
]
self.dataq.put(send)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
nego = nego_collector(telnet.read_sb_data)
telnet.set_option_negotiation_callback(nego.do_nego)
txt = telnet.read_all()
self.assertEqual(txt, '')
want_sb_data = tl.IAC + tl.IAC + 'aabb' + tl.IAC + 'cc' + tl.IAC + 'dd'
self.assertEqual(nego.sb_seen, want_sb_data)
self.assertEqual('', telnet.read_sb_data())
nego.sb_getter = None # break the nego => telnet cycle
class ExpectTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.dataq = Queue.Queue()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock,
self.dataq))
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
# use a similar approach to testing timeouts as test_timeout.py
# these will never pass 100% but make the fuzz big enough that it is rare
block_long = 0.6
block_short = 0.3
def test_expect_A(self):
"""
expect(expected, [timeout])
Read until the expected string has been seen, or a timeout is
hit (default is no timeout); may block.
"""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_expect_B(self):
# test the timeout - it does NOT raise socket.timeout
want = ['hello', self.block_long, 'not seen', EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
(_,_,data) = telnet.expect(['not seen'], self.block_short)
self.assertEqual(data, want[0])
self.assertEqual(telnet.read_all(), 'not seen')
def test_expect_with_poll(self):
"""Use select.poll() to implement telnet.expect()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
if not telnet._has_poll:
raise unittest.SkipTest('select.poll() is required')
telnet._has_poll = True
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_expect_with_select(self):
"""Use select.select() to implement telnet.expect()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
telnet._has_poll = False
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_main(verbose=None):
test_support.run_unittest(GeneralTests, ReadTests, OptionTests,
ExpectTests)
if __name__ == '__main__':
test_main()
|
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields
from openerp.osv import osv
from openerp.tools.translate import _
import openerp
class hr_employee(osv.osv):
_name = "hr.employee"
_inherit = "hr.employee"
_columns = {
'product_id': fields.many2one('product.product', 'Product', help="If you want to reinvoice working time of employees, link this employee to a service to determinate the cost price of the job."),
'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal'),
'uom_id': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Unit of Measure', store=True, readonly=True)
}
def _getAnalyticJournal(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'hr_timesheet', 'analytic_journal')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('account.analytic.journal').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
def _getEmployeeProduct(self, cr, uid, context=None):
md = self.pool.get('ir.model.data')
try:
dummy, res_id = md.get_object_reference(cr, uid, 'product', 'product_product_consultant')
#search on id found in result to check if current user has read access right
check_right = self.pool.get('product.template').search(cr, uid, [('id', '=', res_id)], context=context)
if check_right:
return res_id
except ValueError:
pass
return False
_defaults = {
'journal_id': _getAnalyticJournal,
'product_id': _getEmployeeProduct
}
class hr_analytic_timesheet(osv.osv):
_name = "hr.analytic.timesheet"
_table = 'hr_analytic_timesheet'
_description = "Timesheet Line"
_inherits = {'account.analytic.line': 'line_id'}
_order = "id desc"
_columns = {
'line_id': fields.many2one('account.analytic.line', 'Analytic Line', ondelete='cascade', required=True),
'partner_id': fields.related('account_id', 'partner_id', type='many2one', string='Partner', relation='res.partner', store=True),
}
def unlink(self, cr, uid, ids, context=None):
toremove = {}
for obj in self.browse(cr, uid, ids, context=context):
toremove[obj.line_id.id] = True
super(hr_analytic_timesheet, self).unlink(cr, uid, ids, context=context)
self.pool.get('account.analytic.line').unlink(cr, uid, toremove.keys(), context=context)
return True
def on_change_unit_amount(self, cr, uid, id, prod_id, unit_amount, company_id, unit=False, journal_id=False, context=None):
res = {'value':{}}
if prod_id and unit_amount:
# find company
company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.analytic.line', context=context)
r = self.pool.get('account.analytic.line').on_change_unit_amount(cr, uid, id, prod_id, unit_amount, company_id, unit, journal_id, context=context)
if r:
res.update(r)
# update unit of measurement
if prod_id:
uom = self.pool.get('product.product').browse(cr, uid, prod_id, context=context)
if uom.uom_id:
res['value'].update({'product_uom_id': uom.uom_id.id})
else:
res['value'].update({'product_uom_id': False})
return res
def _getEmployeeProduct(self, cr, uid, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.id
return False
def _getEmployeeUnit(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.product_id:
return emp.product_id.uom_id.id
return False
def _getGeneralAccount(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
if emp_id:
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if bool(emp.product_id):
a = emp.product_id.property_account_expense.id
if not a:
a = emp.product_id.categ_id.property_account_expense_categ.id
if a:
return a
return False
def _getAnalyticJournal(self, cr, uid, context=None):
emp_obj = self.pool.get('hr.employee')
if context is None:
context = {}
if context.get('employee_id'):
emp_id = [context.get('employee_id')]
else:
emp_id = emp_obj.search(cr, uid, [('user_id','=',context.get('user_id') or uid)], limit=1, context=context)
if not emp_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'hr', 'open_view_employee_list_my')
msg = _("Employee is not created for this user. Please create one from configuration panel.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
emp = emp_obj.browse(cr, uid, emp_id[0], context=context)
if emp.journal_id:
return emp.journal_id.id
else :
raise osv.except_osv(_('Warning!'), _('No analytic journal defined for \'%s\'.\nYou should assign an analytic journal on the employee form.')%(emp.name))
_defaults = {
'product_uom_id': _getEmployeeUnit,
'product_id': _getEmployeeProduct,
'general_account_id': _getGeneralAccount,
'journal_id': _getAnalyticJournal,
'date': lambda self, cr, uid, ctx: ctx.get('date', fields.date.context_today(self,cr,uid,context=ctx)),
'user_id': lambda obj, cr, uid, ctx: ctx.get('user_id') or uid,
}
def on_change_account_id(self, cr, uid, ids, account_id, context=None):
return {'value':{}}
def on_change_date(self, cr, uid, ids, date):
if ids:
new_date = self.read(cr, uid, ids[0], ['date'])['date']
if date != new_date:
warning = {'title':_('User Alert!'),'message':_('Changing the date will let this entry appear in the timesheet of the new date.')}
return {'value':{},'warning':warning}
return {'value':{}}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
emp_obj = self.pool.get('hr.employee')
emp_id = emp_obj.search(cr, uid, [('user_id', '=', context.get('user_id') or uid)], context=context)
ename = ''
if emp_id:
ename = emp_obj.browse(cr, uid, emp_id[0], context=context).name
if not vals.get('journal_id',False):
raise osv.except_osv(_('Warning!'), _('No \'Analytic Journal\' is defined for employee %s \nDefine an employee for the selected user and assign an \'Analytic Journal\'!')%(ename,))
if not vals.get('account_id',False):
raise osv.except_osv(_('Warning!'), _('No analytic account is defined on the project.\nPlease set one or we cannot automatically fill the timesheet.'))
return super(hr_analytic_timesheet, self).create(cr, uid, vals, context=context)
def on_change_user_id(self, cr, uid, ids, user_id):
if not user_id:
return {}
context = {'user_id': user_id}
return {'value': {
'product_id': self. _getEmployeeProduct(cr, uid, context),
'product_uom_id': self._getEmployeeUnit(cr, uid, context),
'general_account_id': self._getGeneralAccount(cr, uid, context),
'journal_id': self._getAnalyticJournal(cr, uid, context),
}}
class account_analytic_account(osv.osv):
_inherit = 'account.analytic.account'
_description = 'Analytic Account'
_columns = {
'use_timesheets': fields.boolean('Timesheets', help="Check this field if this project manages timesheets"),
}
def on_change_template(self, cr, uid, ids, template_id, date_start=False, context=None):
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, date_start=date_start, context=context)
if template_id and 'value' in res:
template = self.browse(cr, uid, template_id, context=context)
res['value']['use_timesheets'] = template.use_timesheets
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
|
"""
Miscellaneous utilities, and serializers.
Pyro - Python Remote Objects. Copyright by Irmen de Jong ([email protected]).
"""
import array
import sys
import zlib
import uuid
import logging
import linecache
import traceback
import inspect
import struct
import datetime
import decimal
import numbers
from Pyro4 import errors
from Pyro4.configuration import config
try:
import copyreg
except ImportError:
import copy_reg as copyreg
log = logging.getLogger("Pyro4.util")
def getPyroTraceback(ex_type=None, ex_value=None, ex_tb=None):
"""Returns a list of strings that form the traceback information of a
Pyro exception. Any remote Pyro exception information is included.
Traceback information is automatically obtained via ``sys.exc_info()`` if
you do not supply the objects yourself."""
def formatRemoteTraceback(remote_tb_lines):
result = [" +--- This exception occured remotely (Pyro) - Remote traceback:"]
for line in remote_tb_lines:
if line.endswith("\n"):
line = line[:-1]
lines = line.split("\n")
for line2 in lines:
result.append("\n | ")
result.append(line2)
result.append("\n +--- End of remote traceback\n")
return result
try:
if ex_type is not None and ex_value is None and ex_tb is None:
# possible old (3.x) call syntax where caller is only providing exception object
if type(ex_type) is not type:
raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all")
if ex_type is None and ex_tb is None:
ex_type, ex_value, ex_tb = sys.exc_info()
remote_tb = getattr(ex_value, "_pyroTraceback", None)
local_tb = formatTraceback(ex_type, ex_value, ex_tb, config.DETAILED_TRACEBACK)
if remote_tb:
remote_tb = formatRemoteTraceback(remote_tb)
return local_tb + remote_tb
else:
# hmm. no remote tb info, return just the local tb.
return local_tb
finally:
# clean up cycle to traceback, to allow proper GC
del ex_type, ex_value, ex_tb
def formatTraceback(ex_type=None, ex_value=None, ex_tb=None, detailed=False):
"""Formats an exception traceback. If you ask for detailed formatting,
the result will contain info on the variables in each stack frame.
You don't have to provide the exception info objects, if you omit them,
this function will obtain them itself using ``sys.exc_info()``."""
if ex_type is not None and ex_value is None and ex_tb is None:
# possible old (3.x) call syntax where caller is only providing exception object
if type(ex_type) is not type:
raise TypeError("invalid argument: ex_type should be an exception type, or just supply no arguments at all")
if ex_type is None and ex_tb is None:
ex_type, ex_value, ex_tb = sys.exc_info()
if detailed and sys.platform != "cli": # detailed tracebacks don't work in ironpython (most of the local vars are omitted)
def makeStrValue(value):
try:
return repr(value)
except:
try:
return str(value)
except:
return "<ERROR>"
try:
result = ["-" * 52 + "\n"]
result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value))
result.append(" Extended stacktrace follows (most recent call last)\n")
skipLocals = True # don't print the locals of the very first stack frame
while ex_tb:
frame = ex_tb.tb_frame
sourceFileName = frame.f_code.co_filename
if "self" in frame.f_locals:
location = "%s.%s" % (frame.f_locals["self"].__class__.__name__, frame.f_code.co_name)
else:
location = frame.f_code.co_name
result.append("-" * 52 + "\n")
result.append("File \"%s\", line %d, in %s\n" % (sourceFileName, ex_tb.tb_lineno, location))
result.append("Source code:\n")
result.append(" " + linecache.getline(sourceFileName, ex_tb.tb_lineno).strip() + "\n")
if not skipLocals:
names = set()
names.update(getattr(frame.f_code, "co_varnames", ()))
names.update(getattr(frame.f_code, "co_names", ()))
names.update(getattr(frame.f_code, "co_cellvars", ()))
names.update(getattr(frame.f_code, "co_freevars", ()))
result.append("Local values:\n")
for name2 in sorted(names):
if name2 in frame.f_locals:
value = frame.f_locals[name2]
result.append(" %s = %s\n" % (name2, makeStrValue(value)))
if name2 == "self":
# print the local variables of the class instance
for name3, value in vars(value).items():
result.append(" self.%s = %s\n" % (name3, makeStrValue(value)))
skipLocals = False
ex_tb = ex_tb.tb_next
result.append("-" * 52 + "\n")
result.append(" EXCEPTION %s: %s\n" % (ex_type, ex_value))
result.append("-" * 52 + "\n")
return result
except Exception:
return ["-" * 52 + "\nError building extended traceback!!! :\n",
"".join(traceback.format_exception(*sys.exc_info())) + '-' * 52 + '\n',
"Original Exception follows:\n",
"".join(traceback.format_exception(ex_type, ex_value, ex_tb))]
else:
# default traceback format.
return traceback.format_exception(ex_type, ex_value, ex_tb)
all_exceptions = {}
if sys.version_info < (3, 0):
import exceptions
for name, t in vars(exceptions).items():
if type(t) is type and issubclass(t, BaseException):
all_exceptions[name] = t
else:
import builtins
for name, t in vars(builtins).items():
if type(t) is type and issubclass(t, BaseException):
all_exceptions[name] = t
buffer = bytearray
for name, t in vars(errors).items():
if type(t) is type and issubclass(t, errors.PyroError):
all_exceptions[name] = t
class SerializerBase(object):
"""Base class for (de)serializer implementations (which must be thread safe)"""
__custom_class_to_dict_registry = {}
__custom_dict_to_class_registry = {}
def serializeData(self, data, compress=False):
"""Serialize the given data object, try to compress if told so.
Returns a tuple of the serialized data (bytes) and a bool indicating if it is compressed or not."""
data = self.dumps(data)
return self.__compressdata(data, compress)
def deserializeData(self, data, compressed=False):
"""Deserializes the given data (bytes). Set compressed to True to decompress the data first."""
if compressed:
if sys.version_info < (3, 0):
data = self._convertToBytes(data)
data = zlib.decompress(data)
return self.loads(data)
def serializeCall(self, obj, method, vargs, kwargs, compress=False):
"""Serialize the given method call parameters, try to compress if told so.
Returns a tuple of the serialized data and a bool indicating if it is compressed or not."""
data = self.dumpsCall(obj, method, vargs, kwargs)
return self.__compressdata(data, compress)
def deserializeCall(self, data, compressed=False):
"""Deserializes the given call data back to (object, method, vargs, kwargs) tuple.
Set compressed to True to decompress the data first."""
if compressed:
if sys.version_info < (3, 0):
data = self._convertToBytes(data)
data = zlib.decompress(data)
return self.loadsCall(data)
def loads(self, data):
raise NotImplementedError("implement in subclass")
def loadsCall(self, data):
raise NotImplementedError("implement in subclass")
def dumps(self, data):
raise NotImplementedError("implement in subclass")
def dumpsCall(self, obj, method, vargs, kwargs):
raise NotImplementedError("implement in subclass")
def _convertToBytes(self, data):
t = type(data)
if t is not bytes:
if t in (bytearray, buffer):
return bytes(data)
if t is memoryview:
return data.tobytes()
return data
def __compressdata(self, data, compress):
if not compress or len(data) < 200:
return data, False # don't waste time compressing small messages
compressed = zlib.compress(data)
if len(compressed) < len(data):
return compressed, True
return data, False
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
raise NotImplementedError("implement in subclass")
@classmethod
def register_class_to_dict(cls, clazz, converter, serpent_too=True):
"""Registers a custom function that returns a dict representation of objects of the given class.
The function is called with a single parameter; the object to be converted to a dict."""
cls.__custom_class_to_dict_registry[clazz] = converter
if serpent_too:
try:
get_serializer_by_id(SerpentSerializer.serializer_id)
import serpent # @todo not needed?
def serpent_converter(obj, serializer, stream, level):
d = converter(obj)
serializer.ser_builtins_dict(d, stream, level)
serpent.register_class(clazz, serpent_converter)
except errors.ProtocolError:
pass
@classmethod
def unregister_class_to_dict(cls, clazz):
"""Removes the to-dict conversion function registered for the given class. Objects of the class
will be serialized by the default mechanism again."""
if clazz in cls.__custom_class_to_dict_registry:
del cls.__custom_class_to_dict_registry[clazz]
try:
get_serializer_by_id(SerpentSerializer.serializer_id)
import serpent # @todo not needed?
serpent.unregister_class(clazz)
except errors.ProtocolError:
pass
@classmethod
def register_dict_to_class(cls, classname, converter):
"""
Registers a custom converter function that creates objects from a dict with the given classname tag in it.
The function is called with two parameters: the classname and the dictionary to convert to an instance of the class.
This mechanism is not used for the pickle serializer.
"""
cls.__custom_dict_to_class_registry[classname] = converter
@classmethod
def unregister_dict_to_class(cls, classname):
"""
Removes the converter registered for the given classname. Dicts with that classname tag
will be deserialized by the default mechanism again.
This mechanism is not used for the pickle serializer.
"""
if classname in cls.__custom_dict_to_class_registry:
del cls.__custom_dict_to_class_registry[classname]
@classmethod
def class_to_dict(cls, obj):
"""
Convert a non-serializable object to a dict. Partly borrowed from serpent.
Not used for the pickle serializer.
"""
for clazz in cls.__custom_class_to_dict_registry:
if isinstance(obj, clazz):
return cls.__custom_class_to_dict_registry[clazz](obj)
if type(obj) in (set, dict, tuple, list):
# we use a ValueError to mirror the exception type returned by serpent and other serializers
raise ValueError("can't serialize type " + str(obj.__class__) + " into a dict")
if hasattr(obj, "_pyroDaemon"):
obj._pyroDaemon = None
if isinstance(obj, BaseException):
# special case for exceptions
return {
"__class__": obj.__class__.__module__ + "." + obj.__class__.__name__,
"__exception__": True,
"args": obj.args,
"attributes": vars(obj) # add custom exception attributes
}
try:
value = obj.__getstate__()
except AttributeError:
pass
else:
if isinstance(value, dict):
return value
try:
value = dict(vars(obj)) # make sure we can serialize anything that resembles a dict
value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__
return value
except TypeError:
if hasattr(obj, "__slots__"):
# use the __slots__ instead of the vars dict
value = {}
for slot in obj.__slots__:
value[slot] = getattr(obj, slot)
value["__class__"] = obj.__class__.__module__ + "." + obj.__class__.__name__
return value
else:
raise errors.SerializeError("don't know how to serialize class " + str(obj.__class__) +
" using serializer " + str(cls.__name__) +
". Give it vars() or an appropriate __getstate__")
@classmethod
def dict_to_class(cls, data):
"""
Recreate an object out of a dict containing the class name and the attributes.
Only a fixed set of classes are recognized.
Not used for the pickle serializer.
"""
from Pyro4 import core, futures # XXX circular
classname = data.get("__class__", "<unknown>")
if isinstance(classname, bytes):
classname = classname.decode("utf-8")
if classname in cls.__custom_dict_to_class_registry:
converter = cls.__custom_dict_to_class_registry[classname]
return converter(classname, data)
if "__" in classname:
raise errors.SecurityError("refused to deserialize types with double underscores in their name: " + classname)
# for performance, the constructors below are hardcoded here instead of added on a per-class basis to the dict-to-class registry
if classname.startswith("Pyro4.core."):
if classname == "Pyro4.core.URI":
uri = core.URI.__new__(core.URI)
uri.__setstate_from_dict__(data["state"])
return uri
elif classname == "Pyro4.core.Proxy":
proxy = core.Proxy.__new__(core.Proxy)
proxy.__setstate_from_dict__(data["state"])
return proxy
elif classname == "Pyro4.core.Daemon":
daemon = core.Daemon.__new__(core.Daemon)
daemon.__setstate_from_dict__(data["state"])
return daemon
elif classname.startswith("Pyro4.util."):
if classname == "Pyro4.util.SerpentSerializer":
return SerpentSerializer()
elif classname == "Pyro4.util.PickleSerializer":
return PickleSerializer()
elif classname == "Pyro4.util.MarshalSerializer":
return MarshalSerializer()
elif classname == "Pyro4.util.JsonSerializer":
return JsonSerializer()
elif classname == "Pyro4.util.MsgpackSerializer":
return MsgpackSerializer()
elif classname == "Pyro4.util.CloudpickleSerializer":
return CloudpickleSerializer()
elif classname == "Pyro4.util.DillSerializer":
return DillSerializer()
elif classname.startswith("Pyro4.errors."):
errortype = getattr(errors, classname.split('.', 2)[2])
if issubclass(errortype, errors.PyroError):
return SerializerBase.make_exception(errortype, data)
elif classname == "Pyro4.futures._ExceptionWrapper":
ex = data["exception"]
if isinstance(ex, dict) and "__class__" in ex:
ex = SerializerBase.dict_to_class(ex)
return futures._ExceptionWrapper(ex)
elif data.get("__exception__", False):
if classname in all_exceptions:
return SerializerBase.make_exception(all_exceptions[classname], data)
# python 2.x: exceptions.ValueError
# python 3.x: builtins.ValueError
# translate to the appropriate namespace...
namespace, short_classname = classname.split('.', 1)
if namespace in ("builtins", "exceptions"):
if sys.version_info < (3, 0):
exceptiontype = getattr(exceptions, short_classname)
if issubclass(exceptiontype, BaseException):
return SerializerBase.make_exception(exceptiontype, data)
else:
exceptiontype = getattr(builtins, short_classname)
if issubclass(exceptiontype, BaseException):
return SerializerBase.make_exception(exceptiontype, data)
elif namespace == "sqlite3" and short_classname.endswith("Error"):
import sqlite3
exceptiontype = getattr(sqlite3, short_classname)
if issubclass(exceptiontype, BaseException):
return SerializerBase.make_exception(exceptiontype, data)
log.warning("unsupported serialized class: " + classname)
raise errors.SerializeError("unsupported serialized class: " + classname)
@staticmethod
def make_exception(exceptiontype, data):
ex = exceptiontype(*data["args"])
if "attributes" in data:
# restore custom attributes on the exception object
for attr, value in data["attributes"].items():
setattr(ex, attr, value)
return ex
def recreate_classes(self, literal):
t = type(literal)
if t is set:
return {self.recreate_classes(x) for x in literal}
if t is list:
return [self.recreate_classes(x) for x in literal]
if t is tuple:
return tuple(self.recreate_classes(x) for x in literal)
if t is dict:
if "__class__" in literal:
return self.dict_to_class(literal)
result = {}
for key, value in literal.items():
result[key] = self.recreate_classes(value)
return result
return literal
def __eq__(self, other):
"""this equality method is only to support the unit tests of this class"""
return isinstance(other, SerializerBase) and vars(self) == vars(other)
def __ne__(self, other):
return not self.__eq__(other)
__hash__ = object.__hash__
class PickleSerializer(SerializerBase):
"""
A (de)serializer that wraps the Pickle serialization protocol.
It can optionally compress the serialized data, and is thread safe.
"""
serializer_id = 4 # never change this
def dumpsCall(self, obj, method, vargs, kwargs):
return pickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION)
def dumps(self, data):
return pickle.dumps(data, config.PICKLE_PROTOCOL_VERSION)
def loadsCall(self, data):
data = self._convertToBytes(data)
return pickle.loads(data)
def loads(self, data):
data = self._convertToBytes(data)
return pickle.loads(data)
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
def copyreg_function(obj):
return replacement_function(obj).__reduce__()
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
try:
copyreg.pickle(object_type, copyreg_function)
except TypeError:
pass
class CloudpickleSerializer(SerializerBase):
"""
A (de)serializer that wraps the Cloudpickle serialization protocol.
It can optionally compress the serialized data, and is thread safe.
"""
serializer_id = 7 # never change this
def dumpsCall(self, obj, method, vargs, kwargs):
return cloudpickle.dumps((obj, method, vargs, kwargs), config.PICKLE_PROTOCOL_VERSION)
def dumps(self, data):
return cloudpickle.dumps(data, config.PICKLE_PROTOCOL_VERSION)
def loadsCall(self, data):
return cloudpickle.loads(data)
def loads(self, data):
return cloudpickle.loads(data)
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
def copyreg_function(obj):
return replacement_function(obj).__reduce__()
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
try:
copyreg.pickle(object_type, copyreg_function)
except TypeError:
pass
class DillSerializer(SerializerBase):
"""
A (de)serializer that wraps the Dill serialization protocol.
It can optionally compress the serialized data, and is thread safe.
"""
serializer_id = 5 # never change this
def dumpsCall(self, obj, method, vargs, kwargs):
return dill.dumps((obj, method, vargs, kwargs), config.DILL_PROTOCOL_VERSION)
def dumps(self, data):
return dill.dumps(data, config.DILL_PROTOCOL_VERSION)
def loadsCall(self, data):
return dill.loads(data)
def loads(self, data):
return dill.loads(data)
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
def copyreg_function(obj):
return replacement_function(obj).__reduce__()
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
try:
copyreg.pickle(object_type, copyreg_function)
except TypeError:
pass
class MarshalSerializer(SerializerBase):
"""(de)serializer that wraps the marshal serialization protocol."""
serializer_id = 3 # never change this
def dumpsCall(self, obj, method, vargs, kwargs):
vargs = [self.convert_obj_into_marshallable(value) for value in vargs]
kwargs = {key: self.convert_obj_into_marshallable(value) for key, value in kwargs.items()}
return marshal.dumps((obj, method, vargs, kwargs))
def dumps(self, data):
return marshal.dumps(self.convert_obj_into_marshallable(data))
if sys.platform == "cli":
def loadsCall(self, data):
if type(data) is not str:
# Ironpython's marshal expects str...
data = str(data)
obj, method, vargs, kwargs = marshal.loads(data)
vargs = self.recreate_classes(vargs)
kwargs = self.recreate_classes(kwargs)
return obj, method, vargs, kwargs
def loads(self, data):
if type(data) is not str:
# Ironpython's marshal expects str...
data = str(data)
return self.recreate_classes(marshal.loads(data))
else:
def loadsCall(self, data):
data = self._convertToBytes(data)
obj, method, vargs, kwargs = marshal.loads(data)
vargs = self.recreate_classes(vargs)
kwargs = self.recreate_classes(kwargs)
return obj, method, vargs, kwargs
def loads(self, data):
data = self._convertToBytes(data)
return self.recreate_classes(marshal.loads(data))
marshalable_types = (str, int, float, type(None), bool, complex, bytes, bytearray,
tuple, set, frozenset, list, dict)
if sys.version_info < (3, 0):
marshalable_types += (unicode,)
def convert_obj_into_marshallable(self, obj):
if isinstance(obj, self.marshalable_types):
return obj
if isinstance(obj, array.array):
if obj.typecode == 'c':
return obj.tostring()
if obj.typecode == 'u':
return obj.tounicode()
return obj.tolist()
return self.class_to_dict(obj)
@classmethod
def class_to_dict(cls, obj):
if isinstance(obj, uuid.UUID):
return str(obj)
return super(MarshalSerializer, cls).class_to_dict(obj)
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
pass # marshal serializer doesn't support per-type hooks
class SerpentSerializer(SerializerBase):
"""(de)serializer that wraps the serpent serialization protocol."""
serializer_id = 1 # never change this
def dumpsCall(self, obj, method, vargs, kwargs):
return serpent.dumps((obj, method, vargs, kwargs), module_in_classname=True)
def dumps(self, data):
return serpent.dumps(data, module_in_classname=True)
def loadsCall(self, data):
obj, method, vargs, kwargs = serpent.loads(data)
vargs = self.recreate_classes(vargs)
kwargs = self.recreate_classes(kwargs)
return obj, method, vargs, kwargs
def loads(self, data):
return self.recreate_classes(serpent.loads(data))
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
def custom_serializer(object, serpent_serializer, outputstream, indentlevel):
replaced = replacement_function(object)
if replaced is object:
serpent_serializer.ser_default_class(replaced, outputstream, indentlevel)
else:
serpent_serializer._serialize(replaced, outputstream, indentlevel)
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
serpent.register_class(object_type, custom_serializer)
@classmethod
def dict_to_class(cls, data):
if data.get("__class__") == "float":
return float(data["value"]) # serpent encodes a float nan as a special class dict like this
return super(SerpentSerializer, cls).dict_to_class(data)
class JsonSerializer(SerializerBase):
"""(de)serializer that wraps the json serialization protocol."""
serializer_id = 2 # never change this
__type_replacements = {}
def dumpsCall(self, obj, method, vargs, kwargs):
data = {"object": obj, "method": method, "params": vargs, "kwargs": kwargs}
data = json.dumps(data, ensure_ascii=False, default=self.default)
return data.encode("utf-8")
def dumps(self, data):
data = json.dumps(data, ensure_ascii=False, default=self.default)
return data.encode("utf-8")
def loadsCall(self, data):
data = self._convertToBytes(data).decode("utf-8")
data = json.loads(data)
vargs = self.recreate_classes(data["params"])
kwargs = self.recreate_classes(data["kwargs"])
return data["object"], data["method"], vargs, kwargs
def loads(self, data):
data = self._convertToBytes(data).decode("utf-8")
return self.recreate_classes(json.loads(data))
def default(self, obj):
replacer = self.__type_replacements.get(type(obj), None)
if replacer:
obj = replacer(obj)
if isinstance(obj, set):
return tuple(obj) # json module can't deal with sets so we make a tuple out of it
if isinstance(obj, uuid.UUID):
return str(obj)
if isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, decimal.Decimal):
return str(obj)
if isinstance(obj, array.array):
if obj.typecode == 'c':
return obj.tostring()
if obj.typecode == 'u':
return obj.tounicode()
return obj.tolist()
return self.class_to_dict(obj)
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
cls.__type_replacements[object_type] = replacement_function
class MsgpackSerializer(SerializerBase):
"""(de)serializer that wraps the msgpack serialization protocol."""
serializer_id = 6 # never change this
__type_replacements = {}
def dumpsCall(self, obj, method, vargs, kwargs):
return msgpack.packb((obj, method, vargs, kwargs), use_bin_type=True, default=self.default)
def dumps(self, data):
return msgpack.packb(data, use_bin_type=True, default=self.default)
def loadsCall(self, data):
data = self._convertToBytes(data)
obj, method, vargs, kwargs = msgpack.unpackb(data, raw=False, object_hook=self.object_hook)
return obj, method, vargs, kwargs
def loads(self, data):
data = self._convertToBytes(data)
return msgpack.unpackb(data, raw=False, object_hook=self.object_hook, ext_hook=self.ext_hook)
def default(self, obj):
replacer = self.__type_replacements.get(type(obj), None)
if replacer:
obj = replacer(obj)
if isinstance(obj, set):
return tuple(obj) # msgpack module can't deal with sets so we make a tuple out of it
if isinstance(obj, uuid.UUID):
return str(obj)
if isinstance(obj, bytearray):
return bytes(obj)
if isinstance(obj, complex):
return msgpack.ExtType(0x30, struct.pack("dd", obj.real, obj.imag))
if isinstance(obj, datetime.datetime):
if obj.tzinfo:
raise errors.SerializeError("msgpack cannot serialize datetime with timezone info")
return msgpack.ExtType(0x32, struct.pack("d", obj.timestamp()))
if isinstance(obj, datetime.date):
return msgpack.ExtType(0x33, struct.pack("l", obj.toordinal()))
if isinstance(obj, decimal.Decimal):
return str(obj)
if isinstance(obj, numbers.Number):
return msgpack.ExtType(0x31, str(obj).encode("ascii")) # long
if isinstance(obj, array.array):
if obj.typecode == 'c':
return obj.tostring()
if obj.typecode == 'u':
return obj.tounicode()
return obj.tolist()
return self.class_to_dict(obj)
def object_hook(self, obj):
if "__class__" in obj:
return self.dict_to_class(obj)
return obj
def ext_hook(self, code, data):
if code == 0x30:
real, imag = struct.unpack("dd", data)
return complex(real, imag)
if code == 0x31:
return int(data)
if code == 0x32:
return datetime.datetime.fromtimestamp(struct.unpack("d", data)[0])
if code == 0x33:
return datetime.date.fromordinal(struct.unpack("l", data)[0])
raise errors.SerializeError("invalid ext code for msgpack: " + str(code))
@classmethod
def register_type_replacement(cls, object_type, replacement_function):
if object_type is type or not inspect.isclass(object_type):
raise ValueError("refusing to register replacement for a non-type or the type 'type' itself")
cls.__type_replacements[object_type] = replacement_function
"""The various serializers that are supported"""
_serializers = {}
_serializers_by_id = {}
def get_serializer(name):
try:
return _serializers[name]
except KeyError:
raise errors.SerializeError("serializer '%s' is unknown or not available" % name)
def get_serializer_by_id(sid):
try:
return _serializers_by_id[sid]
except KeyError:
raise errors.SerializeError("no serializer available for id %d" % sid)
# determine the serializers that are supported
try:
import cPickle as pickle
except ImportError:
import pickle
assert config.PICKLE_PROTOCOL_VERSION >= 2, "pickle protocol needs to be 2 or higher"
_ser = PickleSerializer()
_serializers["pickle"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
import marshal
_ser = MarshalSerializer()
_serializers["marshal"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
try:
import cloudpickle
_ser = CloudpickleSerializer()
_serializers["cloudpickle"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
except ImportError:
pass
try:
import dill
_ser = DillSerializer()
_serializers["dill"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
except ImportError:
pass
try:
try:
import importlib
json = importlib.import_module(config.JSON_MODULE)
except ImportError:
json = __import__(config.JSON_MODULE)
_ser = JsonSerializer()
_serializers["json"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
except ImportError:
pass
try:
import serpent
_ser = SerpentSerializer()
_serializers["serpent"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
except ImportError:
log.warning("serpent serializer is not available")
try:
import msgpack
_ser = MsgpackSerializer()
_serializers["msgpack"] = _ser
_serializers_by_id[_ser.serializer_id] = _ser
except ImportError:
pass
del _ser
def getAttribute(obj, attr):
"""
Resolves an attribute name to an object. Raises
an AttributeError if any attribute in the chain starts with a '``_``'.
Doesn't resolve a dotted name, because that is a security vulnerability.
It treats it as a single attribute name (and the lookup will likely fail).
"""
if is_private_attribute(attr):
raise AttributeError("attempt to access private attribute '%s'" % attr)
else:
obj = getattr(obj, attr)
if not config.REQUIRE_EXPOSE or getattr(obj, "_pyroExposed", False):
return obj
raise AttributeError("attempt to access unexposed attribute '%s'" % attr)
def excepthook(ex_type, ex_value, ex_tb):
"""An exception hook you can use for ``sys.excepthook``, to automatically print remote Pyro tracebacks"""
traceback = "".join(getPyroTraceback(ex_type, ex_value, ex_tb))
sys.stderr.write(traceback)
def fixIronPythonExceptionForPickle(exceptionObject, addAttributes):
"""
Function to hack around a bug in IronPython where it doesn't pickle
exception attributes. We piggyback them into the exception's args.
Bug report is at https://github.com/IronLanguages/main/issues/943
Bug is still present in Ironpython 2.7.7
"""
if hasattr(exceptionObject, "args"):
if addAttributes:
# piggyback the attributes on the exception args instead.
ironpythonArgs = vars(exceptionObject)
ironpythonArgs["__ironpythonargs__"] = True
exceptionObject.args += (ironpythonArgs,)
else:
# check if there is a piggybacked object in the args
# if there is, extract the exception attributes from it.
if len(exceptionObject.args) > 0:
piggyback = exceptionObject.args[-1]
if type(piggyback) is dict and piggyback.get("__ironpythonargs__"):
del piggyback["__ironpythonargs__"]
exceptionObject.args = exceptionObject.args[:-1]
exceptionObject.__dict__.update(piggyback)
__exposed_member_cache = {}
def reset_exposed_members(obj, only_exposed=True, as_lists=False):
"""Delete any cached exposed members forcing recalculation on next request"""
if not inspect.isclass(obj):
obj = obj.__class__
cache_key = (obj, only_exposed, as_lists)
__exposed_member_cache.pop(cache_key, None)
def get_exposed_members(obj, only_exposed=True, as_lists=False, use_cache=True):
"""
Return public and exposed members of the given object's class.
You can also provide a class directly.
Private members are ignored no matter what (names starting with underscore).
If only_exposed is True, only members tagged with the @expose decorator are
returned. If it is False, all public members are returned.
The return value consists of the exposed methods, exposed attributes, and methods
tagged as @oneway.
(All this is used as meta data that Pyro sends to the proxy if it asks for it)
as_lists is meant for python 2 compatibility.
"""
if not inspect.isclass(obj):
obj = obj.__class__
cache_key = (obj, only_exposed, as_lists)
if use_cache and cache_key in __exposed_member_cache:
return __exposed_member_cache[cache_key]
methods = set() # all methods
oneway = set() # oneway methods
attrs = set() # attributes
for m in dir(obj): # also lists names inherited from super classes
if is_private_attribute(m):
continue
v = getattr(obj, m)
if inspect.ismethod(v) or inspect.isfunction(v) or inspect.ismethoddescriptor(v):
if getattr(v, "_pyroExposed", not only_exposed):
methods.add(m)
# check if the method is marked with the 'oneway' decorator:
if getattr(v, "_pyroOneway", False):
oneway.add(m)
elif inspect.isdatadescriptor(v):
func = getattr(v, "fget", None) or getattr(v, "fset", None) or getattr(v, "fdel", None)
if func is not None and getattr(func, "_pyroExposed", not only_exposed):
attrs.add(m)
# Note that we don't expose plain class attributes no matter what.
# it is a syntax error to add a decorator on them, and it is not possible
# to give them a _pyroExposed tag either.
# The way to expose attributes is by using properties for them.
# This automatically solves the protection/security issue: you have to
# explicitly decide to make an attribute into a @property (and to @expose it
# if REQUIRE_EXPOSED=True) before it is remotely accessible.
if as_lists:
methods = list(methods)
oneway = list(oneway)
attrs = list(attrs)
result = {
"methods": methods,
"oneway": oneway,
"attrs": attrs
}
__exposed_member_cache[cache_key] = result
return result
def get_exposed_property_value(obj, propname, only_exposed=True):
"""
Return the value of an @exposed @property.
If the requested property is not a @property or not exposed,
an AttributeError is raised instead.
"""
v = getattr(obj.__class__, propname)
if inspect.isdatadescriptor(v):
if v.fget and getattr(v.fget, "_pyroExposed", not only_exposed):
return v.fget(obj)
raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname)
def set_exposed_property_value(obj, propname, value, only_exposed=True):
"""
Sets the value of an @exposed @property.
If the requested property is not a @property or not exposed,
an AttributeError is raised instead.
"""
v = getattr(obj.__class__, propname)
if inspect.isdatadescriptor(v):
pfunc = v.fget or v.fset or v.fdel
if v.fset and getattr(pfunc, "_pyroExposed", not only_exposed):
return v.fset(obj, value)
raise AttributeError("attempt to access unexposed or unknown remote attribute '%s'" % propname)
_private_dunder_methods = frozenset([
"__init__", "__init_subclass__", "__class__", "__module__", "__weakref__",
"__call__", "__new__", "__del__", "__repr__", "__unicode__",
"__str__", "__format__", "__nonzero__", "__bool__", "__coerce__",
"__cmp__", "__eq__", "__ne__", "__hash__", "__ge__", "__gt__", "__le__", "__lt__",
"__dir__", "__enter__", "__exit__", "__copy__", "__deepcopy__", "__sizeof__",
"__getattr__", "__setattr__", "__hasattr__", "__getattribute__", "__delattr__",
"__instancecheck__", "__subclasscheck__", "__getinitargs__", "__getnewargs__",
"__getstate__", "__setstate__", "__reduce__", "__reduce_ex__",
"__getstate_for_dict__", "__setstate_from_dict__", "__subclasshook__"
])
def is_private_attribute(attr_name):
"""returns if the attribute name is to be considered private or not."""
if attr_name in _private_dunder_methods:
return True
if not attr_name.startswith('_'):
return False
if len(attr_name) > 4 and attr_name.startswith("__") and attr_name.endswith("__"):
return False
return True
|
|
"""
Support for an interface to work with a remote instance of Home Assistant.
If a connection error occurs while communicating with the API a
HomeAssistantError will be raised.
For more details about the Python API, please refer to the documentation at
https://home-assistant.io/developers/python_api/
"""
import asyncio
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
import enum
import json
import logging
import time
import threading
import urllib.parse
from typing import Optional
import requests
import homeassistant.bootstrap as bootstrap
import homeassistant.core as ha
from homeassistant.const import (
HTTP_HEADER_HA_AUTH, SERVER_PORT, URL_API, URL_API_EVENT_FORWARD,
URL_API_EVENTS, URL_API_EVENTS_EVENT, URL_API_SERVICES, URL_API_CONFIG,
URL_API_SERVICES_SERVICE, URL_API_STATES, URL_API_STATES_ENTITY,
HTTP_HEADER_CONTENT_TYPE, CONTENT_TYPE_JSON)
from homeassistant.exceptions import HomeAssistantError
METHOD_GET = "get"
METHOD_POST = "post"
METHOD_DELETE = "delete"
_LOGGER = logging.getLogger(__name__)
class APIStatus(enum.Enum):
"""Represent API status."""
# pylint: disable=no-init, invalid-name
OK = "ok"
INVALID_PASSWORD = "invalid_password"
CANNOT_CONNECT = "cannot_connect"
UNKNOWN = "unknown"
def __str__(self) -> str:
"""Return the state."""
return self.value
class API(object):
"""Object to pass around Home Assistant API location and credentials."""
def __init__(self, host: str, api_password: Optional[str]=None,
port: Optional[int]=None, use_ssl: bool=False) -> None:
"""Initalize the API."""
self.host = host
self.port = port or SERVER_PORT
self.api_password = api_password
if use_ssl:
self.base_url = "https://{}:{}".format(host, self.port)
else:
self.base_url = "http://{}:{}".format(host, self.port)
self.status = None
self._headers = {
HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_JSON,
}
if api_password is not None:
self._headers[HTTP_HEADER_HA_AUTH] = api_password
def validate_api(self, force_validate: bool=False) -> bool:
"""Test if we can communicate with the API."""
if self.status is None or force_validate:
self.status = validate_api(self)
return self.status == APIStatus.OK
def __call__(self, method, path, data=None, timeout=5):
"""Make a call to the Home Assistant API."""
if data is not None:
data = json.dumps(data, cls=JSONEncoder)
url = urllib.parse.urljoin(self.base_url, path)
try:
if method == METHOD_GET:
return requests.get(
url, params=data, timeout=timeout, headers=self._headers)
else:
return requests.request(
method, url, data=data, timeout=timeout,
headers=self._headers)
except requests.exceptions.ConnectionError:
_LOGGER.exception("Error connecting to server")
raise HomeAssistantError("Error connecting to server")
except requests.exceptions.Timeout:
error = "Timeout when talking to {}".format(self.host)
_LOGGER.exception(error)
raise HomeAssistantError(error)
def __repr__(self) -> str:
"""Return the representation of the API."""
return "API({}, {}, {})".format(
self.host, self.api_password, self.port)
class HomeAssistant(ha.HomeAssistant):
"""Home Assistant that forwards work."""
# pylint: disable=super-init-not-called
def __init__(self, remote_api, local_api=None, loop=None):
"""Initalize the forward instance."""
if not remote_api.validate_api():
raise HomeAssistantError(
"Remote API at {}:{} not valid: {}".format(
remote_api.host, remote_api.port, remote_api.status))
self.remote_api = remote_api
self.loop = loop or asyncio.get_event_loop()
self.executor = ThreadPoolExecutor(max_workers=5)
self.loop.set_default_executor(self.executor)
self.loop.set_exception_handler(self._async_exception_handler)
self._pending_tasks = []
self._pending_sheduler = None
self.bus = EventBus(remote_api, self)
self.services = ha.ServiceRegistry(self)
self.states = StateMachine(self.bus, self.loop, self.remote_api)
self.config = ha.Config()
# This is a dictionary that any component can store any data on.
self.data = {}
self.state = ha.CoreState.not_running
self.exit_code = None
self._websession = None
self.config.api = local_api
def start(self):
"""Start the instance."""
# Ensure a local API exists to connect with remote
if 'api' not in self.config.components:
if not bootstrap.setup_component(self, 'api'):
raise HomeAssistantError(
'Unable to setup local API to receive events')
self.state = ha.CoreState.starting
# pylint: disable=protected-access
ha._async_create_timer(self)
self.bus.fire(ha.EVENT_HOMEASSISTANT_START,
origin=ha.EventOrigin.remote)
# Ensure local HTTP is started
self.block_till_done()
self.state = ha.CoreState.running
time.sleep(0.05)
# Setup that events from remote_api get forwarded to local_api
# Do this after we are running, otherwise HTTP is not started
# or requests are blocked
if not connect_remote_events(self.remote_api, self.config.api):
raise HomeAssistantError((
'Could not setup event forwarding from api {} to '
'local api {}').format(self.remote_api, self.config.api))
def stop(self):
"""Stop Home Assistant and shuts down all threads."""
_LOGGER.info("Stopping")
self.state = ha.CoreState.stopping
self.bus.fire(ha.EVENT_HOMEASSISTANT_STOP,
origin=ha.EventOrigin.remote)
# Disconnect master event forwarding
disconnect_remote_events(self.remote_api, self.config.api)
self.state = ha.CoreState.not_running
class EventBus(ha.EventBus):
"""EventBus implementation that forwards fire_event to remote API."""
def __init__(self, api, hass):
"""Initalize the eventbus."""
super().__init__(hass)
self._api = api
def fire(self, event_type, event_data=None, origin=ha.EventOrigin.local):
"""Forward local events to remote target.
Handles remote event as usual.
"""
# All local events that are not TIME_CHANGED are forwarded to API
if origin == ha.EventOrigin.local and \
event_type != ha.EVENT_TIME_CHANGED:
fire_event(self._api, event_type, event_data)
else:
super().fire(event_type, event_data, origin)
class EventForwarder(object):
"""Listens for events and forwards to specified APIs."""
def __init__(self, hass, restrict_origin=None):
"""Initalize the event forwarder."""
self.hass = hass
self.restrict_origin = restrict_origin
# We use a tuple (host, port) as key to ensure
# that we do not forward to the same host twice
self._targets = {}
self._lock = threading.Lock()
self._async_unsub_listener = None
@ha.callback
def async_connect(self, api):
"""Attach to a Home Assistant instance and forward events.
Will overwrite old target if one exists with same host/port.
"""
if self._async_unsub_listener is None:
self._async_unsub_listener = self.hass.bus.async_listen(
ha.MATCH_ALL, self._event_listener)
key = (api.host, api.port)
self._targets[key] = api
@ha.callback
def async_disconnect(self, api):
"""Remove target from being forwarded to."""
key = (api.host, api.port)
did_remove = self._targets.pop(key, None) is None
if len(self._targets) == 0:
# Remove event listener if no forwarding targets present
self._async_unsub_listener()
self._async_unsub_listener = None
return did_remove
def _event_listener(self, event):
"""Listen and forward all events."""
with self._lock:
# We don't forward time events or, if enabled, non-local events
if event.event_type == ha.EVENT_TIME_CHANGED or \
(self.restrict_origin and event.origin != self.restrict_origin):
return
for api in self._targets.values():
fire_event(api, event.event_type, event.data)
class StateMachine(ha.StateMachine):
"""Fire set events to an API. Uses state_change events to track states."""
def __init__(self, bus, loop, api):
"""Initalize the statemachine."""
super().__init__(bus, loop)
self._api = api
self.mirror()
bus.listen(ha.EVENT_STATE_CHANGED, self._state_changed_listener)
def remove(self, entity_id):
"""Remove the state of an entity.
Returns boolean to indicate if an entity was removed.
"""
return remove_state(self._api, entity_id)
def set(self, entity_id, new_state, attributes=None, force_update=False):
"""Call set_state on remote API."""
set_state(self._api, entity_id, new_state, attributes, force_update)
def mirror(self):
"""Discard current data and mirrors the remote state machine."""
self._states = {state.entity_id: state for state
in get_states(self._api)}
def _state_changed_listener(self, event):
"""Listen for state changed events and applies them."""
if event.data['new_state'] is None:
self._states.pop(event.data['entity_id'], None)
else:
self._states[event.data['entity_id']] = event.data['new_state']
class JSONEncoder(json.JSONEncoder):
"""JSONEncoder that supports Home Assistant objects."""
# pylint: disable=method-hidden
def default(self, obj):
"""Convert Home Assistant objects.
Hand other objects to the original method.
"""
if isinstance(obj, datetime):
return obj.isoformat()
elif hasattr(obj, 'as_dict'):
return obj.as_dict()
try:
return json.JSONEncoder.default(self, obj)
except TypeError:
# If the JSON serializer couldn't serialize it
# it might be a generator, convert it to a list
try:
return [self.default(child_obj)
for child_obj in obj]
except TypeError:
# Ok, we're lost, cause the original error
return json.JSONEncoder.default(self, obj)
def validate_api(api):
"""Make a call to validate API."""
try:
req = api(METHOD_GET, URL_API)
if req.status_code == 200:
return APIStatus.OK
elif req.status_code == 401:
return APIStatus.INVALID_PASSWORD
else:
return APIStatus.UNKNOWN
except HomeAssistantError:
return APIStatus.CANNOT_CONNECT
def connect_remote_events(from_api, to_api):
"""Setup from_api to forward all events to to_api."""
data = {
'host': to_api.host,
'api_password': to_api.api_password,
'port': to_api.port
}
try:
req = from_api(METHOD_POST, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error setting up event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error setting up event forwarding")
return False
def disconnect_remote_events(from_api, to_api):
"""Disconnect forwarding events from from_api to to_api."""
data = {
'host': to_api.host,
'port': to_api.port
}
try:
req = from_api(METHOD_DELETE, URL_API_EVENT_FORWARD, data)
if req.status_code == 200:
return True
else:
_LOGGER.error(
"Error removing event forwarding: %s - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error removing an event forwarder")
return False
def get_event_listeners(api):
"""List of events that is being listened for."""
try:
req = api(METHOD_GET, URL_API_EVENTS)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Unexpected result retrieving event listeners")
return {}
def fire_event(api, event_type, data=None):
"""Fire an event at remote API."""
try:
req = api(METHOD_POST, URL_API_EVENTS_EVENT.format(event_type), data)
if req.status_code != 200:
_LOGGER.error("Error firing event: %d - %s",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error firing event")
def get_state(api, entity_id):
"""Query given API for state of entity_id."""
try:
req = api(METHOD_GET, URL_API_STATES_ENTITY.format(entity_id))
# req.status_code == 422 if entity does not exist
return ha.State.from_dict(req.json()) \
if req.status_code == 200 else None
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching state")
return None
def get_states(api):
"""Query given API for all states."""
try:
req = api(METHOD_GET,
URL_API_STATES)
return [ha.State.from_dict(item) for
item in req.json()]
except (HomeAssistantError, ValueError, AttributeError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Error fetching states")
return []
def remove_state(api, entity_id):
"""Call API to remove state for entity_id.
Return True if entity is gone (removed/never existed).
"""
try:
req = api(METHOD_DELETE, URL_API_STATES_ENTITY.format(entity_id))
if req.status_code in (200, 404):
return True
_LOGGER.error("Error removing state: %d - %s",
req.status_code, req.text)
return False
except HomeAssistantError:
_LOGGER.exception("Error removing state")
return False
def set_state(api, entity_id, new_state, attributes=None, force_update=False):
"""Tell API to update state for entity_id.
Return True if success.
"""
attributes = attributes or {}
data = {'state': new_state,
'attributes': attributes,
'force_update': force_update}
try:
req = api(METHOD_POST,
URL_API_STATES_ENTITY.format(entity_id),
data)
if req.status_code not in (200, 201):
_LOGGER.error("Error changing state: %d - %s",
req.status_code, req.text)
return False
else:
return True
except HomeAssistantError:
_LOGGER.exception("Error setting state")
return False
def is_state(api, entity_id, state):
"""Query API to see if entity_id is specified state."""
cur_state = get_state(api, entity_id)
return cur_state and cur_state.state == state
def get_services(api):
"""Return a list of dicts.
Each dict has a string "domain" and a list of strings "services".
"""
try:
req = api(METHOD_GET, URL_API_SERVICES)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the json
_LOGGER.exception("Got unexpected services result")
return {}
def call_service(api, domain, service, service_data=None, timeout=5):
"""Call a service at the remote API."""
try:
req = api(METHOD_POST,
URL_API_SERVICES_SERVICE.format(domain, service),
service_data, timeout=timeout)
if req.status_code != 200:
_LOGGER.error("Error calling service: %d - %s",
req.status_code, req.text)
except HomeAssistantError:
_LOGGER.exception("Error calling service")
def get_config(api):
"""Return configuration."""
try:
req = api(METHOD_GET, URL_API_CONFIG)
return req.json() if req.status_code == 200 else {}
except (HomeAssistantError, ValueError):
# ValueError if req.json() can't parse the JSON
_LOGGER.exception("Got unexpected configuration results")
return {}
|
|
import asyncio
import curses
import random
import math
import time
DEFAULT_COLOR = 1
class Window:
"""
represents a window space on the screen that you can place text in
"""
contents = None
def __init__(self, parent, height=-1, width=-1, y_start=0, x_start=0):
if height == -1:
height = parent.getmaxyx()[0]
if width == -1:
width = parent.getmaxyx()[1]
self.parent = parent
self.parent.nodelay(True)
self.text_area = self.parent.derwin(height, width, y_start, x_start)
self.text_area.scrollok(True)
self.text_area.nodelay(True)
self.Contents = ''
@property
def contents(self):
''' getter for contents (mainly ment to create a property for the following setter)'''
return self.Contents
@contents.setter
def contents(self, value):
''' setter for contests of the text area '''
self.clear_text()
self.Contents = str(value)
self.text_area.addstr(0,0, self.Contents)
self.text_area.noutrefresh()
def add(self, text, color=DEFAULT_COLOR):
self.text_area.addstr(text, curses.color_pair(color))
self.text_area.noutrefresh()
def clear_text(self):
''' clears the window '''
# this is probobly a bit of a hack but ...
# self.text_area.clear() makes everything blink
maxy, maxx = self.text_area.getmaxyx()
self.text_area.addstr(0,0, ' '*maxy*maxx)
self.text_area.noutrefresh()
@property
def maxyx(self):
return self.text_area.getmaxyx()
class BorderedWindow(Window):
'''
a sub class of Window that simply has a border around it.
'''
def __init__(self, parent, height=-1, width=-1, y_start=0, x_start=0):
if height == -1:
height = parent.getmaxyx()[0]
if width == -1:
width = parent.getmaxyx()[1]
self.parent = parent
self.parent.nodelay(True)
self.border = self.parent.derwin(height, width, y_start, x_start)
self.border.box()
self.border.noutrefresh()
self.text_area = self.parent.derwin(height-2, width-2, y_start+1, x_start+1)
self.text_area.scrollok(True)
self.text_area.nodelay(True)
self.Contents = ''
class TableLayout:
'''
used to create multiple windows of a spectified type that are equaly spaced
into the given number of rows and cols. I have it as a class but it may
be better to change it to simply a function that returns a '2d' list.
'''
def __init__(self, parent, rows, cols, window_type=Window):
self.parent = parent
self.parent.nodelay(True)
self.sub_windows = [ [ None for i in range(cols) ] for j in range(rows) ]
maxy, maxx = self.parent.getmaxyx()
height = math.floor(maxy/rows)
width = math.floor(maxx/cols)
y_start = 0
x_start = 0
for row_num in range(rows):
y_start = height*row_num
for col_num in range(cols):
x_start = width*col_num
self.sub_windows[row_num][col_num] = window_type(self.parent, height, width, y_start, x_start)
class BaseUI:
'''
a class that represents the UI and is used as the base for specific UIs
'''
def __init__(self, rate=1):
'''
creates the UI and sets it's fram rate.
'''
self.rate = rate
self.close = False
def __enter__(self):
'''
initalizes this UI for use in context managers
'''
self.main_window = curses.initscr()
self.main_window.nodelay(True)
self.main_window.keypad(True)
curses.noecho()
curses.start_color()
self.setup()
return self
def setup(self):
'''
this is where you create all the sub windows/etc for the class
'''
pass
def cleanup(self):
'''
this is where you do any special clean up when closing down your UI
'''
pass
def __exit__(self, *args):
'''
used to return the consle back to normal after leaving the context
'''
curses.nocbreak()
curses.endwin()
@property
def maxyx(self):
return self.main_window.getmaxyx()
async def screen_updater(self):
'''
a Job that is used to refresh the screen at the specified frame rate
'''
try:
while True:
await asyncio.sleep(self.rate)
if self.close:
return
else:
self.pre_update_work()
curses.doupdate()
except KeyboardInterrupt:
self.cleanup()
self.close = True
return
def pre_update_work(self):
pass
async def keyboard_listener(self):
while True:
key = await self.get_key()
if key is None:
continue
else:
self.key_stroke_handler(key)
async def get_key(self):
await asyncio.sleep(0.1)
ch = None
try:
ch = self.main_window.get_wch()
except curses.error as e:
return None
return ch
def key_stroke_handler(self, key):
pass
class TestingUI_1(BaseUI):
'''
My first test creates a single border window and randomly changes the message in it.
'''
def setup(self):
maxy, maxx = self.main_window.getmaxyx()
self.textarea = BorderedWindow(self.main_window, maxy, maxx, 0, 0)
async def test_worker(self):
try:
while True:
await asyncio.sleep(0)
if self.close:
return
messages = ['this is a test',
'this is really a test',
'another',
'test']
self.textarea.contents = random.choice(messages)
except KeyboardInterrupt:
self.cleanup()
self.close = True
return
class TestingUI_2(BaseUI):
'''
My second test creates 2 non bordered windows and randomly places messages in each
'''
def setup(self):
maxy, maxx = self.main_window.getmaxyx()
self.textarea1 = Window(self.main_window, math.floor(maxy/2), maxx, 0, 0)
self.textarea2 = Window(self.main_window, math.floor(maxy/2), maxx, math.floor(maxy/2), 0)
async def test_worker_1(self):
try:
while True:
await asyncio.sleep(0)
if self.close:
return
messages = ['this is a test',
'this is really a test',
'another',
'test']
self.textarea1.contents = random.choice(messages)+" "+str(random.randint(0, 99999))
except KeyboardInterrupt:
self.cleanup()
self.close = True
return
async def test_worker_2(self):
try:
while True:
await asyncio.sleep(0)
if self.close:
return
messages = ['this is a test',
'this is really a test',
'another',
'test']
self.textarea2.contents = random.choice(messages)+" "+str(random.randint(0, 99999))
except KeyboardInterrupt:
self.cleanup()
self.close = True
return
class TestingUI_3(BaseUI):
'''
my third test that creates a 2 rows of 3 windows that are borderd and randomly
selects a window and changes it's text.
'''
def setup(self):
self.rows = 2
self.cols = 3
self.layout = TableLayout(self.main_window, self.rows, self.cols, BorderedWindow)
async def test_worker(self):
try:
while True:
await asyncio.sleep(0.01)
r = random.randint(0, self.rows-1)
c = random.randint(0, self.cols-1)
textarea = self.layout.sub_windows[r][c]
if self.close:
return
messages = ['this is a test',
'this is really a test',
'another',
'test',
'this is a really really long test to see what happens when it gets to the end of the line']
textarea.contents = random.choice(messages)
except KeyboardInterrupt:
self.cleanup()
self.close = True
return
class TestingUI_4(BaseUI):
'''
'''
def setup(self):
self.text_window = BorderedWindow(self.main_window)
def key_stroke_handler(self, key):
self.text_window.add(str(key))
def run_testing_ui():
try:
'''
with TestingUI_1(frame_rate=1) as ui:
loop = asyncio.get_event_loop()
asyncio.ensure_future(ui.test_worker())
loop.run_until_complete(ui.screen_updater())
with TestingUI_2(frame_rate=1) as ui:
loop = asyncio.get_event_loop()
asyncio.ensure_future(ui.test_worker_1())
asyncio.ensure_future(ui.test_worker_2())
loop.run_until_complete(ui.screen_updater())
with TestingUI_3(frame_rate=1) as ui:
loop = asyncio.get_event_loop()
asyncio.ensure_future(ui.test_worker())
loop.run_until_complete(ui.screen_updater())
'''
with TestingUI_4(frame_rate=10) as ui:
loop = asyncio.get_event_loop()
asyncio.ensure_future(ui.keyboard_listener())
loop.run_until_complete(ui.screen_updater())
except KeyboardInterrupt:
print('ending program')
if __name__=='__main__':
run_testing_ui()
|
|
import os
import io
import codecs
import yaml
import jenkins
from jenkins_jobs import cmd
from jenkins_jobs.errors import JenkinsJobsException
from tests.base import mock
from tests.cmd.test_cmd import CmdTestsBase
from tests.cmd.test_recurse_path import fake_os_walk
os_walk_return_values = {
'/jjb_projects': [
('/jjb_projects', (['dir1', 'dir2', 'dir3'], ())),
('/jjb_projects/dir1', (['bar'], ())),
('/jjb_projects/dir2', (['baz'], ())),
('/jjb_projects/dir3', ([], ())),
('/jjb_projects/dir1/bar', ([], ())),
('/jjb_projects/dir2/baz', ([], ())),
],
'/jjb_templates': [
('/jjb_templates', (['dir1', 'dir2', 'dir3'], ())),
('/jjb_templates/dir1', (['bar'], ())),
('/jjb_templates/dir2', (['baz'], ())),
('/jjb_templates/dir3', ([], ())),
('/jjb_templates/dir1/bar', ([], ())),
('/jjb_templates/dir2/baz', ([], ())),
],
'/jjb_macros': [
('/jjb_macros', (['dir1', 'dir2', 'dir3'], ())),
('/jjb_macros/dir1', (['bar'], ())),
('/jjb_macros/dir2', (['baz'], ())),
('/jjb_macros/dir3', ([], ())),
('/jjb_macros/dir1/bar', ([], ())),
('/jjb_macros/dir2/baz', ([], ())),
],
}
def os_walk_side_effects(path_name, topdown):
return fake_os_walk(os_walk_return_values[path_name])(path_name, topdown)
@mock.patch('jenkins_jobs.builder.Jenkins.get_plugins_info', mock.MagicMock)
class TestTests(CmdTestsBase):
def test_non_existing_config_dir(self):
"""
Run test mode and pass a non-existing configuration directory
"""
args = self.parser.parse_args(['test', 'foo'])
self.assertRaises(IOError, cmd.execute, args, self.config)
def test_non_existing_config_file(self):
"""
Run test mode and pass a non-existing configuration file
"""
args = self.parser.parse_args(['test', 'non-existing.yaml'])
self.assertRaises(IOError, cmd.execute, args, self.config)
def test_non_existing_job(self):
"""
Run test mode and pass a non-existing job name
(probably better to fail here)
"""
args = self.parser.parse_args(['test',
os.path.join(self.fixtures_path,
'cmd-001.yaml'),
'invalid'])
args.output_dir = mock.MagicMock()
cmd.execute(args, self.config) # probably better to fail here
def test_valid_job(self):
"""
Run test mode and pass a valid job name
"""
args = self.parser.parse_args(['test',
os.path.join(self.fixtures_path,
'cmd-001.yaml'),
'foo-job'])
args.output_dir = mock.MagicMock()
cmd.execute(args, self.config) # probably better to fail here
@mock.patch('jenkins_jobs.cmd.Builder.update_job')
def test_multi_path(self, update_job_mock):
"""
Run test mode and pass multiple paths.
"""
path_list = list(os_walk_return_values.keys())
multipath = os.pathsep.join(path_list)
args = self.parser.parse_args(['test', multipath])
args.output_dir = mock.MagicMock()
cmd.execute(args, self.config)
self.assertEqual(args.path, path_list)
update_job_mock.assert_called_with(path_list, [],
output=args.output_dir)
@mock.patch('jenkins_jobs.cmd.Builder.update_job')
@mock.patch('jenkins_jobs.cmd.os.path.isdir')
@mock.patch('jenkins_jobs.cmd.os.walk')
def test_recursive_multi_path(self, os_walk_mock, isdir_mock,
update_job_mock):
"""
Run test mode and pass multiple paths with recursive path option.
"""
os_walk_mock.side_effect = os_walk_side_effects
isdir_mock.return_value = True
path_list = os_walk_return_values.keys()
paths = []
for path in path_list:
paths.extend([p for p, _ in os_walk_return_values[path]])
multipath = os.pathsep.join(path_list)
args = self.parser.parse_args(['test', '-r', multipath])
args.output_dir = mock.MagicMock()
cmd.execute(args, self.config)
update_job_mock.assert_called_with(paths, [], output=args.output_dir)
args = self.parser.parse_args(['test', multipath])
self.config.set('job_builder', 'recursive', 'True')
cmd.execute(args, self.config)
update_job_mock.assert_called_with(paths, [], output=args.output_dir)
@mock.patch('jenkins_jobs.cmd.Builder.update_job')
@mock.patch('jenkins_jobs.cmd.os.path.isdir')
@mock.patch('jenkins_jobs.cmd.os.walk')
def test_recursive_multi_path_with_excludes(self, os_walk_mock, isdir_mock,
update_job_mock):
"""
Run test mode and pass multiple paths with recursive path option.
"""
os_walk_mock.side_effect = os_walk_side_effects
isdir_mock.return_value = True
path_list = os_walk_return_values.keys()
paths = []
for path in path_list:
paths.extend([p for p, __ in os_walk_return_values[path]
if 'dir1' not in p and 'dir2' not in p])
multipath = os.pathsep.join(path_list)
args = self.parser.parse_args(['test', '-r', multipath, '-x',
'dir1:dir2'])
args.output_dir = mock.MagicMock()
cmd.execute(args, self.config)
update_job_mock.assert_called_with(paths, [], output=args.output_dir)
def test_console_output(self):
"""
Run test mode and verify that resulting XML gets sent to the console.
"""
console_out = io.BytesIO()
with mock.patch('sys.stdout', console_out):
cmd.main(['test', os.path.join(self.fixtures_path,
'cmd-001.yaml')])
xml_content = codecs.open(os.path.join(self.fixtures_path,
'cmd-001.xml'),
'r', 'utf-8').read()
self.assertEqual(console_out.getvalue().decode('utf-8'), xml_content)
def test_config_with_test(self):
"""
Run test mode and pass a config file
"""
args = self.parser.parse_args(['--conf',
os.path.join(self.fixtures_path,
'cmd-001.conf'),
'test',
os.path.join(self.fixtures_path,
'cmd-001.yaml'),
'foo-job'])
config = cmd.setup_config_settings(args)
self.assertEqual(config.get('jenkins', 'url'),
"http://test-jenkins.with.non.default.url:8080/")
@mock.patch('jenkins_jobs.builder.YamlParser.generateXML')
@mock.patch('jenkins_jobs.builder.ModuleRegistry')
def test_plugins_info_stub_option(self, registry_mock, generateXML_mock):
"""
Test handling of plugins_info stub option.
"""
plugins_info_stub_yaml_file = os.path.join(self.fixtures_path,
'plugins-info.yaml')
args = ['--conf',
os.path.join(self.fixtures_path, 'cmd-001.conf'),
'test',
'-p',
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
args = self.parser.parse_args(args)
with mock.patch('sys.stdout'):
cmd.execute(args, self.config) # probably better to fail here
with open(plugins_info_stub_yaml_file, 'r') as yaml_file:
plugins_info_list = yaml.load(yaml_file)
registry_mock.assert_called_with(self.config, plugins_info_list)
@mock.patch('jenkins_jobs.builder.YamlParser.generateXML')
@mock.patch('jenkins_jobs.builder.ModuleRegistry')
def test_bogus_plugins_info_stub_option(self, registry_mock,
generateXML_mock):
"""
Verify that a JenkinsJobException is raised if the plugins_info stub
file does not yield a list as its top-level object.
"""
plugins_info_stub_yaml_file = os.path.join(self.fixtures_path,
'bogus-plugins-info.yaml')
args = ['--conf',
os.path.join(self.fixtures_path, 'cmd-001.conf'),
'test',
'-p',
plugins_info_stub_yaml_file,
os.path.join(self.fixtures_path, 'cmd-001.yaml')]
args = self.parser.parse_args(args)
with mock.patch('sys.stdout'):
e = self.assertRaises(JenkinsJobsException, cmd.execute,
args, self.config)
self.assertIn("must contain a Yaml list", str(e))
class TestJenkinsGetPluginInfoError(CmdTestsBase):
""" This test class is used for testing the 'test' subcommand when we want
to validate its behavior without mocking
jenkins_jobs.builder.Jenkins.get_plugins_info
"""
@mock.patch('jenkins.Jenkins.get_plugins_info')
def test_console_output_jenkins_connection_failure_warning(
self, get_plugins_info_mock):
"""
Run test mode and verify that failed Jenkins connection attempt
exception does not bubble out of cmd.main. Ideally, we would also test
that an appropriate message is logged to stderr but it's somewhat
difficult to figure out how to actually enable stderr in this test
suite.
"""
get_plugins_info_mock.side_effect = \
jenkins.JenkinsException("Connection refused")
with mock.patch('sys.stdout'):
try:
cmd.main(['test', os.path.join(self.fixtures_path,
'cmd-001.yaml')])
except jenkins.JenkinsException:
self.fail("jenkins.JenkinsException propagated to main")
except:
pass # only care about jenkins.JenkinsException for now
|
|
# Powered by Python 2.7
# the order is:
# fp7graphBuilder
#fp7projector
#fp7filter1st2yrs
#fp7dropOldOrgs
#fp7edgeStacker
#fp7stablePartnership
#fp7findIntermediatedOrgs
#deathStarPrint
# To cancel the modifications performed by the script
# on the current graph, click on the undo button.
# Some useful keyboards shortcuts :
# * Ctrl + D : comment selected lines.
# * Ctrl + Shift + D : uncomment selected lines.
# * Ctrl + I : indent selected lines.
# * Ctrl + Shift + I : unindent selected lines.
# * Ctrl + Return : run script.
# * Ctrl + F : find selected text.
# * Ctrl + R : replace selected text.
# * Ctrl + Space : show auto-completion dialog.
import time
import datetime
import csv
from tulip import *
# the updateVisualization(centerViews = True) function can be called
# during script execution to update the opened views
# the pauseScript() function can be called to pause the script execution.
# To resume the script execution, you will have to click on the "Run script " button.
# the runGraphScript(scriptFile, graph) function can be called to launch another edited script on a tlp.Graph object.
# The scriptFile parameter defines the script name to call (in the form [a-zA-Z0-9_]+.py)
# the main(graph) function must be defined
# to run the script on the current graph
# start the clock
start_script = datetime.datetime.now()
# initialize variables
dirPath = '/Users/albertocottica/github/local/eu-research-funding-network/FP7/FP7Data/'
def loadProjects():
''' loads the projects CSV file and puts the results into a Dict'''
projectsFile = dirPath + 'fp7projects.csv'
projects = csv.DictReader(open(projectsFile, 'rb'), delimiter=';')
projectsList = []
for line in projects:
projectsList.append(line)
return projectsList
def loadOrgs():
''' loads the orgs CSV file and puts the results into a Dict'''
orgsFile = dirPath + 'fp7orgs.csv'
orgs = csv.DictReader(open(orgsFile, 'rb'), delimiter=';') # in FP7 data it's comma separated
orgsList = []
for line in orgs:
orgsList.append(line)
return orgsList
def loadEdges():
'''loads the edges file and puts the results into a dict'''
edgesFile = dirPath + 'fp7edges.csv'
edges = csv.DictReader(open(edgesFile, 'rb'), delimiter = '\t')
edgesList = []
for edge in edges:
edgesList.append(edge)
return edgesList
def cleanAmount(dirtyString):
'''
(str) => float
takes an amount given as a string in the CSV, cleans the string and
converts it to float
'''
if dirtyString == '':
print ('Missing value!')
return 0
else:
replaceComma = dirtyString.replace(',', '.', 1)
stripEnd = replaceComma.strip('.')
return float(stripEnd)
def main(graph):
viewBorderColor = graph.getColorProperty("viewBorderColor")
viewBorderWidth = graph.getDoubleProperty("viewBorderWidth")
viewColor = graph.getColorProperty("viewColor")
viewFont = graph.getStringProperty("viewFont")
viewFontSize = graph.getIntegerProperty("viewFontSize")
viewLabel = graph.getStringProperty("viewLabel")
viewLabelBorderColor = graph.getColorProperty("viewLabelBorderColor")
viewLabelBorderWidth = graph.getDoubleProperty("viewLabelBorderWidth")
viewLabelColor = graph.getColorProperty("viewLabelColor")
viewLabelPosition = graph.getIntegerProperty("viewLabelPosition")
viewLayout = graph.getLayoutProperty("viewLayout")
viewMetaGraph = graph.getGraphProperty("viewMetaGraph")
viewMetric = graph.getDoubleProperty("viewMetric")
viewRotation = graph.getDoubleProperty("viewRotation")
viewSelection = graph.getBooleanProperty("viewSelection")
viewShape = graph.getIntegerProperty("viewShape")
viewSize = graph.getSizeProperty("viewSize")
viewSrcAnchorShape = graph.getIntegerProperty("viewSrcAnchorShape")
viewSrcAnchorSize = graph.getSizeProperty("viewSrcAnchorSize")
viewTexture = graph.getStringProperty("viewTexture")
viewTgtAnchorShape = graph.getIntegerProperty("viewTgtAnchorShape")
viewTgtAnchorSize = graph.getSizeProperty("viewTgtAnchorSize")
# for n in graph.getNodes():
# print n
print ('Initializing graph...')
projectNode = graph.getBooleanProperty('projectNode') # this property is common to both node types
# create node properties: start with properties related to projects
rcn = graph.getStringProperty('rcn')
reference = graph.getStringProperty('reference')
programmeCode = graph.getStringProperty('programme')
topics = graph.getStringProperty('topics')
subprogramme = graph.getStringProperty('subprogramme')
title = graph.getStringProperty('acronym')
startDate = graph.getStringProperty('startDate')
endDate = graph.getStringProperty('endDate')
totalCost = graph.getDoubleProperty('totalCost')
ecMaxContribution = graph.getDoubleProperty('ecMaxContribution')
call = graph.getStringProperty('call')
fundingScheme = graph.getStringProperty('fundingScheme')
coordinator = graph.getStringProperty('coordinator')
coordinatorCountry = graph.getStringProperty('coordinatorCountry')
# now create the properties of org-type nodes
newOrgId = graph.getStringProperty('new_org_id')
projectReference = graph.getStringProperty('projectReference')
organisationName = graph.getStringProperty('name')
organisationShortName = graph.getStringProperty('ShortName')
organisationNameNormal = graph.getStringProperty('name_normal')
organisationCountry = graph.getStringProperty('country')
organisationNumProj = graph.getDoubleProperty('numProj')
organisationTotalContribution = graph.getDoubleProperty('totContribution')
# now create the properties of edges
sourceId = graph.getStringProperty('sourceId')
targetId = graph.getStringProperty('targetId')
projectsList = loadProjects() # we execute the functions to run the files
orgsList = loadOrgs()
edgesList = loadEdges()
# add the projects as nodes from a list of projects, assigning their characteristics to node properties
# requires plenty of OpenRefine
# create project-type nodes and populate their properties
print ('Adding project nodes...')
p2n = {} # Ben's map trick. This maps the rcn property of projects to their node objects.
counter = 0
for p in projectsList:
if p['startDate'] < '2009-04-15':
counter += 1
# print ('Adding project ' + str(counter))
n = graph.addNode()
projectNode[n] = True # this is set to True for all projects!
rcnCode = p['rcn']
rcn[n] = rcnCode
p2n[rcnCode] = n # update the map
reference[n] = p['reference']
programmeCode[n] = p['programme']
title[n] = p['acronym']
startDate[n] = p['startDate']
endDate[n] = p['endDate']
totalCost[n] = cleanAmount(p['totalCost'])
ecMaxContribution[n] = cleanAmount(p['ecMaxContribution'])
call[n] = p['call']
fundingScheme[n] = p['fundingScheme']
coordinator[n] = p['coordinator']
coordinatorCountry[n] = p['coordinatorCountry']
# now add the org-type nodes and populate their properties
print ('Adding organisation nodes...')
counter = 0
o2n = {} # Ben's map trick
for o in orgsList:
counter += 1
# print ('Adding org ' + str(counter))
# check that the organisation has not already been added
oNewOrgId = str(o['new_org_id'])
oName = o['name']
n = graph.addNode()
projectNode[n] = False # Set to False for all orgs!
organisationName[n] = oName
organisationShortName[n] = o['shortName']
newOrgId[n] = oNewOrgId
# organisationNameNormal[n] = o['name_normal'] missing in FP7 data
organisationCountry[n] = o['country']
# organisationNumProj[n] = float(o['numProj']) # this field is not present in FP7 data
# organisationTotalContribution[n] = cleanAmount(o['totContribution']) missing from FP7 data
o2n[oNewOrgId] = n #update the map
# create an edge
missing = 0
for e in edgesList:
# source = o2n [e['new_org_id']]
# target = p2n [e['projectRcn']]
# newEdge = graph.addEdge(source, target)
try:
sourceString = e['new_org_id'].strip() # this is needed because PIC numbers in the original file start with a space, example ' 986355365'
source = o2n [sourceString]
target = p2n [e['projectRcn']]
newEdge = graph.addEdge(source, target)
sourceId[newEdge] = e['new_org_id']
targetId[newEdge] = e['projectRcn']
except:
missing += 1
print ('Could not add ' + str(missing) + ' edges.')
end_script = datetime.datetime.now()
running_time = end_script - start_script
print ('Executed in ' + str(running_time))
|
|
import codecs
import errno
import os
import sys
import time
from typing import Iterable, Optional
from .helpers import reraise
from .languages.javascript.generator import Generator as JavascriptGenerator
from .languages.python.generator import Generator as PythonGenerator
from .runtime import python, exceptions
from .syntaxes.chameleon import parse as parse_chameleon
from .syntaxes.tonnikala import (parse as parse_tonnikala,
parse_js as parse_js_tonnikala)
_make_traceback = None
MIN_CHECK_INTERVAL = 0.25
try: # pragma: python3
import builtins as __builtin__
except ImportError: # pragma: python2
# noinspection PyUnresolvedReferences,PyCompatibility
exec('import __builtin__')
class Helpers():
pass
escape = python.escape
helpers = Helpers()
helpers.literal = lambda x: x
helpers.gettext = lambda x: x
helpers.egettext = lambda x: escape(x)
def get_builtins_with_chain(chain=(helpers,)):
builtins = {}
for i in [__builtin__] + list(reversed(chain)):
for j in dir(i):
if not j.startswith('__') and not j.endswith('__'):
builtins[j] = getattr(i, j)
return builtins
_builtins = None
def get_builtins():
global _builtins
if _builtins is None:
_builtins = get_builtins_with_chain()
return _builtins
NOT_FOUND = object()
def make_template_context(context):
rv = get_builtins().copy()
rv.update(context)
return rv
_NO = object()
def handle_exception(exc_info=None, source_hint=None, tb_override=_NO):
"""Exception handling helper. This is used internally to either raise
rewritten exceptions or return a rendered traceback for the template.
"""
global _make_traceback
if exc_info is None: # pragma: no cover
exc_info = sys.exc_info()
# the debugging module is imported when it's used for the first time.
# we're doing a lot of stuff there and for applications that do not
# get any exceptions in template rendering there is no need to load
# all of that.
if _make_traceback is None:
from .runtime.debug import make_traceback as _make_traceback
exc_type, exc_value, tb = exc_info
if tb_override is not _NO: # pragma: no cover
tb = tb_override
traceback = _make_traceback((exc_type, exc_value, tb), source_hint)
exc_type, exc_value, tb = traceback.standard_exc_info
reraise(exc_type, exc_value, tb)
class Template(object):
handle_exception = staticmethod(handle_exception)
def __init__(self, binder):
self.binder_func = binder
def bind(self, context):
self.binder_func(context)
def render_to_buffer(self, context, funcname='__main__'):
try:
context = make_template_context(context)
self.bind(context)
return context[funcname]()
except Exception as e:
exc_info = sys.exc_info()
try:
self.handle_exception(exc_info)
finally:
del exc_info
def render(self, context, funcname='__main__'):
return self.render_to_buffer(context, funcname).join()
parsers = {
'tonnikala': parse_tonnikala,
'js_tonnikala': parse_js_tonnikala,
'chameleon': parse_chameleon,
}
class TemplateInfo(object):
def __init__(self, filename, lnotab):
self.filename = filename
self.lnotab = lnotab
def get_corresponding_lineno(self, line):
return self.lnotab.get(line, line)
def _new_globals(runtime):
return {
'__TK__runtime': runtime,
'__TK__mkbuffer': runtime.Buffer,
'__TK__escape': runtime.escape,
'__TK__output_attrs': runtime.output_attrs,
'literal': helpers.literal
}
class Loader(object):
handle_exception = staticmethod(handle_exception)
runtime = python.TonnikalaRuntime
def __init__(self, debug=False, syntax='tonnikala', translatable=False):
self.debug = debug
self.syntax = syntax
self.translatable = translatable
def load_string(self, string, filename="<string>"):
parser_func = parsers.get(self.syntax)
if not parser_func:
raise ValueError("Invalid parser syntax %s: valid syntaxes: %r"
% sorted(parsers.keys()))
try:
tree = parser_func(filename, string, translatable=self.translatable)
gen = PythonGenerator(tree)
code = gen.generate_ast()
exc_info = None
except exceptions.TemplateSyntaxError as e:
if e.source is None:
e.source = string
if e.filename is None:
e.filename = filename
exc_info = sys.exc_info()
if exc_info:
self.handle_exception(exc_info, string, tb_override=None)
return
if self.debug:
import ast
print(ast.dump(code, True, True))
try:
import astor
print(astor.codegen.to_source(code))
except ImportError:
print("Not reversing AST to source as astor was not installed")
runtime = self.runtime()
runtime.loader = self
glob = _new_globals(runtime)
compiled = compile(code, filename, 'exec')
glob['__TK_template_info__'] = TemplateInfo(filename, gen.lnotab_info())
exec(compiled, glob, glob)
template_func = glob['__TK__binder']
return Template(template_func)
class FileLoader(Loader):
def __init__(
self,
paths: Iterable[str] = (),
debug: bool = False,
syntax: str = 'tonnikala',
*args,
**kwargs
):
super(FileLoader, self).__init__(*args, debug=debug, syntax=syntax,
**kwargs)
self.cache = {}
self.paths = list(paths)
self.reload = False
self._last_reload_check = time.time()
def add_path(self, *a: str) -> None:
self.paths.extend(a)
def resolve(self, name: str) -> Optional[str]:
if os.path.isabs(name):
if os.path.exists(name):
return name
for i in self.paths:
path = os.path.abspath(os.path.join(i, name))
if os.path.exists(path):
return path
return None
def set_reload(self, flag: bool) -> None:
self.reload = flag
def _maybe_purge_cache(self):
"""
If enough time since last check has elapsed, check if any
of the cached templates has changed. If any of the template
files were deleted, remove that file only. If any were
changed, then purge the entire cache.
"""
if self._last_reload_check + MIN_CHECK_INTERVAL > time.time():
return
for name, tmpl in list(self.cache.items()):
if not os.stat(tmpl.path):
self.cache.pop(name)
continue
if os.stat(tmpl.path).st_mtime > tmpl.mtime:
self.cache.clear()
break
self._last_reload_check = time.time()
def load(self, name):
"""
If not yet in the cache, load the named template and compiles it,
placing it into the cache.
If in cache, return the cached template.
"""
if self.reload:
self._maybe_purge_cache()
template = self.cache.get(name)
if template:
return template
path = self.resolve(name)
if not path:
raise OSError(errno.ENOENT, "File not found: %s" % name)
with codecs.open(path, 'r', encoding='UTF-8') as f:
contents = f.read()
mtime = os.fstat(f.fileno()).st_mtime
template = self.load_string(contents, filename=path)
template.mtime = mtime
template.path = path
self.cache[name] = template
return template
class JSLoader(object):
def __init__(
self,
debug: bool = False,
syntax: str = 'js_tonnikala',
minify: bool = False
):
self.debug = debug
self.syntax = syntax
self.minify = minify
def load_string(self, string: str, filename: str = "<string>"):
parser_func = parsers.get(self.syntax)
if not parser_func:
raise ValueError("Invalid parser syntax %s: valid syntaxes: %r"
% (self.syntax, sorted(parsers.keys())))
tree = parser_func(filename, string)
code = JavascriptGenerator(tree).generate_ast()
if self.debug:
print("JS template output code for %s" % filename)
print(code)
if self.minify:
from slimit import minify
code = minify(code, mangle=True)
return code
|
|
"""Affinity Propagation clustering algorithm.
This module is an extension of sklearn's Affinity Propagation to take into
account sparse matrices, which are not currently supported.
Author: Federico Tomasi
Copyright (c) 2016, Federico Tomasi.
Licensed under the FreeBSD license (see LICENSE.txt).
Original authors for sklearn:
# Author: Alexandre Gramfort [email protected]
# Gael Varoquaux [email protected]
# License: BSD 3 clause
"""
import numpy as np
from scipy.sparse import coo_matrix
from scipy.sparse import issparse
from sklearn.cluster import AffinityPropagation
from sklearn.utils import as_float_array, check_array
from sklearn.metrics import euclidean_distances
from ._sparse_affinity_propagation import (
parse_preference, _set_sparse_diagonal,
_sparse_row_maxindex, _sparse_row_sum_update,
_update_r_max_row, remove_single_samples)
def sparse_ap(S, preference=None, convergence_iter=15, max_iter=200,
damping=0.5, copy=True, verbose=False,
return_n_iter=False, convergence_percentage=0.999999):
"""Perform Affinity Propagation Clustering of data.
Same as affinity_propagation(), but assume S be a sparse matrix.
Parameters
----------
S : array-like, shape (n_samples, n_samples)
Matrix of similarities between points
preference : array-like, shape (n_samples,) or float, optional
Preferences for each point - points with larger values of
preferences are more likely to be chosen as exemplars. The number of
exemplars, i.e. of clusters, is influenced by the input preferences
value. If the preferences are not passed as arguments, they will be
set to the median of the input similarities (resulting in a moderate
number of clusters). For a smaller amount of clusters, this can be set
to the minimum value of the similarities.
convergence_iter : int, optional, default: 15
Number of iterations with no change in the number
of estimated clusters that stops the convergence.
max_iter : int, optional, default: 200
Maximum number of iterations
damping : float, optional, default: 0.5
Damping factor between 0.5 and 1.
copy : boolean, optional, default: True
If copy is False, the affinity matrix is modified inplace by the
algorithm, for memory efficiency.
Unused, but left for sklearn affinity propagation consistency.
verbose : boolean, optional, default: False
The verbosity level
return_n_iter : bool, default False
Whether or not to return the number of iterations.
Returns
-------
cluster_centers_indices : array, shape (n_clusters,)
index of clusters centers
labels : array, shape (n_samples,)
cluster labels for each point
n_iter : int
number of iterations run. Returned only if `return_n_iter` is
set to True.
Notes
-----
See examples/cluster/plot_affinity_propagation.py for an example.
References
----------
Brendan J. Frey and Delbert Dueck, "Clustering by Passing Messages
Between Data Points", Science Feb. 2007
"""
# rows, cols, data = matrix_to_row_col_data(S)
rows, cols, data = S.row, S.col, as_float_array(S.data, copy=copy)
n_samples = S.shape[0]
preferences = parse_preference(preference, n_samples, data)
if damping < 0.5 or damping >= 1:
raise ValueError('damping must be >= 0.5 and < 1')
if convergence_percentage is None:
convergence_percentage = 1
random_state = np.random.RandomState(0)
# Place preference on the diagonal of S
rows, cols, data = _set_sparse_diagonal(rows, cols, data, preferences)
rows, cols, data, left_ori_dict, idx_single_samples, n_samples = \
remove_single_samples(rows, cols, data, n_samples)
data_len = data.shape[0]
row_indptr = np.append(
np.insert(np.where(np.diff(rows) > 0)[0] + 1, 0, 0), data_len)
row_to_col_idx = np.lexsort((rows, cols))
rows_colbased = rows[row_to_col_idx]
cols_colbased = cols[row_to_col_idx]
col_to_row_idx = np.lexsort((cols_colbased, rows_colbased))
col_indptr = np.append(
np.insert(np.where(np.diff(cols_colbased) > 0)[0] + 1, 0, 0), data_len)
kk_row_index = np.where(rows == cols)[0]
kk_col_index = np.where(rows_colbased == cols_colbased)[0]
# Initialize messages
A = np.zeros(data_len, dtype=float)
R = np.zeros(data_len, dtype=float)
# Intermediate results
tmp = np.zeros(data_len, dtype=float)
# Remove degeneracies
data += ((np.finfo(np.double).eps * data + np.finfo(np.double).tiny * 100)
* random_state.randn(data_len))
labels = np.empty(0, dtype=np.int)
last_labels = None
convergence_count = 0
for it in range(max_iter):
last_labels = labels.copy()
# tmp = A + S; compute responsibilities
np.add(A, data, tmp)
np.subtract(data, _update_r_max_row(tmp, row_indptr), tmp)
# Damping
tmp *= 1. - damping
R *= damping
R += tmp
# tmp = Rp; compute availabilities
np.maximum(R, 0, tmp)
tmp[kk_row_index] = R[kk_row_index]
# tmp = -Anew
tmp = tmp[row_to_col_idx]
_sparse_row_sum_update(tmp, col_indptr, kk_col_index)
tmp = tmp[col_to_row_idx]
# Damping
tmp *= 1. - damping
A *= damping
A -= tmp
# Check for convergence
labels = _sparse_row_maxindex(A + R, row_indptr)
if labels.shape[0] == 0 or np.sum(last_labels == labels) / \
float(labels.shape[0]) < convergence_percentage:
convergence_count = 0
else:
convergence_count += 1
if convergence_count == convergence_iter:
if verbose:
print("Converged after %d iterations." % it)
break
else:
if verbose:
print("Did not converge")
if idx_single_samples is None or len(idx_single_samples) == 0:
cluster_centers_indices = cols[labels]
else:
cluster_centers_indices = [left_ori_dict[el] for el in cols[labels]]
for ind in sorted(idx_single_samples):
cluster_centers_indices.insert(ind, ind)
cluster_centers_indices = np.asarray(cluster_centers_indices)
_centers = np.unique(cluster_centers_indices)
lookup = {v: i for i, v in enumerate(_centers)}
labels = np.array([lookup[x] for x in cluster_centers_indices], dtype=int)
if return_n_iter:
return cluster_centers_indices, labels, it + 1
else:
return cluster_centers_indices, labels
###############################################################################
class AffinityPropagation(AffinityPropagation):
"""Perform Affinity Propagation Clustering of data.
Read more in the :ref:`User Guide <affinity_propagation>`.
Parameters
----------
damping : float, optional, default: 0.5
Damping factor between 0.5 and 1.
convergence_iter : int, optional, default: 15
Number of iterations with no change in the number
of estimated clusters that stops the convergence.
max_iter : int, optional, default: 200
Maximum number of iterations.
copy : boolean, optional, default: True
Make a copy of input data.
preference : array-like, shape (n_samples,) or float, optional
Preferences for each point - points with larger values of
preferences are more likely to be chosen as exemplars. The number
of exemplars, ie of clusters, is influenced by the input
preferences value. If the preferences are not passed as arguments,
they will be set to the median of the input similarities.
affinity : string, optional, default=``euclidean``
Which affinity to use. At the moment ``precomputed`` and
``euclidean`` are supported. ``euclidean`` uses the
negative squared euclidean distance between points.
verbose : boolean, optional, default: False
Whether to be verbose.
Attributes
----------
cluster_centers_indices_ : array, shape (n_clusters,)
Indices of cluster centers
cluster_centers_ : array, shape (n_clusters, n_features)
Cluster centers (if affinity != ``precomputed``).
labels_ : array, shape (n_samples,)
Labels of each point
affinity_matrix_ : array, shape (n_samples, n_samples)
Stores the affinity matrix used in ``fit``.
n_iter_ : int
Number of iterations taken to converge.
Notes
-----
See examples/cluster/plot_affinity_propagation.py for an example.
The algorithmic complexity of affinity propagation is quadratic
in the number of points.
References
----------
Brendan J. Frey and Delbert Dueck, "Clustering by Passing Messages
Between Data Points", Science Feb. 2007
"""
def __init__(self, damping=.5, max_iter=200, convergence_iter=15,
copy=True, preference=None, affinity='euclidean',
verbose=False, convergence_percentage=0.999999):
super(AffinityPropagation, self).__init__(
damping=damping,
max_iter=max_iter,
convergence_iter=convergence_iter,
copy=copy,
verbose=verbose,
preference=preference,
affinity=affinity)
self.convergence_percentage = convergence_percentage
def fit(self, X, **kwargs):
"""Apply affinity propagation clustering.
Create affinity matrix from negative euclidean distances if required.
Parameters
----------
X: array-like or sparse matrix,
shape (n_samples, n_features) or (n_samples, n_samples)
Data matrix or, if affinity is ``precomputed``, matrix of
similarities / affinities.
"""
if not issparse(X):
return super(AffinityPropagation, self).fit(X, **kwargs)
# Since X is sparse, this converts it in a coo_matrix if required
X = check_array(X, accept_sparse='coo')
if self.affinity == "precomputed":
self.affinity_matrix_ = X
elif self.affinity == "euclidean":
self.affinity_matrix_ = coo_matrix(
-euclidean_distances(X, squared=True))
else:
raise ValueError("Affinity must be 'precomputed' or "
"'euclidean'. Got %s instead"
% str(self.affinity))
self.cluster_centers_indices_, self.labels_, self.n_iter_ = \
sparse_ap(
self.affinity_matrix_, self.preference, max_iter=self.max_iter,
convergence_iter=self.convergence_iter, damping=self.damping,
copy=self.copy, verbose=self.verbose, return_n_iter=True,
convergence_percentage=self.convergence_percentage)
if self.affinity != "precomputed":
self.cluster_centers_ = X.data[self.cluster_centers_indices_].copy()
return self
|
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Contains the base Layer class, from which all layers inherit."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.eager import context
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.util import function_utils
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
InputSpec = base_layer.InputSpec # pylint: disable=invalid-name
@tf_export('layers.Layer')
class Layer(base_layer.Layer):
"""Base layer class.
It is considered legacy, and we recommend the use of `tf.keras.layers.Layer`
instead.
Arguments:
trainable: Boolean, whether the layer's variables should be trainable.
name: String name of the layer.
dtype: Default dtype of the layer's weights (default of `None` means use the
type of the first input).
Read-only properties:
name: The name of the layer (string).
dtype: Default dtype of the layer's weights (default of `None` means use the
type of the first input).
trainable_variables: List of trainable variables.
non_trainable_variables: List of non-trainable variables.
variables: List of all variables of this layer, trainable and
non-trainable.
updates: List of update ops of this layer.
losses: List of losses added by this layer.
trainable_weights: List of variables to be included in backprop.
non_trainable_weights: List of variables that should not be
included in backprop.
weights: The concatenation of the lists trainable_weights and
non_trainable_weights (in this order).
Mutable properties:
trainable: Whether the layer should be trained (boolean).
input_spec: Optional (list of) `InputSpec` object(s) specifying the
constraints on inputs that can be accepted by the layer.
"""
def __init__(self, trainable=True, name=None, dtype=None,
**kwargs):
# For backwards compatibility, legacy layers do not use `ResourceVariable`
# by default.
self._use_resource_variables = False
scope = kwargs.pop('_scope', None)
self._reuse = kwargs.pop('_reuse', None)
# Avoid an incorrect lint error
self._trainable_weights = []
self.built = False
super(Layer, self).__init__(trainable=trainable, name=name, dtype=dtype,
**kwargs)
self._graph = None
self._call_has_scope_arg = 'scope' in self._call_fn_args
if scope:
with vs.variable_scope(scope) as captured_scope:
self._scope = captured_scope
else:
self._scope = None
self._current_scope = None
@property
def graph(self):
if context.executing_eagerly():
raise RuntimeError('Layer.graph not supported when executing eagerly.')
return self._graph
def _init_set_name(self, name):
# Determine layer name (non-unique).
if isinstance(name, vs.VariableScope):
base_name = name.name
else:
base_name = name
self._name = name
if not name:
self._name, base_name = self._make_unique_name()
self._base_name = base_name
def _make_unique_name(self, name_uid_map=None, avoid_names=None,
namespace='', zero_based=False):
base_name = base_layer.to_snake_case(self.__class__.__name__)
name = base_layer.unique_layer_name(base_name,
name_uid_map=name_uid_map,
avoid_names=avoid_names,
namespace=namespace,
zero_based=zero_based)
return (name, base_name)
@property
def scope_name(self):
if not self._scope:
raise ValueError('No name available for layer scope because the layer "' +
self._name + '" has not been used yet. The scope name ' +
' is determined the first time the layer instance is ' +
'called. You must therefore call the layer before ' +
'querying `scope_name`.')
return self._scope.name
def add_loss(self, losses, inputs=None):
previous_losses_length = len(self._losses)
super(Layer, self).add_loss(losses, inputs=inputs)
# TODO(fchollet): deprecate collection below.
new_losses = self._losses[previous_losses_length:]
_add_elements_to_collection(new_losses, ops.GraphKeys.REGULARIZATION_LOSSES)
def _name_scope(self):
"""Determines op naming for the Layer."""
return self._current_scope.original_name_scope
def _set_scope(self, scope=None):
if self._scope is None:
# If constructed with _scope=None, lazy setting of scope.
if self._reuse:
with vs.variable_scope(
scope if scope is not None else self._base_name) as captured_scope:
self._scope = captured_scope
else:
with vs.variable_scope(
scope, default_name=self._base_name) as captured_scope:
self._scope = captured_scope
def add_weight(self,
name,
shape,
dtype=None,
initializer=None,
regularizer=None,
trainable=None,
constraint=None,
use_resource=None,
synchronization=vs.VariableSynchronization.AUTO,
aggregation=vs.VariableAggregation.NONE,
partitioner=None):
"""Adds a new variable to the layer, or gets an existing one; returns it.
Arguments:
name: variable name.
shape: variable shape.
dtype: The type of the variable. Defaults to `self.dtype` or `float32`.
initializer: initializer instance (callable).
regularizer: regularizer instance (callable).
trainable: whether the variable should be part of the layer's
"trainable_variables" (e.g. variables, biases)
or "non_trainable_variables" (e.g. BatchNorm mean, stddev).
Note, if the current variable scope is marked as non-trainable
then this parameter is ignored and any added variables are also
marked as non-trainable. `trainable` defaults to `True` unless
`synchronization` is set to `ON_READ`.
constraint: constraint instance (callable).
use_resource: Whether to use `ResourceVariable`.
synchronization: Indicates when a distributed a variable will be
aggregated. Accepted values are constants defined in the class
`tf.VariableSynchronization`. By default the synchronization is set to
`AUTO` and the current `DistributionStrategy` chooses
when to synchronize. If `synchronization` is set to `ON_READ`,
`trainable` must not be set to `True`.
aggregation: Indicates how a distributed variable will be aggregated.
Accepted values are constants defined in the class
`tf.VariableAggregation`.
partitioner: (optional) partitioner instance (callable). If
provided, when the requested variable is created it will be split
into multiple partitions according to `partitioner`. In this case,
an instance of `PartitionedVariable` is returned. Available
partitioners include `tf.fixed_size_partitioner` and
`tf.variable_axis_size_partitioner`. For more details, see the
documentation of `tf.get_variable` and the "Variable Partitioners
and Sharding" section of the API guide.
Returns:
The created variable. Usually either a `Variable` or `ResourceVariable`
instance. If `partitioner` is not `None`, a `PartitionedVariable`
instance is returned.
Raises:
RuntimeError: If called with partioned variable regularization and
eager execution is enabled.
ValueError: When trainable has been set to True with synchronization
set as `ON_READ`.
"""
if synchronization == vs.VariableSynchronization.ON_READ:
if trainable:
raise ValueError(
'Synchronization value can be set to '
'VariableSynchronization.ON_READ only for non-trainable variables. '
'You have specified trainable=True and '
'synchronization=VariableSynchronization.ON_READ.')
else:
# Set trainable to be false when variable is to be synced on read.
trainable = False
elif trainable is None:
trainable = True
def _should_add_regularizer(variable, existing_variable_set):
if isinstance(variable, tf_variables.PartitionedVariable):
for var in variable:
if var in existing_variable_set:
return False
return True
else:
return variable not in existing_variable_set
init_graph = None
if not context.executing_eagerly():
default_graph = ops.get_default_graph()
if default_graph.building_function:
with ops.init_scope():
# Retrieve the variables from the graph into which variables
# will be lifted; if initialization ops will be lifted into
# the eager context, then there is nothing to retrieve, since variable
# collections are not supported when eager execution is enabled.
if not context.executing_eagerly():
init_graph = ops.get_default_graph()
existing_variables = set(tf_variables.global_variables())
else:
# Initialization ops will not be lifted out of the default graph.
init_graph = default_graph
existing_variables = set(tf_variables.global_variables())
if dtype is None:
dtype = self.dtype or dtypes.float32
self._set_scope(None)
reuse = self.built or self._reuse
prev_len_trainable = len(self._trainable_weights)
with vs.variable_scope(
self._scope, reuse=reuse, auxiliary_name_scope=False) as scope:
self._current_scope = scope
with ops.name_scope(self._name_scope()):
use_resource = (use_resource or
self._use_resource_variables or
scope.use_resource)
if initializer is None:
initializer = scope.initializer
variable = super(Layer, self).add_weight(
name,
shape,
dtype=dtypes.as_dtype(dtype),
initializer=initializer,
trainable=trainable,
constraint=constraint,
partitioner=partitioner,
use_resource=use_resource,
synchronization=synchronization,
aggregation=aggregation,
getter=vs.get_variable)
if regularizer:
if context.executing_eagerly() or _should_add_regularizer(
variable, existing_variables):
self._handle_weight_regularization(name, variable, regularizer)
if init_graph is not None:
# Handle edge case where a custom getter has overridden `trainable`.
# There is one known occurrence of this, in unit test
# testBasicRNNCellNotTrainable in
# contrib.rnn.python.kernel_tests.core_rnn_cell_test
with init_graph.as_default():
trainable_variables = tf_variables.trainable_variables()
if (trainable and self.trainable and
variable not in trainable_variables):
# A custom getter / variable scope overrode the trainable flag.
extra_trainable_vars = self._trainable_weights[prev_len_trainable:]
self._trainable_weights = self._trainable_weights[
:prev_len_trainable]
self._non_trainable_weights += extra_trainable_vars
return variable
def __call__(self, inputs, *args, **kwargs):
"""Wraps `call`, applying pre- and post-processing steps.
Arguments:
inputs: input tensor(s).
*args: additional positional arguments to be passed to `self.call`.
**kwargs: additional keyword arguments to be passed to `self.call`.
**Note**: kwarg `scope` is reserved for use by the layer.
Returns:
Output tensor(s).
Note:
- If the layer's `call` method takes a `scope` keyword argument,
this argument will be automatically set to the current variable scope.
- If the layer's `call` method takes a `mask` argument (as some Keras
layers do), its default value will be set to the mask generated
for `inputs` by the previous layer (if `input` did come from
a layer that generated a corresponding mask, i.e. if it came from
a Keras layer with masking support.
Raises:
ValueError: if the layer's `call` method returns None (an invalid value).
"""
self._set_scope(kwargs.pop('scope', None))
if not context.executing_eagerly():
try:
# Set layer's "graph" at build time
self._graph = ops._get_graph_from_inputs(nest.flatten(inputs), # pylint: disable=protected-access
graph=self._graph)
except ValueError as e:
raise ValueError('Input graph and Layer graph are not the same: %s' % e)
if self.built:
try:
# Some classes which inherit from Layer do not use its constructor, so
# rather than initializing to None we check for an AttributeError.
scope_context_manager = self._always_reuse_variable_scope
except AttributeError:
# From this point we will always set reuse=True, so create a "final"
# variable scope with this setting. We avoid re-creating variable scopes
# after this point as an optimization.
self._always_reuse_variable_scope = vs.variable_scope(
self._scope, reuse=True, auxiliary_name_scope=False)
scope_context_manager = self._always_reuse_variable_scope
else:
scope_context_manager = vs.variable_scope(
self._scope, reuse=self._reuse, auxiliary_name_scope=False)
with scope_context_manager as scope:
self._current_scope = scope
try:
call_has_scope_arg = self._call_has_scope_arg
except AttributeError:
self._call_fn_args = function_utils.fn_args(self.call)
self._call_has_scope_arg = 'scope' in self._call_fn_args
call_has_scope_arg = self._call_has_scope_arg
if call_has_scope_arg:
kwargs['scope'] = scope
# Actually call layer
outputs = super(Layer, self).__call__(inputs, *args, **kwargs)
if not context.executing_eagerly():
# Update global default collections.
_add_elements_to_collection(self.updates, ops.GraphKeys.UPDATE_OPS)
return outputs
def __deepcopy__(self, memo):
no_copy = set(['_graph'])
shallow_copy = set(['_scope', '_always_reuse_variable_scope'])
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.__dict__.items():
if k in no_copy:
setattr(result, k, v)
elif k in shallow_copy:
setattr(result, k, copy.copy(v))
elif base_layer.is_tensor_or_tensor_list(v):
setattr(result, k, v)
else:
setattr(result, k, copy.deepcopy(v, memo))
return result
def _add_elements_to_collection(elements, collection_list):
if context.executing_eagerly():
raise RuntimeError('Using collections from Layers not supported in Eager '
'mode. Tried to add %s to %s' % (elements,
collection_list))
elements = nest.flatten(elements)
collection_list = nest.flatten(collection_list)
for name in collection_list:
collection = ops.get_collection_ref(name)
collection_set = set(collection)
for element in elements:
if element not in collection_set:
collection.append(element)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/Immunization) on 2019-05-07.
# 2019, SMART Health IT.
from . import domainresource
class Immunization(domainresource.DomainResource):
""" Immunization event information.
Describes the event of a patient being administered a vaccine or a record
of an immunization as reported by a patient, a clinician or another party.
"""
resource_type = "Immunization"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.doseQuantity = None
""" Amount of vaccine administered.
Type `Quantity` (represented as `dict` in JSON). """
self.education = None
""" Educational material presented to patient.
List of `ImmunizationEducation` items (represented as `dict` in JSON). """
self.encounter = None
""" Encounter immunization was part of.
Type `FHIRReference` (represented as `dict` in JSON). """
self.expirationDate = None
""" Vaccine expiration date.
Type `FHIRDate` (represented as `str` in JSON). """
self.fundingSource = None
""" Funding source for the vaccine.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.identifier = None
""" Business identifier.
List of `Identifier` items (represented as `dict` in JSON). """
self.isSubpotent = None
""" Dose potency.
Type `bool`. """
self.location = None
""" Where immunization occurred.
Type `FHIRReference` (represented as `dict` in JSON). """
self.lotNumber = None
""" Vaccine lot number.
Type `str`. """
self.manufacturer = None
""" Vaccine manufacturer.
Type `FHIRReference` (represented as `dict` in JSON). """
self.note = None
""" Additional immunization notes.
List of `Annotation` items (represented as `dict` in JSON). """
self.occurrenceDateTime = None
""" Vaccine administration date.
Type `FHIRDate` (represented as `str` in JSON). """
self.occurrenceString = None
""" Vaccine administration date.
Type `str`. """
self.patient = None
""" Who was immunized.
Type `FHIRReference` (represented as `dict` in JSON). """
self.performer = None
""" Who performed event.
List of `ImmunizationPerformer` items (represented as `dict` in JSON). """
self.primarySource = None
""" Indicates context the data was recorded in.
Type `bool`. """
self.programEligibility = None
""" Patient eligibility for a vaccination program.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.protocolApplied = None
""" Protocol followed by the provider.
List of `ImmunizationProtocolApplied` items (represented as `dict` in JSON). """
self.reaction = None
""" Details of a reaction that follows immunization.
List of `ImmunizationReaction` items (represented as `dict` in JSON). """
self.reasonCode = None
""" Why immunization occurred.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.reasonReference = None
""" Why immunization occurred.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.recorded = None
""" When the immunization was first captured in the subject's record.
Type `FHIRDate` (represented as `str` in JSON). """
self.reportOrigin = None
""" Indicates the source of a secondarily reported record.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.route = None
""" How vaccine entered body.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.site = None
""" Body site vaccine was administered.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.status = None
""" completed | entered-in-error | not-done.
Type `str`. """
self.statusReason = None
""" Reason not done.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.subpotentReason = None
""" Reason for being subpotent.
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.vaccineCode = None
""" Vaccine product administered.
Type `CodeableConcept` (represented as `dict` in JSON). """
super(Immunization, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Immunization, self).elementProperties()
js.extend([
("doseQuantity", "doseQuantity", quantity.Quantity, False, None, False),
("education", "education", ImmunizationEducation, True, None, False),
("encounter", "encounter", fhirreference.FHIRReference, False, None, False),
("expirationDate", "expirationDate", fhirdate.FHIRDate, False, None, False),
("fundingSource", "fundingSource", codeableconcept.CodeableConcept, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("isSubpotent", "isSubpotent", bool, False, None, False),
("location", "location", fhirreference.FHIRReference, False, None, False),
("lotNumber", "lotNumber", str, False, None, False),
("manufacturer", "manufacturer", fhirreference.FHIRReference, False, None, False),
("note", "note", annotation.Annotation, True, None, False),
("occurrenceDateTime", "occurrenceDateTime", fhirdate.FHIRDate, False, "occurrence", True),
("occurrenceString", "occurrenceString", str, False, "occurrence", True),
("patient", "patient", fhirreference.FHIRReference, False, None, True),
("performer", "performer", ImmunizationPerformer, True, None, False),
("primarySource", "primarySource", bool, False, None, False),
("programEligibility", "programEligibility", codeableconcept.CodeableConcept, True, None, False),
("protocolApplied", "protocolApplied", ImmunizationProtocolApplied, True, None, False),
("reaction", "reaction", ImmunizationReaction, True, None, False),
("reasonCode", "reasonCode", codeableconcept.CodeableConcept, True, None, False),
("reasonReference", "reasonReference", fhirreference.FHIRReference, True, None, False),
("recorded", "recorded", fhirdate.FHIRDate, False, None, False),
("reportOrigin", "reportOrigin", codeableconcept.CodeableConcept, False, None, False),
("route", "route", codeableconcept.CodeableConcept, False, None, False),
("site", "site", codeableconcept.CodeableConcept, False, None, False),
("status", "status", str, False, None, True),
("statusReason", "statusReason", codeableconcept.CodeableConcept, False, None, False),
("subpotentReason", "subpotentReason", codeableconcept.CodeableConcept, True, None, False),
("vaccineCode", "vaccineCode", codeableconcept.CodeableConcept, False, None, True),
])
return js
from . import backboneelement
class ImmunizationEducation(backboneelement.BackboneElement):
""" Educational material presented to patient.
Educational material presented to the patient (or guardian) at the time of
vaccine administration.
"""
resource_type = "ImmunizationEducation"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.documentType = None
""" Educational material document identifier.
Type `str`. """
self.presentationDate = None
""" Educational material presentation date.
Type `FHIRDate` (represented as `str` in JSON). """
self.publicationDate = None
""" Educational material publication date.
Type `FHIRDate` (represented as `str` in JSON). """
self.reference = None
""" Educational material reference pointer.
Type `str`. """
super(ImmunizationEducation, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ImmunizationEducation, self).elementProperties()
js.extend([
("documentType", "documentType", str, False, None, False),
("presentationDate", "presentationDate", fhirdate.FHIRDate, False, None, False),
("publicationDate", "publicationDate", fhirdate.FHIRDate, False, None, False),
("reference", "reference", str, False, None, False),
])
return js
class ImmunizationPerformer(backboneelement.BackboneElement):
""" Who performed event.
Indicates who performed the immunization event.
"""
resource_type = "ImmunizationPerformer"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.actor = None
""" Individual or organization who was performing.
Type `FHIRReference` (represented as `dict` in JSON). """
self.function = None
""" What type of performance was done.
Type `CodeableConcept` (represented as `dict` in JSON). """
super(ImmunizationPerformer, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ImmunizationPerformer, self).elementProperties()
js.extend([
("actor", "actor", fhirreference.FHIRReference, False, None, True),
("function", "function", codeableconcept.CodeableConcept, False, None, False),
])
return js
class ImmunizationProtocolApplied(backboneelement.BackboneElement):
""" Protocol followed by the provider.
The protocol (set of recommendations) being followed by the provider who
administered the dose.
"""
resource_type = "ImmunizationProtocolApplied"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.authority = None
""" Who is responsible for publishing the recommendations.
Type `FHIRReference` (represented as `dict` in JSON). """
self.doseNumberPositiveInt = None
""" Dose number within series.
Type `int`. """
self.doseNumberString = None
""" Dose number within series.
Type `str`. """
self.series = None
""" Name of vaccine series.
Type `str`. """
self.seriesDosesPositiveInt = None
""" Recommended number of doses for immunity.
Type `int`. """
self.seriesDosesString = None
""" Recommended number of doses for immunity.
Type `str`. """
self.targetDisease = None
""" Vaccine preventatable disease being targetted.
List of `CodeableConcept` items (represented as `dict` in JSON). """
super(ImmunizationProtocolApplied, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ImmunizationProtocolApplied, self).elementProperties()
js.extend([
("authority", "authority", fhirreference.FHIRReference, False, None, False),
("doseNumberPositiveInt", "doseNumberPositiveInt", int, False, "doseNumber", True),
("doseNumberString", "doseNumberString", str, False, "doseNumber", True),
("series", "series", str, False, None, False),
("seriesDosesPositiveInt", "seriesDosesPositiveInt", int, False, "seriesDoses", False),
("seriesDosesString", "seriesDosesString", str, False, "seriesDoses", False),
("targetDisease", "targetDisease", codeableconcept.CodeableConcept, True, None, False),
])
return js
class ImmunizationReaction(backboneelement.BackboneElement):
""" Details of a reaction that follows immunization.
Categorical data indicating that an adverse event is associated in time to
an immunization.
"""
resource_type = "ImmunizationReaction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.date = None
""" When reaction started.
Type `FHIRDate` (represented as `str` in JSON). """
self.detail = None
""" Additional information on reaction.
Type `FHIRReference` (represented as `dict` in JSON). """
self.reported = None
""" Indicates self-reported reaction.
Type `bool`. """
super(ImmunizationReaction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ImmunizationReaction, self).elementProperties()
js.extend([
("date", "date", fhirdate.FHIRDate, False, None, False),
("detail", "detail", fhirreference.FHIRReference, False, None, False),
("reported", "reported", bool, False, None, False),
])
return js
import sys
try:
from . import annotation
except ImportError:
annotation = sys.modules[__package__ + '.annotation']
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import fhirdate
except ImportError:
fhirdate = sys.modules[__package__ + '.fhirdate']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import quantity
except ImportError:
quantity = sys.modules[__package__ + '.quantity']
|
|
##########################################################################
#
# Copyright (c) 2012-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferScene
##########################################################################
# Metadata
##########################################################################
def __drawingSummary( plug ) :
info = []
for name, label in (
( "Solid", "Shaded" ),
( "Wireframe", "Wireframe" ),
( "Outline", "Outline" ),
( "Point", "Point" ),
( "Bound", "Bound" ),
) :
values = []
if plug["primitive"+name]["enabled"].getValue() :
values.append( "On" if plug["primitive"+name]["value"].getValue() else "Off" )
if name != "Solid" and plug["primitive"+name+"Color"]["enabled"].getValue() :
values.append( "Color" )
if name != "Solid" and name != "Bound" and plug["primitive"+name+"Width"]["enabled"].getValue() :
values.append( "%0gpx" % plug["primitive"+name+"Width"]["value"].getValue() )
if values :
info.append( label + " : " + "/".join( values ) )
return ", ".join( info )
def __pointsPrimitivesSummary( plug ) :
info = []
if plug["pointsPrimitiveUseGLPoints"]["enabled"].getValue() :
info.append( "Points On" if plug["pointsPrimitiveUseGLPoints"]["value"].getValue() else "Points Off" )
if plug["pointsPrimitiveGLPointWidth"]["enabled"].getValue() :
info.append( "Width %0gpx" % plug["pointsPrimitiveGLPointWidth"]["value"].getValue() )
return ", ".join( info )
def __curvesPrimitivesSummary( plug ) :
info = []
if plug["curvesPrimitiveUseGLLines"]["enabled"].getValue() :
info.append( "Lines On" if plug["curvesPrimitiveUseGLLines"]["value"].getValue() else "Lines Off" )
if plug["curvesPrimitiveGLLineWidth"]["enabled"].getValue() :
info.append( "Width %0gpx" % plug["curvesPrimitiveGLLineWidth"]["value"].getValue() )
if plug["curvesPrimitiveIgnoreBasis"]["enabled"].getValue() :
info.append( "Basis Ignored" if plug["curvesPrimitiveIgnoreBasis"]["value"].getValue() else "Basis On" )
return ", ".join( info )
Gaffer.Metadata.registerNode(
GafferScene.OpenGLAttributes,
"description",
"""
Applies attributes to modify the appearance of objects in
the viewport and in renders done by the OpenGLRender node.
""",
plugs = {
# Section summaries
"attributes" : [
"layout:section:Drawing:summary", __drawingSummary,
"layout:section:Points Primitives:summary", __pointsPrimitivesSummary,
"layout:section:Curves Primitives:summary", __curvesPrimitivesSummary,
],
# General drawing plugs
"attributes.primitiveSolid" : [
"description",
"""
Whether or not the object is rendered solid, in which
case the assigned GLSL shader will be used to perform
the shading.
""",
"layout:section", "Drawing",
"label", "Shaded",
],
"attributes.primitiveWireframe" : [
"description",
"""
Whether or not the object is rendered as a wireframe.
Use the primitiveWireframeColor and primitiveWireframeWidth
plugs for finer control of the wireframe appearance.
""",
"layout:section", "Drawing",
"label", "Wireframe",
],
"attributes.primitiveWireframeColor" : [
"description",
"""
The colour to use for the wireframe rendering. Only
meaningful if wireframe rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Wireframe Color",
],
"attributes.primitiveWireframeWidth" : [
"description",
"""
The width in pixels of the wireframe rendering. Only
meaningful if wireframe rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Wireframe Width",
],
"attributes.primitiveOutline" : [
"description",
"""
Whether or not an outline is drawn around the object.
Use the primitiveOutlineColor and primitiveOutlineWidth
plugs for finer control of the outline.
""",
"layout:section", "Drawing",
"label", "Outline",
],
"attributes.primitiveOutlineColor" : [
"description",
"""
The colour to use for the outline. Only
meaningful if outline rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Outline Color",
],
"attributes.primitiveOutlineWidth" : [
"description",
"""
The width in pixels of the outline. Only
meaningful if outline rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Outline Width",
],
"attributes.primitivePoint" : [
"description",
"""
Whether or not the individual points (vertices) of the
object are drawn. Use the primitivePointColor and primitivePointWidth
plugs for finer control of the point rendering.
""",
"layout:section", "Drawing",
"label", "Points",
],
"attributes.primitivePointColor" : [
"description",
"""
The colour to use for the point rendering. Only
meaningful if point rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Point Color",
],
"attributes.primitivePointWidth" : [
"description",
"""
The width in pixels of the points. Only
meaningful if point rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Point Width",
],
"attributes.primitiveBound" : [
"description",
"""
Whether or not the bounding box of the object is drawn.
This is in addition to any drawing of unexpanded bounding
boxes that the viewer performs. Use the primitiveBoundColor
plug to change the colour of the bounding box.
""",
"layout:section", "Drawing",
"label", "Bound",
],
"attributes.primitiveBoundColor" : [
"description",
"""
The colour to use for the bounding box rendering. Only
meaningful if bounding box rendering is turned on.
""",
"layout:section", "Drawing",
"label", "Bound Color",
],
# Points primitive drawing plugs
"attributes.pointsPrimitiveUseGLPoints" : [
"description",
"""
Points primitives have a render type (set by the PointsType
node) which allows them to be rendered as particles, disks,
spheres etc. This attribute overrides that type for OpenGL
only, allowing a much faster rendering as raw OpenGL points.
""",
"layout:section", "Points Primitives",
"label", "Use GL Points",
],
"attributes.pointsPrimitiveUseGLPoints.value" : [
"preset:For GL Points", "forGLPoints",
"preset:For Particles And Disks", "forParticlesAndDisks",
"preset:For All", "forAll",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
],
"attributes.pointsPrimitiveGLPointWidth" : [
"description",
"""
The width in pixels of the GL points rendered when
the pointsPrimitiveUseGLPoints plug has overridden
the point type.
""",
"layout:section", "Points Primitives",
"label", "GL Point Width",
],
# Curves primitive drawing plugs
"attributes.curvesPrimitiveUseGLLines" : [
"description",
"""
Curves primitives are typically rendered as ribbons
and as such have an associated width in object space.
This attribute overrides that for OpenGL only, allowing
a much faster rendering as raw OpenGL lines.
""",
"layout:section", "Curves Primitives",
"label", "Use GL Lines",
],
"attributes.curvesPrimitiveGLLineWidth" : [
"description",
"""
The width in pixels of the GL lines rendered when
the curvesPrimitiveUseGLLines plug has overridden
the drawing to use lines.
""",
"layout:section", "Curves Primitives",
"label", "GL Line Width",
],
"attributes.curvesPrimitiveIgnoreBasis" : [
"description",
"""
Turns off interpolation for cubic curves, just
rendering straight lines between the vertices
instead.
""",
"layout:section", "Curves Primitives",
"label", "Ignore Basis",
],
}
)
|
|
"""
The itty-bitty Python web framework.
Totally ripping off Sintra, the Python way. Very useful for small applications,
especially web services. Handles basic HTTP methods (PUT/DELETE too!). Errs on
the side of fun and terse.
Example Usage::
from itty import get, run_itty
@get('/')
def index(request):
return 'Hello World!'
run_itty()
Thanks go out to Matt Croydon & Christian Metts for putting me up to this late
at night. The joking around has become reality. :)
"""
import cgi
import mimetypes
import os
import re
import StringIO
import sys
import traceback
try:
from urlparse import parse_qs
except ImportError:
from cgi import parse_qs
__author__ = 'Daniel Lindsley'
__version__ = ('0', '8', '1')
__license__ = 'BSD'
REQUEST_MAPPINGS = {
'GET': [],
'POST': [],
'PUT': [],
'DELETE': [],
}
ERROR_HANDLERS = {}
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media')
HTTP_MAPPINGS = {
100: 'CONTINUE',
101: 'SWITCHING PROTOCOLS',
200: 'OK',
201: 'CREATED',
202: 'ACCEPTED',
203: 'NON-AUTHORITATIVE INFORMATION',
204: 'NO CONTENT',
205: 'RESET CONTENT',
206: 'PARTIAL CONTENT',
300: 'MULTIPLE CHOICES',
301: 'MOVED PERMANENTLY',
302: 'FOUND',
303: 'SEE OTHER',
304: 'NOT MODIFIED',
305: 'USE PROXY',
306: 'RESERVED',
307: 'TEMPORARY REDIRECT',
400: 'BAD REQUEST',
401: 'UNAUTHORIZED',
402: 'PAYMENT REQUIRED',
403: 'FORBIDDEN',
404: 'NOT FOUND',
405: 'METHOD NOT ALLOWED',
406: 'NOT ACCEPTABLE',
407: 'PROXY AUTHENTICATION REQUIRED',
408: 'REQUEST TIMEOUT',
409: 'CONFLICT',
410: 'GONE',
411: 'LENGTH REQUIRED',
412: 'PRECONDITION FAILED',
413: 'REQUEST ENTITY TOO LARGE',
414: 'REQUEST-URI TOO LONG',
415: 'UNSUPPORTED MEDIA TYPE',
416: 'REQUESTED RANGE NOT SATISFIABLE',
417: 'EXPECTATION FAILED',
500: 'INTERNAL SERVER ERROR',
501: 'NOT IMPLEMENTED',
502: 'BAD GATEWAY',
503: 'SERVICE UNAVAILABLE',
504: 'GATEWAY TIMEOUT',
505: 'HTTP VERSION NOT SUPPORTED',
}
class RequestError(Exception):
"""A base exception for HTTP errors to inherit from."""
status = 404
def __init__(self, message, hide_traceback=False):
super(RequestError, self).__init__(message)
self.hide_traceback = hide_traceback
class Forbidden(RequestError):
status = 403
class NotFound(RequestError):
status = 404
def __init__(self, message, hide_traceback=True):
super(NotFound, self).__init__(message)
self.hide_traceback = hide_traceback
class AppError(RequestError):
status = 500
class Redirect(RequestError):
"""
Redirects the user to a different URL.
Slightly different than the other HTTP errors, the Redirect is less
'OMG Error Occurred' and more 'let's do something exceptional'. When you
redirect, you break out of normal processing anyhow, so it's a very similar
case."""
status = 302
url = ''
def __init__(self, url):
self.url = url
self.args = ["Redirecting to '%s'..." % self.url]
class lazyproperty(object):
"""A property whose value is computed only once. """
def __init__(self, function):
self._function = function
def __get__(self, obj, _=None):
if obj is None:
return self
value = self._function(obj)
setattr(obj, self._function.func_name, value)
return value
class Request(object):
"""An object to wrap the environ bits in a friendlier way."""
GET = {}
def __init__(self, environ, start_response):
self._environ = environ
self._start_response = start_response
self.setup_self()
def setup_self(self):
self.path = add_slash(self._environ.get('PATH_INFO', ''))
self.method = self._environ.get('REQUEST_METHOD', 'GET').upper()
self.query = self._environ.get('QUERY_STRING', '')
self.content_length = 0
try:
self.content_length = int(self._environ.get('CONTENT_LENGTH', '0'))
except ValueError:
pass
self.GET = self.build_get_dict()
def __getattr__(self, name):
"""
Allow accesses of the environment if we don't already have an attribute
for. This lets you do things like::
script_name = request.SCRIPT_NAME
"""
return self._environ[name]
@lazyproperty
def POST(self):
return self.build_complex_dict()
@lazyproperty
def PUT(self):
return self.build_complex_dict()
@lazyproperty
def body(self):
"""Content of the request."""
return self._environ['wsgi.input'].read(self.content_length)
def build_get_dict(self):
"""Takes GET data and rips it apart into a dict."""
raw_query_dict = parse_qs(self.query, keep_blank_values=1)
query_dict = {}
for key, value in raw_query_dict.items():
if len(value) <= 1:
query_dict[key] = value[0]
else:
# Since it's a list of multiple items, we must have seen more than
# one item of the same name come in. Store all of them.
query_dict[key] = value
return query_dict
def build_complex_dict(self):
"""Takes POST/PUT data and rips it apart into a dict."""
raw_data = cgi.FieldStorage(fp=StringIO.StringIO(self.body), environ=self._environ)
query_dict = {}
for field in raw_data:
if isinstance(raw_data[field], list):
# Since it's a list of multiple items, we must have seen more than
# one item of the same name come in. Store all of them.
query_dict[field] = [fs.value for fs in raw_data[field]]
elif raw_data[field].filename:
# We've got a file.
query_dict[field] = raw_data[field]
else:
query_dict[field] = raw_data[field].value
return query_dict
class Response(object):
headers = []
def __init__(self, output, headers=None, status=200, content_type='text/html'):
self.output = output
self.content_type = content_type
self.status = status
self.headers = []
if headers and isinstance(headers, list):
self.headers = headers
def add_header(self, key, value):
self.headers.append((key, value))
def send(self, start_response):
status = "%d %s" % (self.status, HTTP_MAPPINGS.get(self.status))
headers = [('Content-Type', "%s; charset=utf-8" % self.content_type)] + self.headers
final_headers = []
# Because Unicode is unsupported...
for header in headers:
final_headers.append((self.convert_to_ascii(header[0]), self.convert_to_ascii(header[1])))
start_response(status, final_headers)
if isinstance(self.output, unicode):
return self.output.encode('utf-8')
else:
return self.output
def convert_to_ascii(self, data):
if isinstance(data, unicode):
try:
return data.encode('us-ascii')
except UnicodeError, e:
raise
else:
return str(data)
def handle_request(environ, start_response):
"""The main handler. Dispatches to the user's code."""
try:
request = Request(environ, start_response)
except Exception, e:
return handle_error(e)
try:
(re_url, url, callback), kwargs = find_matching_url(request)
response = callback(request, **kwargs)
except Exception, e:
return handle_error(e, request)
if not isinstance(response, Response):
response = Response(response)
return response.send(start_response)
def handle_error(exception, request=None):
"""If an exception is thrown, deal with it and present an error page."""
if request is None:
request = {'_environ': {'PATH_INFO': ''}}
if not getattr(exception, 'hide_traceback', False):
(e_type, e_value, e_tb) = sys.exc_info()
message = "%s occurred on '%s': %s\nTraceback: %s" % (
exception.__class__,
request._environ['PATH_INFO'],
exception,
''.join(traceback.format_exception(e_type, e_value, e_tb))
)
request._environ['wsgi.errors'].write(message)
if isinstance(exception, RequestError):
status = getattr(exception, 'status', 404)
else:
status = 500
if status in ERROR_HANDLERS:
return ERROR_HANDLERS[status](request, exception)
return not_found(request, exception)
def find_matching_url(request):
"""Searches through the methods who've registed themselves with the HTTP decorators."""
if not request.method in REQUEST_MAPPINGS:
raise NotFound("The HTTP request method '%s' is not supported." % request.method)
for url_set in REQUEST_MAPPINGS[request.method]:
match = url_set[0].search(request.path)
if match is not None:
return (url_set, match.groupdict())
raise NotFound("Sorry, nothing here.")
def add_slash(url):
"""Adds a trailing slash for consistency in urls."""
if not url.endswith('/'):
url = url + '/'
return url
def content_type(filename):
"""
Takes a guess at what the desired mime type might be for the requested file.
Mostly only useful for static media files.
"""
ct = 'text/plain'
ct_guess = mimetypes.guess_type(filename)
if ct_guess[0] is not None:
ct = ct_guess[0]
return ct
def static_file(filename, root=MEDIA_ROOT):
"""
Fetches a static file from the filesystem, relative to either the given
MEDIA_ROOT or from the provided root directory.
"""
if filename is None:
raise Forbidden("You must specify a file you'd like to access.")
# Strip the '/' from the beginning/end.
valid_path = filename.strip('/')
# Kill off any character trying to work their way up the filesystem.
valid_path = valid_path.replace('//', '/').replace('/./', '/').replace('/../', '/')
desired_path = os.path.join(root, valid_path)
if not os.path.exists(desired_path):
raise NotFound("File does not exist.")
if not os.access(desired_path, os.R_OK):
raise Forbidden("You do not have permission to access this file.")
ct = str(content_type(desired_path))
# Do the text types as a non-binary read.
if ct.startswith('text') or ct.endswith('xml') or ct.endswith('json'):
return open(desired_path, 'r').read()
# Fall back to binary for everything else.
return open(desired_path, 'rb').read()
# Static file handler
def serve_static_file(request, filename, root=MEDIA_ROOT, force_content_type=None):
"""
Basic handler for serving up static media files.
Accepts an optional ``root`` (filepath string, defaults to ``MEDIA_ROOT``) parameter.
Accepts an optional ``force_content_type`` (string, guesses if ``None``) parameter.
"""
file_contents = static_file(filename, root)
if force_content_type is None:
ct = content_type(filename)
else:
ct = force_content_type
return Response(file_contents, content_type=ct)
# Decorators
def get(url):
"""Registers a method as capable of processing GET requests."""
def wrapped(method):
# Register.
re_url = re.compile("^%s$" % add_slash(url))
REQUEST_MAPPINGS['GET'].append((re_url, url, method))
return method
return wrapped
def post(url):
"""Registers a method as capable of processing POST requests."""
def wrapped(method):
# Register.
re_url = re.compile("^%s$" % add_slash(url))
REQUEST_MAPPINGS['POST'].append((re_url, url, method))
return method
return wrapped
def put(url):
"""Registers a method as capable of processing PUT requests."""
def wrapped(method):
# Register.
re_url = re.compile("^%s$" % add_slash(url))
REQUEST_MAPPINGS['PUT'].append((re_url, url, method))
new.status = 201
return method
return wrapped
def delete(url):
"""Registers a method as capable of processing DELETE requests."""
def wrapped(method):
# Register.
re_url = re.compile("^%s$" % add_slash(url))
REQUEST_MAPPINGS['DELETE'].append((re_url, url, method))
return method
return wrapped
def error(code):
"""Registers a method for processing errors of a certain HTTP code."""
def wrapped(method):
# Register.
ERROR_HANDLERS[code] = method
return method
return wrapped
# Error handlers
@error(403)
def forbidden(request, exception):
response = Response('Forbidden', status=403, content_type='text/plain')
return response.send(request._start_response)
@error(404)
def not_found(request, exception):
response = Response('Not Found', status=404, content_type='text/plain')
return response.send(request._start_response)
@error(500)
def app_error(request, exception):
response = Response('Application Error', status=500, content_type='text/plain')
return response.send(request._start_response)
@error(302)
def redirect(request, exception):
response = Response('', status=302, content_type='text/plain', headers=[('Location', exception.url)])
return response.send(request._start_response)
# Servers Adapters
def wsgiref_adapter(host, port):
from wsgiref.simple_server import make_server
srv = make_server(host, port, handle_request)
srv.serve_forever()
def appengine_adapter(host, port):
from google.appengine.ext.webapp import util
util.run_wsgi_app(handle_request)
def cherrypy_adapter(host, port):
# Experimental (Untested).
from cherrypy import wsgiserver
server = wsgiserver.CherryPyWSGIServer((host, port), handle_request)
server.start()
def flup_adapter(host, port):
# Experimental (Untested).
from flup.server.fcgi import WSGIServer
WSGIServer(handle_request, bindAddress=(host, port)).run()
def paste_adapter(host, port):
# Experimental (Untested).
from paste import httpserver
httpserver.serve(handle_request, host=host, port=str(port))
def twisted_adapter(host, port):
from twisted.web import server, wsgi
from twisted.python.threadpool import ThreadPool
from twisted.internet import reactor
thread_pool = ThreadPool()
thread_pool.start()
reactor.addSystemEventTrigger('after', 'shutdown', thread_pool.stop)
ittyResource = wsgi.WSGIResource(reactor, thread_pool, handle_request)
site = server.Site(ittyResource)
reactor.listenTCP(port, site)
reactor.run()
def diesel_adapter(host, port):
# Experimental (Mostly untested).
from diesel.protocols.wsgi import WSGIApplication
app = WSGIApplication(handle_request, port=int(port))
app.run()
def tornado_adapter(host, port):
# Experimental (Mostly untested).
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
container = WSGIContainer(handle_request)
http_server = HTTPServer(container)
http_server.listen(port)
IOLoop.instance().start()
def gunicorn_adapter(host, port):
from gunicorn import version_info
if version_info < (0, 9, 0):
from gunicorn.arbiter import Arbiter
from gunicorn.config import Config
arbiter = Arbiter(Config({'bind': "%s:%d" % (host, int(port)), 'workers': 4}), handle_request)
arbiter.run()
else:
from gunicorn.app.base import Application
class IttyApplication(Application):
def init(self, parser, opts, args):
return {
'bind': '{0}:{1}'.format(host, port),
'workers': 4
}
def load(self):
return handle_request
IttyApplication().run()
def gevent_adapter(host, port):
from gevent import pywsgi
pywsgi.WSGIServer((host, int(port)), handle_request).serve_forever()
def eventlet_adapter(host, port):
from eventlet import wsgi, listen
wsgi.server(listen((host, int(port))), handle_request)
WSGI_ADAPTERS = {
'wsgiref': wsgiref_adapter,
'appengine': appengine_adapter,
'cherrypy': cherrypy_adapter,
'flup': flup_adapter,
'paste': paste_adapter,
'twisted': twisted_adapter,
'diesel': diesel_adapter,
'tornado': tornado_adapter,
'gunicorn': gunicorn_adapter,
'gevent': gevent_adapter,
'eventlet': eventlet_adapter,
}
# Server
def run_itty(server='wsgiref', host='localhost', port=8080, config=None):
"""
Runs the itty web server.
Accepts an optional host (string), port (integer), server (string) and
config (python module name/path as a string) parameters.
By default, uses Python's built-in wsgiref implementation. Specify a server
name from WSGI_ADAPTERS to use an alternate WSGI server.
"""
if not server in WSGI_ADAPTERS:
raise RuntimeError("Server '%s' is not a valid server. Please choose a different server." % server)
if config is not None:
# We'll let ImportErrors bubble up.
config_options = __import__(config)
host = getattr(config_options, 'host', host)
port = getattr(config_options, 'port', port)
server = getattr(config_options, 'server', server)
# AppEngine seems to echo everything, even though it shouldn't. Accomodate.
if server != 'appengine':
print 'itty starting up (using %s)...' % server
print 'Listening on http://%s:%s...' % (host, port)
print 'Use Ctrl-C to quit.'
print
try:
WSGI_ADAPTERS[server](host, port)
except KeyboardInterrupt:
print 'Shutting down. Have a nice day!'
|
|
from collections import defaultdict
from relational_data import (
CHANGED_ROWS,
COMPARAND_MISSING_ROWS,
RelationalData,
MISSING_ROWS,
)
from sqlalchemy import (
create_engine,
inspect,
MetaData,
)
from sqlalchemy.orm import sessionmaker
# 'description', # I don't care about description right now.
TABLE_KEYS = [
'fullname',
'name',
'primary_key',
]
# I know default is on the SA side, and I suspect onupdate is the same since there is
# also a server_onupdate. I don't really care about doc for now, and I don't know what
# the rest of these are.
# 'default', 'doc', 'system', 'dispatch', 'onupdate'
# TODO Could we just look at all of the column attributes instead of enumerating them here?
# Omitted keys because they are redundant with the table metadata. TODO confirm the redundancy.
# 'constraints',
# 'foreign_keys',
# 'index',
# 'primary_key',
COLUMN_KEYS = [
'autoincrement',
'is_literal',
'key',
'name',
'nullable',
'server_default',
'server_onupdate',
'type',
'unique',
]
IGNORE_ITEMS = [
'PrimaryKeyConstraint',
]
class SQLDatabase (object):
def __init__(self, conn_string, schema=None):
# How should we name the class considering it can either refer to
# a schema or a database?
# is there any reason to create the engine separately?
self.schema = schema
self.engine = create_engine(conn_string)
self.conn = self.engine.connect()
Session = sessionmaker(bind=self.engine)
self.session = Session(bind=self.conn)
# The differences are a nesting of dicts and lists. The first level has table names, then a
# general entry and column specific entries. If the table data or column list is empty,
# that means the corresponding database does not have that table or column at all. To illustrate:
# {
# table1:
# {
# general: [difference1, difference2],
# col1: [difference3, difference4],
# col2: [difference5]},
# table2:
# {
# general: [],
# col3: [difference6],
# col4: [difference7, difference8],
# col5: [],
# },
# table3: {},
# }
self.differences = defaultdict(dict)
# TODO
# Use insepctor, instead: http://docs.sqlalchemy.org/en/latest/core/reflection.html
self.metadata = MetaData(bind=self.engine, schema=schema)
self.metadata.reflect()
self.inspector = inspect(self.engine)
@property
def tables(self):
"""Return the table objects of all tables in this database."""
return self.metadata.tables
@property
def table_names(self):
"""Return the names of the tables in this database (or database schema)."""
return self.inspector.get_table_names(schema=self.schema)
def table_pk_col_names(self, table):
"""Return the primary key column names for the given table."""
return [c.name for c in table.columns if c.primary_key]
def table_from_name(self, table_name):
"""Return the table object from the table name."""
if self.schema:
table_name = self.schema + table_name
return self.tables[table_name]
def column_from_table(self, table, column_name):
"""Return the column object from a table and column name."""
if isinstance(table, str):
table = self.table_from_name(table)
return table.columns[column_name]
# has tables with columns with constraints and sequences
# has rows with values
# We want to be able to apply these functions to full databases and
# to tables. Can we make that possible?
# This will require
# For getting common table names, common columns, etc.
# def common_elements(db_attr, attr_attr):
# return set(getattr(a, attr_attr) for a in getattr(db_a, db_attr) if getattr(db_b, db_attr))
def compare_schemas(self, comparand):
"""Compare the schemas of the two given databases.
Compare the schema of the database associated with self with the schema of
the comparand."""
self.comparand = comparand
common_table_names = set(self.table_names) & set(comparand.table_names)
self.dual_set_compare(self.table_names, comparand.table_names)
# add a and b not found tables to however we end up reporting errors
# a_not_b_tables = set(db_a.tables) - common_table_names and vice versa
map(self.compare_table_schemas, common_table_names)
def dual_set_compare(self, a, b, diff_key=None):
"""Compare both directions of differences between two sets-like objects.
TODO give a concrete example of how this is used."""
self.single_set_compare(a, b, 'a', diff_key)
self.single_set_compare(b, a, 'b', diff_key)
def single_set_compare(self, a, b, prefix, diff_key):
"""Compare a single direction of two set-like objects."""
for i in remove_ignore(set(a) - set(b)):
if diff_key:
self.differences[diff_key]['{}_{}'.format(i, prefix)] = {}
else:
self.differences['{}_{}'.format(i, prefix)] = {}
def compare_table_schemas(self, table_name):
"""Compare the general and column specific schemas of each table."""
ta = self.table_from_name(table_name)
tb = self.comparand.table_from_name(table_name)
self.dual_set_compare(ta.constraints, tb.constraints, table_name)
self.dual_set_compare(ta.foreign_keys, tb.foreign_keys, table_name)
self.dual_set_compare(ta.indexes, tb.indexes, table_name)
self.differences[table_name]['general'] = compare(TABLE_KEYS, ta, tb)
self.compare_table_columns(ta, tb)
def compare_table_columns(self, ta, tb):
"""Record the columns present in one table and not the other."""
ta_col_names = set(col.name for col in ta.columns)
tb_col_names = set(col.name for col in tb.columns)
for col_name in ta_col_names - tb_col_names:
self.differences[ta.name]['{}_a'.format(col_name)] = {}
for col_name in tb_col_names - ta_col_names:
self.differences[tb.name]['{}_b'.format(col_name)] = {}
for col_name in ta_col_names & tb_col_names:
col_a = self.column_from_table(ta, col_name)
col_b = self.comparand.column_from_table(tb, col_name)
results = compare(COLUMN_KEYS, col_a, col_b)
if results:
self.differences[ta.name][col_name] = results
def relational_data_for_table(self, table):
"""Return a RelationalData object for the given table."""
data = self.session.query(table).all()
headers = [tuple(c.name for c in table.columns)]
pks = self.table_pk_col_names(table)
assert len(pks) == 1, \
"Compare data only works with data having exactly one primary key column."
return RelationalData(headers + data, pks[0])
def compare_data(self):
"""Compare the data of the two given databases.
Compare the data of database a (db_a) with the data of
database b (db_b). Only show the differences unless
show_equivalence is True.
Differences can be different data in one or more columns
of the same row (as identified by the primary key), missing
rows (rows db_a has, but db_b does not), or added rows (rows
that db_b has, but db_a does not).
A data comparison necessarily includes a schema comparison."""
self.data_diffs = defaultdict(dict)
for table in self.tables.values():
data = self.relational_data_for_table(table)
comparand_table = self.comparand.table_from_name(table.name)
data.compare(self.comparand.relational_data_for_table(comparand_table))
self.data_diffs[table.name] = data.errors
return self.data_diffs
def update_data_to_match_comparand(self):
"""Insert, remove, and update data to match the comparand."""
data_diffs = self.compare_data()
sorted_table_names = [t.name for t in self.metadata.sorted_tables if t.name in data_diffs.keys()]
# Due to foreign key constraints, we must iterate twice through the tables.
# Iterate in forward order so inserts do not violate fk constraints.
for table_name in sorted_table_names:
table = self.table_from_name(table_name)
active_diffs = data_diffs[table_name]
self.add_missing_rows(table, active_diffs[COMPARAND_MISSING_ROWS].keys())
self.update_changed_values(table, active_diffs[CHANGED_ROWS])
# Iterate through the tables in reversed order so row deletion does not
# violate foreign key constraints.
for table_name in reversed(sorted_table_names):
table = self.table_from_name(table_name)
active_diffs = data_diffs[table_name]
self.delete_missing_rows(table, active_diffs[MISSING_ROWS].keys())
self.session.commit()
def add_missing_rows(self, table, rows):
"""Insert the given rows into the given table."""
table_col_names = [c.name for c in table.columns]
comparand_table = self.comparand.table_from_name(table.name)
comparand_col_names = [c.name for c in comparand_table.columns]
for row in rows:
# TODO check the column vs header order assumption
insert_values = {}
for i in range(0, len(comparand_col_names)):
insert_values[comparand_col_names[i]] = row[i]
for col_name in set(comparand_col_names) - set(table_col_names):
del insert_values[col_name]
self.session.execute(table.insert().values(**insert_values))
def update_changed_values(self, table, values):
"""Update the given table with the given values."""
for id in values:
changed_columns = values[id]
for col in changed_columns:
self.session.execute(table.update().
values({col: changed_columns[col][1]}).where(table.c.id == id))
def delete_missing_rows(self, table, rows):
"""Remove rows that are present in this db but missing from the comparand."""
for row in rows:
# TODO don't forget that assuming the pk is the first item isn't a valid
# assumption. Definitely refactor all of this.
pk = self.table_pk_col_names(table)[0]
pk_val = row[0]
self.session.execute(table.delete(getattr(table.c, pk) == pk_val))
def compare_sequences(self):
pass
def update_schema_b_from_a(self):
"""Update the schema of db_b to that found in db_a."""
pass
def update_b_sequences_from_a(self):
pass
# We need type equivalences.
# Type + flavor is equivalent to another type + flavor
def print_differences(self):
"""Print the differences in a (hopefully) human understandable format."""
print self.differences
def compare(attrs, compare_a, compare_b):
"""Return the unequal attributes between a and b.
Given a set of attributes and two arbitrary objects, compare the values of
those attributes of each object. Return the comparisons that were unequal.
An example use case might be tables as the objects and schema parameters as
the attributes. """
errors = {}
for attr in attrs:
a_val = getattr(compare_a, attr)
b_val = getattr(compare_b, attr)
# Allow for string equivalence as otherwise things like VARCHAR() do not match each other
# TODO investigate if this inequality is because of SQL flavor and possibly use that
# route to fix.
if not a_val == b_val and not (str(a_val) == str(b_val)):
errors['{}_a'.format(attr)] = a_val
errors['{}_b'.format(attr)] = b_val
return errors
def remove_ignore(diff_set):
"""Remove items that begin with ignore strings."""
return_set = set(diff_set)
for item in diff_set:
for ignore in IGNORE_ITEMS:
if ignore in str(item):
return_set.remove(item)
return return_set
|
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing instances.
"""
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django import http
from django import shortcuts
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from horizon import tabs
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances \
import console as project_console
from openstack_dashboard.dashboards.project.instances \
import forms as project_forms
from openstack_dashboard.dashboards.project.instances \
import tables as project_tables
from openstack_dashboard.dashboards.project.instances \
import tabs as project_tabs
from openstack_dashboard.dashboards.project.instances \
import workflows as project_workflows
class IndexView(tables.DataTableView):
table_class = project_tables.InstancesTable
template_name = 'project/instances/index.html'
def has_more_data(self, table):
return self._more
def get_data(self):
marker = self.request.GET.get(
project_tables.InstancesTable._meta.pagination_param, None)
search_opts = self.get_filters({'marker': marker, 'paginate': True})
# Gather our instances
try:
instances, self._more = api.nova.server_list(
self.request,
search_opts=search_opts)
except Exception:
self._more = False
instances = []
exceptions.handle(self.request,
_('Unable to retrieve instances.'))
if instances:
try:
api.network.servers_update_addresses(self.request, instances)
except Exception:
exceptions.handle(
self.request,
message=_('Unable to retrieve IP addresses from Neutron.'),
ignore=True)
# Gather our flavors and images and correlate our instances to them
try:
flavors = api.nova.flavor_list(self.request)
except Exception:
flavors = []
exceptions.handle(self.request, ignore=True)
try:
# TODO(gabriel): Handle pagination.
images, more, prev = api.glance.image_list_detailed(
self.request)
except Exception:
images = []
exceptions.handle(self.request, ignore=True)
full_flavors = SortedDict([(str(flavor.id), flavor)
for flavor in flavors])
image_map = SortedDict([(str(image.id), image)
for image in images])
# Loop through instances to get flavor info.
for instance in instances:
if hasattr(instance, 'image'):
# Instance from image returns dict
if isinstance(instance.image, dict):
if instance.image.get('id') in image_map:
instance.image = image_map[instance.image['id']]
try:
flavor_id = instance.flavor["id"]
if flavor_id in full_flavors:
instance.full_flavor = full_flavors[flavor_id]
else:
# If the flavor_id is not in full_flavors list,
# get it via nova api.
instance.full_flavor = api.nova.flavor_get(
self.request, flavor_id)
except Exception:
msg = _('Unable to retrieve instance size information.')
exceptions.handle(self.request, msg)
return instances
def get_filters(self, filters):
filter_field = self.table.get_filter_field()
filter_action = self.table._meta._filter_action
if filter_action.is_api_filter(filter_field):
filter_string = self.table.get_filter_string()
if filter_field and filter_string:
filters[filter_field] = filter_string
return filters
class MigrateView(forms.ModalFormView):
form_class = project_forms.MigrateInstanceForm
template_name = 'project/instances/migrate.html'
success_url = reverse_lazy('horizon:project:instances:index')
def get_initial(self):
return {"instance_id": self.kwargs["instance_id"]}
def get_context_data(self, **kwargs):
context = super(MigrateView, self).get_context_data(**kwargs)
context['instance_id'] = self.kwargs['instance_id']
return context
class LaunchInstanceView(workflows.WorkflowView):
workflow_class = project_workflows.LaunchInstance
def get_initial(self):
initial = super(LaunchInstanceView, self).get_initial()
initial['project_id'] = self.request.user.tenant_id
initial['user_id'] = self.request.user.id
return initial
def console(request, instance_id):
try:
# TODO(jakedahn): clean this up once the api supports tailing.
tail = request.GET.get('length', None)
data = api.nova.server_console_output(request,
instance_id,
tail_length=tail)
except Exception:
data = _('Unable to get log for instance "%s".') % instance_id
exceptions.handle(request, ignore=True)
response = http.HttpResponse(content_type='text/plain')
response.write(data)
response.flush()
return response
def vnc(request, instance_id):
try:
instance = api.nova.server_get(request, instance_id)
console_url = project_console.get_console(request, 'VNC', instance)
return shortcuts.redirect(console_url)
except Exception:
redirect = reverse("horizon:project:instances:index")
msg = _('Unable to get VNC console for instance "%s".') % instance_id
exceptions.handle(request, msg, redirect=redirect)
def spice(request, instance_id):
try:
instance = api.nova.server_get(request, instance_id)
console_url = project_console.get_console(request, 'SPICE', instance)
return shortcuts.redirect(console_url)
except Exception:
redirect = reverse("horizon:project:instances:index")
msg = _('Unable to get SPICE console for instance "%s".') % instance_id
exceptions.handle(request, msg, redirect=redirect)
def rdp(request, instance_id):
try:
instance = api.nova.server_get(request, instance_id)
console_url = project_console.get_console(request, 'RDP', instance)
return shortcuts.redirect(console_url)
except Exception:
redirect = reverse("horizon:project:instances:index")
msg = _('Unable to get RDP console for instance "%s".') % instance_id
exceptions.handle(request, msg, redirect=redirect)
class UpdateView(workflows.WorkflowView):
workflow_class = project_workflows.UpdateInstance
success_url = reverse_lazy("horizon:project:instances:index")
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
context["instance_id"] = self.kwargs['instance_id']
return context
@memoized.memoized_method
def get_object(self, *args, **kwargs):
instance_id = self.kwargs['instance_id']
try:
return api.nova.server_get(self.request, instance_id)
except Exception:
redirect = reverse("horizon:project:instances:index")
msg = _('Unable to retrieve instance details.')
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
initial = super(UpdateView, self).get_initial()
initial.update({'instance_id': self.kwargs['instance_id'],
'name': getattr(self.get_object(), 'name', '')})
return initial
class RebuildView(forms.ModalFormView):
form_class = project_forms.RebuildInstanceForm
template_name = 'project/instances/rebuild.html'
success_url = reverse_lazy('horizon:project:instances:index')
def get_context_data(self, **kwargs):
context = super(RebuildView, self).get_context_data(**kwargs)
context['instance_id'] = self.kwargs['instance_id']
context['can_set_server_password'] = api.nova.can_set_server_password()
return context
def get_initial(self):
return {'instance_id': self.kwargs['instance_id']}
class DecryptPasswordView(forms.ModalFormView):
form_class = project_forms.DecryptPasswordInstanceForm
template_name = 'project/instances/decryptpassword.html'
success_url = reverse_lazy('horizon:project:instances:index')
def get_context_data(self, **kwargs):
context = super(DecryptPasswordView, self).get_context_data(**kwargs)
context['instance_id'] = self.kwargs['instance_id']
context['keypair_name'] = self.kwargs['keypair_name']
return context
def get_initial(self):
return {'instance_id': self.kwargs['instance_id'],
'keypair_name': self.kwargs['keypair_name']}
class DetailView(tabs.TabView):
tab_group_class = project_tabs.InstanceDetailTabs
template_name = 'project/instances/detail.html'
redirect_url = 'horizon:project:instances:index'
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
context["instance"] = self.get_data()
return context
@memoized.memoized_method
def get_data(self):
try:
instance_id = self.kwargs['instance_id']
instance = api.nova.server_get(self.request, instance_id)
instance.volumes = api.nova.instance_volumes_list(self.request,
instance_id)
# Sort by device name
instance.volumes.sort(key=lambda vol: vol.device)
instance.full_flavor = api.nova.flavor_get(
self.request, instance.flavor["id"])
instance.security_groups = api.network.server_security_groups(
self.request, instance_id)
except Exception:
redirect = reverse(self.redirect_url)
exceptions.handle(self.request,
_('Unable to retrieve details for '
'instance "%s".') % instance_id,
redirect=redirect)
# Not all exception types handled above will result in a redirect.
# Need to raise here just in case.
raise exceptions.Http302(redirect)
try:
api.network.servers_update_addresses(self.request, [instance])
except Exception:
exceptions.handle(
self.request,
_('Unable to retrieve IP addresses from Neutron for instance '
'"%s".') % instance_id, ignore=True)
return instance
def get_tabs(self, request, *args, **kwargs):
instance = self.get_data()
return self.tab_group_class(request, instance=instance, **kwargs)
class ResizeView(workflows.WorkflowView):
workflow_class = project_workflows.ResizeInstance
success_url = reverse_lazy("horizon:project:instances:index")
def get_context_data(self, **kwargs):
context = super(ResizeView, self).get_context_data(**kwargs)
context["instance_id"] = self.kwargs['instance_id']
return context
@memoized.memoized_method
def get_object(self, *args, **kwargs):
instance_id = self.kwargs['instance_id']
try:
instance = api.nova.server_get(self.request, instance_id)
flavor_id = instance.flavor['id']
flavors = self.get_flavors()
if flavor_id in flavors:
instance.flavor_name = flavors[flavor_id].name
else:
flavor = api.nova.flavor_get(self.request, flavor_id)
instance.flavor_name = flavor.name
except Exception:
redirect = reverse("horizon:project:instances:index")
msg = _('Unable to retrieve instance details.')
exceptions.handle(self.request, msg, redirect=redirect)
return instance
@memoized.memoized_method
def get_flavors(self, *args, **kwargs):
try:
flavors = api.nova.flavor_list(self.request)
return SortedDict((str(flavor.id), flavor) for flavor in flavors)
except Exception:
redirect = reverse("horizon:project:instances:index")
exceptions.handle(self.request,
_('Unable to retrieve flavors.'), redirect=redirect)
def get_initial(self):
initial = super(ResizeView, self).get_initial()
_object = self.get_object()
if _object:
initial.update({'instance_id': self.kwargs['instance_id'],
'name': getattr(_object, 'name', None),
'old_flavor_id': _object.flavor['id'],
'old_flavor_name': getattr(_object, 'flavor_name', ''),
'flavors': self.get_flavors()})
return initial
|
|
from __future__ import absolute_import, division, print_function
import argparse
import os
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from PyAstronomy import pyasl
'''
Remove spikes (tellurics) from continuum-normalized apVisit APOGEE spectra.
Typically you will run this after apVisit2input.py, which finds the apVisit
spectra downloaded via the python apogee module for one target and
continuum-normalizes them. If you somehow have an apVisit-style FITS file
that is continuum-normalized already (or close), this can despike it too.
First, read_infiles reads in wavelengths and fluxes for a list of spectra;
Then, despike_spectra despikes the spectra with one of two techniques.
Usage
-----
python despike.py -d datapath -i filelist
datapath: /path/to/filelist/and/files/listed/in/filelist
filelist: Single-column text file with list of *continuum-normalized*
single-visit APOGEE files you want to despike.
The files in this list can either be two-column text files
(wave, flux) or FITS files in the apVisit format.
Result
------
The new despiked spectra are written to two-column (wavelength, flux) files
with similar names as the original, but they now end in '_despiked.txt'.
'''
def main():
'''
Parse arguments, despike a set of spectra, and write the results to files.
'''
# parse command line arguments with argparse
parser = argparse.ArgumentParser()
parser.add_argument('-i', dest='filelist', required=True,
help='text file containing a list of spectra')
parser.add_argument('-d', dest='datapath', required=True,
help='path to where filelist and files in filelist live')
args = parser.parse_args()
filelist = args.filelist
datapath = args.datapath
if not os.path.exists(os.path.join(datapath, filelist)):
raise argparse.ArgumentTypeError("{0} does not exist".format(filelist))
infilelist, wavelist, speclist = read_infiles(datapath, filelist)
newwavelist, newspeclist = despike_spectra(wavelist, speclist)
# write out a set of two-column text files,
# each containing one element of newwavelist and one element of newspeclist
for file, newwave, newspec in zip(infilelist, newwavelist, newspeclist):
# create outfile based on infile name
outfile = os.path.splitext(file)[0] + '_despiked.txt'
with open(outfile, 'w') as f:
for wentry, sentry in zip(newwave, newspec):
print(wentry, sentry, file=f)
return
def read_infiles(datapath, filelist, isFits=False):
'''
Load a text file containing a list of continuum-normalized spectra
Parameters
----------
datapath: `str`
Path to the directory containing both filelist and the files therein
filelist: `str`
Name of a text file containing a list of continuum-normalized apVisit
spectra you want to despike. The files in this list can either be
two-column text files (wave, flux) or FITS files in the apVisit format.
isFits: `bool`
True if the files listed in filelist are FITS files, else False.
Returns
-------
infilelist: `list`
The full path to each spectrum file in filelist
wavelist: `list`
A list of lists containing wavelength values for each spectrum
speclist: `list`
A list of lists containing flux values for the same spectra
'''
print(datapath, filelist)
wavelist = []
speclist = []
with open(os.path.join(datapath, filelist)) as f1:
infilelist = [] # for use later to make outfiles
if isFits: # it's a FITS file
for line in f1:
infile = line.rstrip()
infile = os.path.join(datapath, infile)
infilelist.append(infile)
with fits.open(infile) as hdu:
# APOGEE! the data is in a funny place and backwards
wave = hdu[4].data.flatten()
wave = wave[::-1]
spec = hdu[1].data
spec = spec.flatten()
spec = spec[::-1]
spec = spec / np.median(spec) # put the continuum roughly near 1
wavelist.append(wave)
speclist.append(spec)
else: # it's a text file
for line in f1:
infile = line.rstrip()
infile = os.path.join(datapath, infile)
infilelist.append(infile)
wave, spec = np.loadtxt(infile, usecols=(0, 1), unpack=True)
wavelist.append(wave)
speclist.append(spec)
return infilelist, wavelist, speclist
def simpledespike(wave, spec, delwindow=6, stdfactorup=0.7, stdfactordown=3, plot=True):
'''
Implement a simple despiking routine based on the stdev of 1D fluxes
All outlier points are deleted from wave, spec to yield newwave, newspec.
Parameters
----------
wave: `list`
A 1D list of wavelength values for one spectrum
spec: `list`
A 1D list of flux values for the same spectrum
delwindow: `int`
Around each outlier (upward spike), adjacent points in a window of +/-
delwindow are also flagged as outliers
stdfactorup: `float`
Outliers (upward spikes) are identified as exceeding stdfactorup*sigma
above the continuum
stdfactordown: `float`
Additional outliers (downward spikes) are identified as exceeding
stdfactordown*sigma below the continuum
plot: `bool`
True = show an interactive plot of each spectrum as it is despiked
Returns
-------
newwave: `list`
A 1D list of wavelength values for one despiked spectrum
newspec: `list`
A 1D list of flux values for the same despiked spectrum
'''
pointstodelete = []
outliers = (np.where((spec > 1.0 + stdfactorup*np.std(spec)) |
(spec < 1.0 - stdfactordown*np.std(spec))))[0]
for point in outliers: # add +/- delwindow points around each outlier
pointstodelete.extend(range(point-delwindow, point+delwindow+1))
pointstodelete = [point for point in pointstodelete if point >= 0]
newwave, newspec = np.delete(wave, pointstodelete), np.delete(spec, pointstodelete)
if plot:
plt.plot(wave, spec)
plt.plot(newwave, newspec, color='r')
plt.xlabel('Wavelength ({\AA})')
plt.ylabel('Normalized flux')
plt.axhline(y=(1 + stdfactorup*np.std(spec)), ls=':', color='g')
plt.axhline(y=(1 - stdfactordown*np.std(spec)), ls=':', color='g')
plt.show()
return newwave, newspec
def generalizedESDdespike(wave, spec, maxOLs=1000, alpha=5000):
'''
Implement a complicated despiking routine from PyAstronomy
(this function is not tested)
'''
r = pyasl.pointDistGESD(spec, maxOLs, alpha)
# r[0] is number of outliers found, r[i] is indices of outliers
# maxOLs is max outliers that may be identified; increase alpha to find more
newwave, newspec = np.delete(wave, r[1]), np.delete(spec, r[1])
return newwave, newspec
def despike_spectra(wavelist, speclist, type='simple', plot=True):
'''
Use one of two techniques to remove spikes from a series of spectra.
Parameters
----------
wavelist: `list`
Input list of wavelength arrays
speclist: `list`
Input list of corresponding flux arrays (for 1D spectra)
type: `str`
type='simple' is recommended (see simpledespike)
type=<anything not 'simple'> will use generalizedESDdespike instead
plot: `bool`
True = show an interactive plot of each spectrum as it is despiked
Returns
-------
newwavelist: `list`
A list of lists containing wavelength values for each despiked spectrum
newspeclist: `list`
A list of lists containing flux values the same despiked spectra
'''
newwavelist = []
newspeclist = []
for wave, spec in zip(wavelist, speclist):
if type == 'simple':
delwindow = 6
stdfactorup = 0.7
stdfactordown = 3
newwave, newspec = simpledespike(wave, spec,
delwindow=delwindow,
stdfactorup=stdfactorup,
stdfactordown=stdfactordown,
plot=plot)
else:
newwave, newspec = generalizedESDdespike(wave, spec, maxOLs=1000, alpha=5000)
newwavelist.append(newwave)
newspeclist.append(newspec)
return newwavelist, newspeclist
if __name__ == '__main__':
main()
|
|
from urllib.parse import parse_qs
import aiohttp
import discord
from discord.ext import commands
from lxml import etree
class Google(commands.Cog):
def __init__(self, bot):
self.bot = bot
def parse_google_card(self, node):
if node is None:
return None
e = discord.Embed(colour=0x738bd7)
# check if it's a calculator card:
calculator = node.find(".//table/tr/td/span[@class='nobr']/h2[@class='r']")
if calculator is not None:
e.title = 'Calculator'
e.description = ''.join(calculator.itertext())
return e
parent = node.getparent()
# check for unit conversion card
unit = parent.find(".//ol//div[@class='_Tsb']")
if unit is not None:
e.title = 'Unit Conversion'
e.description = ''.join(''.join(n.itertext()) for n in unit)
return e
# check for currency conversion card
currency = parent.find(".//ol/table[@class='std _tLi']/tr/td/h2")
if currency is not None:
e.title = 'Currency Conversion'
e.description = ''.join(currency.itertext())
return e
# check for release date card
release = parent.find(".//div[@id='_vBb']")
if release is not None:
try:
e.description = ''.join(release[0].itertext()).strip()
e.title = ''.join(release[1].itertext()).strip()
return e
except: # noqa
return None
# check for translation card
words = parent.find(".//ol/div[@class='g']/div/table/tr/td/h3[@class='r']")
if words is not None:
e.title = 'Google Translate'
e.add_field(name='Input', value=words[0].text, inline=True)
e.add_field(name='Output', value=words[1].text, inline=True)
return e
# check for definition card
words = parent.find(".//ol/div[@class='g']/div/h3[@class='r']/div")
if words is not None:
try:
definition_info = words.getparent().getparent()[1] # yikes
except: # noqa
pass
else:
try:
# inside is a <div> with two <span>
# the first is the actual word, the second is the pronunciation
e.title = words[0].text
e.description = words[1].text
except: # noqa
return None
# inside the table there's the actual definitions
# they're separated as noun/verb/adjective with a list
# of definitions
for row in definition_info:
if len(row.attrib) != 0:
# definitions are empty <tr>
# if there is something in the <tr> then we're done
# with the definitions
break
try:
data = row[0]
lexical_category = data[0].text
body = []
for index, definition in enumerate(data[1], 1):
body.append('%s. %s' % (index, definition.text))
e.add_field(name=lexical_category, value='\n'.join(body), inline=False)
except: # noqa
continue
return e
# check for "time in" card
time_in = parent.find(".//ol//div[@class='_Tsb _HOb _Qeb']")
if time_in is not None:
try:
time_place = ''.join(time_in.find("span[@class='_HOb _Qeb']").itertext()).strip()
the_time = ''.join(time_in.find("div[@class='_rkc _Peb']").itertext()).strip()
the_date = ''.join(time_in.find("div[@class='_HOb _Qeb']").itertext()).strip()
except: # noqa
return None
else:
e.title = time_place
e.description = '%s\n%s' % (the_time, the_date)
return e
# check for weather card
# this one is the most complicated of the group lol
# everything is under a <div class="e"> which has a
# <h3>{{ weather for place }}</h3>
# string, the rest is fucking table fuckery.
weather = parent.find(".//ol//div[@class='e']")
if weather is None:
return None
location = weather.find('h3')
if location is None:
return None
e.title = ''.join(location.itertext())
table = weather.find('table')
if table is None:
return None
# This is gonna be a bit fucky.
# So the part we care about is on the second data
# column of the first tr
try:
tr = table[0]
img = tr[0].find('img')
category = img.get('alt')
image = 'https:' + img.get('src')
temperature = tr[1].xpath("./span[@class='wob_t']//text()")[0]
except: # noqa
return None # RIP
else:
e.set_thumbnail(url=image)
e.description = '*%s*' % category
e.add_field(name='Temperature', value=temperature)
# On the 4th column it tells us our wind speeds
try:
wind = ''.join(table[3].itertext()).replace('Wind: ', '')
except: # noqa
return None
else:
e.add_field(name='Wind', value=wind)
# On the 5th column it tells us our humidity
try:
humidity = ''.join(table[4][0].itertext()).replace('Humidity: ', '')
except: # noqa
return None
else:
e.add_field(name='Humidity', value=humidity)
return e
async def get_google_entries(self, query):
params = {
'q': query,
'safe': 'on'
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64)'
}
# list of URLs
entries = []
# the result of a google card, an embed
card = None
async with aiohttp.ClientSession() as cs:
async with cs.get('https://www.google.com/search', params=params, headers=headers) as resp:
if resp.status != 200:
link = "https://www.google.com/search?q=" + query.replace(" ", "+")
raise RuntimeError('Google decided to ignore the bot.\n'
'Use the following link to achieve the same result:\n' + link)
root = etree.fromstring(await resp.text(), etree.HTMLParser())
# with open('google.html', 'w', encoding='utf-8') as f:
# f.write(etree.tostring(root, pretty_print=True).decode('utf-8'))
"""
Tree looks like this.. sort of..
<div class="g">
...
<h3>
<a href="/url?q=<url>" ...>title</a>
</h3>
...
<span class="st">
<span class="f">date here</span>
summary here, can contain <em>tag</em>
</span>
</div>
"""
card_node = root.find(".//div[@id='topstuff']")
card = self.parse_google_card(card_node)
search_nodes = root.findall(".//div[@class='g']")
for node in search_nodes:
url_node = node.find('.//h3/a')
if url_node is None:
continue
url = url_node.attrib['href']
if not url.startswith('/url?'):
continue
url = parse_qs(url[5:])['q'][0] # get the URL from ?q query string
# if I ever cared about the description, this is how
entries.append(url)
# short = node.find(".//span[@class='st']")
# if short is None:
# entries.append((url, ''))
# else:
# text = ''.join(short.itertext())
# entries.append((url, text.replace('...', '')))
return card, entries
@commands.command(aliases=['google', 'search'])
async def g(self, ctx, *, query):
"""Searches google and gives you top result."""
await ctx.trigger_typing()
try:
card, entries = await self.get_google_entries(query)
except RuntimeError as e:
await ctx.send(str(e))
else:
if card:
value = '\n'.join(entries[:3])
if value:
card.add_field(name='Search Results', value=value, inline=False)
return await ctx.send(embed=card)
if len(entries) == 0:
return await ctx.send('No results found... sorry.')
icon = "https://cdn.discordapp.com/attachments/246291440106340352/293036111373139969/google_logo1600.png"
emb = discord.Embed(colour=0x77EE00, timestamp=ctx.message.created_at)
emb.set_author(name="Google Search", url="https://www.google.com/search?q=" + query.replace(" ", "+"),
icon_url=icon)
next_two = entries[1:3]
first_entry = entries[0]
if first_entry[-1] == ')':
first_entry = first_entry[:-1] + '%29'
if next_two:
formatted = '\n'.join(map(lambda x: '<%s>' % x, next_two))
emb.add_field(name="Search Result", value=first_entry)
emb.add_field(name="More", value=formatted, inline=False)
else:
emb.add_field(name="Search Result", value=first_entry)
await ctx.send(embed=emb)
def setup(bot):
bot.add_cog(Google(bot))
|
|
"""
Generic tag bot, yay.
"""
import asyncio
import copy
import shlex
import traceback
import discord
from discord.ext import commands
from discord.ext.commands import CommandError, CommandInvokeError, CommandNotFound
from joku.cogs._common import Cog
from joku.core.bot import Context, Jokusoramame
from joku.core.tagengine import TagEngine
class Tags(Cog):
def __init__(self, bot: Jokusoramame):
super().__init__(bot)
self.engine = TagEngine(self.bot)
def _sanitize_name(self, name: str) -> str:
return name.replace("@everyone", "@\u200beveryone").replace("@here", "@\u200bhere")
@commands.group(pass_context=True, invoke_without_command=True,
aliases=["tags"])
async def tag(self, ctx: Context, *, name: str):
"""
Tags are like aliases or autoresponses that can be added to the bot.
They are made with `tag create`, and deleted with `tag delete`.
To be accessed, they are simply called like commands are.
"""
tag, alias = await ctx.bot.database.get_tag(ctx.message.guild, name, return_alias=True)
if not tag:
await ctx.channel.send(":x: Tag not found.")
return
if alias is not None:
em = discord.Embed(title=alias.alias_name, description="Alias for `{}`".format(tag.name))
owner = ctx.bot.get_member(alias.user_id)
else:
em = discord.Embed(title=tag.name, description="```{}```".format(tag.content))
owner = ctx.bot.get_member(tag.user_id)
em.add_field(name="Owner", value=owner.mention if owner else "<Unknown>")
em.add_field(name="Last Modified", value=tag.last_modified.isoformat())
# Display the tag info.
await ctx.channel.send(embed=em)
@tag.command()
async def copy(self, ctx: Context, guild_id: int, *, tag_name: str):
"""
Copies a tag from another guild.
This requires the guild ID of the guild to copy.
To get this, enable Developer Mode and Copy ID.
"""
guild = self.bot.get_guild(guild_id)
if not guild:
await ctx.send(":x: I am not in that guild.")
return
server_tags = await ctx.bot.database.get_all_tags_for_guild(ctx.message.guild)
other_tags = await ctx.bot.database.get_all_tags_for_guild(guild)
if tag_name not in [tag.name for tag in other_tags]:
await ctx.send(":x: That tag does not exist in the other server.")
elif tag_name in [tag.name for tag in server_tags]:
await ctx.send(":x: That tag already exists in this server.")
else:
tag_object = next(filter(lambda tag: tag.name == tag_name, other_tags))
await ctx.bot.database.save_tag(ctx.guild, tag_object.name, tag_object.content,
owner=ctx.author, lua=tag_object.lua)
await ctx.send(":heavy_check_mark: Copied tag `{}`.".format(tag_name))
@tag.command()
async def alias(self, ctx: Context, tag_name: str, *, alias_name: str):
"""
Creates an alias to a tag.
"""
tag, alias = await ctx.bot.database.get_tag(ctx.guild, alias_name, return_alias=True)
if alias is not None:
await ctx.send(":x: That tag already has an alias by that name.")
return
tag = await ctx.bot.database.get_tag(ctx.guild, tag_name)
if tag is None:
await ctx.send(":x: That tag does not exist.")
return
await ctx.bot.database.create_tag_alias(ctx.guild, tag, alias_name, ctx.author)
await ctx.send(":heavy_check_mark: Created alias `{}` for `{}`.".format(alias_name, tag.name))
@tag.command()
async def unalias(self, ctx: Context, alias_name: str):
"""
Removes an alias to a tag.
You must be the owner of this alias.
"""
tag, alias = await ctx.bot.database.get_tag(ctx.guild, alias_name, return_alias=True)
if alias is None:
await ctx.send(":x: That alias does not exist.")
return
if alias.user_id != ctx.author.id and not ctx.message.author.guild_permissions.administrator:
await ctx.send(":x: Cannot remove somebody else's alias.")
return
await ctx.bot.database.remove_tag_alias(ctx.guild, alias)
await ctx.send(":heavy_check_mark: Removed tag alias.")
@tag.command(pass_context=True, aliases=["list"])
async def all(self, ctx: Context):
"""
Shows all the tags for the current server
"""
server_tags = await ctx.bot.database.get_all_tags_for_guild(ctx.message.guild)
if not server_tags:
await ctx.channel.send(":x: This server has no tags.")
return
await ctx.channel.send("Tags: " + ", ".join([self._sanitize_name(x.name) for x in server_tags]))
@tag.command(pass_context=True, aliases=["edit"])
async def create(self, ctx: Context, name: str, *, content: str):
"""
Creates a new tag.
This will overwrite other tags with the same name,
if you are the owner or an administrator.
Tags use Lua scripting to generate the final output.
"""
existing_tag = await ctx.bot.database.get_tag(ctx.message.guild, name)
if existing_tag:
# Check for the admin perm.
if not ctx.message.author.guild_permissions.administrator:
# Check if the owner_id matches the author id.
if ctx.message.author.id != existing_tag.user_id:
await ctx.channel.send(":x: You cannot edit somebody else's tag.")
return
# Don't overwrite the owner_id.
owner = None
else:
owner = ctx.message.author
# Replace stuff in content.
content = content.replace("@everyone", "@\u200beveryone").replace("@here", "@\u200bhere")
# Set the tag.
await ctx.bot.database.save_tag(ctx.message.guild, name, content, owner=owner, lua=True)
await ctx.channel.send(":heavy_check_mark: Tag **{}** saved.".format(self._sanitize_name(name)))
@tag.command(pass_context=True, aliases=["remove"])
async def delete(self, ctx: Context, *, name: str):
"""
Deletes a tag.
You must be the owner of the tag, or an administrator.
"""
existing_tag = await ctx.bot.database.get_tag(ctx.message.guild, name)
if not existing_tag:
await ctx.channel.send(":x: This tag does not exist.")
return
# Check the owner_id
if not ctx.message.author.guild_permissions.administrator:
if existing_tag.owner_id != ctx.message.author.id:
await ctx.channel.send(":x: You do not have permission to edit this tag.")
return
# Now, delete the tag.
await ctx.bot.database.delete_tag(ctx.message.guild, name)
await ctx.channel.send(":put_litter_in_its_place: Tag **{}** deleted.".format(self._sanitize_name(name)))
# Unlike other bots, tags are registered like full commands.
# So, they're entirely handled inside on_command_error.
# This will catch the CommandNotFound, and try and find the tag.
async def on_command_error(self, exc: CommandError, ctx: Context):
if not isinstance(exc, CommandNotFound):
# We don't want to catch any non-command not found errors.
return
# Extract the tag from the message content.
cmd = ctx.message.content[len(ctx.prefix):]
cmd = cmd.split(" ")[0]
# Create the arguments for the template.
guild = {"name": ctx.message.guild.name, "icon_url": ctx.message.guild.icon_url,
"id": ctx.message.guild.id, "member_count": ctx.message.guild.member_count,
"created_at": ctx.message.guild.created_at}
author = {"name": ctx.message.author.name, "nick": ctx.message.author.nick,
"discriminator": ctx.message.author.discriminator, "id": ctx.message.author.id,
"colour": ctx.message.author.colour, "mention": ctx.message.author.mention,
"permissions": ctx.message.channel.permissions_for(ctx.message.author),
"guild_permissions": ctx.message.author.guild_permissions,
"joined_at": ctx.message.author.joined_at, "created_at": ctx.message.author.created_at,
"guild": guild}
channel = {"name": ctx.message.channel.name, "id": ctx.message.channel.id,
"mention": ctx.message.channel.mention, "guild": guild}
message = {"id": ctx.message.id, "content": ctx.message.content, "clean_content": ctx.message.clean_content,
"channel": channel, "guild": guild, "author": author}
args = {
"args": shlex.split(ctx.message.content[len(ctx.prefix):])[1:],
"clean_args": shlex.split(ctx.message.clean_content[len(ctx.prefix):])[1:],
"message": message,
"channel": channel,
"author": author,
"server": guild
}
# Render the template, using the args.
try:
coro = self.engine.render_template(cmd, ctx=ctx, **args)
rendered = await coro
except (asyncio.CancelledError, asyncio.TimeoutError) as e:
rendered = "**Timed out waiting for template to render.**"
except Exception as e:
traceback.print_exception(type(e), e, e.__traceback__)
rendered = "**Error when compiling template:**\n`{}`".format(repr(e))
else:
if not rendered:
return
try:
rendered = rendered.replace("@everyone", "@\u200beveryone")
rendered = rendered.replace("@here", "@\u200bhere")
await ctx.message.channel.send(rendered)
except Exception as e:
# Panic, and dispatch on_command_error.
new_e = CommandInvokeError(e)
new_e.__cause__ = e
ctx.bot.dispatch("command_error", new_e, ctx)
def setup(bot):
bot.add_cog(Tags(bot))
|
|
import pytest
import io
import json
from http import client
import aiohttpretty
from waterbutler.core import streams
from waterbutler.core import exceptions
from waterbutler.core.path import WaterButlerPath
from waterbutler.providers.dataverse import settings as dvs
from waterbutler.providers.dataverse import DataverseProvider
from waterbutler.providers.dataverse.metadata import DataverseFileMetadata, DataverseRevision
from tests.providers.dataverse.fixtures import (
native_file_metadata,
native_dataset_metadata,
empty_native_dataset_metadata,
checksum_mismatch_dataset_metadata,
auth,
credentials,
settings
)
@pytest.fixture
def provider(auth, credentials, settings):
return DataverseProvider(auth, credentials, settings)
@pytest.fixture
def file_content():
return b'SLEEP IS FOR THE WEAK GO SERVE STREAMS'
@pytest.fixture
def file_like(file_content):
return io.BytesIO(file_content)
@pytest.fixture
def file_stream(file_like):
return streams.FileStreamReader(file_like)
class TestValidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_file(self, provider, native_dataset_metadata):
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
draft_url,
status=200,
body=native_dataset_metadata)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = '/21'
try:
wb_path_v1 = await provider.validate_v1_path(path)
except Exception as exc:
pytest.fail(str(exc))
with pytest.raises(exceptions.NotFoundError) as exc:
await provider.validate_v1_path(path + '/')
assert exc.value.code == client.NOT_FOUND
wb_path_v0 = await provider.validate_path(path)
assert wb_path_v1 == wb_path_v0
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_folder(self, provider):
try:
wb_path_v1 = await provider.validate_v1_path('/')
except Exception as exc:
pytest.fail(str(exc))
wb_path_v0 = await provider.validate_path('/')
assert wb_path_v1 == wb_path_v0
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revalidate_path(self, provider, native_dataset_metadata):
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
draft_url,
status=200,
body=native_dataset_metadata)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
base = await provider.validate_v1_path('/')
wb_path = await provider.revalidate_path(base, '/thefile.txt')
assert wb_path.name == 'thefile.txt'
wb_path = await provider.revalidate_path(base, '/new_path')
assert wb_path.name == 'new_path'
class TestCRUD:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download(self, provider, native_dataset_metadata):
path = '/21'
url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token)
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_uri('GET', url, body=b'better', auto_length=True)
aiohttpretty.register_json_uri('GET',
draft_url,
status=200,
body=native_dataset_metadata)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = await provider.validate_path(path)
result = await provider.download(path)
content = await result.read()
assert content == b'better'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_not_found(self, provider, native_dataset_metadata):
path = '/21'
url = provider.build_url(dvs.DOWN_BASE_URL, path, key=provider.token)
aiohttpretty.register_uri('GET', url, status=404)
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = await provider.validate_path(path)
with pytest.raises(exceptions.DownloadError):
await provider.download(path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_download_invalid_path(self, provider, native_dataset_metadata):
path = '/50'
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = await provider.validate_path(path)
with pytest.raises(exceptions.NotFoundError):
await provider.download(path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_create(self, provider, file_stream, native_file_metadata,
empty_native_dataset_metadata, native_dataset_metadata):
path = '/thefile.txt'
url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi)
aiohttpretty.register_uri('POST', url, status=201)
latest_url = provider.build_url(
dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token
)
latest_published_url = provider.build_url(
dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token
)
aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}})
aiohttpretty.register_uri('GET', latest_url, responses=[
{
'status': 200,
'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'),
'headers': {'Content-Type': 'application/json'},
},
{
'status': 200,
'body': json.dumps(native_dataset_metadata).encode('utf-8'),
'headers': {'Content-Type': 'application/json'},
},
])
path = await provider.validate_path(path)
metadata, created = await provider.upload(file_stream, path)
entry = native_file_metadata['datafile']
expected = DataverseFileMetadata(entry, 'latest')
assert created is True
assert metadata == expected
assert aiohttpretty.has_call(method='POST', uri=url)
assert aiohttpretty.has_call(method='GET', uri=latest_url)
assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_updates(self, provider,
file_stream,
native_file_metadata,
native_dataset_metadata):
path = '/20'
url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi)
aiohttpretty.register_uri('POST', url, status=201)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
delete_url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', '/20') # Old file id
aiohttpretty.register_json_uri('DELETE', delete_url, status=204)
latest_published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}})
path = await provider.validate_path(path)
metadata, created = await provider.upload(file_stream, path)
entry = native_file_metadata['datafile']
expected = DataverseFileMetadata(entry, 'latest')
assert metadata == expected
assert created is False
assert aiohttpretty.has_call(method='POST', uri=url)
assert aiohttpretty.has_call(method='GET', uri=published_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_upload_checksum_mismatch(self, provider, file_stream,
empty_native_dataset_metadata,
checksum_mismatch_dataset_metadata):
path = '/thefile.txt'
url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'study', provider.doi)
aiohttpretty.register_uri('POST', url, status=201)
latest_url = provider.build_url(
dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token
)
latest_published_url = provider.build_url(
dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token
)
aiohttpretty.register_json_uri('GET', latest_published_url, body={'data': {'files': []}})
aiohttpretty.register_uri('GET', latest_url, responses=[
{
'status': 200,
'body': json.dumps(empty_native_dataset_metadata).encode('utf-8'),
'headers': {'Content-Type': 'application/json'},
},
{
'status': 200,
'body': json.dumps(checksum_mismatch_dataset_metadata).encode('utf-8'),
'headers': {'Content-Type': 'application/json'},
},
])
path = await provider.validate_path(path)
with pytest.raises(exceptions.UploadChecksumMismatchError) as exc:
await provider.upload(file_stream, path)
assert aiohttpretty.has_call(method='POST', uri=url)
assert aiohttpretty.has_call(method='GET', uri=latest_url)
assert aiohttpretty.has_call(method='GET', uri=latest_published_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_delete_file(self, provider, native_dataset_metadata):
path = '21'
url = provider.build_url(dvs.EDIT_MEDIA_BASE_URL, 'file', path)
aiohttpretty.register_json_uri('DELETE', url, status=204)
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = await provider.validate_path(path)
await provider.delete(path)
assert aiohttpretty.has_call(method='DELETE', uri=url)
class TestRevisions:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_revisions(self, provider, native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
path = WaterButlerPath('/thefile.txt', _ids=('?', '19'))
result = await provider.revisions(path, version='latest')
isinstance(result, DataverseRevision)
assert result[0].raw == 'latest-published'
class TestMetadata:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata(self, provider, native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
path = WaterButlerPath('/thefile.txt', _ids=('?', '19'))
result = await provider.metadata(path, version='latest')
assert result.provider == 'dataverse'
assert result.kind == 'file'
assert result.name == 'UnZip.java'
assert result.path == '/19'
assert result.extra['fileId'] == '19'
assert result.materialized_path == '/UnZip.java'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_root(self, provider, native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path, version='latest')
assert len(result) == 3
assert result[0].provider == 'dataverse'
assert result[0].kind == 'file'
assert result[0].name == 'UnZip.java'
assert result[0].path == '/19'
assert result[0].extra['fileId'] == '19'
assert result[0].materialized_path == '/UnZip.java'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_no_files(self, provider, empty_native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=empty_native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path, version='latest')
assert result == []
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_404(self, provider, native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=404, body=native_dataset_metadata)
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
path = WaterButlerPath('/thefilenotfound.txt', _ids=('?', 'nobody has this fileId'))
with pytest.raises(exceptions.MetadataError):
await provider.metadata(path, version='latest')
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_published(self, provider, native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path, version='latest-published')
assert len(result) == 3
assert result[0].provider == 'dataverse'
assert result[0].kind == 'file'
assert result[0].name == 'UnZip.java'
assert result[0].path == '/19'
assert result[0].extra['fileId'] == '19'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_published_no_files(self, provider, empty_native_dataset_metadata):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=200, body=empty_native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path, version='latest-published')
assert result == []
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_draft_metadata_missing(self, provider):
url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', url, status=404)
path = await provider.validate_path('/')
with pytest.raises(exceptions.MetadataError):
await provider.metadata(path, version='latest')
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_draft_metadata_no_state_catches_all(self, provider, native_dataset_metadata):
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id, 'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata)
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET',
published_url,
status=200,
body=native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path)
assert isinstance(result, list)
assert len(result) == 6
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_never_published(self, provider, native_dataset_metadata):
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', published_url, status=404)
draft_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest'),
key=provider.token)
aiohttpretty.register_json_uri('GET', draft_url, status=200, body=native_dataset_metadata)
path = await provider.validate_path('/')
result = await provider.metadata(path)
assert len(result) == 3
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_metadata_never_published_raises_errors(self, provider):
published_url = provider.build_url(dvs.JSON_BASE_URL.format(provider._id,
'latest-published'),
key=provider.token)
aiohttpretty.register_json_uri('GET', published_url, status=400)
path = await provider.validate_path('/')
with pytest.raises(exceptions.MetadataError) as e:
result = await provider.metadata(path)
assert e.value.code == 400
class TestUtils:
def test_utils(self, provider):
assert not provider.can_duplicate_names()
|
|
from __future__ import absolute_import
import sys
import os
import errno
import types
import gc
import signal
import traceback
from gevent.event import AsyncResult
from gevent.hub import get_hub, linkproxy, sleep, getcurrent
from gevent.fileobject import FileObject
from gevent.greenlet import Greenlet, joinall
spawn = Greenlet.spawn
import subprocess as __subprocess__
# Standard functions and classes that this module re-implements in a gevent-aware way.
__implements__ = ['Popen',
'call',
'check_call',
'check_output']
# Standard functions and classes that this module re-imports.
__imports__ = ['PIPE',
'STDOUT',
'CalledProcessError',
# Windows:
'CREATE_NEW_CONSOLE',
'CREATE_NEW_PROCESS_GROUP',
'STD_INPUT_HANDLE',
'STD_OUTPUT_HANDLE',
'STD_ERROR_HANDLE',
'SW_HIDE',
'STARTF_USESTDHANDLES',
'STARTF_USESHOWWINDOW']
__extra__ = ['MAXFD',
'_eintr_retry_call',
'STARTUPINFO',
'pywintypes',
'list2cmdline',
'_subprocess',
# Python 2.5 does not have _subprocess, so we don't use it
'WAIT_OBJECT_0',
'WaitForSingleObject',
'GetExitCodeProcess',
'GetStdHandle',
'CreatePipe',
'DuplicateHandle',
'GetCurrentProcess',
'DUPLICATE_SAME_ACCESS',
'GetModuleFileName',
'GetVersion',
'CreateProcess',
'INFINITE',
'TerminateProcess']
for name in __imports__[:]:
try:
value = getattr(__subprocess__, name)
globals()[name] = value
except AttributeError:
__imports__.remove(name)
__extra__.append(name)
if sys.version_info[:2] <= (2, 6):
__implements__.remove('check_output')
__extra__.append('check_output')
_subprocess = getattr(__subprocess__, '_subprocess', None)
_NONE = object()
for name in __extra__[:]:
if name in globals():
continue
value = _NONE
try:
value = getattr(__subprocess__, name)
except AttributeError:
if _subprocess is not None:
try:
value = getattr(_subprocess, name)
except AttributeError:
pass
if value is _NONE:
__extra__.remove(name)
else:
globals()[name] = value
__all__ = __implements__ + __imports__
mswindows = sys.platform == 'win32'
if mswindows:
import msvcrt
else:
import fcntl
import pickle
from gevent import monkey
fork = monkey.get_original('os', 'fork')
def call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete, then
return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
return Popen(*popenargs, **kwargs).wait()
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return 0
def check_output(*popenargs, **kwargs):
r"""Run command with arguments and return its output as a byte string.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-1", "/dev/null"])
'/dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c", "echo hello world"], stderr=STDOUT)
'hello world\n'
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output = process.communicate()[0]
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
ex = CalledProcessError(retcode, cmd)
# on Python 2.6 and older CalledProcessError does not accept 'output' argument
ex.output = output
raise ex
return output
class Popen(object):
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, threadpool=None):
"""Create new Popen instance."""
if not isinstance(bufsize, (int, long)):
raise TypeError("bufsize must be an integer")
hub = get_hub()
if mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
if close_fds and (stdin is not None or stdout is not None or
stderr is not None):
raise ValueError("close_fds is not supported on Windows "
"platforms if you redirect stdin/stdout/stderr")
if threadpool is None:
threadpool = hub.threadpool
self.threadpool = threadpool
self._waiting = False
else:
# POSIX
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
assert threadpool is None
self._loop = hub.loop
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.universal_newlines = universal_newlines
self.result = AsyncResult()
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are None when not using PIPEs. The child objects are None
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
self._execute_child(args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
if mswindows:
if p2cwrite is not None:
p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
if c2pread is not None:
c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
if errread is not None:
errread = msvcrt.open_osfhandle(errread.Detach(), 0)
if p2cwrite is not None:
self.stdin = FileObject(p2cwrite, 'wb')
if c2pread is not None:
if universal_newlines:
self.stdout = FileObject(c2pread, 'rU')
else:
self.stdout = FileObject(c2pread, 'rb')
if errread is not None:
if universal_newlines:
self.stderr = FileObject(errread, 'rU')
else:
self.stderr = FileObject(errread, 'rb')
def __repr__(self):
return '<%s at 0x%x pid=%r returncode=%r>' % (self.__class__.__name__, id(self), self.pid, self.returncode)
def _on_child(self, watcher):
watcher.stop()
status = watcher.rstatus
if os.WIFSIGNALED(status):
self.returncode = -os.WTERMSIG(status)
else:
self.returncode = os.WEXITSTATUS(status)
self.result.set(self.returncode)
def communicate(self, input=None):
"""Interact with process: Send data to stdin. Read data from
stdout and stderr, until end-of-file is reached. Wait for
process to terminate. The optional input argument should be a
string to be sent to the child process, or None, if no data
should be sent to the child.
communicate() returns a tuple (stdout, stderr)."""
greenlets = []
if self.stdin:
greenlets.append(spawn(write_and_close, self.stdin, input))
if self.stdout:
stdout = spawn(self.stdout.read)
greenlets.append(stdout)
else:
stdout = None
if self.stderr:
stderr = spawn(self.stderr.read)
greenlets.append(stderr)
else:
stderr = None
joinall(greenlets)
if self.stdout:
self.stdout.close()
if self.stderr:
self.stderr.close()
self.wait()
return (None if stdout is None else stdout.value or '',
None if stderr is None else stderr.value or '')
def poll(self):
return self._internal_poll()
def rawlink(self, callback):
self.result.rawlink(linkproxy(callback, self))
# XXX unlink
if mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (None, None, None, None, None, None)
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
p2cread = GetStdHandle(STD_INPUT_HANDLE)
if p2cread is None:
p2cread, _ = CreatePipe(None, 0)
elif stdin == PIPE:
p2cread, p2cwrite = CreatePipe(None, 0)
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
if c2pwrite is None:
_, c2pwrite = CreatePipe(None, 0)
elif stdout == PIPE:
c2pread, c2pwrite = CreatePipe(None, 0)
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = GetStdHandle(STD_ERROR_HANDLE)
if errwrite is None:
_, errwrite = CreatePipe(None, 0)
elif stderr == PIPE:
errread, errwrite = CreatePipe(None, 0)
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
return DuplicateHandle(GetCurrentProcess(),
handle, GetCurrentProcess(), 0, 1,
DUPLICATE_SAME_ACCESS)
def _find_w9xpopen(self):
"""Find and return absolut path to w9xpopen.exe"""
w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
# Eeek - file-not-found - possibly an embedding
# situation - see if we can locate it in sys.exec_prefix
w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
"w9xpopen.exe")
if not os.path.exists(w9xpopen):
raise RuntimeError("Cannot locate w9xpopen.exe, which is "
"needed for Popen to work with your "
"shell or platform.")
return w9xpopen
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (MS Windows version)"""
if not isinstance(args, types.StringTypes):
args = list2cmdline(args)
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
if None not in (p2cread, c2pwrite, errwrite):
startupinfo.dwFlags |= STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
if shell:
startupinfo.dwFlags |= STARTF_USESHOWWINDOW
startupinfo.wShowWindow = SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format(comspec, args)
if GetVersion() >= 0x80000000 or os.path.basename(comspec).lower() == "command.com":
# Win9x, or using command.com on NT. We need to
# use the w9xpopen intermediate program. For more
# information, see KB Q150956
# (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
w9xpopen = self._find_w9xpopen()
args = '"%s" %s' % (w9xpopen, args)
# Not passing CREATE_NEW_CONSOLE has been known to
# cause random failures on win9x. Specifically a
# dialog: "Your program accessed mem currently in
# use at xxx" and a hopeful warning about the
# stability of your system. Cost is Ctrl+C wont
# kill children.
creationflags |= CREATE_NEW_CONSOLE
# Start the process
try:
hp, ht, pid, tid = CreateProcess(executable, args,
# no special security
None, None,
int(not close_fds),
creationflags,
env,
cwd,
startupinfo)
except pywintypes.error, e:
# Translate pywintypes.error to WindowsError, which is
# a subclass of OSError. FIXME: We should really
# translate errno using _sys_errlist (or similar), but
# how can this be done from Python?
raise WindowsError(*e.args)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
if p2cread is not None:
p2cread.Close()
if c2pwrite is not None:
c2pwrite.Close()
if errwrite is not None:
errwrite.Close()
# Retain the process handle, but close the thread handle
self._handle = hp
self.pid = pid
ht.Close()
def _internal_poll(self):
"""Check if child process has terminated. Returns returncode
attribute.
"""
if self.returncode is None:
if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
self.returncode = GetExitCodeProcess(self._handle)
self.result.set(self.returncode)
return self.returncode
def rawlink(self, callback):
if not self.result.ready() and not self._waiting:
self._waiting = True
Greenlet.spawn(self._wait)
self.result.rawlink(linkproxy(callback, self))
# XXX unlink
def _blocking_wait(self):
WaitForSingleObject(self._handle, INFINITE)
self.returncode = GetExitCodeProcess(self._handle)
return self.returncode
def _wait(self):
self.threadpool.spawn(self._blocking_wait).rawlink(self.result)
def wait(self, timeout=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
if self.returncode is None:
if not self._waiting:
self._waiting = True
self._wait()
return self.result.wait(timeout=timeout)
def send_signal(self, sig):
"""Send a signal to the process
"""
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
os.kill(self.pid, signal.CTRL_C_EVENT)
elif sig == signal.CTRL_BREAK_EVENT:
os.kill(self.pid, signal.CTRL_BREAK_EVENT)
else:
raise ValueError("Unsupported signal: {}".format(sig))
def terminate(self):
"""Terminates the process
"""
TerminateProcess(self._handle, 1)
kill = terminate
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = None, None
c2pread, c2pwrite = None, None
errread, errwrite = None, None
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = self.pipe_cloexec()
elif isinstance(stdin, int):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = self.pipe_cloexec()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = self.pipe_cloexec()
elif stderr == STDOUT:
errwrite = c2pwrite
elif isinstance(stderr, int):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _set_cloexec_flag(self, fd, cloexec=True):
try:
cloexec_flag = fcntl.FD_CLOEXEC
except AttributeError:
cloexec_flag = 1
old = fcntl.fcntl(fd, fcntl.F_GETFD)
if cloexec:
fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
else:
fcntl.fcntl(fd, fcntl.F_SETFD, old & ~cloexec_flag)
def _remove_nonblock_flag(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL) & (~os.O_NONBLOCK)
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def pipe_cloexec(self):
"""Create a pipe with FDs set CLOEXEC."""
# Pipes' FDs are set CLOEXEC by default because we don't want them
# to be inherited by other subprocesses: the CLOEXEC flag is removed
# from the child's FDs by _dup2(), between fork() and exec().
# This is not atomic: we would need the pipe2() syscall for that.
r, w = os.pipe()
self._set_cloexec_flag(r)
self._set_cloexec_flag(w)
return r, w
def _close_fds(self, but):
if hasattr(os, 'closerange'):
os.closerange(3, but)
os.closerange(but + 1, MAXFD)
else:
for i in xrange(3, MAXFD):
if i == but:
continue
try:
os.close(i)
except:
pass
def _execute_child(self, args, executable, preexec_fn, close_fds,
cwd, env, universal_newlines,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program (POSIX version)"""
if isinstance(args, types.StringTypes):
args = [args]
else:
args = list(args)
if shell:
args = ["/bin/sh", "-c"] + args
if executable:
args[0] = executable
if executable is None:
executable = args[0]
self._loop.install_sigchld()
# For transferring possible exec failure from child to parent
# The first char specifies the exception type: 0 means
# OSError, 1 means some other error.
errpipe_read, errpipe_write = self.pipe_cloexec()
try:
try:
gc_was_enabled = gc.isenabled()
# Disable gc to avoid bug where gc -> file_dealloc ->
# write to stderr -> hang. http://bugs.python.org/issue1336
gc.disable()
try:
self.pid = fork()
except:
if gc_was_enabled:
gc.enable()
raise
if self.pid == 0:
# Child
try:
# Close parent's pipe ends
if p2cwrite is not None:
os.close(p2cwrite)
if c2pread is not None:
os.close(c2pread)
if errread is not None:
os.close(errread)
os.close(errpipe_read)
# When duping fds, if there arises a situation
# where one of the fds is either 0, 1 or 2, it
# is possible that it is overwritten (#12607).
if c2pwrite == 0:
c2pwrite = os.dup(c2pwrite)
if errwrite == 0 or errwrite == 1:
errwrite = os.dup(errwrite)
# Dup fds for child
def _dup2(a, b):
# dup2() removes the CLOEXEC flag but
# we must do it ourselves if dup2()
# would be a no-op (issue #10806).
if a == b:
self._set_cloexec_flag(a, False)
elif a is not None:
os.dup2(a, b)
self._remove_nonblock_flag(b)
_dup2(p2cread, 0)
_dup2(c2pwrite, 1)
_dup2(errwrite, 2)
# Close pipe fds. Make sure we don't close the
# same fd more than once, or standard fds.
closed = set([None])
for fd in [p2cread, c2pwrite, errwrite]:
if fd not in closed and fd > 2:
os.close(fd)
closed.add(fd)
# Close all other fds, if asked for
if close_fds:
self._close_fds(but=errpipe_write)
if cwd is not None:
os.chdir(cwd)
if preexec_fn:
preexec_fn()
if env is None:
os.execvp(executable, args)
else:
os.execvpe(executable, args, env)
except:
exc_type, exc_value, tb = sys.exc_info()
# Save the traceback and attach it to the exception object
exc_lines = traceback.format_exception(exc_type,
exc_value,
tb)
exc_value.child_traceback = ''.join(exc_lines)
os.write(errpipe_write, pickle.dumps(exc_value))
finally:
# Make sure that the process exits no matter what.
# The return code does not matter much as it won't be
# reported to the application
os._exit(1)
# Parent
self._watcher = self._loop.child(self.pid)
self._watcher.start(self._on_child, self._watcher)
if gc_was_enabled:
gc.enable()
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
if p2cread is not None and p2cwrite is not None:
os.close(p2cread)
if c2pwrite is not None and c2pread is not None:
os.close(c2pwrite)
if errwrite is not None and errread is not None:
os.close(errwrite)
# Wait for exec to fail or succeed; possibly raising exception
errpipe_read = FileObject(errpipe_read, 'rb')
data = errpipe_read.read()
finally:
if hasattr(errpipe_read, 'close'):
errpipe_read.close()
else:
os.close(errpipe_read)
if data != "":
self.wait()
child_exception = pickle.loads(data)
for fd in (p2cwrite, c2pread, errread):
if fd is not None:
os.close(fd)
raise child_exception
def _handle_exitstatus(self, sts):
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
elif os.WIFEXITED(sts):
self.returncode = os.WEXITSTATUS(sts)
else:
# Should never happen
raise RuntimeError("Unknown child exit status!")
def _internal_poll(self):
"""Check if child process has terminated. Returns returncode
attribute.
"""
if self.returncode is None:
if get_hub() is not getcurrent():
sig_pending = getattr(self._loop, 'sig_pending', True)
if sig_pending:
sleep(0.00001)
return self.returncode
def wait(self, timeout=None):
"""Wait for child process to terminate. Returns returncode
attribute."""
return self.result.wait(timeout=timeout)
def send_signal(self, sig):
"""Send a signal to the process
"""
os.kill(self.pid, sig)
def terminate(self):
"""Terminate the process with SIGTERM
"""
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
def write_and_close(fobj, data):
try:
if data:
fobj.write(data)
except (OSError, IOError), ex:
if ex.errno != errno.EPIPE and ex.errno != errno.EINVAL:
raise
finally:
try:
fobj.close()
except EnvironmentError:
pass
|
|
from __future__ import absolute_import, unicode_literals
# -*- coding: utf-8 -*-
import json
from datetime import datetime
from xml.dom import minidom
from django.conf import settings
from django.core import serializers
from django.db import transaction, connection
from django.test import TestCase, TransactionTestCase, Approximate
from django.utils import six
from django.utils.six import StringIO
from django.utils import unittest
from .models import (Category, Author, Article, AuthorProfile, Actor, Movie,
Score, Player, Team)
class SerializerRegistrationTests(unittest.TestCase):
def setUp(self):
self.old_SERIALIZATION_MODULES = getattr(settings, 'SERIALIZATION_MODULES', None)
self.old_serializers = serializers._serializers
serializers._serializers = {}
settings.SERIALIZATION_MODULES = {
"json2" : "django.core.serializers.json",
}
def tearDown(self):
serializers._serializers = self.old_serializers
if self.old_SERIALIZATION_MODULES:
settings.SERIALIZATION_MODULES = self.old_SERIALIZATION_MODULES
else:
delattr(settings, 'SERIALIZATION_MODULES')
def test_register(self):
"Registering a new serializer populates the full registry. Refs #14823"
serializers.register_serializer('json3', 'django.core.serializers.json')
public_formats = serializers.get_public_serializer_formats()
self.assertIn('json3', public_formats)
self.assertIn('json2', public_formats)
self.assertIn('xml', public_formats)
def test_unregister(self):
"Unregistering a serializer doesn't cause the registry to be repopulated. Refs #14823"
serializers.unregister_serializer('xml')
serializers.register_serializer('json3', 'django.core.serializers.json')
public_formats = serializers.get_public_serializer_formats()
self.assertNotIn('xml', public_formats)
self.assertIn('json3', public_formats)
def test_builtin_serializers(self):
"Requesting a list of serializer formats popuates the registry"
all_formats = set(serializers.get_serializer_formats())
public_formats = set(serializers.get_public_serializer_formats())
self.assertIn('xml', all_formats),
self.assertIn('xml', public_formats)
self.assertIn('json2', all_formats)
self.assertIn('json2', public_formats)
self.assertIn('python', all_formats)
self.assertNotIn('python', public_formats)
class SerializersTestBase(object):
@staticmethod
def _comparison_value(value):
return value
def setUp(self):
sports = Category.objects.create(name="Sports")
music = Category.objects.create(name="Music")
op_ed = Category.objects.create(name="Op-Ed")
self.joe = Author.objects.create(name="Joe")
self.jane = Author.objects.create(name="Jane")
self.a1 = Article(
author=self.jane,
headline="Poker has no place on ESPN",
pub_date=datetime(2006, 6, 16, 11, 00)
)
self.a1.save()
self.a1.categories = [sports, op_ed]
self.a2 = Article(
author=self.joe,
headline="Time to reform copyright",
pub_date=datetime(2006, 6, 16, 13, 00, 11, 345)
)
self.a2.save()
self.a2.categories = [music, op_ed]
def test_serialize(self):
"""Tests that basic serialization works."""
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
self.assertTrue(self._validate_output(serial_str))
def test_serializer_roundtrip(self):
"""Tests that serialized content can be deserialized."""
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
models = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(len(models), 2)
def test_altering_serialized_output(self):
"""
Tests the ability to create new objects by
modifying serialized content.
"""
old_headline = "Poker has no place on ESPN"
new_headline = "Poker has no place on television"
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all())
serial_str = serial_str.replace(old_headline, new_headline)
models = list(serializers.deserialize(self.serializer_name, serial_str))
# Prior to saving, old headline is in place
self.assertTrue(Article.objects.filter(headline=old_headline))
self.assertFalse(Article.objects.filter(headline=new_headline))
for model in models:
model.save()
# After saving, new headline is in place
self.assertTrue(Article.objects.filter(headline=new_headline))
self.assertFalse(Article.objects.filter(headline=old_headline))
def test_one_to_one_as_pk(self):
"""
Tests that if you use your own primary key field
(such as a OneToOneField), it doesn't appear in the
serialized field list - it replaces the pk identifier.
"""
profile = AuthorProfile(author=self.joe,
date_of_birth=datetime(1970,1,1))
profile.save()
serial_str = serializers.serialize(self.serializer_name,
AuthorProfile.objects.all())
self.assertFalse(self._get_field_values(serial_str, 'author'))
for obj in serializers.deserialize(self.serializer_name, serial_str):
self.assertEqual(obj.object.pk, self._comparison_value(self.joe.pk))
def test_serialize_field_subset(self):
"""Tests that output can be restricted to a subset of fields"""
valid_fields = ('headline','pub_date')
invalid_fields = ("author", "categories")
serial_str = serializers.serialize(self.serializer_name,
Article.objects.all(),
fields=valid_fields)
for field_name in invalid_fields:
self.assertFalse(self._get_field_values(serial_str, field_name))
for field_name in valid_fields:
self.assertTrue(self._get_field_values(serial_str, field_name))
def test_serialize_unicode(self):
"""Tests that unicode makes the roundtrip intact"""
actor_name = "Za\u017c\u00f3\u0142\u0107"
movie_title = 'G\u0119\u015bl\u0105 ja\u017a\u0144'
ac = Actor(name=actor_name)
mv = Movie(title=movie_title, actor=ac)
ac.save()
mv.save()
serial_str = serializers.serialize(self.serializer_name, [mv])
self.assertEqual(self._get_field_values(serial_str, "title")[0], movie_title)
self.assertEqual(self._get_field_values(serial_str, "actor")[0], actor_name)
obj_list = list(serializers.deserialize(self.serializer_name, serial_str))
mv_obj = obj_list[0].object
self.assertEqual(mv_obj.title, movie_title)
def test_serialize_superfluous_queries(self):
"""Ensure no superfluous queries are made when serializing ForeignKeys
#17602
"""
ac = Actor(name='Actor name')
ac.save()
mv = Movie(title='Movie title', actor_id=ac.pk)
mv.save()
with self.assertNumQueries(0):
serial_str = serializers.serialize(self.serializer_name, [mv])
def test_serialize_with_null_pk(self):
"""
Tests that serialized data with no primary key results
in a model instance with no id
"""
category = Category(name="Reference")
serial_str = serializers.serialize(self.serializer_name, [category])
pk_value = self._get_pk_values(serial_str)[0]
self.assertFalse(pk_value)
cat_obj = list(serializers.deserialize(self.serializer_name,
serial_str))[0].object
self.assertEqual(cat_obj.id, None)
def test_float_serialization(self):
"""Tests that float values serialize and deserialize intact"""
sc = Score(score=3.4)
sc.save()
serial_str = serializers.serialize(self.serializer_name, [sc])
deserial_objs = list(serializers.deserialize(self.serializer_name,
serial_str))
self.assertEqual(deserial_objs[0].object.score, Approximate(3.4, places=1))
def test_custom_field_serialization(self):
"""Tests that custom fields serialize and deserialize intact"""
team_str = "Spartak Moskva"
player = Player()
player.name = "Soslan Djanaev"
player.rank = 1
player.team = Team(team_str)
player.save()
serial_str = serializers.serialize(self.serializer_name,
Player.objects.all())
team = self._get_field_values(serial_str, "team")
self.assertTrue(team)
self.assertEqual(team[0], team_str)
deserial_objs = list(serializers.deserialize(self.serializer_name, serial_str))
self.assertEqual(deserial_objs[0].object.team.to_string(),
player.team.to_string())
def test_pre_1000ad_date(self):
"""Tests that year values before 1000AD are properly formatted"""
# Regression for #12524 -- dates before 1000AD get prefixed
# 0's on the year
a = Article.objects.create(
author = self.jane,
headline = "Nobody remembers the early years",
pub_date = datetime(1, 2, 3, 4, 5, 6))
serial_str = serializers.serialize(self.serializer_name, [a])
date_values = self._get_field_values(serial_str, "pub_date")
self.assertEqual(date_values[0].replace('T', ' '), "0001-02-03 04:05:06")
def test_pkless_serialized_strings(self):
"""
Tests that serialized strings without PKs
can be turned into models
"""
deserial_objs = list(serializers.deserialize(self.serializer_name,
self.pkless_str))
for obj in deserial_objs:
self.assertFalse(obj.object.id)
obj.save()
self.assertEqual(Category.objects.all().count(), 5)
class SerializersTransactionTestBase(object):
def test_forward_refs(self):
"""
Tests that objects ids can be referenced before they are
defined in the serialization data.
"""
# The deserialization process needs to be contained
# within a transaction in order to test forward reference
# handling.
transaction.enter_transaction_management()
objs = serializers.deserialize(self.serializer_name, self.fwd_ref_str)
with connection.constraint_checks_disabled():
for obj in objs:
obj.save()
transaction.commit()
transaction.leave_transaction_management()
for model_cls in (Category, Author, Article):
self.assertEqual(model_cls.objects.all().count(), 1)
art_obj = Article.objects.all()[0]
self.assertEqual(art_obj.categories.all().count(), 1)
self.assertEqual(art_obj.author.name, "Agnes")
class XmlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "xml"
pkless_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object model="serializers.category">
<field type="CharField" name="name">Reference</field>
</object>
<object model="serializers.category">
<field type="CharField" name="name">Non-fiction</field>
</object>
</django-objects>"""
@staticmethod
def _comparison_value(value):
# The XML serializer handles everything as strings, so comparisons
# need to be performed on the stringified value
return six.text_type(value)
@staticmethod
def _validate_output(serial_str):
try:
minidom.parseString(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("object")
for field in fields:
ret_list.append(field.getAttribute("pk"))
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
dom = minidom.parseString(serial_str)
fields = dom.getElementsByTagName("field")
for field in fields:
if field.getAttribute("name") == field_name:
temp = []
for child in field.childNodes:
temp.append(child.nodeValue)
ret_list.append("".join(temp))
return ret_list
class XmlSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "xml"
fwd_ref_str = """<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="1" model="serializers.article">
<field to="serializers.author" name="author" rel="ManyToOneRel">1</field>
<field type="CharField" name="headline">Forward references pose no problem</field>
<field type="DateTimeField" name="pub_date">2006-06-16T15:00:00</field>
<field to="serializers.category" name="categories" rel="ManyToManyRel">
<object pk="1"></object>
</field>
</object>
<object pk="1" model="serializers.author">
<field type="CharField" name="name">Agnes</field>
</object>
<object pk="1" model="serializers.category">
<field type="CharField" name="name">Reference</field></object>
</django-objects>"""
class JsonSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "json"
pkless_str = """[
{
"pk": null,
"model": "serializers.category",
"fields": {"name": "Reference"}
}, {
"model": "serializers.category",
"fields": {"name": "Non-fiction"}
}]"""
@staticmethod
def _validate_output(serial_str):
try:
json.loads(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
if field_name in obj_dict["fields"]:
ret_list.append(obj_dict["fields"][field_name])
return ret_list
class JsonSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "json"
fwd_ref_str = """[
{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
},
{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
},
{
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
try:
import yaml
except ImportError:
pass
else:
class YamlSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "yaml"
fwd_ref_str = """- fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
pk: 1
model: serializers.article
- fields:
name: Reference
pk: 1
model: serializers.category
- fields:
name: Agnes
pk: 1
model: serializers.author"""
pkless_str = """- fields:
name: Reference
pk: null
model: serializers.category
- fields:
name: Non-fiction
model: serializers.category"""
@staticmethod
def _validate_output(serial_str):
try:
yaml.safe_load(StringIO(serial_str))
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
stream = StringIO(serial_str)
for obj_dict in yaml.safe_load(stream):
if "fields" in obj_dict and field_name in obj_dict["fields"]:
field_value = obj_dict["fields"][field_name]
# yaml.safe_load will return non-string objects for some
# of the fields we are interested in, this ensures that
# everything comes back as a string
if isinstance(field_value, six.string_types):
ret_list.append(field_value)
else:
ret_list.append(str(field_value))
return ret_list
class YamlSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "yaml"
fwd_ref_str = """- fields:
headline: Forward references pose no problem
pub_date: 2006-06-16 15:00:00
categories: [1]
author: 1
pk: 1
model: serializers.article
- fields:
name: Reference
pk: 1
model: serializers.category
- fields:
name: Agnes
pk: 1
model: serializers.author"""
|
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task towers for PixelDA model."""
import tensorflow as tf
slim = tf.contrib.slim
def add_task_specific_model(images,
hparams,
num_classes=10,
is_training=False,
reuse_private=False,
private_scope=None,
reuse_shared=False,
shared_scope=None):
"""Create a classifier for the given images.
The classifier is composed of a few 'private' layers followed by a few
'shared' layers. This lets us account for different image 'style', while
sharing the last few layers as 'content' layers.
Args:
images: A `Tensor` of size [batch_size, height, width, 3].
hparams: model hparams
num_classes: The number of output classes.
is_training: whether model is training
reuse_private: Whether or not to reuse the private weights, which are the
first few layers in the classifier
private_scope: The name of the variable_scope for the private (unshared)
components of the classifier.
reuse_shared: Whether or not to reuse the shared weights, which are the last
few layers in the classifier
shared_scope: The name of the variable_scope for the shared components of
the classifier.
Returns:
The logits, a `Tensor` of shape [batch_size, num_classes].
Raises:
ValueError: If hparams.task_classifier is an unknown value
"""
model = hparams.task_tower
# Make sure the classifier name shows up in graph
shared_scope = shared_scope or (model + '_shared')
kwargs = {
'num_classes': num_classes,
'is_training': is_training,
'reuse_private': reuse_private,
'reuse_shared': reuse_shared,
}
if private_scope:
kwargs['private_scope'] = private_scope
if shared_scope:
kwargs['shared_scope'] = shared_scope
quaternion_pred = None
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
activation_fn=tf.nn.relu,
weights_regularizer=tf.contrib.layers.l2_regularizer(
hparams.weight_decay_task_classifier)):
with slim.arg_scope([slim.conv2d], padding='SAME'):
if model == 'doubling_pose_estimator':
logits, quaternion_pred = doubling_cnn_class_and_quaternion(
images, num_private_layers=hparams.num_private_layers, **kwargs)
elif model == 'mnist':
logits, _ = mnist_classifier(images, **kwargs)
elif model == 'svhn':
logits, _ = svhn_classifier(images, **kwargs)
elif model == 'gtsrb':
logits, _ = gtsrb_classifier(images, **kwargs)
elif model == 'pose_mini':
logits, quaternion_pred = pose_mini_tower(images, **kwargs)
else:
raise ValueError('Unknown task classifier %s' % model)
return logits, quaternion_pred
#####################################
# Classifiers used in the DSN paper #
#####################################
def mnist_classifier(images,
is_training=False,
num_classes=10,
reuse_private=False,
private_scope='mnist',
reuse_shared=False,
shared_scope='task_model'):
"""Creates the convolutional MNIST model from the gradient reversal paper.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits, endpoints = conv_mnist(images, is_training=False)
predictions = tf.nn.softmax(logits)
Args:
images: the MNIST digits, a tensor of size [batch_size, 28, 28, 1].
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
num_classes: the number of output classes to use.
Returns:
the output logits, a tensor of size [batch_size, num_classes].
a dictionary with key/values the layer names and tensors.
"""
net = {}
with tf.variable_scope(private_scope, reuse=reuse_private):
net['conv1'] = slim.conv2d(images, 32, [5, 5], scope='conv1')
net['pool1'] = slim.max_pool2d(net['conv1'], [2, 2], 2, scope='pool1')
with tf.variable_scope(shared_scope, reuse=reuse_shared):
net['conv2'] = slim.conv2d(net['pool1'], 48, [5, 5], scope='conv2')
net['pool2'] = slim.max_pool2d(net['conv2'], [2, 2], 2, scope='pool2')
net['fc3'] = slim.fully_connected(
slim.flatten(net['pool2']), 100, scope='fc3')
net['fc4'] = slim.fully_connected(
slim.flatten(net['fc3']), 100, scope='fc4')
logits = slim.fully_connected(
net['fc4'], num_classes, activation_fn=None, scope='fc5')
return logits, net
def svhn_classifier(images,
is_training=False,
num_classes=10,
reuse_private=False,
private_scope=None,
reuse_shared=False,
shared_scope='task_model'):
"""Creates the convolutional SVHN model from the gradient reversal paper.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits = mnist.Mnist(images, is_training=False)
predictions = tf.nn.softmax(logits)
Args:
images: the SVHN digits, a tensor of size [batch_size, 40, 40, 3].
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
num_classes: the number of output classes to use.
Returns:
the output logits, a tensor of size [batch_size, num_classes].
a dictionary with key/values the layer names and tensors.
"""
net = {}
with tf.variable_scope(private_scope, reuse=reuse_private):
net['conv1'] = slim.conv2d(images, 64, [5, 5], scope='conv1')
net['pool1'] = slim.max_pool2d(net['conv1'], [3, 3], 2, scope='pool1')
with tf.variable_scope(shared_scope, reuse=reuse_shared):
net['conv2'] = slim.conv2d(net['pool1'], 64, [5, 5], scope='conv2')
net['pool2'] = slim.max_pool2d(net['conv2'], [3, 3], 2, scope='pool2')
net['conv3'] = slim.conv2d(net['pool2'], 128, [5, 5], scope='conv3')
net['fc3'] = slim.fully_connected(
slim.flatten(net['conv3']), 3072, scope='fc3')
net['fc4'] = slim.fully_connected(
slim.flatten(net['fc3']), 2048, scope='fc4')
logits = slim.fully_connected(
net['fc4'], num_classes, activation_fn=None, scope='fc5')
return logits, net
def gtsrb_classifier(images,
is_training=False,
num_classes=43,
reuse_private=False,
private_scope='gtsrb',
reuse_shared=False,
shared_scope='task_model'):
"""Creates the convolutional GTSRB model from the gradient reversal paper.
Note that since the output is a set of 'logits', the values fall in the
interval of (-infinity, infinity). Consequently, to convert the outputs to a
probability distribution over the characters, one will need to convert them
using the softmax function:
logits = mnist.Mnist(images, is_training=False)
predictions = tf.nn.softmax(logits)
Args:
images: the SVHN digits, a tensor of size [batch_size, 40, 40, 3].
is_training: specifies whether or not we're currently training the model.
This variable will determine the behaviour of the dropout layer.
num_classes: the number of output classes to use.
reuse_private: Whether or not to reuse the private components of the model.
private_scope: The name of the private scope.
reuse_shared: Whether or not to reuse the shared components of the model.
shared_scope: The name of the shared scope.
Returns:
the output logits, a tensor of size [batch_size, num_classes].
a dictionary with key/values the layer names and tensors.
"""
net = {}
with tf.variable_scope(private_scope, reuse=reuse_private):
net['conv1'] = slim.conv2d(images, 96, [5, 5], scope='conv1')
net['pool1'] = slim.max_pool2d(net['conv1'], [2, 2], 2, scope='pool1')
with tf.variable_scope(shared_scope, reuse=reuse_shared):
net['conv2'] = slim.conv2d(net['pool1'], 144, [3, 3], scope='conv2')
net['pool2'] = slim.max_pool2d(net['conv2'], [2, 2], 2, scope='pool2')
net['conv3'] = slim.conv2d(net['pool2'], 256, [5, 5], scope='conv3')
net['pool3'] = slim.max_pool2d(net['conv3'], [2, 2], 2, scope='pool3')
net['fc3'] = slim.fully_connected(
slim.flatten(net['pool3']), 512, scope='fc3')
logits = slim.fully_connected(
net['fc3'], num_classes, activation_fn=None, scope='fc4')
return logits, net
#########################
# pose_mini task towers #
#########################
def pose_mini_tower(images,
num_classes=11,
is_training=False,
reuse_private=False,
private_scope='pose_mini',
reuse_shared=False,
shared_scope='task_model'):
"""Task tower for the pose_mini dataset."""
with tf.variable_scope(private_scope, reuse=reuse_private):
net = slim.conv2d(images, 32, [5, 5], scope='conv1')
net = slim.max_pool2d(net, [2, 2], stride=2, scope='pool1')
with tf.variable_scope(shared_scope, reuse=reuse_shared):
net = slim.conv2d(net, 64, [5, 5], scope='conv2')
net = slim.max_pool2d(net, [2, 2], stride=2, scope='pool2')
net = slim.flatten(net)
net = slim.fully_connected(net, 128, scope='fc3')
net = slim.dropout(net, 0.5, is_training=is_training, scope='dropout')
with tf.variable_scope('quaternion_prediction'):
quaternion_pred = slim.fully_connected(
net, 4, activation_fn=tf.tanh, scope='fc_q')
quaternion_pred = tf.nn.l2_normalize(quaternion_pred, 1)
logits = slim.fully_connected(
net, num_classes, activation_fn=None, scope='fc4')
return logits, quaternion_pred
def doubling_cnn_class_and_quaternion(images,
num_private_layers=1,
num_classes=10,
is_training=False,
reuse_private=False,
private_scope='doubling_cnn',
reuse_shared=False,
shared_scope='task_model'):
"""Alternate conv, pool while doubling filter count."""
net = images
depth = 32
layer_id = 1
with tf.variable_scope(private_scope, reuse=reuse_private):
while num_private_layers > 0 and net.shape.as_list()[1] > 5:
net = slim.conv2d(net, depth, [3, 3], scope='conv%s' % layer_id)
net = slim.max_pool2d(net, [2, 2], stride=2, scope='pool%s' % layer_id)
depth *= 2
layer_id += 1
num_private_layers -= 1
with tf.variable_scope(shared_scope, reuse=reuse_shared):
while net.shape.as_list()[1] > 5:
net = slim.conv2d(net, depth, [3, 3], scope='conv%s' % layer_id)
net = slim.max_pool2d(net, [2, 2], stride=2, scope='pool%s' % layer_id)
depth *= 2
layer_id += 1
net = slim.flatten(net)
net = slim.fully_connected(net, 100, scope='fc1')
net = slim.dropout(net, 0.5, is_training=is_training, scope='dropout')
quaternion_pred = slim.fully_connected(
net, 4, activation_fn=tf.tanh, scope='fc_q')
quaternion_pred = tf.nn.l2_normalize(quaternion_pred, 1)
logits = slim.fully_connected(
net, num_classes, activation_fn=None, scope='fc_logits')
return logits, quaternion_pred
|
|
#
# Copyright 2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
vOLT-HA DHCP Test Case module
"""
import os
import testCaseUtils
import logging
import subprocess
class DHCP(object):
"""
This class implements voltha DHCP test case
"""
CHECK_IP_FILENAME = 'voltha_check_ip.log'
DE_ASSIGN_IP_FILENAME = 'voltha_de-assign_ip.log'
ASSIGN_DHCP_IP_FILENAME = 'voltha_assign_dhcp_ip.log'
CHECK_ASSIGNED_IP_FILENAME = 'voltha_check_assigned_dhcp_ip.log'
def __init__(self):
self.dirs = dict()
self.dirs['log'] = None
self.dirs['root'] = None
self.dirs['voltha'] = None
self.__rgName = testCaseUtils.discover_rg_pod_name()
self.__onuCount = None
self.__fields = None
self.__deviceId = None
self.__portNumber = None
def h_set_log_dirs(self, root_dir, voltha_dir, log_dir):
testCaseUtils.config_dirs(self, log_dir, root_dir, voltha_dir)
def h_configure(self, onu_count):
self.__onuCount = onu_count
def discover_authorized_users(self):
testCaseUtils.send_command_to_onos_cli(testCaseUtils.get_dir(self, 'log'),
'voltha_onos_users.log', 'aaa-users')
def extract_authorized_user_device_id_and_port_number(self):
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'AUTHORIZED', 'voltha_onos_users.log')
assert statusLines, 'No ONU has been authenticated'
self.__deviceId, self.__portNumber = testCaseUtils.retrieve_authorized_users_device_id_and_port_number(statusLines)
def add_onu_bound_dhcp_flows(self):
testCaseUtils.add_subscriber_access(self, self.__deviceId, self.__portNumber)
def should_now_have_two_dhcp_flows(self):
testCaseUtils.send_command_to_onos_cli(testCaseUtils.get_dir(self, 'log'),
'voltha_onos_flows.log', 'flows -s')
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'IP_PROTO:17', 'voltha_onos_flows.log')
assert statusLines, 'No DHCP Detection flows'
lines = statusLines.splitlines()
assert len(lines) >= 2, 'Expected at least 2 DHCP Detection Flows but result was %s' % len(lines)
for line in lines:
self.__fields = testCaseUtils.parse_fields(line, ',')
inPortStr = self.__fields[5].strip()
selector, delimiter, inPort = inPortStr.partition('=[')
assert (inPort == 'IN_PORT:2' or inPort == 'IN_PORT:128'), 'DHCP detection flows not associated with expected ports'
def add_dhcp_server_configuration_data_in_onos(self):
logging.info('Adding DHCP Configuration Data to Onos NetCfg')
logging.debug('curl --user karaf:karaf -X POST -H "Content-Type: application/json" '
'http://localhost:30120/onos/v1/network/configuration/apps/ -d @%s/tests/atests/build/dhcp_json'
% testCaseUtils.get_dir(self, 'voltha'))
os.system('curl --user karaf:karaf -X POST -H "Content-Type: application/json" '
'http://localhost:30120/onos/v1/network/configuration/apps/ -d @%s/tests/atests/build/dhcp_json'
% testCaseUtils.get_dir(self, 'voltha'))
def activate_dhcp_server_in_onos(self):
logging.info('Activating DHCP server on Onos')
testCaseUtils.send_command_to_onos_cli(testCaseUtils.get_dir(self, 'log'),
'voltha_dhcp_server_activate.log', 'app activate dhcp')
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'Activated', 'voltha_dhcp_server_activate.log')
assert statusLines, 'DHCP server failed to be Activated'
def deactivate_dhcp_server_in_onos(self):
logging.info('Deactivating DHCP server on Onos')
testCaseUtils.send_command_to_onos_cli(testCaseUtils.get_dir(self, 'log'),
'voltha_dhcp_server_deactivate.log', 'app deactivate dhcp')
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'Deactivated', 'voltha_dhcp_server_deactivate.log')
assert statusLines, 'DHCP server failed to be Deactivated'
def query_for_default_ip_on_rg(self):
logging.info('De-assigning default IP on RG')
process_output = open('%s/%s' % (testCaseUtils.get_dir(self, 'log'), self.CHECK_IP_FILENAME), 'w')
ifconfigCheck1 = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-n', 'voltha', self.__rgName, '--', 'bash', '-c',
'ifconfig'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
ifconfigCheck2 = subprocess.Popen(['grep', '-e', 'eth0', '-A1'], stdin=ifconfigCheck1.stdout,
stdout=process_output,
stderr=process_output)
ifconfigCheck1.wait()
ifconfigCheck1.stdout.close()
ifconfigCheck2.wait()
process_output.close()
testCaseUtils.print_log_file(self, self.CHECK_IP_FILENAME)
def de_assign_default_ip_on_rg(self):
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'inet', self.CHECK_IP_FILENAME)
if statusLines:
process_output = open('%s/%s' % (testCaseUtils.get_dir(self, 'log'), self.DE_ASSIGN_IP_FILENAME), 'w')
os.system('/usr/bin/kubectl exec -n voltha %s -- bash -c "ifconfig eth0 0.0.0.0"' % self.__rgName)
ifconfigDeassign1 = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-n', 'voltha', self.__rgName, '--', 'bash', '-c',
'ifconfig'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
ifconfigDeassign2 = subprocess.Popen(['grep', '-e', 'eth0', '-A1'], stdin=ifconfigDeassign1.stdout,
stdout=process_output,
stderr=process_output)
ifconfigDeassign1.wait()
ifconfigDeassign1.stdout.close()
ifconfigDeassign2.wait()
process_output.close()
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'inet', self.DE_ASSIGN_IP_FILENAME)
assert not statusLines, 'IP addr not de-assigned'
else:
logging.info('No default IP addr assigned to eth0')
def assign_dhcp_ip_addr_to_rg(self):
logging.info('Assigning IP addr on RG using DHCP')
process_output = open('%s/%s' % (testCaseUtils.get_dir(self, 'log'), self.ASSIGN_DHCP_IP_FILENAME), 'w')
dhcpAssignIp1 = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-it', '-n', 'voltha', self.__rgName, '--',
'dhclient', '-v', 'eth0'],
stdout=process_output,
stderr=process_output)
dhcpAssignIp1.wait()
process_output.close()
testCaseUtils.print_log_file(self, self.ASSIGN_DHCP_IP_FILENAME)
procPidDhclient1 = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-n', 'voltha', self.__rgName, '--', 'ps', '-ef'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
procPidDhclient2 = subprocess.Popen(['grep', '-e', 'dhclient'], stdin=procPidDhclient1.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
procPidDhclient3 = subprocess.Popen(['awk', "{print $2}"], stdin=procPidDhclient2.stdout,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
procPidDhclient1.stdout.close()
procPidDhclient2.stdout.close()
out, err = procPidDhclient3.communicate()
dhclientPid = out.strip()
if dhclientPid:
procKillDhclient = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-n', 'voltha', self.__rgName, '--', 'kill', dhclientPid],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = procKillDhclient.communicate()
assert not err, 'Killing dhclient returned %s' % err
def should_have_dhcp_assigned_ip(self):
process_output = open('%s/%s' % (testCaseUtils.get_dir(self, 'log'), self.CHECK_ASSIGNED_IP_FILENAME), 'w')
ifConfigCheck1 = subprocess.Popen(['/usr/bin/kubectl', 'exec', '-n', 'voltha', self.__rgName, '--', 'bash', '-c',
'ifconfig'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
ifConfigCheck2 = subprocess.Popen(['grep', '-e', 'eth0', '-A1'], stdin=ifConfigCheck1.stdout,
stdout=process_output,
stderr=process_output)
ifConfigCheck1.wait()
ifConfigCheck1.stdout.close()
ifConfigCheck2.wait()
process_output.close()
testCaseUtils.print_log_file(self, self.CHECK_ASSIGNED_IP_FILENAME)
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'inet', self.CHECK_ASSIGNED_IP_FILENAME)
assert statusLines, 'DHCP IP addr not assigned'
def should_have_ips_assigned_to_all_onus(self):
logging.info('Verifying IP Address assignment on all ONUs')
testCaseUtils.send_command_to_onos_cli(testCaseUtils.get_dir(self, 'log'),
'_voltha_onos_dhcpl2relay_allocations.log', 'dhcpl2relay-allocations')
statusLines = testCaseUtils.get_fields_from_grep_command(self, 'DHCPACK', '_voltha_onos_dhcpl2relay_allocations.log')
assert statusLines, 'No DHCP addresses allocated'
lines = statusLines.splitlines()
assert len(lines) == self.__onuCount, 'Allocated IPs does not match ONU count but result was %s' % len(lines)
for line in lines:
self.__fields = testCaseUtils.parse_fields(line, ',')
allocIp = self.__fields[5].strip()
allocated, delimiter, ipAddr = allocIp.partition('=')
assert ipAddr != '0.0.0.0', 'Invalid IP Address Allocated'
def set_firewall_rules():
logging.info('Setting Firewall rules for DHCP test')
os.system('sudo iptables -P FORWARD ACCEPT')
def run_test(onu_count, root_dir, voltha_dir, log_dir, simtype):
dhcp = DHCP()
dhcp.h_set_log_dirs(root_dir, voltha_dir, log_dir)
dhcp.h_configure(onu_count)
if simtype == 'ponsim':
set_firewall_rules()
dhcp.discover_authorized_users()
dhcp.extract_authorized_user_device_id_and_port_number()
dhcp.add_onu_bound_dhcp_flows()
dhcp.should_now_have_two_dhcp_flows()
dhcp.deactivate_dhcp_server_in_onos()
dhcp.add_dhcp_server_configuration_data_in_onos()
dhcp.activate_dhcp_server_in_onos()
dhcp.query_for_default_ip_on_rg()
dhcp.de_assign_default_ip_on_rg()
dhcp.assign_dhcp_ip_addr_to_rg()
dhcp.should_have_dhcp_assigned_ip()
elif simtype == 'bbsim':
dhcp.should_have_ips_assigned_to_all_onus()
|
|
"""
Runner to provide F5 Load Balancer functionality
:depends: - pycontrol Python module
:configuration: In order to connect to a F5 Load Balancer, you must specify
in the Salt master configuration the currently available load balancers
.. code-block:: yaml
load_balancers:
bigip1.example.com:
username: admin
password: secret
bigip2.example.com:
username: admin
password: secret
"""
from salt.exceptions import CommandExecutionError
try:
import pycontrol.pycontrol as f5
HAS_PYCONTROL = True
except ImportError:
HAS_PYCONTROL = False
def __virtual__():
if not HAS_PYCONTROL:
return False
return True
class F5Mgmt:
def __init__(self, lb, username, password):
self.lb = lb
self.username = username
self.password = password
self._connect()
def _connect(self):
"""
Connect to F5
"""
try:
self.bigIP = f5.BIGIP(
hostname=self.lb,
username=self.username,
password=self.password,
fromurl=True,
wsdls=["LocalLB.VirtualServer", "LocalLB.Pool"],
)
except Exception: # pylint: disable=broad-except
raise Exception("Unable to connect to {}".format(self.lb))
return True
def create_vs(self, name, ip, port, protocol, profile, pool_name):
"""
Create a virtual server
"""
vs = self.bigIP.LocalLB.VirtualServer
vs_def = vs.typefactory.create("Common.VirtualServerDefinition")
vs_def.name = name
vs_def.address = ip
vs_def.port = port
common_protocols = vs.typefactory.create("Common.ProtocolType")
p = [i[0] for i in common_protocols if i[0].split("_")[1] == protocol.upper()]
if p:
vs_def.protocol = p
else:
raise CommandExecutionError("Unknown protocol")
vs_def_seq = vs.typefactory.create("Common.VirtualServerSequence")
vs_def_seq.item = [vs_def]
vs_type = vs.typefactory.create("LocalLB.VirtualServer.VirtualServerType")
vs_resource = vs.typefactory.create(
"LocalLB.VirtualServer.VirtualServerResource"
)
vs_resource.type = vs_type.RESOURCE_TYPE_POOL
vs_resource.default_pool_name = pool_name
resource_seq = vs.typefactory.create(
"LocalLB.VirtualServer.VirtualServerResourceSequence"
)
resource_seq.item = [vs_resource]
vs_context = vs.typefactory.create("LocalLB.ProfileContextType")
vs_profile = vs.typefactory.create("LocalLB.VirtualServer.VirtualServerProfile")
vs_profile.profile_context = vs_context.PROFILE_CONTEXT_TYPE_ALL
vs_profile.profile_name = protocol
vs_profile_http = vs.typefactory.create(
"LocalLB.VirtualServer.VirtualServerProfile"
)
vs_profile_http.profile_name = profile
vs_profile_conn = vs.typefactory.create(
"LocalLB.VirtualServer.VirtualServerProfile"
)
vs_profile_conn.profile_name = "oneconnect"
vs_profile_seq = vs.typefactory.create(
"LocalLB.VirtualServer.VirtualServerProfileSequence"
)
vs_profile_seq.item = [vs_profile, vs_profile_http, vs_profile_conn]
try:
vs.create(
definitions=vs_def_seq,
wildmasks=["255.255.255.255"],
resources=resource_seq,
profiles=[vs_profile_seq],
)
except Exception as e: # pylint: disable=broad-except
raise Exception(
"Unable to create `{}` virtual server\n\n{}".format(name, e)
)
return True
def create_pool(self, name, method="ROUND_ROBIN"):
"""
Create a pool on the F5 load balancer
"""
lbmethods = self.bigIP.LocalLB.Pool.typefactory.create("LocalLB.LBMethod")
supported_method = [
i[0] for i in lbmethods if (i[0].split("_", 2)[-1] == method.upper())
]
if supported_method and not self.check_pool(name):
try:
self.bigIP.LocalLB.Pool.create(
pool_names=[name], lb_methods=[supported_method], members=[[]]
)
except Exception as e: # pylint: disable=broad-except
raise Exception("Unable to create `{}` pool\n\n{}".format(name, e))
else:
raise Exception("Unsupported method")
return True
def add_pool_member(self, name, port, pool_name):
"""
Add a node to a pool
"""
if not self.check_pool(pool_name):
raise CommandExecutionError("{} pool does not exists".format(pool_name))
members_seq = self.bigIP.LocalLB.Pool.typefactory.create(
"Common.IPPortDefinitionSequence"
)
members_seq.items = []
member = self.bigIP.LocalLB.Pool.typefactory.create("Common.IPPortDefinition")
member.address = name
member.port = port
members_seq.items.append(member)
try:
self.bigIP.LocalLB.Pool.add_member(
pool_names=[pool_name], members=[members_seq]
)
except Exception as e: # pylint: disable=broad-except
raise Exception(
"Unable to add `{}` to `{}`\n\n{}".format(name, pool_name, e)
)
return True
def check_pool(self, name):
"""
Check to see if a pool exists
"""
pools = self.bigIP.LocalLB.Pool
for pool in pools.get_list():
if pool.split("/")[-1] == name:
return True
return False
def check_virtualserver(self, name):
"""
Check to see if a virtual server exists
"""
vs = self.bigIP.LocalLB.VirtualServer
for v in vs.get_list():
if v.split("/")[-1] == name:
return True
return False
def check_member_pool(self, member, pool_name):
"""
Check a pool member exists in a specific pool
"""
members = self.bigIP.LocalLB.Pool.get_member(pool_names=[pool_name])[0]
for mem in members:
if member == mem.address:
return True
return False
def lbmethods(self):
"""
List all the load balancer methods
"""
methods = self.bigIP.LocalLB.Pool.typefactory.create("LocalLB.LBMethod")
return [method[0].split("_", 2)[-1] for method in methods]
def create_vs(lb, name, ip, port, protocol, profile, pool_name):
"""
Create a virtual server
CLI Examples:
.. code-block:: bash
salt-run f5.create_vs lbalancer vs_name 10.0.0.1 80 tcp http poolname
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
F5.create_vs(name, ip, port, protocol, profile, pool_name)
return True
def create_pool(lb, name, method="ROUND_ROBIN"):
"""
Create a pool on the F5 load balancer
CLI Examples:
.. code-block:: bash
salt-run f5.create_pool load_balancer pool_name loadbalance_method
salt-run f5.create_pool load_balancer my_pool ROUND_ROBIN
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
F5.create_pool(name, method)
return True
def add_pool_member(lb, name, port, pool_name):
"""
Add a node to a pool
CLI Examples:
.. code-block:: bash
salt-run f5.add_pool_member load_balancer 10.0.0.1 80 my_pool
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
F5.add_pool_member(name, port, pool_name)
return True
def check_pool(lb, name):
"""
Check to see if a pool exists
CLI Examples:
.. code-block:: bash
salt-run f5.check_pool load_balancer pool_name
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
return F5.check_pool(name)
def check_virtualserver(lb, name):
"""
Check to see if a virtual server exists
CLI Examples:
.. code-block:: bash
salt-run f5.check_virtualserver load_balancer virtual_server
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
return F5.check_virtualserver(name)
def check_member_pool(lb, member, pool_name):
"""
Check a pool member exists in a specific pool
CLI Examples:
.. code-block:: bash
salt-run f5.check_member_pool load_balancer 10.0.0.1 my_pool
"""
if __opts__["load_balancers"].get(lb, None):
(username, password) = list(__opts__["load_balancers"][lb].values())
else:
raise Exception("Unable to find `{}` load balancer".format(lb))
F5 = F5Mgmt(lb, username, password)
return F5.check_member_pool(member, pool_name)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Common functions
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.remoteSSHClient import remoteSSHClient
from utils import *
from base import *
#Import System modules
import time
def get_domain(apiclient, services=None):
"Returns a default domain"
cmd = listDomains.listDomainsCmd()
if services:
if "domainid" in services:
cmd.id = services["domainid"]
domains = apiclient.listDomains(cmd)
if isinstance(domains, list):
return domains[0]
else:
raise Exception("Failed to find specified domain.")
def get_zone(apiclient, services=None):
"Returns a default zone"
cmd = listZones.listZonesCmd()
if services:
if "zoneid" in services:
cmd.id = services["zoneid"]
zones = apiclient.listZones(cmd)
if isinstance(zones, list):
return zones[0]
else:
raise Exception("Failed to find specified zone.")
def get_pod(apiclient, zoneid, services=None):
"Returns a default pod for specified zone"
cmd = listPods.listPodsCmd()
cmd.zoneid = zoneid
if services:
if "podid" in services:
cmd.id = services["podid"]
pods = apiclient.listPods(cmd)
if isinstance(pods, list):
return pods[0]
else:
raise Exception("Exception: Failed to find specified pod.")
def get_template(apiclient, zoneid, ostypeid=12, services=None):
"Returns a template"
cmd = listTemplates.listTemplatesCmd()
cmd.templatefilter = 'featured'
cmd.zoneid = zoneid
if services:
if "template" in services:
cmd.id = services["template"]
list_templates = apiclient.listTemplates(cmd)
for template in list_templates:
if template.ostypeid == ostypeid:
return template
raise Exception("Exception: Failed to find template with OSTypeID: %s" %
ostypeid)
return
def download_systemplates_sec_storage(server, services):
"""Download System templates on sec storage"""
try:
# Login to management server
ssh = remoteSSHClient.remoteSSHClient(
server["ipaddress"],
server["port"],
server["username"],
server["password"]
)
except Exception:
raise Exception("SSH access failted for server with IP address: %s" %
server["ipaddess"])
# Mount Secondary Storage on Management Server
cmds = [
"mkdir -p %s" % services["mnt_dir"],
"mount -t nfs %s:/%s %s" % (
services["sec_storage"],
services["path"],
services["mnt_dir"]
),
"%s -m %s -u %s -h %s -F" % (
services["command"],
services["mnt_dir"],
services["download_url"],
services["hypervisor"]
)
]
for c in cmds:
result = ssh.execute(c)
res = str(result)
# Unmount the Secondary storage
ssh.execute("umount %s" % (services["mnt_dir"]))
if res.count("Successfully installed system VM template") == 1:
return
else:
raise Exception("Failed to download System Templates on Sec Storage")
return
def wait_for_ssvms(apiclient, zoneid, podid, interval=60):
"""After setup wait for SSVMs to come Up"""
time.sleep(interval)
timeout = 40
while True:
list_ssvm_response = list_ssvms(
apiclient,
systemvmtype='secondarystoragevm',
zoneid=zoneid,
podid=podid
)
ssvm = list_ssvm_response[0]
if ssvm.state != 'Running':
# Sleep to ensure SSVMs are Up and Running
time.sleep(interval)
timeout = timeout - 1
elif ssvm.state == 'Running':
break
elif timeout == 0:
raise Exception("SSVM failed to come up")
break
timeout = 40
while True:
list_ssvm_response = list_ssvms(
apiclient,
systemvmtype='consoleproxy',
zoneid=zoneid,
podid=podid
)
cpvm = list_ssvm_response[0]
if cpvm.state != 'Running':
# Sleep to ensure SSVMs are Up and Running
time.sleep(interval)
timeout = timeout - 1
elif cpvm.state == 'Running':
break
elif timeout == 0:
raise Exception("CPVM failed to come up")
break
return
def download_builtin_templates(apiclient, zoneid, hypervisor, host,
linklocalip, interval=60):
"""After setup wait till builtin templates are downloaded"""
# Change IPTABLES Rules
get_process_status(
host["ipaddress"],
host["port"],
host["username"],
host["password"],
linklocalip,
"iptables -P INPUT ACCEPT"
)
time.sleep(interval)
# Find the BUILTIN Templates for given Zone, Hypervisor
list_template_response = list_templates(
apiclient,
hypervisor=hypervisor,
zoneid=zoneid,
templatefilter='self'
)
if not isinstance(list_template_response, list):
raise Exception("Failed to download BUILTIN templates")
# Ensure all BUILTIN templates are downloaded
templateid = None
for template in list_template_response:
if template.templatetype == "BUILTIN":
templateid = template.id
# Sleep to ensure that template is in downloading state after adding
# Sec storage
time.sleep(interval)
while True:
template_response = list_templates(
apiclient,
id=templateid,
zoneid=zoneid,
templatefilter='self'
)
template = template_response[0]
# If template is ready,
# template.status = Download Complete
# Downloading - x% Downloaded
# Error - Any other string
if template.status == 'Download Complete':
break
elif 'Downloaded' in template.status:
time.sleep(interval)
elif 'Installing' not in template.status:
raise Exception("ErrorInDownload")
return
def update_resource_limit(apiclient, resourcetype, account=None,
domainid=None, max=None, projectid=None):
"""Updates the resource limit to 'max' for given account"""
cmd = updateResourceLimit.updateResourceLimitCmd()
cmd.resourcetype = resourcetype
if account:
cmd.account = account
if domainid:
cmd.domainid = domainid
if max:
cmd.max = max
if projectid:
cmd.projectid = projectid
apiclient.updateResourceLimit(cmd)
return
def list_routers(apiclient, **kwargs):
"""List all Routers matching criteria"""
cmd = listRouters.listRoutersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listRouters(cmd))
def list_zones(apiclient, **kwargs):
"""List all Zones matching criteria"""
cmd = listZones.listZonesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listZones(cmd))
def list_networks(apiclient, **kwargs):
"""List all Networks matching criteria"""
cmd = listNetworks.listNetworksCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listNetworks(cmd))
def list_clusters(apiclient, **kwargs):
"""List all Clusters matching criteria"""
cmd = listClusters.listClustersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listClusters(cmd))
def list_ssvms(apiclient, **kwargs):
"""List all SSVMs matching criteria"""
cmd = listSystemVms.listSystemVmsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listSystemVms(cmd))
def list_storage_pools(apiclient, **kwargs):
"""List all storage pools matching criteria"""
cmd = listStoragePools.listStoragePoolsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listStoragePools(cmd))
def list_virtual_machines(apiclient, **kwargs):
"""List all VMs matching criteria"""
cmd = listVirtualMachines.listVirtualMachinesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listVirtualMachines(cmd))
def list_hosts(apiclient, **kwargs):
"""List all Hosts matching criteria"""
cmd = listHosts.listHostsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listHosts(cmd))
def list_configurations(apiclient, **kwargs):
"""List configuration with specified name"""
cmd = listConfigurations.listConfigurationsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listConfigurations(cmd))
def list_publicIP(apiclient, **kwargs):
"""List all Public IPs matching criteria"""
cmd = listPublicIpAddresses.listPublicIpAddressesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listPublicIpAddresses(cmd))
def list_nat_rules(apiclient, **kwargs):
"""List all NAT rules matching criteria"""
cmd = listPortForwardingRules.listPortForwardingRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listPortForwardingRules(cmd))
def list_lb_rules(apiclient, **kwargs):
"""List all Load balancing rules matching criteria"""
cmd = listLoadBalancerRules.listLoadBalancerRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listLoadBalancerRules(cmd))
def list_lb_instances(apiclient, **kwargs):
"""List all Load balancing instances matching criteria"""
cmd = listLoadBalancerRuleInstances.listLoadBalancerRuleInstancesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listLoadBalancerRuleInstances(cmd))
def list_firewall_rules(apiclient, **kwargs):
"""List all Firewall Rules matching criteria"""
cmd = listFirewallRules.listFirewallRulesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listFirewallRules(cmd))
def list_volumes(apiclient, **kwargs):
"""List all volumes matching criteria"""
cmd = listVolumes.listVolumesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listVolumes(cmd))
def list_isos(apiclient, **kwargs):
"""Lists all available ISO files."""
cmd = listIsos.listIsosCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listIsos(cmd))
def list_snapshots(apiclient, **kwargs):
"""List all snapshots matching criteria"""
cmd = listSnapshots.listSnapshotsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listSnapshots(cmd))
def list_templates(apiclient, **kwargs):
"""List all templates matching criteria"""
cmd = listTemplates.listTemplatesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listTemplates(cmd))
def list_domains(apiclient, **kwargs):
"""Lists domains"""
cmd = listDomains.listDomainsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listDomains(cmd))
def list_accounts(apiclient, **kwargs):
"""Lists accounts and provides detailed account information for
listed accounts"""
cmd = listAccounts.listAccountsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listAccounts(cmd))
def list_users(apiclient, **kwargs):
"""Lists users and provides detailed account information for
listed users"""
cmd = listUsers.listUsersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listUsers(cmd))
def list_snapshot_policy(apiclient, **kwargs):
"""Lists snapshot policies."""
cmd = listSnapshotPolicies.listSnapshotPoliciesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listSnapshotPolicies(cmd))
def list_events(apiclient, **kwargs):
"""Lists events"""
cmd = listEvents.listEventsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listEvents(cmd))
def list_disk_offering(apiclient, **kwargs):
"""Lists all available disk offerings."""
cmd = listDiskOfferings.listDiskOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listDiskOfferings(cmd))
def list_service_offering(apiclient, **kwargs):
"""Lists all available service offerings."""
cmd = listServiceOfferings.listServiceOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listServiceOfferings(cmd))
def list_vlan_ipranges(apiclient, **kwargs):
"""Lists all VLAN IP ranges."""
cmd = listVlanIpRanges.listVlanIpRangesCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listVlanIpRanges(cmd))
def list_usage_records(apiclient, **kwargs):
"""Lists usage records for accounts"""
cmd = listUsageRecords.listUsageRecordsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listUsageRecords(cmd))
def list_nw_service_prividers(apiclient, **kwargs):
"""Lists Network service providers"""
cmd = listNetworkServiceProviders.listNetworkServiceProvidersCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listNetworkServiceProviders(cmd))
def list_virtual_router_elements(apiclient, **kwargs):
"""Lists Virtual Router elements"""
cmd = listVirtualRouterElements.listVirtualRouterElementsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listVirtualRouterElements(cmd))
def list_network_offerings(apiclient, **kwargs):
"""Lists network offerings"""
cmd = listNetworkOfferings.listNetworkOfferingsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listNetworkOfferings(cmd))
def list_resource_limits(apiclient, **kwargs):
"""Lists resource limits"""
cmd = listResourceLimits.listResourceLimitsCmd()
[setattr(cmd, k, v) for k, v in kwargs.items()]
return(apiclient.listResourceLimits(cmd))
|
|
#!/usr/bin/env python
#
# BitBake Graphical GTK User Interface
#
# Copyright (C) 2012 Intel Corporation
#
# Authored by Dongxiao Xu <[email protected]>
# Authored by Shane Wang <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import gtk
import glib
from bb.ui.crumbs.hobcolor import HobColors
from bb.ui.crumbs.hobwidget import HobViewTable, HobNotebook, HobAltButton, HobButton
from bb.ui.crumbs.hoblistmodel import RecipeListModel
from bb.ui.crumbs.hobpages import HobPage
#
# RecipeSelectionPage
#
class RecipeSelectionPage (HobPage):
pages = [
{
'name' : 'Included recipes',
'tooltip' : 'The recipes currently included for your image',
'filter' : { RecipeListModel.COL_INC : [True],
RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] },
'search' : 'Search recipes by name',
'searchtip' : 'Enter a recipe name to find it',
'columns' : [{
'col_name' : 'Recipe name',
'col_id' : RecipeListModel.COL_NAME,
'col_style': 'text',
'col_min' : 100,
'col_max' : 400,
'expand' : 'True'
}, {
'col_name' : 'Group',
'col_id' : RecipeListModel.COL_GROUP,
'col_style': 'text',
'col_min' : 100,
'col_max' : 300,
'expand' : 'True'
}, {
'col_name' : 'Brought in by (+others)',
'col_id' : RecipeListModel.COL_BINB,
'col_style': 'binb',
'col_min' : 100,
'col_max' : 500,
'expand' : 'True'
}, {
'col_name' : 'Included',
'col_id' : RecipeListModel.COL_INC,
'col_style': 'check toggle',
'col_min' : 100,
'col_max' : 100
}]
}, {
'name' : 'All recipes',
'tooltip' : 'All recipes in your configured layers',
'filter' : { RecipeListModel.COL_TYPE : ['recipe'] },
'search' : 'Search recipes by name',
'searchtip' : 'Enter a recipe name to find it',
'columns' : [{
'col_name' : 'Recipe name',
'col_id' : RecipeListModel.COL_NAME,
'col_style': 'text',
'col_min' : 100,
'col_max' : 400,
'expand' : 'True'
}, {
'col_name' : 'Group',
'col_id' : RecipeListModel.COL_GROUP,
'col_style': 'text',
'col_min' : 100,
'col_max' : 400,
'expand' : 'True'
}, {
'col_name' : 'License',
'col_id' : RecipeListModel.COL_LIC,
'col_style': 'text',
'col_min' : 100,
'col_max' : 400,
'expand' : 'True'
}, {
'col_name' : 'Included',
'col_id' : RecipeListModel.COL_INC,
'col_style': 'check toggle',
'col_min' : 100,
'col_max' : 100
}]
}, {
'name' : 'Package Groups',
'tooltip' : 'All package groups in your configured layers',
'filter' : { RecipeListModel.COL_TYPE : ['packagegroup'] },
'search' : 'Search package groups by name',
'searchtip' : 'Enter a package group name to find it',
'columns' : [{
'col_name' : 'Package group name',
'col_id' : RecipeListModel.COL_NAME,
'col_style': 'text',
'col_min' : 100,
'col_max' : 400,
'expand' : 'True'
}, {
'col_name' : 'Included',
'col_id' : RecipeListModel.COL_INC,
'col_style': 'check toggle',
'col_min' : 100,
'col_max' : 100
}]
}
]
(INCLUDED,
ALL,
TASKS) = range(3)
def __init__(self, builder = None):
super(RecipeSelectionPage, self).__init__(builder, "Step 1 of 2: Edit recipes")
# set invisible members
self.recipe_model = self.builder.recipe_model
# create visual elements
self.create_visual_elements()
def included_clicked_cb(self, button):
self.ins.set_current_page(self.INCLUDED)
def create_visual_elements(self):
self.eventbox = self.add_onto_top_bar(None, 73)
self.pack_start(self.eventbox, expand=False, fill=False)
self.pack_start(self.group_align, expand=True, fill=True)
# set visible members
self.ins = HobNotebook()
self.tables = [] # we need modify table when the dialog is shown
search_names = []
search_tips = []
# append the tabs in order
for page in self.pages:
columns = page['columns']
name = page['name']
tab = HobViewTable(columns, name)
search_names.append(page['search'])
search_tips.append(page['searchtip'])
filter = page['filter']
sort_model = self.recipe_model.tree_model(filter, initial=True)
tab.set_model(sort_model)
tab.connect("toggled", self.table_toggled_cb, name)
if name == "Included recipes":
tab.connect("button-release-event", self.button_click_cb)
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
if name == "Package Groups":
tab.connect("button-release-event", self.button_click_cb)
tab.connect("cell-fadeinout-stopped", self.after_fadeout_checkin_include)
if name == "All recipes":
tab.connect("button-release-event", self.button_click_cb)
tab.connect("cell-fadeinout-stopped", self.button_click_cb)
self.ins.append_page(tab, page['name'], page['tooltip'])
self.tables.append(tab)
self.ins.set_entry(search_names, search_tips)
self.ins.search.connect("changed", self.search_entry_changed)
# add all into the window
self.box_group_area.pack_start(self.ins, expand=True, fill=True)
button_box = gtk.HBox(False, 6)
self.box_group_area.pack_end(button_box, expand=False, fill=False)
self.build_packages_button = HobButton('Build packages')
#self.build_packages_button.set_size_request(205, 49)
self.build_packages_button.set_tooltip_text("Build selected recipes into packages")
self.build_packages_button.set_flags(gtk.CAN_DEFAULT)
self.build_packages_button.grab_default()
self.build_packages_button.connect("clicked", self.build_packages_clicked_cb)
button_box.pack_end(self.build_packages_button, expand=False, fill=False)
self.back_button = HobAltButton('Cancel')
self.back_button.connect("clicked", self.back_button_clicked_cb)
button_box.pack_end(self.back_button, expand=False, fill=False)
def search_entry_changed(self, entry):
text = entry.get_text()
if self.ins.search_focus:
self.ins.search_focus = False
elif self.ins.page_changed:
self.ins.page_change = False
self.filter_search(entry)
elif text not in self.ins.search_names:
self.filter_search(entry)
def filter_search(self, entry):
text = entry.get_text()
current_tab = self.ins.get_current_page()
filter = self.pages[current_tab]['filter']
filter[RecipeListModel.COL_NAME] = text
self.tables[current_tab].set_model(self.recipe_model.tree_model(filter, search_data=text))
if self.recipe_model.filtered_nb == 0:
if not self.ins.get_nth_page(current_tab).top_bar:
self.ins.get_nth_page(current_tab).add_no_result_bar(entry)
self.ins.get_nth_page(current_tab).top_bar.show()
self.ins.get_nth_page(current_tab).scroll.hide()
else:
if self.ins.get_nth_page(current_tab).top_bar:
self.ins.get_nth_page(current_tab).top_bar.hide()
self.ins.get_nth_page(current_tab).scroll.show()
if entry.get_text() == '':
entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, False)
else:
entry.set_icon_sensitive(gtk.ENTRY_ICON_SECONDARY, True)
def button_click_cb(self, widget, event):
path, col = widget.table_tree.get_cursor()
tree_model = widget.table_tree.get_model()
if path and col.get_title() != 'Included': # else activation is likely a removal
properties = {'summary': '', 'name': '', 'version': '', 'revision': '', 'binb': '', 'group': '', 'license': '', 'homepage': '', 'bugtracker': '', 'description': ''}
properties['summary'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_SUMMARY)
properties['name'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_NAME)
properties['version'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_VERSION)
properties['revision'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_REVISION)
properties['binb'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BINB)
properties['group'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_GROUP)
properties['license'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_LIC)
properties['homepage'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_HOMEPAGE)
properties['bugtracker'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_BUGTRACKER)
properties['description'] = tree_model.get_value(tree_model.get_iter(path), RecipeListModel.COL_DESC)
self.builder.show_recipe_property_dialog(properties)
def build_packages_clicked_cb(self, button):
self.builder.build_packages()
def back_button_clicked_cb(self, button):
self.builder.recipe_model.set_selected_image(self.builder.configuration.initial_selected_image)
self.builder.image_configuration_page.update_image_combo(self.builder.recipe_model, self.builder.configuration.initial_selected_image)
self.builder.image_configuration_page.update_image_desc()
self.builder.show_configuration()
def refresh_selection(self):
self.builder.configuration.selected_image = self.recipe_model.get_selected_image()
_, self.builder.configuration.selected_recipes = self.recipe_model.get_selected_recipes()
self.ins.show_indicator_icon("Included recipes", len(self.builder.configuration.selected_recipes))
def toggle_item_idle_cb(self, path, view_tree, cell, pagename):
if not self.recipe_model.path_included(path):
self.recipe_model.include_item(item_path=path, binb="User Selected", image_contents=False)
else:
if pagename == "Included recipes":
self.pre_fadeout_checkout_include(view_tree)
self.recipe_model.exclude_item(item_path=path)
self.render_fadeout(view_tree, cell)
else:
self.recipe_model.exclude_item(item_path=path)
self.refresh_selection()
if not self.builder.customized:
self.builder.customized = True
self.builder.configuration.selected_image = self.recipe_model.__custom_image__
self.builder.rcppkglist_populated()
self.builder.window_sensitive(True)
def table_toggled_cb(self, table, cell, view_path, toggled_columnid, view_tree, pagename):
# Click to include a recipe
self.builder.window_sensitive(False)
view_model = view_tree.get_model()
path = self.recipe_model.convert_vpath_to_path(view_model, view_path)
glib.idle_add(self.toggle_item_idle_cb, path, view_tree, cell, pagename)
def pre_fadeout_checkout_include(self, tree):
#resync the included items to a backup fade include column
it = self.recipe_model.get_iter_first()
while it:
active = self.recipe_model.get_value(it, self.recipe_model.COL_INC)
self.recipe_model.set(it, self.recipe_model.COL_FADE_INC, active)
it = self.recipe_model.iter_next(it)
# Check out a model which base on the column COL_FADE_INC,
# it's save the prev state of column COL_INC before do exclude_item
filter = { RecipeListModel.COL_FADE_INC : [True],
RecipeListModel.COL_TYPE : ['recipe', 'packagegroup'] }
new_model = self.recipe_model.tree_model(filter, excluded_items_ahead=True)
tree.set_model(new_model)
def render_fadeout(self, tree, cell):
if (not cell) or (not tree):
return
to_render_cells = []
model = tree.get_model()
it = model.get_iter_first()
while it:
path = model.get_path(it)
prev_cell_is_active = model.get_value(it, RecipeListModel.COL_FADE_INC)
curr_cell_is_active = model.get_value(it, RecipeListModel.COL_INC)
if (prev_cell_is_active == True) and (curr_cell_is_active == False):
to_render_cells.append(path)
it = model.iter_next(it)
cell.fadeout(tree, 1000, to_render_cells)
def after_fadeout_checkin_include(self, table, ctrl, cell, tree):
tree.set_model(self.recipe_model.tree_model(self.pages[0]['filter']))
def set_recipe_curr_tab(self, curr_page):
self.ins.set_current_page(curr_page)
|
|
"""
Like described in the :mod:`parso.python.tree` module,
there's a need for an ast like module to represent the states of parsed
modules.
But now there are also structures in Python that need a little bit more than
that. An ``Instance`` for example is only a ``Class`` before it is
instantiated. This class represents these cases.
So, why is there also a ``Class`` class here? Well, there are decorators and
they change classes in Python 3.
Representation modules also define "magic methods". Those methods look like
``py__foo__`` and are typically mappable to the Python equivalents ``__call__``
and others. Here's a list:
====================================== ========================================
**Method** **Description**
-------------------------------------- ----------------------------------------
py__call__(params: Array) On callable objects, returns types.
py__bool__() Returns True/False/None; None means that
there's no certainty.
py__bases__() Returns a list of base classes.
py__mro__() Returns a list of classes (the mro).
py__iter__() Returns a generator of a set of types.
py__class__() Returns the class of an instance.
py__getitem__(index: int/str) Returns a a set of types of the index.
Can raise an IndexError/KeyError.
py__file__() Only on modules. Returns None if does
not exist.
py__package__() Only on modules. For the import system.
py__path__() Only on modules. For the import system.
py__get__(call_object) Only on instances. Simulates
descriptors.
py__doc__(include_call_signature: Returns the docstring for a context.
bool)
====================================== ========================================
"""
import os
import pkgutil
import imp
import re
from itertools import chain
from parso.python import tree
from parso import python_bytes_to_unicode
from jedi._compatibility import use_metaclass
from jedi import debug
from jedi.evaluate.cache import evaluator_method_cache, CachedMetaClass
from jedi.evaluate import compiled
from jedi.evaluate import recursion
from jedi.evaluate import iterable
from jedi.evaluate import docstrings
from jedi.evaluate import pep0484
from jedi.evaluate import param
from jedi.evaluate import flow_analysis
from jedi.evaluate import imports
from jedi.evaluate import helpers
from jedi.evaluate.filters import ParserTreeFilter, FunctionExecutionFilter, \
GlobalNameFilter, DictFilter, ContextName, AbstractNameDefinition, \
ParamName, AnonymousInstanceParamName, TreeNameDefinition, \
ContextNameMixin
from jedi.evaluate import context
from jedi.evaluate.context import ContextualizedNode
from jedi import parser_utils
from jedi.evaluate.parser_cache import get_yield_exprs
def apply_py__get__(context, base_context):
try:
method = context.py__get__
except AttributeError:
yield context
else:
for descriptor_context in method(base_context):
yield descriptor_context
class ClassName(TreeNameDefinition):
def __init__(self, parent_context, tree_name, name_context):
super(ClassName, self).__init__(parent_context, tree_name)
self._name_context = name_context
def infer(self):
# TODO this _name_to_types might get refactored and be a part of the
# parent class. Once it is, we can probably just overwrite method to
# achieve this.
from jedi.evaluate.finder import _name_to_types
inferred = _name_to_types(
self.parent_context.evaluator, self._name_context, self.tree_name)
for result_context in inferred:
for c in apply_py__get__(result_context, self.parent_context):
yield c
class ClassFilter(ParserTreeFilter):
name_class = ClassName
def _convert_names(self, names):
return [self.name_class(self.context, name, self._node_context)
for name in names]
class ClassContext(use_metaclass(CachedMetaClass, context.TreeContext)):
"""
This class is not only important to extend `tree.Class`, it is also a
important for descriptors (if the descriptor methods are evaluated or not).
"""
api_type = 'class'
def __init__(self, evaluator, classdef, parent_context):
super(ClassContext, self).__init__(evaluator, parent_context=parent_context)
self.tree_node = classdef
@evaluator_method_cache(default=())
def py__mro__(self):
def add(cls):
if cls not in mro:
mro.append(cls)
mro = [self]
# TODO Do a proper mro resolution. Currently we are just listing
# classes. However, it's a complicated algorithm.
for lazy_cls in self.py__bases__():
# TODO there's multiple different mro paths possible if this yields
# multiple possibilities. Could be changed to be more correct.
for cls in lazy_cls.infer():
# TODO detect for TypeError: duplicate base class str,
# e.g. `class X(str, str): pass`
try:
mro_method = cls.py__mro__
except AttributeError:
# TODO add a TypeError like:
"""
>>> class Y(lambda: test): pass
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: function() argument 1 must be code, not str
>>> class Y(1): pass
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: int() takes at most 2 arguments (3 given)
"""
pass
else:
add(cls)
for cls_new in mro_method():
add(cls_new)
return tuple(mro)
@evaluator_method_cache(default=())
def py__bases__(self):
arglist = self.tree_node.get_super_arglist()
if arglist:
args = param.TreeArguments(self.evaluator, self, arglist)
return [value for key, value in args.unpack() if key is None]
else:
return [context.LazyKnownContext(compiled.create(self.evaluator, object))]
def py__call__(self, params):
from jedi.evaluate.instance import TreeInstance
return set([TreeInstance(self.evaluator, self.parent_context, self, params)])
def py__class__(self):
return compiled.create(self.evaluator, type)
def get_params(self):
from jedi.evaluate.instance import AnonymousInstance
anon = AnonymousInstance(self.evaluator, self.parent_context, self)
return [AnonymousInstanceParamName(anon, param.name) for param in self.funcdef.get_params()]
def get_filters(self, search_global, until_position=None, origin_scope=None, is_instance=False):
if search_global:
yield ParserTreeFilter(
self.evaluator,
context=self,
until_position=until_position,
origin_scope=origin_scope
)
else:
for cls in self.py__mro__():
if isinstance(cls, compiled.CompiledObject):
for filter in cls.get_filters(is_instance=is_instance):
yield filter
else:
yield ClassFilter(
self.evaluator, self, node_context=cls,
origin_scope=origin_scope)
def is_class(self):
return True
def get_function_slot_names(self, name):
for filter in self.get_filters(search_global=False):
names = filter.get(name)
if names:
return names
return []
def get_param_names(self):
for name in self.get_function_slot_names('__init__'):
for context_ in name.infer():
try:
method = context_.get_param_names
except AttributeError:
pass
else:
return list(method())[1:]
return []
@property
def name(self):
return ContextName(self, self.tree_node.name)
class LambdaName(AbstractNameDefinition):
string_name = '<lambda>'
def __init__(self, lambda_context):
self._lambda_context = lambda_context
self.parent_context = lambda_context.parent_context
def start_pos(self):
return self._lambda_context.tree_node.start_pos
def infer(self):
return set([self._lambda_context])
class FunctionContext(use_metaclass(CachedMetaClass, context.TreeContext)):
"""
Needed because of decorators. Decorators are evaluated here.
"""
api_type = 'function'
def __init__(self, evaluator, parent_context, funcdef):
""" This should not be called directly """
super(FunctionContext, self).__init__(evaluator, parent_context)
self.tree_node = funcdef
def get_filters(self, search_global, until_position=None, origin_scope=None):
if search_global:
yield ParserTreeFilter(
self.evaluator,
context=self,
until_position=until_position,
origin_scope=origin_scope
)
else:
scope = self.py__class__()
for filter in scope.get_filters(search_global=False, origin_scope=origin_scope):
yield filter
def infer_function_execution(self, function_execution):
"""
Created to be used by inheritance.
"""
yield_exprs = get_yield_exprs(self.evaluator, self.tree_node)
if yield_exprs:
return set([iterable.Generator(self.evaluator, function_execution)])
else:
return function_execution.get_return_values()
def get_function_execution(self, arguments=None):
if arguments is None:
arguments = param.AnonymousArguments()
return FunctionExecutionContext(self.evaluator, self.parent_context, self, arguments)
def py__call__(self, arguments):
function_execution = self.get_function_execution(arguments)
return self.infer_function_execution(function_execution)
def py__class__(self):
# This differentiation is only necessary for Python2. Python3 does not
# use a different method class.
if isinstance(parser_utils.get_parent_scope(self.tree_node), tree.Class):
name = 'METHOD_CLASS'
else:
name = 'FUNCTION_CLASS'
return compiled.get_special_object(self.evaluator, name)
@property
def name(self):
if self.tree_node.type == 'lambdef':
return LambdaName(self)
return ContextName(self, self.tree_node.name)
def get_param_names(self):
function_execution = self.get_function_execution()
return [ParamName(function_execution, param.name)
for param in self.tree_node.get_params()]
class FunctionExecutionContext(context.TreeContext):
"""
This class is used to evaluate functions and their returns.
This is the most complicated class, because it contains the logic to
transfer parameters. It is even more complicated, because there may be
multiple calls to functions and recursion has to be avoided. But this is
responsibility of the decorators.
"""
function_execution_filter = FunctionExecutionFilter
def __init__(self, evaluator, parent_context, function_context, var_args):
super(FunctionExecutionContext, self).__init__(evaluator, parent_context)
self.function_context = function_context
self.tree_node = function_context.tree_node
self.var_args = var_args
@evaluator_method_cache(default=set())
@recursion.execution_recursion_decorator()
def get_return_values(self, check_yields=False):
funcdef = self.tree_node
if funcdef.type == 'lambdef':
return self.evaluator.eval_element(self, funcdef.children[-1])
if check_yields:
types = set()
returns = get_yield_exprs(self.evaluator, funcdef)
else:
returns = funcdef.iter_return_stmts()
types = set(docstrings.infer_return_types(self.function_context))
types |= set(pep0484.infer_return_types(self.function_context))
for r in returns:
check = flow_analysis.reachability_check(self, funcdef, r)
if check is flow_analysis.UNREACHABLE:
debug.dbg('Return unreachable: %s', r)
else:
if check_yields:
types |= set(self._eval_yield(r))
else:
try:
children = r.children
except AttributeError:
types.add(compiled.create(self.evaluator, None))
else:
types |= self.eval_node(children[1])
if check is flow_analysis.REACHABLE:
debug.dbg('Return reachable: %s', r)
break
return types
def _eval_yield(self, yield_expr):
if yield_expr.type == 'keyword':
# `yield` just yields None.
yield context.LazyKnownContext(compiled.create(self.evaluator, None))
return
node = yield_expr.children[1]
if node.type == 'yield_arg': # It must be a yield from.
cn = ContextualizedNode(self, node.children[1])
for lazy_context in iterable.py__iter__(self.evaluator, cn.infer(), cn):
yield lazy_context
else:
yield context.LazyTreeContext(self, node)
@recursion.execution_recursion_decorator(default=iter([]))
def get_yield_values(self):
for_parents = [(y, tree.search_ancestor(y, 'for_stmt', 'funcdef',
'while_stmt', 'if_stmt'))
for y in get_yield_exprs(self.evaluator, self.tree_node)]
# Calculate if the yields are placed within the same for loop.
yields_order = []
last_for_stmt = None
for yield_, for_stmt in for_parents:
# For really simple for loops we can predict the order. Otherwise
# we just ignore it.
parent = for_stmt.parent
if parent.type == 'suite':
parent = parent.parent
if for_stmt.type == 'for_stmt' and parent == self.tree_node \
and parser_utils.for_stmt_defines_one_name(for_stmt): # Simplicity for now.
if for_stmt == last_for_stmt:
yields_order[-1][1].append(yield_)
else:
yields_order.append((for_stmt, [yield_]))
elif for_stmt == self.tree_node:
yields_order.append((None, [yield_]))
else:
types = self.get_return_values(check_yields=True)
if types:
yield context.get_merged_lazy_context(list(types))
return
last_for_stmt = for_stmt
evaluator = self.evaluator
for for_stmt, yields in yields_order:
if for_stmt is None:
# No for_stmt, just normal yields.
for yield_ in yields:
for result in self._eval_yield(yield_):
yield result
else:
input_node = for_stmt.get_testlist()
cn = ContextualizedNode(self, input_node)
ordered = iterable.py__iter__(evaluator, cn.infer(), cn)
ordered = list(ordered)
for lazy_context in ordered:
dct = {str(for_stmt.children[1].value): lazy_context.infer()}
with helpers.predefine_names(self, for_stmt, dct):
for yield_in_same_for_stmt in yields:
for result in self._eval_yield(yield_in_same_for_stmt):
yield result
def get_filters(self, search_global, until_position=None, origin_scope=None):
yield self.function_execution_filter(self.evaluator, self,
until_position=until_position,
origin_scope=origin_scope)
@evaluator_method_cache()
def get_params(self):
return self.var_args.get_params(self)
class ModuleAttributeName(AbstractNameDefinition):
"""
For module attributes like __file__, __str__ and so on.
"""
api_type = 'instance'
def __init__(self, parent_module, string_name):
self.parent_context = parent_module
self.string_name = string_name
def infer(self):
return compiled.create(self.parent_context.evaluator, str).execute(
param.ValuesArguments([])
)
class ModuleName(ContextNameMixin, AbstractNameDefinition):
start_pos = 1, 0
def __init__(self, context, name):
self._context = context
self._name = name
@property
def string_name(self):
return self._name
class ModuleContext(use_metaclass(CachedMetaClass, context.TreeContext)):
api_type = 'module'
parent_context = None
def __init__(self, evaluator, module_node, path):
super(ModuleContext, self).__init__(evaluator, parent_context=None)
self.tree_node = module_node
self._path = path
def get_filters(self, search_global, until_position=None, origin_scope=None):
yield ParserTreeFilter(
self.evaluator,
context=self,
until_position=until_position,
origin_scope=origin_scope
)
yield GlobalNameFilter(self, self.tree_node)
yield DictFilter(self._sub_modules_dict())
yield DictFilter(self._module_attributes_dict())
for star_module in self.star_imports():
yield next(star_module.get_filters(search_global))
# I'm not sure if the star import cache is really that effective anymore
# with all the other really fast import caches. Recheck. Also we would need
# to push the star imports into Evaluator.modules, if we reenable this.
@evaluator_method_cache([])
def star_imports(self):
modules = []
for i in self.tree_node.iter_imports():
if i.is_star_import():
name = i.get_paths()[-1][-1]
new = imports.infer_import(self, name)
for module in new:
if isinstance(module, ModuleContext):
modules += module.star_imports()
modules += new
return modules
@evaluator_method_cache()
def _module_attributes_dict(self):
names = ['__file__', '__package__', '__doc__', '__name__']
# All the additional module attributes are strings.
return dict((n, ModuleAttributeName(self, n)) for n in names)
@property
def _string_name(self):
""" This is used for the goto functions. """
if self._path is None:
return '' # no path -> empty name
else:
sep = (re.escape(os.path.sep),) * 2
r = re.search(r'([^%s]*?)(%s__init__)?(\.py|\.so)?$' % sep, self._path)
# Remove PEP 3149 names
return re.sub('\.[a-z]+-\d{2}[mud]{0,3}$', '', r.group(1))
@property
@evaluator_method_cache()
def name(self):
return ModuleName(self, self._string_name)
def _get_init_directory(self):
"""
:return: The path to the directory of a package. None in case it's not
a package.
"""
for suffix, _, _ in imp.get_suffixes():
ending = '__init__' + suffix
py__file__ = self.py__file__()
if py__file__ is not None and py__file__.endswith(ending):
# Remove the ending, including the separator.
return self.py__file__()[:-len(ending) - 1]
return None
def py__name__(self):
for name, module in self.evaluator.modules.items():
if module == self and name != '':
return name
return '__main__'
def py__file__(self):
"""
In contrast to Python's __file__ can be None.
"""
if self._path is None:
return None
return os.path.abspath(self._path)
def py__package__(self):
if self._get_init_directory() is None:
return re.sub(r'\.?[^\.]+$', '', self.py__name__())
else:
return self.py__name__()
def _py__path__(self):
search_path = self.evaluator.sys_path
init_path = self.py__file__()
if os.path.basename(init_path) == '__init__.py':
with open(init_path, 'rb') as f:
content = python_bytes_to_unicode(f.read(), errors='replace')
# these are strings that need to be used for namespace packages,
# the first one is ``pkgutil``, the second ``pkg_resources``.
options = ('declare_namespace(__name__)', 'extend_path(__path__')
if options[0] in content or options[1] in content:
# It is a namespace, now try to find the rest of the
# modules on sys_path or whatever the search_path is.
paths = set()
for s in search_path:
other = os.path.join(s, self.name.string_name)
if os.path.isdir(other):
paths.add(other)
if paths:
return list(paths)
# TODO I'm not sure if this is how nested namespace
# packages work. The tests are not really good enough to
# show that.
# Default to this.
return [self._get_init_directory()]
@property
def py__path__(self):
"""
Not seen here, since it's a property. The callback actually uses a
variable, so use it like::
foo.py__path__(sys_path)
In case of a package, this returns Python's __path__ attribute, which
is a list of paths (strings).
Raises an AttributeError if the module is not a package.
"""
path = self._get_init_directory()
if path is None:
raise AttributeError('Only packages have __path__ attributes.')
else:
return self._py__path__
@evaluator_method_cache()
def _sub_modules_dict(self):
"""
Lists modules in the directory of this module (if this module is a
package).
"""
path = self._path
names = {}
if path is not None and path.endswith(os.path.sep + '__init__.py'):
mods = pkgutil.iter_modules([os.path.dirname(path)])
for module_loader, name, is_pkg in mods:
# It's obviously a relative import to the current module.
names[name] = imports.SubModuleName(self, name)
# TODO add something like this in the future, its cleaner than the
# import hacks.
# ``os.path`` is a hardcoded exception, because it's a
# ``sys.modules`` modification.
# if str(self.name) == 'os':
# names.append(Name('path', parent_context=self))
return names
def py__class__(self):
return compiled.get_special_object(self.evaluator, 'MODULE_CLASS')
def __repr__(self):
return "<%s: %s@%s-%s>" % (
self.__class__.__name__, self._string_name,
self.tree_node.start_pos[0], self.tree_node.end_pos[0])
class ImplicitNSName(AbstractNameDefinition):
"""
Accessing names for implicit namespace packages should infer to nothing.
This object will prevent Jedi from raising exceptions
"""
def __init__(self, implicit_ns_context, string_name):
self.implicit_ns_context = implicit_ns_context
self.string_name = string_name
def infer(self):
return []
def get_root_context(self):
return self.implicit_ns_context
class ImplicitNamespaceContext(use_metaclass(CachedMetaClass, context.TreeContext)):
"""
Provides support for implicit namespace packages
"""
api_type = 'module'
parent_context = None
def __init__(self, evaluator, fullname):
super(ImplicitNamespaceContext, self).__init__(evaluator, parent_context=None)
self.evaluator = evaluator
self.fullname = fullname
def get_filters(self, search_global, until_position=None, origin_scope=None):
yield DictFilter(self._sub_modules_dict())
@property
@evaluator_method_cache()
def name(self):
string_name = self.py__package__().rpartition('.')[-1]
return ImplicitNSName(self, string_name)
def py__file__(self):
return None
def py__package__(self):
"""Return the fullname
"""
return self.fullname
@property
def py__path__(self):
return lambda: [self.paths]
@evaluator_method_cache()
def _sub_modules_dict(self):
names = {}
paths = self.paths
file_names = chain.from_iterable(os.listdir(path) for path in paths)
mods = [
file_name.rpartition('.')[0] if '.' in file_name else file_name
for file_name in file_names
if file_name != '__pycache__'
]
for name in mods:
names[name] = imports.SubModuleName(self, name)
return names
|
|
#===============================================================================
# Copyright 2009 Matt Chaput
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#===============================================================================
from threading import Lock
from whoosh.fields import FieldConfigurationError
from whoosh.filedb.filepostings import FilePostingReader
from whoosh.filedb.filetables import FileTableReader, FileRecordReader, FileListReader
from whoosh.filedb.filetables import encode_termkey, decode_termkey
from whoosh.filedb.filetables import encode_vectorkey, decode_vectorkey, decode_terminfo
from whoosh.filedb.filetables import depickle, unpackint
from whoosh.postings import Exclude
from whoosh.reading import IndexReader, TermNotFound
from whoosh.util import protected
# Convenience functions
def open_terms(storage, segment):
termfile = storage.open_file(segment.term_filename)
return FileTableReader(termfile,
keycoder = encode_termkey,
keydecoder = decode_termkey,
valuedecoder = decode_terminfo)
def open_doclengths(storage, segment, fieldcount):
from whoosh.filedb.filewriting import DOCLENGTH_TYPE
rformat = "!" + DOCLENGTH_TYPE * fieldcount
recordfile = storage.open_file(segment.doclen_filename)
return FileRecordReader(recordfile, rformat)
def open_storedfields(storage, segment, storedfieldnames):
def dictifier(value):
value = depickle(value)
return dict(zip(storedfieldnames, value))
listfile = storage.open_file(segment.docs_filename, mapped=False)
return FileListReader(listfile, segment.doc_count_all(),
valuedecoder = dictifier)
def open_vectors(storage, segment):
vectorfile = storage.open_file(segment.vector_filename)
return FileTableReader(vectorfile, keycoder = encode_vectorkey,
keydecoder = decode_vectorkey,
valuedecoder = unpackint)
# Reader class
class SegmentReader(IndexReader):
def __init__(self, storage, segment, schema):
self.storage = storage
self.segment = segment
self.schema = schema
self._scorable_fields = schema.scorable_fields()
self._fieldnum_to_scorable_pos = dict((fnum, i) for i, fnum
in enumerate(self._scorable_fields))
self.termtable = open_terms(storage, segment)
self.postfile = storage.open_file(segment.posts_filename, mapped=False)
self.docstable = open_storedfields(storage, segment, schema.stored_field_names())
self.doclengths = None
if self._scorable_fields:
self.doclengths = open_doclengths(storage, segment, len(self._scorable_fields))
self.has_deletions = segment.has_deletions
self.is_deleted = segment.is_deleted
self.doc_count = segment.doc_count
self.doc_count_all = segment.doc_count_all
self.vectortable = None
self.is_closed = False
self._sync_lock = Lock()
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.segment)
@protected
def __contains__(self, term):
return (self.schema.to_number(term[0]), term[1]) in self.termtable
def close(self):
self.docstable.close()
self.termtable.close()
if self.vectortable:
self.vectortable.close()
if self.doclengths:
self.doclengths.close()
self.is_closed = True
def _open_vectors(self):
if not self.vectortable:
storage, segment = self.storage, self.segment
self.vectortable = open_vectors(storage, segment)
self.vpostfile = storage.open_file(segment.vectorposts_filename, mapped=False)
def vector(self, docnum, fieldid):
self._open_vectors()
schema = self.schema
fieldnum = schema.to_number(fieldid)
vformat = schema[fieldnum].vector
offset = self.vectortable[(docnum, fieldnum)]
return FilePostingReader(self.vpostfile, offset, vformat, stringids=True)
@protected
def stored_fields(self, docnum):
return self.docstable[docnum]
@protected
def all_stored_fields(self):
is_deleted = self.segment.is_deleted
for docnum in xrange(0, self.segment.doc_count_all()):
if not is_deleted(docnum):
yield self.docstable[docnum]
def field_length(self, fieldid):
fieldid = self.schema.to_number(fieldid)
return self.segment.field_length(fieldid)
@protected
def doc_field_length(self, docnum, fieldid):
fieldid = self.schema.to_number(fieldid)
if fieldid not in self._scorable_fields:
raise FieldConfigurationError("Field %r does not store lengths" % fieldid)
pos = self._fieldnum_to_scorable_pos[fieldid]
return self.doclengths.at(docnum, pos)
@protected
def doc_field_lengths(self, docnum):
if not self.doclengths:
return []
return self.doclengths.record(docnum)
@protected
def has_vector(self, docnum, fieldnum):
self._open_vectors()
return (docnum, fieldnum) in self.vectortable
@protected
def __iter__(self):
for (fn, t), (totalfreq, offset, postcount) in self.termtable:
yield (fn, t, postcount, totalfreq)
@protected
def iter_from(self, fieldnum, text):
tt = self.termtable
for (fn, t), (totalfreq, offset, postcount) in tt.items_from((fieldnum, text)):
yield (fn, t, postcount, totalfreq)
@protected
def _term_info(self, fieldnum, text):
try:
return self.termtable[(fieldnum, text)]
except KeyError:
raise TermNotFound("%s:%r" % (fieldnum, text))
def doc_frequency(self, fieldid, text):
try:
fieldid = self.schema.to_number(fieldid)
return self._term_info(fieldid, text)[2]
except TermNotFound:
return 0
def frequency(self, fieldid, text):
try:
fieldid = self.schema.to_number(fieldid)
return self._term_info(fieldid, text)[0]
except TermNotFound:
return 0
@protected
def lexicon(self, fieldid):
# The base class has a lexicon() implementation that uses
# iter_from() and throws away the value, but overriding to
# use FileTableReader.keys_from() is much, much faster.
tt = self.termtable
fieldid = self.schema.to_number(fieldid)
for fn, t in tt.keys_from((fieldid, '')):
if fn != fieldid:
return
yield t
@protected
def expand_prefix(self, fieldid, prefix):
# The base class has an expand_prefix() implementation that uses
# iter_from() and throws away the value, but overriding to
# use FileTableReader.keys_from() is much, much faster.
tt = self.termtable
fieldid = self.schema.to_number(fieldid)
for fn, t in tt.keys_from((fieldid, prefix)):
if fn != fieldid or not t.startswith(prefix):
return
yield t
def postings(self, fieldid, text, exclude_docs = None):
schema = self.schema
fieldnum = schema.to_number(fieldid)
format = schema[fieldnum].format
try:
totalfreq, offset, postcount = self.termtable[(fieldnum, text)] #@UnusedVariable
except KeyError:
raise TermNotFound("%s:%r" % (fieldid, text))
if self.segment.deleted and exclude_docs:
exclude_docs = self.segment.deleted | exclude_docs
elif self.segment.deleted:
exclude_docs = self.segment.deleted
postreader = FilePostingReader(self.postfile, offset, format)
if exclude_docs:
postreader = Exclude(postreader, exclude_docs)
return postreader
|
|
# -*- coding: utf-8 -*-
import heapq
def sorted_merge(a, b):
""" 10.1 Sorted Merge: You are given two sorted arrays, A and B, where A
has a large enough buffer at the end to hold B. Write a method to merge B
into A in sorted order.
"""
if len(a) == 0 and len(b) == 0:
return []
if len(a) == 0:
return b
if len(b) == 0:
return a
ha, ta = a[0], a[1:]
hb, tb = b[0], b[1:]
if ha < hb:
return [ha] + sorted_merge(ta, b)
else:
return [hb] + sorted_merge(a, tb)
def group_anagrams(words):
""" 10.2 Group Anagrams: Write a method to sort an array of strings so that
all the anagrams are next to each other.
"""
words_with_sorted = [(w, str(sorted(list(w)))) for w in words]
words_sorted_by_anagram = sorted(words_with_sorted, key=lambda p: p[1])
return [p[0] for p in words_sorted_by_anagram]
def search_rotated(arr, el):
""" 10.3 Search in Rotated Array: Given a sorted array of n integers that
has been rotated an unknown number of times, write code to find an element
in the array. You may assume that the array was originally sorted in
increasing order.
EXAMPLE
Input: find 5 in {15, 16, 19, 20, 25, 1, 3, 4, 5, 7, 10, 14}
Output: 8 (the index of 5 in the array)
"""
def search(arr, el, left, right):
if left > right:
return None
if left == right:
if arr[left] == el:
return left
else:
return None
middle = (left + right) / 2
if arr[left] <= arr[middle]: # inflection is to the right
if el <= arr[middle]:
return search(arr, el, left, middle)
else:
return search(arr, el, middle+1, right)
else: # inflection is the left side
if el >= arr[middle]:
return search(arr, el, middle, right)
else:
return search(arrl, el, left, middle-1)
return search(arr, el, 0, len(arr) - 1)
def sorted_search_no_size(x, elementAt):
""" 10.4 Sorted Search, No Size: You are given an array-like data structure
Listy which lacks a size method. It does, however, have an elementAt(i)
method that returns the element at index i in 0(1) time. If i is beyond the
bounds of the data structure, it returns -1. (For this reason, the data
structure only supports positive integers.) Given a Listy which contains
sorted, positive integers, find the index at which an element x occurs.
If x occurs multiple times, you may return any index.
"""
def rec_search(el, elementAt, left_idx, right_idx):
left_val = elementAt(left_idx)
right_val = elementAt(right_idx)
if left_idx == right_idx:
if el == left_val:
return left_idx
else:
return None
if right_val == -1:
return rec_search(el, elementAt, left_idx, (left_idx + right_idx) / 2)
if el < right_val:
return rec_search(el, elementAt, left_idx, (left_idx + right_idx) / 2)
if right_val < el:
return rec_search(el, elementAt, right_idx, right_idx * 2)
if right_val == el:
return right_idx
return rec_search(x, elementAt, 0, 100)
def sparse_search(arr, s):
""" 10.5 Sparse Search: Given a sorted array of strings that is interspersed
with empty strings, write a method to find the location of a given string.
EXAMPLE:
Input: find "ball" in {"at", "", "", "" , "ball", "", "", "car", "" , "" , "dad", ""}
Output: 4
"""
def spread(arr, middle, left, right):
k = 1
while middle - k >= left and middle + k <= right:
if arr[middle - k] != "":
return middle - k
if arr[middle + k] != "":
return middle + k
k += 1
return middle
def rec_sparse_search(arr, s, left, right):
if left > right:
return None
middle = (left + right) / 2
if arr[middle] == "":
new_middle = spread(arr, middle, left, right)
if new_middle == middle:
return None
middle = new_middle
if arr[middle] == s:
return middle
if arr[middle] < s:
return rec_sparse_search(arr, s, left, middle - 1)
return rec_sparse_search(arr, s, middle + 1, right)
return rec_sparse_search(arr, s, 0, len(arr) - 1)
def sort_big_file(lines):
""" 10.6 Sort Big File: Imagine you have a 20 GB file with one string per
line. Explain how you would sort the file.
Solution: bucket sort, we make a pass through each line and copy it at the
an of new file that starts with the first two letters of the line.
At the end we will have 24*2 file, of more-or-less 0.5GB, which can be sorted in memory.
Finally, merge the files in alphabetic order on disk.
"""
buckets = {}
for line in lines:
prefix = line[:2]
if prefix not in buckets:
buckets[prefix] = []
buckets[prefix].append(line)
for key, bucket in buckets.items():
buckets[key] = sorted(bucket)
sorted_by_keys = sorted(buckets.items(), key=lambda p: p[0])
output = []
for _, bucket in sorted_by_keys:
output = output + bucket
return output
## PRACTICE BASE ALGORITHMS
def bubble_sort(arr):
""" Time complexity O(n^2) Space complexity O(1) """
for i in range(len(arr) - 1):
for j in range(i+1, len(arr)):
if arr[i] > arr[j]:
arr[i], arr[j] = arr[i], arr[j]
return arr
def selection_sort(arr):
for i in range(len(arr) - 1):
min_idx = i
for j in range(i+1, len(arr)):
if arr[min_idx] > arr[j]:
min_idx = j
arr[i], arr[min_idx] = arr[min_idx], arr[i]
return arr
def join_sorted(left, right):
out = []
i, j = 0, 0
while i < len(left) and j < len(right):
if left[i] <= right[j]:
out.append(left[i])
i += 1
else:
out.append(right[j])
j += 1
if i == len(left):
out.extend(right[j:])
if j == len(right):
out.extend(left[i:])
return out
def merge_sort(arr):
if len(arr) == 0 or len(arr) == 1:
return arr
middle = int(len(arr) / 2)
left = merge_sort(arr[:middle])
right = merge_sort(arr[middle:])
return join_sorted(left, right)
def pick_pivot(left, right):
return int((right + left) / 2) # better yet, pick a random number.
def partition(arr, left, right):
"""
*--------*----------*-----------*
left mid frontier right
left is the pivot, it has been moved to the beginning of the array.
frontier marks all the elements we've alreay looked at.
mid is the new index of the pivot, everything to the left is smaller than
pivot, everything to the right is larger.
"""
mid = left + 1
frontier = left + 1
while frontier < right:
if arr[frontier] < arr[left]:
arr[frontier], arr[mid] = arr[mid], arr[frontier]
mid += 1
frontier += 1
arr[left], arr[mid] = arr[mid], arr[left]
return mid
def quick_sort_rec(arr, left, right):
if right <= left + 1:
return
pivot = pick_pivot(left, right)
arr[left], arr[pivot] = arr[pivot], arr[left]
new_pivot = partition(arr, left, right)
quick_sort_rec(arr, new_pivot+1, right)
def quick_sort(arr):
quick_sort_rec(arr, 0, len(arr) - 1)
return arr
def heap_sort(arr):
heapq.heapify(arr)
out = []
while len(arr) != 0:
out.append(heapq.heappop(arr))
return out
|
|
# Copyright 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import httplib
import logging
import os
import socket
import StringIO
import urlparse
try:
import ssl
except ImportError:
#TODO(bcwaldon): Handle this failure more gracefully
pass
try:
import json
except ImportError:
import simplejson as json
# Python 2.5 compat fix
if not hasattr(urlparse, 'parse_qsl'):
import cgi
urlparse.parse_qsl = cgi.parse_qsl
from tuskarclient import exc
LOG = logging.getLogger(__name__)
USER_AGENT = 'python-tuskarclient'
CHUNKSIZE = 1024 * 64 # 64kB
class HTTPClient(object):
def __init__(self, endpoint, **kwargs):
self.endpoint = endpoint
self.auth_token = kwargs.get('token')
self.connection_params = self.get_connection_params(endpoint, **kwargs)
@staticmethod
def get_connection_params(endpoint, **kwargs):
parts = urlparse.urlparse(endpoint)
_args = (parts.hostname, parts.port, parts.path)
_kwargs = {'timeout': (float(kwargs.get('timeout'))
if kwargs.get('timeout') else 600)}
if parts.scheme == 'https':
_class = VerifiedHTTPSConnection
_kwargs['ca_file'] = kwargs.get('ca_file', None)
_kwargs['cert_file'] = kwargs.get('cert_file', None)
_kwargs['key_file'] = kwargs.get('key_file', None)
_kwargs['insecure'] = kwargs.get('insecure', False)
elif parts.scheme == 'http':
_class = httplib.HTTPConnection
else:
msg = 'Unsupported scheme: %s' % parts.scheme
raise exc.InvalidEndpoint(msg)
return (_class, _args, _kwargs)
def get_connection(self):
_class = self.connection_params[0]
try:
return _class(*self.connection_params[1][0:2],
**self.connection_params[2])
except httplib.InvalidURL:
raise exc.InvalidEndpoint()
def log_curl_request(self, method, url, kwargs):
curl = ['curl -i -X %s' % method]
for (key, value) in kwargs['headers'].items():
header = '-H \'%s: %s\'' % (key, value)
curl.append(header)
conn_params_fmt = [
('key_file', '--key %s'),
('cert_file', '--cert %s'),
('ca_file', '--cacert %s'),
]
for (key, fmt) in conn_params_fmt:
value = self.connection_params[2].get(key)
if value:
curl.append(fmt % value)
if self.connection_params[2].get('insecure'):
curl.append('-k')
if 'body' in kwargs:
curl.append('-d \'%s\'' % kwargs['body'])
curl.append('%s%s' % (self.endpoint, url))
LOG.debug(' '.join(curl))
@staticmethod
def log_http_response(resp, body=None):
status = (resp.version / 10.0, resp.status, resp.reason)
dump = ['\nHTTP/%.1f %s %s' % status]
dump.extend(['%s: %s' % (k, v) for k, v in resp.getheaders()])
dump.append('')
if body:
dump.extend([body, ''])
LOG.debug('\n'.join(dump))
def _make_connection_url(self, url):
(_class, _args, _kwargs) = self.connection_params
base_url = _args[2]
return '%s/%s' % (base_url.rstrip('/'), url.lstrip('/'))
def _http_request(self, url, method, **kwargs):
"""Send an http request with the specified characteristics.
Wrapper around httplib.HTTP(S)Connection.request to handle tasks such
as setting headers and error handling.
"""
# Copy the kwargs so we can reuse the original in case of redirects
kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {}))
kwargs['headers'].setdefault('User-Agent', USER_AGENT)
if self.auth_token:
kwargs['headers'].setdefault('X-Auth-Token', self.auth_token)
self.log_curl_request(method, url, kwargs)
conn = self.get_connection()
try:
conn_url = self._make_connection_url(url)
conn.request(method, conn_url, **kwargs)
resp = conn.getresponse()
except socket.gaierror as e:
message = "Error finding address for %(url)s: %(e)s" % locals()
raise exc.InvalidEndpoint(message=message)
except (socket.error, socket.timeout) as e:
endpoint = self.endpoint
message = "Error communicating with %(endpoint)s %(e)s" % locals()
raise exc.CommunicationError(message=message)
body_iter = ResponseBodyIterator(resp)
# Read body into string if it isn't obviously image data
if resp.getheader('content-type', None) != 'application/octet-stream':
body_str = ''.join([chunk for chunk in body_iter])
self.log_http_response(resp, body_str)
body_iter = StringIO.StringIO(body_str)
else:
self.log_http_response(resp)
if 400 <= resp.status < 600:
LOG.warn("Request returned failure status.")
raise exc.from_response(resp)
elif resp.status in (301, 302, 305):
# Redirected. Reissue the request to the new location.
return self._http_request(resp['location'], method, **kwargs)
elif resp.status == 300:
raise exc.from_response(resp)
return resp, body_iter
def json_request(self, method, url, **kwargs):
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('Content-Type', 'application/json')
kwargs['headers'].setdefault('Accept', 'application/json')
if 'body' in kwargs:
kwargs['body'] = json.dumps(kwargs['body'])
resp, body_iter = self._http_request(url, method, **kwargs)
content_type = resp.getheader('content-type', None)
if resp.status == 204 or resp.status == 205 or content_type is None:
return resp, list()
if 'application/json' in content_type:
body = ''.join([chunk for chunk in body_iter])
try:
body = json.loads(body)
except ValueError:
LOG.error('Could not decode response body as JSON')
else:
body = None
return resp, body
def raw_request(self, method, url, **kwargs):
kwargs.setdefault('headers', {})
kwargs['headers'].setdefault('Content-Type',
'application/octet-stream')
return self._http_request(url, method, **kwargs)
class VerifiedHTTPSConnection(httplib.HTTPSConnection):
"""httplib-compatibile connection using client-side SSL authentication
:see http://code.activestate.com/recipes/
577548-https-httplib-client-connection-with-certificate-v/
"""
def __init__(self, host, port, key_file=None, cert_file=None,
ca_file=None, timeout=None, insecure=False):
httplib.HTTPSConnection.__init__(self, host, port, key_file=key_file,
cert_file=cert_file)
self.key_file = key_file
self.cert_file = cert_file
if ca_file is not None:
self.ca_file = ca_file
else:
self.ca_file = self.get_system_ca_file()
self.timeout = timeout
self.insecure = insecure
def connect(self):
"""Connect to a host on a given (SSL) port.
If ca_file is pointing somewhere, use it to check Server Certificate.
Redefined/copied and extended from httplib.py:1105 (Python 2.6.x).
This is needed to pass cert_reqs=ssl.CERT_REQUIRED as parameter to
ssl.wrap_socket(), which forces SSL to check server certificate against
our client certificate.
"""
sock = socket.create_connection((self.host, self.port), self.timeout)
if self._tunnel_host:
self.sock = sock
self._tunnel()
if self.insecure is True:
kwargs = {'cert_reqs': ssl.CERT_NONE}
else:
kwargs = {'cert_reqs': ssl.CERT_REQUIRED, 'ca_certs': self.ca_file}
if self.cert_file:
kwargs['certfile'] = self.cert_file
if self.key_file:
kwargs['keyfile'] = self.key_file
self.sock = ssl.wrap_socket(sock, **kwargs)
@staticmethod
def get_system_ca_file():
"""Return path to system default CA file."""
# Standard CA file locations for Debian/Ubuntu, RedHat/Fedora,
# Suse, FreeBSD/OpenBSD
ca_path = ['/etc/ssl/certs/ca-certificates.crt',
'/etc/pki/tls/certs/ca-bundle.crt',
'/etc/ssl/ca-bundle.pem',
'/etc/ssl/cert.pem']
for ca in ca_path:
if os.path.exists(ca):
return ca
return None
class ResponseBodyIterator(object):
"""A class that acts as an iterator over an HTTP response."""
def __init__(self, resp):
self.resp = resp
def __iter__(self):
while True:
yield self.next()
def next(self):
chunk = self.resp.read(CHUNKSIZE)
if chunk:
return chunk
else:
raise StopIteration()
|
|
"""Loading unittests."""
import os
import re
import sys
import traceback
import types
import unittest
from fnmatch import fnmatch
from airy.utils.unittest import case, suite
try:
from os.path import relpath
except ImportError:
from airy.utils.unittest.compatibility import relpath
__unittest = True
def _CmpToKey(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) == -1
return K
# what about .pyc or .pyo (etc)
# we would need to avoid loading the same tests multiple times
# from '.py', '.pyc' *and* '.pyo'
VALID_MODULE_NAME = re.compile(r'[_a-z]\w*\.py$', re.IGNORECASE)
def _make_failed_import_test(name, suiteClass):
message = 'Failed to import test module: %s' % name
if hasattr(traceback, 'format_exc'):
# Python 2.3 compatibility
# format_exc returns two frames of discover.py as well
message += '\n%s' % traceback.format_exc()
return _make_failed_test('ModuleImportFailure', name, ImportError(message),
suiteClass)
def _make_failed_load_tests(name, exception, suiteClass):
return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
def _make_failed_test(classname, methodname, exception, suiteClass):
def testFailure(self):
raise exception
attrs = {methodname: testFailure}
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
class TestLoader(unittest.TestLoader):
"""
This class is responsible for loading tests according to various criteria
and returning them wrapped in a TestSuite
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = cmp
suiteClass = suite.TestSuite
_top_level_dir = None
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
if issubclass(testCaseClass, suite.TestSuite):
raise TypeError("Test cases should not be derived from TestSuite."
" Maybe you meant to derive from TestCase?")
testCaseNames = self.getTestCaseNames(testCaseClass)
if not testCaseNames and hasattr(testCaseClass, 'runTest'):
testCaseNames = ['runTest']
loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))
return loaded_suite
def loadTestsFromModule(self, module, use_load_tests=True):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if isinstance(obj, type) and issubclass(obj, unittest.TestCase):
tests.append(self.loadTestsFromTestCase(obj))
load_tests = getattr(module, 'load_tests', None)
tests = self.suiteClass(tests)
if use_load_tests and load_tests is not None:
try:
return load_tests(self, tests, None)
except Exception, e:
return _make_failed_load_tests(module.__name__, e,
self.suiteClass)
return tests
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = name.split('.')
if module is None:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy:
raise
parts = parts[1:]
obj = module
for part in parts:
parent, obj = obj, getattr(obj, part)
if isinstance(obj, types.ModuleType):
return self.loadTestsFromModule(obj)
elif isinstance(obj, type) and issubclass(obj, unittest.TestCase):
return self.loadTestsFromTestCase(obj)
elif (isinstance(obj, types.UnboundMethodType) and
isinstance(parent, type) and
issubclass(parent, case.TestCase)):
return self.suiteClass([parent(obj.__name__)])
elif isinstance(obj, unittest.TestSuite):
return obj
elif hasattr(obj, '__call__'):
test = obj()
if isinstance(test, unittest.TestSuite):
return test
elif isinstance(test, unittest.TestCase):
return self.suiteClass([test])
else:
raise TypeError("calling %s returned %s, not a test" %
(obj, test))
else:
raise TypeError("don't know how to make test from: %s" % obj)
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = [self.loadTestsFromName(name, module) for name in names]
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
def isTestMethod(attrname, testCaseClass=testCaseClass,
prefix=self.testMethodPrefix):
return attrname.startswith(prefix) and \
hasattr(getattr(testCaseClass, attrname), '__call__')
testFnNames = filter(isTestMethod, dir(testCaseClass))
if self.sortTestMethodsUsing:
testFnNames.sort(key=_CmpToKey(self.sortTestMethodsUsing))
return testFnNames
def discover(self, start_dir, pattern='test*.py', top_level_dir=None):
"""Find and return all test modules from the specified start
directory, recursing into subdirectories to find them. Only test files
that match the pattern will be loaded. (Using shell style pattern
matching.)
All test modules must be importable from the top level of the project.
If the start directory is not the top level directory then the top
level directory must be specified separately.
If a test package name (directory with '__init__.py') matches the
pattern then the package will be checked for a 'load_tests' function. If
this exists then it will be called with loader, tests, pattern.
If load_tests exists then discovery does *not* recurse into the package,
load_tests is responsible for loading all tests in the package.
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
# make top_level_dir optional if called from load_tests in a package
top_level_dir = self._top_level_dir
elif top_level_dir is None:
set_implicit_top = True
top_level_dir = start_dir
top_level_dir = os.path.abspath(top_level_dir)
if not top_level_dir in sys.path:
# all test modules must be importable from the top level directory
# should we *unconditionally* put the start directory in first
# in sys.path to minimise likelihood of conflicts between installed
# modules and development versions?
sys.path.insert(0, top_level_dir)
self._top_level_dir = top_level_dir
is_not_importable = False
if os.path.isdir(os.path.abspath(start_dir)):
start_dir = os.path.abspath(start_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
else:
# support for discovery from dotted module names
try:
__import__(start_dir)
except ImportError:
is_not_importable = True
else:
the_module = sys.modules[start_dir]
top_part = start_dir.split('.')[0]
start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
if set_implicit_top:
self._top_level_dir = os.path.abspath(os.path.dirname(os.path.dirname(sys.modules[top_part].__file__)))
sys.path.remove(top_level_dir)
if is_not_importable:
raise ImportError('Start directory is not importable: %r' % start_dir)
tests = list(self._find_tests(start_dir, pattern))
return self.suiteClass(tests)
def _get_name_from_path(self, path):
path = os.path.splitext(os.path.normpath(path))[0]
_relpath = relpath(path, self._top_level_dir)
assert not os.path.isabs(_relpath), "Path must be within the project"
assert not _relpath.startswith('..'), "Path must be within the project"
name = _relpath.replace(os.path.sep, '.')
return name
def _get_module_from_name(self, name):
__import__(name)
return sys.modules[name]
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
paths = os.listdir(start_dir)
for path in paths:
full_path = os.path.join(start_dir, path)
if os.path.isfile(full_path):
if not VALID_MODULE_NAME.match(path):
# valid Python identifiers only
continue
if not self._match_path(path, full_path, pattern):
continue
# if the test file matches, load it
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
mod_file = os.path.abspath(getattr(module, '__file__', full_path))
realpath = os.path.splitext(mod_file)[0]
fullpath_noext = os.path.splitext(full_path)[0]
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = os.path.splitext(os.path.basename(full_path))[0]
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?")
raise ImportError(msg % (mod_name, module_dir, expected_dir))
yield self.loadTestsFromModule(module)
elif os.path.isdir(full_path):
if not os.path.isfile(os.path.join(full_path, '__init__.py')):
continue
load_tests = None
tests = None
if fnmatch(path, pattern):
# only check load_tests if the package directory itself matches the filter
name = self._get_name_from_path(full_path)
package = self._get_module_from_name(name)
load_tests = getattr(package, 'load_tests', None)
tests = self.loadTestsFromModule(package, use_load_tests=False)
if load_tests is None:
if tests is not None:
# tests loaded from package file
yield tests
# recurse into the package
for test in self._find_tests(full_path, pattern):
yield test
else:
try:
yield load_tests(self, tests, pattern)
except Exception, e:
yield _make_failed_load_tests(package.__name__, e,
self.suiteClass)
defaultTestLoader = TestLoader()
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass:
loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
def findTestCases(module, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
|
|
# Copyright (c) 2015-2016, the authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from GPy.util.linalg import jitchol, backsub_both_sides, tdot, dtrtrs, dtrtri,pdinv
from GPy.util import diag
from GPy.core.parameterization.variational import VariationalPosterior
import numpy as np
from GPy.inference.latent_function_inference import LatentFunctionInference
from GPy.inference.latent_function_inference.posterior import Posterior
log_2_pi = np.log(2*np.pi)
try:
from mpi4py import MPI
except:
pass
class VarDTC(LatentFunctionInference):
"""
An object for inference when the likelihood is Gaussian, but we want to do sparse inference.
The function self.inference returns a Posterior object, which summarizes
the posterior.
For efficiency, we sometimes work with the cholesky of Y*Y.T. To save repeatedly recomputing this, we cache it.
"""
const_jitter = 1e-6
def __init__(self, mpi_comm=None):
self.mpi_comm = mpi_comm
self.Y_speedup = False # Replace Y with the cholesky factor of YY.T, but the computation of posterior object will be skipped.
def get_trYYT(self, Y):
return np.sum(np.square(Y))
def get_YYTfactor(self, Y):
N, D = Y.shape
if (N>=D):
return Y.view(np.ndarray)
else:
return jitchol(tdot(Y))
def gatherPsiStat(self, kern, X, Z, Y, beta, uncertain_inputs):
assert beta.size == 1
if uncertain_inputs:
psi0 = kern.psi0(Z, X)
psi1 = kern.psi1(Z, X)*beta
psi2 = kern.psi2(Z, X)*beta
else:
psi0 = kern.Kdiag(X)
psi1 = kern.K(X, Z)
psi2 = np.dot(psi1.T,psi1)*beta
psi1 = psi1*beta
psi0 = psi0.sum()*beta
if isinstance(Y, VariationalPosterior):
m, s = Y.mean, Y.variance
psi1Y = np.dot(m.T,psi1) # DxM
Shalf = np.sqrt(s.sum(axis=1))
psi1S = Shalf[:,None]*psi1
YRY = (np.square(m).sum()+s.sum())*beta
else:
psi1Y = np.dot(Y.T,psi1) # DxM
trYYT = self.get_trYYT(Y)
YRY = trYYT*beta
psi1S = None
Shalf = None
return psi0, psi2, YRY, psi1, psi1Y, Shalf, psi1S
def inference(self, kern, X, Z, likelihood, Y, Y_metadata=None, Lm=None, dL_dKmm=None, Kuu_sigma=None):
"""
The first phase of inference:
Compute: log-likelihood, dL_dKmm
Cached intermediate results: Kmm, KmmInv,
"""
num_data, output_dim = Y.shape
input_dim = Z.shape[0]
uncertain_inputs = isinstance(X, VariationalPosterior)
uncertain_outputs = isinstance(Y, VariationalPosterior)
# from ..models.sslvm import Gaussian_Gamma
# if isinstance(likelihood, Gaussian_Gamma):
# beta = likelihood.expectation_beta()
# logL_R = -num_data*likelihood.expectation_logbeta()
# else:
beta = 1./np.fmax(likelihood.variance, 1e-6)
logL_R = -num_data*np.log(beta)
psi0, psi2, YRY, psi1, psi1Y, Shalf, psi1S = self.gatherPsiStat(kern, X, Z, Y, beta, uncertain_inputs)
#======================================================================
# Compute Common Components
#======================================================================
Kmm = kern.K(Z).copy()
if Kuu_sigma is not None:
diag.add(Kmm, Kuu_sigma)
else:
diag.add(Kmm, self.const_jitter)
Lm = jitchol(Kmm)
#LmInv = dtrtri(Lm)
if uncertain_inputs:
LmInvPsi2LmInvT = backsub_both_sides(Lm, psi2, 'right')
else:
LmInvPsi2LmInvT = tdot(dtrtrs(Lm, psi1.T)[0])/beta #tdot(psi1.dot(LmInv.T).T) /beta
Lambda = np.eye(Kmm.shape[0])+LmInvPsi2LmInvT
LL = jitchol(Lambda)
LmLL = Lm.dot(LL)
# LLInv = dtrtri(LL)
# LmLLInv = LLInv.dot(LmInv)
logdet_L = 2.*np.sum(np.log(np.diag(LL)))
b = dtrtrs(LmLL, psi1Y.T)[0].T #psi1Y.dot(LmLLInv.T)
bbt = np.square(b).sum()
v = dtrtrs(LmLL, b.T, trans=1)[0].T #b.dot(LmLLInv)
LLinvPsi1TYYTPsi1LLinvT = tdot(b.T)
if psi1S is not None:
psi1SLLinv = dtrtrs(LmLL, psi1S.T)[0].T #psi1S.dot(LmLLInv.T)
bbt += np.square(psi1SLLinv).sum()
LLinvPsi1TYYTPsi1LLinvT += tdot(psi1SLLinv.T)
psi1SP = dtrtrs(LmLL, psi1SLLinv.T, trans=1)[0].T #psi1SLLinv.dot(LmLLInv)
tmp = -backsub_both_sides(LL, LLinvPsi1TYYTPsi1LLinvT+output_dim*np.eye(input_dim))
dL_dpsi2R = backsub_both_sides(Lm, tmp+output_dim*np.eye(input_dim))/2
#tmp = -LLInv.T.dot(LLinvPsi1TYYTPsi1LLinvT+output_dim*np.eye(input_dim)).dot(LLInv)
#dL_dpsi2R = LmInv.T.dot(tmp+output_dim*np.eye(input_dim)).dot(LmInv)/2.
#======================================================================
# Compute log-likelihood
#======================================================================
logL = -(output_dim*(num_data*log_2_pi+logL_R+psi0-np.trace(LmInvPsi2LmInvT))+YRY- bbt)/2.-output_dim*logdet_L/2.
#======================================================================
# Compute dL_dKmm
#======================================================================
dL_dKmm = dL_dpsi2R - output_dim* backsub_both_sides(Lm, LmInvPsi2LmInvT)/2 #LmInv.T.dot(LmInvPsi2LmInvT).dot(LmInv)/2.
#======================================================================
# Compute the Posterior distribution of inducing points p(u|Y)
#======================================================================
LLInvLmT = dtrtrs(LL, Lm.T)[0]
cov = tdot(LLInvLmT.T)
wd_inv = backsub_both_sides(Lm, np.eye(input_dim)- backsub_both_sides(LL, np.identity(input_dim), transpose='left'), transpose='left')
post = Posterior(woodbury_inv=wd_inv, woodbury_vector=v.T, K=Kmm, mean=None, cov=cov, K_chol=Lm)
#======================================================================
# Compute dL_dthetaL for uncertian input and non-heter noise
#======================================================================
# if isinstance(likelihood, Gaussian_Gamma):
# from scipy.special import polygamma
# dL_dthetaL = ((YRY + output_dim*psi0)/2. - (dL_dpsi2R*psi2).sum() - np.trace(LLinvPsi1TYYTPsi1LLinvT))/-beta
# likelihood.q_a.gradient = num_data*output_dim/2.*polygamma(1, likelihood.q_a) + dL_dthetaL/likelihood.q_b
# likelihood.q_b.gradient = num_data*output_dim/(-2.*likelihood.q_b) +dL_dthetaL*(-likelihood.q_a/(likelihood.q_b*likelihood.q_b))
# else:
dL_dthetaL = (YRY*beta + beta*output_dim*psi0 - num_data*output_dim*beta)/2. - beta*(dL_dpsi2R*psi2).sum() - beta*np.trace(LLinvPsi1TYYTPsi1LLinvT)
#======================================================================
# Compute dL_dpsi
#======================================================================
dL_dpsi0 = -output_dim * (beta * np.ones((num_data,)))/2.
if uncertain_outputs:
m,s = Y.mean, Y.variance
dL_dpsi1 = beta*(np.dot(m,v)+Shalf[:,None]*psi1SP)
else:
dL_dpsi1 = beta*np.dot(Y,v)
if uncertain_inputs:
dL_dpsi2 = beta* dL_dpsi2R
else:
dL_dpsi1 += np.dot(psi1,dL_dpsi2R)*2.
dL_dpsi2 = None
if uncertain_inputs:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dpsi0':dL_dpsi0,
'dL_dpsi1':dL_dpsi1,
'dL_dpsi2':dL_dpsi2,
'dL_dthetaL':dL_dthetaL}
else:
grad_dict = {'dL_dKmm': dL_dKmm,
'dL_dKdiag':dL_dpsi0,
'dL_dKnm':dL_dpsi1,
'dL_dthetaL':dL_dthetaL}
if uncertain_outputs:
m,s = Y.mean, Y.variance
psi1LmiLLi = dtrtrs(LmLL, psi1.T)[0].T
LLiLmipsi1Y = b.T
grad_dict['dL_dYmean'] = -m*beta+ psi1LmiLLi.dot(LLiLmipsi1Y)
grad_dict['dL_dYvar'] = beta/-2.+ np.square(psi1LmiLLi).sum(axis=1)/2
return post, logL, grad_dict
|
|
''' This module provides functions for embedding Bokeh plots in various
different ways.
There are a number of different combinations of options when embedding
Bokeh plots. The data for the plot can be contained in the document,
or on a Bokeh server, or in a sidecar JavaScript file. Likewise, BokehJS
may be inlined in the document, or loaded from CDN or a Bokeh server.
The functions in ``bokeh.embed`` provide functionality to embed in all
these different cases.
'''
import uuid
from .protocol import serialize_json
from .resources import Resources
from .templates import (
AUTOLOAD, AUTOLOAD_SERVER, AUTOLOAD_STATIC, FILE,
NOTEBOOK_DIV, PLOT_DIV, PLOT_JS, PLOT_SCRIPT, RESOURCES
)
from .utils import encode_utf8
def components(plot_object, resources):
''' Return HTML components to embed a Bokeh plot.
The data for the plot is stored directly in the returned HTML.
.. note:: The returned components assume that BokehJS resources
are **already loaded**.
Args:
plot_object (PlotObject) : Bokeh object to render
typically a Plot or PlotContext
resources (Resources, optional) : BokehJS resources config
Returns:
(script, div) : UTF-8 encoded
'''
ref = plot_object.ref
elementid = str(uuid.uuid4())
js = PLOT_JS.render(
elementid = elementid,
modelid = ref["id"],
modeltype = ref["type"],
all_models = serialize_json(plot_object.dump()),
)
script = PLOT_SCRIPT.render(
plot_js = resources.js_wrapper(js),
)
div = PLOT_DIV.render(elementid=elementid)
return encode_utf8(script), encode_utf8(div)
def notebook_div(plot_object):
''' Return HTML for a div that will display a Bokeh plot in an
IPython Notebook
The data for the plot is stored directly in the returned HTML.
Args:
plot_object (PlotObject) : Bokeh object to render
typically a Plot or PlotContext
Returns:
div : UTF-8 encoded HTML text
.. note:: Assumes ``bokeh.load_notebook()`` or the equivalent has
already been executed.
'''
ref = plot_object.ref
resources = Resources()
elementid = str(uuid.uuid4())
js = PLOT_JS.render(
elementid = elementid,
modelid = ref["id"],
modeltype = ref["type"],
all_models = serialize_json(plot_object.dump()),
)
script = PLOT_SCRIPT.render(
plot_js = resources.js_wrapper(js),
)
div = PLOT_DIV.render(elementid=elementid)
html = NOTEBOOK_DIV.render(
plot_script = script,
plot_div = div,
)
return encode_utf8(html)
def file_html(plot_object, resources, title, template=FILE):
''' Return an HTML document that embeds a Bokeh plot.
The data for the plot is stored directly in the returned HTML.
Args:
plot_object (PlotObject) : Bokeh object to render
typically a Plot or PlotContext
resources (Resources) : a resource configuration for BokehJS assets
title (str) : a title for the HTML document ``<title>`` tags
template (Template, optional) : HTML document template (default: FILE)
A Jinja2 Template, see bokeh.templates.FILE for the required
template parameters
Returns:
html : standalone HTML document with embedded plot
'''
plot_resources = RESOURCES.render(
js_raw = resources.js_raw,
css_raw = resources.css_raw,
js_files = resources.js_files,
css_files = resources.css_files,
)
script, div = components(plot_object, resources)
html = template.render(
title = title,
plot_resources = plot_resources,
plot_script = script,
plot_div = div,
)
return encode_utf8(html)
def autoload_static(plot_object, resources, script_path):
''' Return JavaScript code and a script tag that can be used to embed
Bokeh Plots.
The data for the plot is stored directly in the returned JavaScript code.
Args:
plot_object (PlotObject) :
resources (Resources) :
script_path (str) :
Returns:
(js, tag) :
JavaScript code to be saved at ``script_path`` and a ``<script>``
tag to load it
Raises:
ValueError
'''
if resources.mode == 'inline':
raise ValueError("autoload_static() requires non-inline resources")
if resources.dev:
raise ValueError("autoload_static() only works with non-dev resources")
elementid = str(uuid.uuid4())
js = AUTOLOAD.render(
all_models = serialize_json(plot_object.dump()),
js_url = resources.js_files[0],
css_files = resources.css_files,
elementid = elementid,
)
tag = AUTOLOAD_STATIC.render(
src_path = script_path,
elementid = elementid,
modelid = plot_object._id,
modeltype = plot_object.__view_model__,
loglevel = resources.log_level,
)
return encode_utf8(js), encode_utf8(tag)
def autoload_server(plot_object, session):
''' Return a script tag that can be used to embed Bokeh Plots from
a Bokeh Server.
The data for the plot is stored on the Bokeh Server.
Args:
plot_object (PlotObject) :
session (session) :
Returns:
tag :
a ``<script>`` tag that will execute an autoload script
loaded from the Bokeh Server
'''
elementid = str(uuid.uuid4())
resources = Resources(root_url=session.root_url, mode="server")
tag = AUTOLOAD_SERVER.render(
src_path = resources._autoload_path(elementid),
elementid = elementid,
modelid = plot_object._id,
root_url = resources.root_url,
docid = session.docid,
docapikey = session.apikey,
loglevel = resources.log_level,
)
return encode_utf8(tag)
|
|
# -*- coding: utf-8 -*-
from __future__ import (print_function, unicode_literals, absolute_import,
division)
from pusher.http import GET, POST, Request, request_method
from pusher.signature import sign, verify
from pusher.util import ensure_text, validate_channel, validate_socket_id, app_id_re, pusher_url_re, channel_name_re
import collections
import hashlib
import json
import os
import re
import six
import time
def join_attributes(attributes):
return six.text_type(',').join(attributes)
class Pusher(object):
"""Client for the Pusher HTTP API.
This client supports various backend adapters to support various http
libraries available in the python ecosystem.
:param app_id: a pusher application identifier
:param key: a pusher application key
:param secret: a pusher application secret token
:param ssl: Whenever to use SSL or plain HTTP
:param host: Used for custom host destination
:param port: Used for custom port destination
:param timeout: Request timeout (in seconds)
:param cluster: Convention for other clusters than the main Pusher-one.
Eg: 'eu' will resolve to the api-eu.pusherapp.com host
:param backend: an http adapter class (AsyncIOBackend, RequestsBackend, SynchronousBackend, TornadoBackend)
:param backend_options: additional backend
"""
def __init__(self, app_id, key, secret, ssl=True, host=None, port=None, timeout=5, cluster=None,
json_encoder=None, json_decoder=None, backend=None, **backend_options):
if backend is None:
from pusher.requests import RequestsBackend
backend = RequestsBackend
self._app_id = ensure_text(app_id, "app_id")
if not app_id_re.match(self._app_id):
raise ValueError("Invalid app id")
self._key = ensure_text(key, "key")
self._secret = ensure_text(secret, "secret")
if not isinstance(ssl, bool):
raise TypeError("SSL should be a boolean")
self._ssl = ssl
if host:
self._host = ensure_text(host, "host")
elif cluster:
self._host = six.text_type("api-%s.pusher.com") % ensure_text(cluster, "cluster")
else:
self._host = six.text_type("api.pusherapp.com")
if port and not isinstance(port, six.integer_types):
raise TypeError("port should be an integer")
self._port = port or (443 if ssl else 80)
if not isinstance(timeout, six.integer_types):
raise TypeError("timeout should be an integer")
self._timeout = timeout
self._json_encoder = json_encoder
self._json_decoder = json_decoder
self.http = backend(self, **backend_options)
@classmethod
def from_url(cls, url, **options):
"""Alternate constructor that extracts the information from a URL.
:param url: String containing a URL
Usage::
>> from pusher import Pusher
>> p = Pusher.from_url("http://mykey:[email protected]/apps/432")
"""
m = pusher_url_re.match(ensure_text(url, "url"))
if not m:
raise Exception("Unparsable url: %s" % url)
ssl = m.group(1) == 'https'
options_ = {
'key': m.group(2),
'secret': m.group(3),
'host': m.group(4),
'app_id': m.group(5),
'ssl': ssl,
}
options_.update(options)
return cls(**options_)
@classmethod
def from_env(cls, env='PUSHER_URL', **options):
"""Alternate constructor that extracts the information from an URL
stored in an environment variable. The pusher heroku addon will set
the PUSHER_URL automatically when installed for example.
:param env: Name of the environment variable
Usage::
>> from pusher import Pusher
>> c = Pusher.from_env("PUSHER_URL")
"""
val = os.environ.get(env)
if not val:
raise Exception("Environment variable %s not found" % env)
return cls.from_url(val, **options)
@request_method
def trigger(self, channels, event_name, data, socket_id=None):
'''
Trigger an event on one or more channels, see:
http://pusher.com/docs/rest_api#method-post-event
'''
if isinstance(channels, six.string_types):
channels = [channels]
if isinstance(channels, dict) or not isinstance(channels, (collections.Sized, collections.Iterable)):
raise TypeError("Expected a single or a list of channels")
if len(channels) > 10:
raise ValueError("Too many channels")
channels = list(map(validate_channel, channels))
event_name = ensure_text(event_name, "event_name")
if len(event_name) > 200:
raise ValueError("event_name too long")
if isinstance(data, six.string_types):
data = ensure_text(data, "data")
else:
data = json.dumps(data, cls=self._json_encoder)
if len(data) > 10240:
raise ValueError("Too much data")
params = {
'name': event_name,
'channels': channels,
'data': data
}
if socket_id:
params['socket_id'] = validate_socket_id(socket_id)
return Request(self, POST, "/apps/%s/events" % self.app_id, params)
@request_method
def channels_info(self, prefix_filter=None, attributes=[]):
'''
Get information on multiple channels, see:
http://pusher.com/docs/rest_api#method-get-channels
'''
params = {}
if attributes:
params['info'] = join_attributes(attributes)
if prefix_filter:
params['filter_by_prefix'] = ensure_text(prefix_filter, "prefix_filter")
return Request(self, GET, six.text_type("/apps/%s/channels") % self.app_id, params)
@request_method
def channel_info(self, channel, attributes=[]):
'''
Get information on a specific channel, see:
http://pusher.com/docs/rest_api#method-get-channel
'''
validate_channel(channel)
params = {}
if attributes:
params['info'] = join_attributes(attributes)
return Request(self, GET, "/apps/%s/channels/%s" % (self.app_id, channel), params)
@request_method
def users_info(self, channel):
'''
Fetch user ids currently subscribed to a presence channel
http://pusher.com/docs/rest_api#method-get-users
'''
validate_channel(channel)
return Request(self, GET, "/apps/%s/channels/%s/users" % (self.app_id, channel))
def authenticate(self, channel, socket_id, custom_data=None):
"""Used to generate delegated client subscription token.
:param channel: name of the channel to authorize subscription to
:param socket_id: id of the socket that requires authorization
:param custom_data: used on presence channels to provide user info
"""
channel = validate_channel(channel)
if not channel_name_re.match(channel):
raise ValueError('Channel should be a valid channel, got: %s' % channel)
socket_id = validate_socket_id(socket_id)
if custom_data:
custom_data = json.dumps(custom_data, cls=self._json_encoder)
string_to_sign = "%s:%s" % (socket_id, channel)
if custom_data:
string_to_sign += ":%s" % custom_data
signature = sign(self.secret, string_to_sign)
auth = "%s:%s" % (self.key, signature)
result = {'auth': auth}
if custom_data:
result['channel_data'] = custom_data
return result
def validate_webhook(self, key, signature, body):
"""Used to validate incoming webhook messages. When used it guarantees
that the sender is Pusher and not someone else impersonating it.
:param key: key used to sign the body
:param signature: signature that was given with the body
:param body: content that needs to be verified
"""
key = ensure_text(key, "key")
signature = ensure_text(signature, "signature")
body = ensure_text(body, "body")
if key != self.key:
return None
if not verify(self.secret, body, signature):
return None
try:
body_data = json.loads(body, cls=self._json_decoder)
except ValueError:
return None
time_ms = body_data.get('time_ms')
if not time_ms:
return None
if abs(time.time()*1000 - time_ms) > 300000:
return None
return body_data
@property
def app_id(self):
return self._app_id
@property
def key(self):
return self._key
@property
def secret(self):
return self._secret
@property
def host(self):
return self._host
@property
def port(self):
return self._port
@property
def timeout(self):
return self._timeout
@property
def ssl(self):
return self._ssl
@property
def scheme(self):
return 'https' if self.ssl else 'http'
|
|
#!/usr/bin/env python
# -- coding: utf-8 --
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # Python 2.6
from django.http import Http404
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_POST
from desktop.lib.django_util import JsonResponse
from desktop.lib.i18n import force_unicode, smart_str
from metadata.conf import has_navigator, NAVIGATOR, has_navigator_file_search
from metadata.navigator_client import NavigatorApi, NavigatorApiException, EntityDoesNotExistException,\
NavigathorAuthException
LOG = logging.getLogger(__name__)
class MetadataApiException(Exception):
pass
def error_handler(view_fn):
def decorator(*args, **kwargs):
status = 200
response = {
'status': -1,
'message': ''
}
try:
if has_navigator(args[0].user):
return view_fn(*args, **kwargs)
else:
raise MetadataApiException('Navigator API is not configured.')
except Http404, e:
raise e
except EntityDoesNotExistException, e:
response['message'] = e.message
response['status'] = -3
status = 200
except NavigathorAuthException, e:
response['message'] = force_unicode(e.message)
response['status'] = -2
except NavigatorApiException, e:
try:
response['message'] = json.loads(e.message)
response['status'] = -2
except Exception:
response['message'] = force_unicode(e.message)
except Exception, e:
status = 500
message = force_unicode(e)
LOG.exception(message)
return JsonResponse(response, status=status)
return decorator
@error_handler
def search_entities(request):
"""
For displaying results.
"""
api = NavigatorApi(request.user)
query_s = json.loads(request.POST.get('query_s', ''))
query_s = smart_str(query_s)
offset = request.POST.get('offset', 0)
limit = int(request.POST.get('limit', 100))
sources = json.loads(request.POST.get('sources') or '[]')
if sources and not has_navigator_file_search(request.user):
sources = ['sql']
query_s = query_s.strip() or '*'
entities = api.search_entities(query_s, limit=limit, offset=offset, sources=sources)
_augment_highlighting(query_s, entities)
response = {
'entities': entities,
'count': len(entities),
'offset': offset,
'limit': limit,
'query_s': query_s,
'status': 0
}
return JsonResponse(response)
@error_handler
def search_entities_interactive(request):
"""
For search autocomplete.
"""
api = NavigatorApi(request.user)
query_s = json.loads(request.POST.get('query_s', ''))
prefix = request.POST.get('prefix')
offset = request.POST.get('offset', 0)
limit = int(request.POST.get('limit', 25))
field_facets = json.loads(request.POST.get('field_facets') or '[]')
sources = json.loads(request.POST.get('sources') or '[]')
if sources and not has_navigator_file_search(request.user):
sources = ['sql']
f = {
"outputFormat" : {
"type" : "dynamic"
},
"name" : {
"type" : "dynamic"
},
"lastModified" : {
"type" : "date"
},
"sourceType" : {
"type" : "dynamic"
},
"parentPath" : {
"type" : "dynamic"
},
"lastAccessed" : {
"type" : "date"
},
"type" : {
"type" : "dynamic"
},
"sourceId" : {
"type" : "dynamic"
},
"partitionColNames" : {
"type" : "dynamic"
},
"serDeName" : {
"type" : "dynamic"
},
"created" : {
"type" : "date"
},
"fileSystemPath" : {
"type" : "dynamic"
},
"compressed" : {
"type" : "bool"
},
"clusteredByColNames" : {
"type" : "dynamic"
},
"originalName" : {
"type" : "dynamic"
},
"owner" : {
"type" : "dynamic"
},
"extractorRunId" : {
"type" : "dynamic"
},
"userEntity" : {
"type" : "bool"
},
"sortByColNames" : {
"type" : "dynamic"
},
"inputFormat" : {
"type" : "dynamic"
},
"serDeLibName" : {
"type" : "dynamic"
},
"originalDescription" : {
"type" : "dynamic"
},
"lastModifiedBy" : {
"type" : "dynamic"
}
}
field_facets = ["tags", "type"] + f.keys()
query_s = query_s.strip() + '*'
last_query_term = [term for term in query_s.split()][-1]
if last_query_term and last_query_term != '*':
last_query_term = last_query_term.rstrip('*')
(fname, fval) = last_query_term.split(':') if ':' in last_query_term else (last_query_term, '')
field_facets = [f for f in field_facets if f.startswith(fname)]
field_facets = field_facets[:5]
response = api.search_entities_interactive(
query_s=query_s,
limit=limit,
offset=offset,
facetFields=field_facets,
facetPrefix=prefix,
facetRanges=None,
firstClassEntitiesOnly=None,
sources=sources
)
if response.get('facets'): # Remove empty facets
for fname, fvalues in response['facets'].items():
if NAVIGATOR.APPLY_SENTRY_PERMISSIONS.get():
fvalues = []
else:
fvalues = sorted([(k, v) for k, v in fvalues.items() if v > 0], key=lambda n: n[1], reverse=True)
response['facets'][fname] = OrderedDict(fvalues)
if ':' in query_s and not response['facets'][fname]:
del response['facets'][fname]
_augment_highlighting(query_s, response.get('results'))
response['status'] = 0
return JsonResponse(response)
def _augment_highlighting(query_s, records):
fs = {}
ts = []
for term in query_s.split():
if ':' in term:
fname, fval = term.split(':', 1)
if fval and fval.strip('*'):
fs[fname] = fval.strip('*')
else:
if term.strip('*'):
ts.append(term.strip('*'))
for record in records:
name = record.get('originalName', '') or ''
record['hue_description'] = ''
record['hue_name'] = record.get('parentPath', '') if record.get('parentPath') else ''
if record.get('parentPath') is None:
record['parentPath'] = ''
if record['hue_name'] and record.get('sourceType', '') != 'S3':
record['hue_name'] = (record['hue_name'].replace('/', '.') + '.').lstrip('.')
record['originalName'] = record['hue_name'] + name # Inserted when selected in autocomplete, full path
record['selectionName'] = name # Use when hovering / selecting a search result
for term in ts:
name = _highlight(term, name)
if record.get('tags'):
_highlight_tags(record, term)
for fname, fval in fs.iteritems(): # e.g. owner:<em>hu</em>e
if record.get(fname, ''):
if fname == 'tags':
_highlight_tags(record, fval)
else:
record['hue_description'] += ' %s:%s' % (fname, _highlight(fval, record[fname]))
originalDescription = record.get('originalDescription', '')
if not record['hue_description'] and originalDescription:
record['hue_description'] = _highlight(term, originalDescription)
record['hue_name'] += name
record['hue_name'] = escape(record['hue_name']).replace('<em>', '<em>').replace('</em>', '</em>')
record['hue_description'] = escape(record['hue_description']).replace('<em>', '<em>').replace('</em>', '</em>')
def _highlight(pattern, string):
pattern = re.escape(pattern)
return re.compile('(%s)' % pattern, re.IGNORECASE).sub('<em>\\1</em>', string, count=1)
def _highlight_tags(record, term):
for tag in record['tags']:
if re.match(term, tag):
record['hue_description'] += ' tags:%s' % _highlight(term, tag)
@error_handler
def list_tags(request):
api = NavigatorApi(request.user)
prefix = request.POST.get('prefix')
offset = request.POST.get('offset', 0)
limit = request.POST.get('limit', 25)
data = api.search_entities_interactive(facetFields=['tags'], facetPrefix=prefix, limit=limit, offset=offset)
response = {
'tags': data['facets']['tags'],
'status': 0
}
return JsonResponse(response)
@error_handler
def find_entity(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_type = request.GET.get('type', '')
database = request.GET.get('database', '')
table = request.GET.get('table', '')
name = request.GET.get('name', '')
path = request.GET.get('path', '')
if not entity_type:
raise MetadataApiException("find_entity requires a type value, e.g. - 'database', 'table', 'file'")
if entity_type.lower() == 'database':
if not name:
raise MetadataApiException('get_database requires name param')
response['entity'] = api.get_database(name)
elif entity_type.lower() == 'table' or entity_type.lower() == 'view':
if not database or not name:
raise MetadataApiException('get_table requires database and name param')
is_view = entity_type.lower() == 'view'
response['entity'] = api.get_table(database, name, is_view=is_view)
elif entity_type.lower() == 'field':
if not database or not table or not name:
raise MetadataApiException('get_field requires database, table, and name params')
response['entity'] = api.get_field(database, table, name)
elif entity_type.lower() == 'directory':
if not path:
raise MetadataApiException('get_directory requires path param')
response['entity'] = api.get_directory(path)
elif entity_type.lower() == 'file':
if not path:
raise MetadataApiException('get_file requires path param')
response['entity'] = api.get_file(path)
else:
raise MetadataApiException("type %s is unrecognized" % entity_type)
# Prevent nulls later
if 'tags' in response['entity'] and not response['entity']['tags']:
response['entity']['tags'] = []
response['status'] = 0
return JsonResponse(response)
@error_handler
def suggest(request):
response = {'status': -1}
api = NavigatorApi(request.user)
prefix = request.POST.get('prefix')
suggest = api.suggest(prefix)
response['suggest'] = suggest
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_entity(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_id = request.REQUEST.get('id')
if not entity_id:
raise MetadataApiException("get_entity requires an 'id' parameter")
entity = api.get_entity(entity_id)
response['entity'] = entity
response['status'] = 0
return JsonResponse(response)
@require_POST
@error_handler
def add_tags(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_id = json.loads(request.POST.get('id', ''))
tags = json.loads(request.POST.get('tags', []))
is_allowed = request.user.has_hue_permission(action='write', app='metadata')
request.audit = {
'allowed': is_allowed,
'operation': 'NAVIGATOR_ADD_TAG',
'operationText': 'Adding tags %s to entity %s' % (tags, entity_id)
}
if not entity_id or not tags or not isinstance(tags, list) or not is_allowed:
response['error'] = _("add_tags requires an 'id' parameter and 'tags' parameter that is a non-empty list of tags")
else:
response['entity'] = api.add_tags(entity_id, tags)
response['status'] = 0
return JsonResponse(response)
@require_POST
@error_handler
def delete_tags(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_id = json.loads(request.POST.get('id', ''))
tags = json.loads(request.POST.get('tags', []))
is_allowed = request.user.has_hue_permission(action='write', app='metadata')
request.audit = {
'allowed': is_allowed,
'operation': 'NAVIGATOR_DELETE_TAG',
'operationText': 'Removing tags %s to entity %s' % (tags, entity_id)
}
if not entity_id or not tags or not isinstance(tags, list) or not is_allowed:
response['error'] = _("add_tags requires an 'id' parameter and 'tags' parameter that is a non-empty list of tags")
else:
response['entity'] = api.delete_tags(entity_id, tags)
response['status'] = 0
return JsonResponse(response)
@require_POST
@error_handler
def update_properties(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_id = json.loads(request.POST.get('id', ''))
properties = json.loads(request.POST.get('properties', {}))
is_allowed = request.user.has_hue_permission(action='write', app='metadata')
request.audit = {
'allowed': is_allowed,
'operation': 'NAVIGATOR_UPDATE_PROPERTIES',
'operationText': 'Updating property %s of entity %s' % (properties, entity_id)
}
if not entity_id or not properties or not isinstance(properties, dict) or not is_allowed:
response['error'] = _("update_properties requires an 'id' parameter and 'properties' parameter that is a non-empty dict")
else:
response['entity'] = api.update_properties(entity_id, properties)
response['status'] = 0
return JsonResponse(response)
@require_POST
@error_handler
def delete_properties(request):
response = {'status': -1}
api = NavigatorApi(request.user)
entity_id = json.loads(request.POST.get('id', ''))
keys = json.loads(request.POST.get('keys', []))
is_allowed = request.user.has_hue_permission(action='write', app='metadata')
request.audit = {
'allowed': is_allowed,
'operation': 'NAVIGATOR_DELETE_PROPERTIES',
'operationText': 'Deleting property %s of entity %s' % (keys, entity_id)
}
if not entity_id or not keys or not isinstance(keys, list):
response['error'] = _("update_properties requires an 'id' parameter and 'keys' parameter that is a non-empty list")
else:
response['entity'] = api.delete_properties(entity_id, keys)
response['status'] = 0
return JsonResponse(response)
@error_handler
def get_lineage(request):
response = {'status': -1, 'inputs': [], 'source_query': '', 'target_queries': [], 'targets': []}
api = NavigatorApi(request.user)
entity_id = request.REQUEST.get('id')
if not entity_id:
raise MetadataApiException("get_lineage requires an 'id' parameter")
lineage = api.get_lineage(entity_id)
entity_name = api.get_entity(entity_id)['originalName'].upper()
response['id'] = entity_id
# TODO: This is a cheat way to do to this for demo using filtering but we should really traverse relationships
parent_operation = next((entity for entity in lineage['entities'] if entity.get('outputs', []) == [entity_name]), None)
if parent_operation:
response['inputs'] = [input.lower() for input in parent_operation['inputs']]
response['source_query'] = parent_operation.get('queryText', '')
children = [entity for entity in lineage['entities'] if entity.get('inputs') is not None and entity_name in entity.get('inputs')]
if children is not None:
response['target_queries'] = [child['queryText'] for child in children if child.get('queryText') is not None]
outputs = [child['outputs'] for child in children if child.get('outputs') is not None]
response['targets'] = [target.lower() for output in outputs for target in output]
response['status'] = 0
return JsonResponse(response)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from Crypto.Cipher import AES
import base64
import time
import socket
import string
import hashlib
import shutil
import os
from log import LogFactory
BS = 16
log = LogFactory().get_log(__name__)
def unpad(s): return s[0:-ord(s[-1])]
def current_milli_time(): return int(round(time.time() * 1000))
def pad(s): return s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
def decrypt_password(pass_str, secret):
"""
Decrypts the given password using the given secret. The encryption is assumed to be done
without IV, in AES.
:param str pass_str: Encrypted password string in Base64 encoding
:param str secret: The secret string
:return: The decrypted password
:rtype: str
"""
if pass_str is None or pass_str.strip() == "":
return pass_str.strip()
dec_pass = ""
try:
log.debug("Decrypting cipher text: %s" % pass_str)
bdecoded_pass = base64.b64decode(pass_str.strip())
# secret length should be 16
cipher = AES.new(secret.strip(), AES.MODE_ECB)
# dec_pass = unpad(cipher.decrypt(bdecoded_pass))
dec_pass = cipher.decrypt(bdecoded_pass)
except:
log.exception("Exception occurred while decrypting password")
# remove nonprintable characters that are padded in the decrypted password
dec_pass = filter(lambda x: x in string.printable, dec_pass)
# dec_pass_md5 = hashlib.md5(dec_pass.encode('utf-8')).hexdigest()
# log.debug("Decrypted password md5sum: [%r]" % dec_pass_md5)
return dec_pass
def wait_until_ports_active(ip_address, ports, ports_check_timeout=600000):
"""
Blocks until the given list of ports become active
:param str ip_address: Ip address of the member to be checked
:param list[str] ports: List of ports to be checked
:param int ports_check_timeout: The timeout in milliseconds, defaults to 1000*60*10
:return: void
"""
if ports_check_timeout is None:
ports_check_timeout = 1000 * 60 * 10
log.debug("Port check timeout: %s" % ports_check_timeout)
ports_left = ports
start_time = current_milli_time()
# check ports until all are active or timeout exceeds
while True:
log.info("Waiting for ports to be active: [ip] %s [ports] %s" % (ip_address, ports))
# check each port for activity
for checking_port in list(ports_left):
port_active = check_port_active(ip_address, checking_port)
if port_active:
log.debug("Port %s on host %s active" % (checking_port, ip_address))
ports_left.remove(checking_port)
# if no ports are left to check for activity, return
if len(ports_left) == 0:
log.info("Ports activated: [ip] %r [ports] %r" % (ip_address, ports))
return True
# active = check_ports_active(ip_address, ports)
end_time = current_milli_time()
duration = end_time - start_time
if duration > ports_check_timeout:
log.info("Port check timeout reached: [ip] %s [ports] %s [timeout] %s"
% (ip_address, ports, ports_check_timeout))
return False
time.sleep(5)
def check_port_active(ip_address, port):
"""
Checks the given port on the given host for activity
:param str ip_address: Ip address of the member to be checked
:param str port: The port to be checked
:return: True if the ports are active, False if at least one is not active
:rtype: bool
"""
if port is None:
raise RuntimeError("Cannot check invalid port for activity")
try:
port_int = int(port)
except ValueError:
raise RuntimeError("Cannot check invalid port for activity %s" % port)
s = socket.socket()
s.settimeout(5)
try:
s.connect((ip_address, port_int))
log.debug("Port %s is active" % port)
s.close()
return True
except socket.error:
log.debug("Port %s is not active" % port)
return False
class Utils(object):
@staticmethod
def directory_exists(dir):
"""
Checks if the given directory exists
:param dir: directory to check
:return: True if the directory dir exists, else False
:rtype: bool
"""
try:
return os.path.isdir(dir)
except OSError as e:
log.error("Unable to check directory existance [%s]" % e)
return False
@staticmethod
def copy_directory(src, destination):
"""
Copies if the directory 'src' to 'destination'
:param src: location of directory to copy
:param destination: new directory location
"""
try:
shutil.copytree(src, destination)
log.debug("Directory [%s] copied to [%s]" % (src, destination))
except OSError as e:
log.error('Directory not copied. Error: %s' % e)
@staticmethod
def strip_trailing_slash(string):
"""
If the string has a trailing '/', removes it
:param string: string to check
:return: string without a trailing '/'
:rtype: string
"""
if string.endswith('/'):
return string[:-1]
return string
@staticmethod
def move_directory(src, destination):
"""
Moves if the directory 'src' to 'destination'
:param src: location of directory to move
:param destination: new directory location
"""
try:
shutil.move(src, destination)
log.debug("Directory [%s] moved to [%s]" % (src, destination))
except OSError as e:
log.error('Directory not moved. Error: %s' % e)
@staticmethod
def print_dictionary(dic):
for x in dic:
print (x)
for y in dic[x]:
print (y, ':', dic[x][y])
class IncrementalCeilingListIterator(object):
"""
Iterates through a given list and returns elements. At the end of the list if terminate_at_end is set to false,
the last element will be returned repeatedly. If terminate_at_end is set to true, an IndexError will be thrown.
"""
def __init__(self, intervals, terminate_at_end):
self.__intervals = intervals
self.__index = 0
self.__terminate_at_end = terminate_at_end
def get_next_retry_interval(self):
"""
Retrieves the next element in the list.
:return:
:rtype: int
"""
if self.__index < len(self.__intervals):
next_interval = self.__intervals[self.__index]
self.__index += 1
else:
if self.__terminate_at_end:
raise IndexError("Reached the end of the list")
else:
next_interval = self.__intervals[-1]
return next_interval
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .cluster_service import (
AcceleratorConfig,
AddonsConfig,
AdvancedMachineFeatures,
AuthenticatorGroupsConfig,
Autopilot,
AutoprovisioningNodePoolDefaults,
AutoUpgradeOptions,
BinaryAuthorization,
CancelOperationRequest,
ClientCertificateConfig,
CloudRunConfig,
Cluster,
ClusterAutoscaling,
ClusterTelemetry,
ClusterUpdate,
CompleteIPRotationRequest,
ConfidentialNodes,
ConfigConnectorConfig,
CreateClusterRequest,
CreateNodePoolRequest,
DailyMaintenanceWindow,
DatabaseEncryption,
DefaultSnatStatus,
DeleteClusterRequest,
DeleteNodePoolRequest,
DnsCacheConfig,
DNSConfig,
EphemeralStorageConfig,
GcePersistentDiskCsiDriverConfig,
GcfsConfig,
GcpFilestoreCsiDriverConfig,
GetClusterRequest,
GetJSONWebKeysRequest,
GetJSONWebKeysResponse,
GetNodePoolRequest,
GetOpenIDConfigRequest,
GetOpenIDConfigResponse,
GetOperationRequest,
GetServerConfigRequest,
GkeBackupAgentConfig,
HorizontalPodAutoscaling,
HttpLoadBalancing,
IdentityServiceConfig,
ILBSubsettingConfig,
IntraNodeVisibilityConfig,
IPAllocationPolicy,
IstioConfig,
Jwk,
KalmConfig,
KubernetesDashboard,
LegacyAbac,
LinuxNodeConfig,
ListClustersRequest,
ListClustersResponse,
ListLocationsRequest,
ListLocationsResponse,
ListNodePoolsRequest,
ListNodePoolsResponse,
ListOperationsRequest,
ListOperationsResponse,
ListUsableSubnetworksRequest,
ListUsableSubnetworksResponse,
Location,
LoggingComponentConfig,
LoggingConfig,
MaintenanceExclusionOptions,
MaintenancePolicy,
MaintenanceWindow,
ManagedPrometheusConfig,
Master,
MasterAuth,
MasterAuthorizedNetworksConfig,
MaxPodsConstraint,
MeshCertificates,
MonitoringComponentConfig,
MonitoringConfig,
NetworkConfig,
NetworkPolicy,
NetworkPolicyConfig,
NetworkTags,
NodeConfig,
NodeConfigDefaults,
NodeKubeletConfig,
NodeLabels,
NodeManagement,
NodeNetworkConfig,
NodePool,
NodePoolAutoscaling,
NodePoolDefaults,
NodeTaint,
NodeTaints,
NotificationConfig,
Operation,
OperationProgress,
PodSecurityPolicyConfig,
PrivateClusterConfig,
PrivateClusterMasterGlobalAccessConfig,
RecurringTimeWindow,
ReleaseChannel,
ReservationAffinity,
ResourceLimit,
ResourceUsageExportConfig,
RollbackNodePoolUpgradeRequest,
SandboxConfig,
SecurityBulletinEvent,
ServerConfig,
ServiceExternalIPsConfig,
SetAddonsConfigRequest,
SetLabelsRequest,
SetLegacyAbacRequest,
SetLocationsRequest,
SetLoggingServiceRequest,
SetMaintenancePolicyRequest,
SetMasterAuthRequest,
SetMonitoringServiceRequest,
SetNetworkPolicyRequest,
SetNodePoolAutoscalingRequest,
SetNodePoolManagementRequest,
SetNodePoolSizeRequest,
ShieldedInstanceConfig,
ShieldedNodes,
StartIPRotationRequest,
StatusCondition,
TimeWindow,
TpuConfig,
UpdateClusterRequest,
UpdateMasterRequest,
UpdateNodePoolRequest,
UpgradeAvailableEvent,
UpgradeEvent,
UsableSubnetwork,
UsableSubnetworkSecondaryRange,
VerticalPodAutoscaling,
VirtualNIC,
WindowsVersions,
WorkloadCertificates,
WorkloadIdentityConfig,
WorkloadMetadataConfig,
DatapathProvider,
PrivateIPv6GoogleAccess,
UpgradeResourceType,
)
__all__ = (
"AcceleratorConfig",
"AddonsConfig",
"AdvancedMachineFeatures",
"AuthenticatorGroupsConfig",
"Autopilot",
"AutoprovisioningNodePoolDefaults",
"AutoUpgradeOptions",
"BinaryAuthorization",
"CancelOperationRequest",
"ClientCertificateConfig",
"CloudRunConfig",
"Cluster",
"ClusterAutoscaling",
"ClusterTelemetry",
"ClusterUpdate",
"CompleteIPRotationRequest",
"ConfidentialNodes",
"ConfigConnectorConfig",
"CreateClusterRequest",
"CreateNodePoolRequest",
"DailyMaintenanceWindow",
"DatabaseEncryption",
"DefaultSnatStatus",
"DeleteClusterRequest",
"DeleteNodePoolRequest",
"DnsCacheConfig",
"DNSConfig",
"EphemeralStorageConfig",
"GcePersistentDiskCsiDriverConfig",
"GcfsConfig",
"GcpFilestoreCsiDriverConfig",
"GetClusterRequest",
"GetJSONWebKeysRequest",
"GetJSONWebKeysResponse",
"GetNodePoolRequest",
"GetOpenIDConfigRequest",
"GetOpenIDConfigResponse",
"GetOperationRequest",
"GetServerConfigRequest",
"GkeBackupAgentConfig",
"HorizontalPodAutoscaling",
"HttpLoadBalancing",
"IdentityServiceConfig",
"ILBSubsettingConfig",
"IntraNodeVisibilityConfig",
"IPAllocationPolicy",
"IstioConfig",
"Jwk",
"KalmConfig",
"KubernetesDashboard",
"LegacyAbac",
"LinuxNodeConfig",
"ListClustersRequest",
"ListClustersResponse",
"ListLocationsRequest",
"ListLocationsResponse",
"ListNodePoolsRequest",
"ListNodePoolsResponse",
"ListOperationsRequest",
"ListOperationsResponse",
"ListUsableSubnetworksRequest",
"ListUsableSubnetworksResponse",
"Location",
"LoggingComponentConfig",
"LoggingConfig",
"MaintenanceExclusionOptions",
"MaintenancePolicy",
"MaintenanceWindow",
"ManagedPrometheusConfig",
"Master",
"MasterAuth",
"MasterAuthorizedNetworksConfig",
"MaxPodsConstraint",
"MeshCertificates",
"MonitoringComponentConfig",
"MonitoringConfig",
"NetworkConfig",
"NetworkPolicy",
"NetworkPolicyConfig",
"NetworkTags",
"NodeConfig",
"NodeConfigDefaults",
"NodeKubeletConfig",
"NodeLabels",
"NodeManagement",
"NodeNetworkConfig",
"NodePool",
"NodePoolAutoscaling",
"NodePoolDefaults",
"NodeTaint",
"NodeTaints",
"NotificationConfig",
"Operation",
"OperationProgress",
"PodSecurityPolicyConfig",
"PrivateClusterConfig",
"PrivateClusterMasterGlobalAccessConfig",
"RecurringTimeWindow",
"ReleaseChannel",
"ReservationAffinity",
"ResourceLimit",
"ResourceUsageExportConfig",
"RollbackNodePoolUpgradeRequest",
"SandboxConfig",
"SecurityBulletinEvent",
"ServerConfig",
"ServiceExternalIPsConfig",
"SetAddonsConfigRequest",
"SetLabelsRequest",
"SetLegacyAbacRequest",
"SetLocationsRequest",
"SetLoggingServiceRequest",
"SetMaintenancePolicyRequest",
"SetMasterAuthRequest",
"SetMonitoringServiceRequest",
"SetNetworkPolicyRequest",
"SetNodePoolAutoscalingRequest",
"SetNodePoolManagementRequest",
"SetNodePoolSizeRequest",
"ShieldedInstanceConfig",
"ShieldedNodes",
"StartIPRotationRequest",
"StatusCondition",
"TimeWindow",
"TpuConfig",
"UpdateClusterRequest",
"UpdateMasterRequest",
"UpdateNodePoolRequest",
"UpgradeAvailableEvent",
"UpgradeEvent",
"UsableSubnetwork",
"UsableSubnetworkSecondaryRange",
"VerticalPodAutoscaling",
"VirtualNIC",
"WindowsVersions",
"WorkloadCertificates",
"WorkloadIdentityConfig",
"WorkloadMetadataConfig",
"DatapathProvider",
"PrivateIPv6GoogleAccess",
"UpgradeResourceType",
)
|
|
import logging
from numpy import nan
from collections import Counter
from protmapper import uniprot_client, ProtMapper
from indra.statements import *
from indra.databases import hgnc_client
logger = logging.getLogger(__name__)
# Map of HPRD PTM types to INDRA Statement types
_ptm_map = {
'ADP-Ribosylation': None,
'Acetylation': Acetylation,
'Alkylation': None,
'Amidation': None,
'Carboxylation': None,
'Deacetylation': Deacetylation,
'Deacylation': None,
'Deglycosylation': Deglycosylation,
'Demethylation': Demethylation,
'Dephosphorylation': Dephosphorylation,
'Disulfide Bridge': None,
'Farnesylation': Farnesylation,
'Glucuronosyl transfer': None,
'Glutathionylation': None,
'Glycation': None,
'Glycosyl phosphatidyl inositol GPI': None,
'Glycosylation': Glycosylation,
'Hydroxylation': Hydroxylation,
'Methylation': Methylation,
'Myristoylation': Myristoylation,
'Neddylation': None,
'Nitration': None,
'Palmitoylation': Palmitoylation,
'Phosphorylation': Phosphorylation,
'Prenylation': None,
'Proteolytic Cleavage': None,
'S-Nitrosylation': None,
'Sulfation': None,
'Sumoylation': Sumoylation,
'Transglutamination': None,
'Ubiquitination': Ubiquitination,
}
class HprdProcessor(object):
"""Get INDRA Statements from HPRD data.
See documentation for `indra.sources.hprd.api.process_flat_files.`
Parameters
----------
id_df : pandas.DataFrame
DataFrame loaded from the HPRD_ID_MAPPINGS.txt file.
cplx_df : pandas.DataFrame
DataFrame loaded from the PROTEIN_COMPLEXES.txt file.
ptm_df : pandas.DataFrame
DataFrame loaded from the POST_TRANSLATIONAL_MODIFICATIONS.txt file.
ppi_df : pandas.DataFrame
DataFrame loaded from the BINARY_PROTEIN_PROTEIN_INTERACTIONS.txt file.
seq_dict : dict
Dictionary mapping RefSeq IDs to protein sequences, loaded from the
PROTEIN_SEQUENCES.txt file.
motif_window : int
Number of flanking amino acids to include on each side of the
PTM target residue in the 'site_motif' annotations field of the
Evidence for Modification Statements. Default is 7.
Attributes
----------
statements : list of INDRA Statements
INDRA Statements (Modifications and Complexes) produced from the
HPRD content.
id_df : pandas.DataFrame
DataFrame loaded from HPRD_ID_MAPPINGS.txt file.
seq_dict :
Dictionary mapping RefSeq IDs to protein sequences, loaded from the
PROTEIN_SEQUENCES.txt file.
no_hgnc_for_egid: collections.Counter
Counter listing Entrez gene IDs reference in the HPRD content that
could not be mapped to a current HGNC ID, along with their frequency.
no_up_for_hgnc : collections.Counter
Counter with tuples of form (entrez_id, hgnc_symbol, hgnc_id) where
the HGNC ID could not be mapped to a Uniprot ID, along with their
frequency.
no_up_for_refseq : collections.Counter
Counter of RefSeq protein IDs that could not be mapped to any Uniprot
ID, along with frequency.
many_ups_for_refseq : collections.Counter
Counter of RefSeq protein IDs that yielded more than one matching
Uniprot ID. Note that in these cases, the Uniprot ID obtained from
HGNC is used.
invalid_site_pos : list of tuples
List of tuples of form (refseq_id, residue, position) indicating sites
of post translational modifications where the protein sequences
provided by HPRD did not contain the given residue at the given
position.
off_by_one : list of tuples
The subset of sites contained in `invalid_site_pos` where the given
residue can be found at position+1 in the HPRD protein sequence,
suggesting an off-by-one error due to numbering based on the
protein with initial methionine cleaved. Note that no mapping is
performed by the processor.
motif_window : int
Number of flanking amino acids to include on each side of the
PTM target residue in the 'site_motif' annotations field of the
Evidence for Modification Statements. Default is 7.
"""
def __init__(self, id_df, cplx_df=None, ptm_df=None, ppi_df=None,
seq_dict=None, motif_window=7):
if cplx_df is None and ptm_df is None and ppi_df is None:
raise ValueError('At least one of cplx_df, ptm_df, or ppi_df must '
'be specified.')
if ptm_df is not None and not seq_dict:
raise ValueError('If ptm_df is given, seq_dict must also be given.')
self.statements = []
self.id_df = id_df
self.seq_dict = seq_dict
self.motif_window = motif_window
# Keep track of the ID mapping issues encountered
self.no_hgnc_for_egid = []
self.no_up_for_hgnc = []
self.no_up_for_refseq = []
self.many_ups_for_refseq = []
self.invalid_site_pos = []
self.off_by_one = []
# Do the actual processing
if cplx_df is not None:
self.get_complexes(cplx_df)
if ptm_df is not None:
self.get_ptms(ptm_df)
if ppi_df is not None:
self.get_ppis(ppi_df)
# Tabulate IDs causing issues
self.no_hgnc_for_egid = Counter(self.no_hgnc_for_egid)
self.no_up_for_hgnc = Counter(self.no_up_for_hgnc)
self.no_up_for_refseq = Counter(self.no_up_for_refseq)
self.many_ups_for_refseq = Counter(self.many_ups_for_refseq)
logger.info('For information on problematic entries encountered while '
'processing, see HprdProcessor attributes: '
'no_hgnc_for_egid, no_up_for_hgnc, no_up_for_refseq, '
'many_ups_for_refseq, invalid_site_pos, and off_by_one.')
def get_complexes(self, cplx_df):
"""Generate Complex Statements from the HPRD protein complexes data.
Parameters
----------
cplx_df : pandas.DataFrame
DataFrame loaded from the PROTEIN_COMPLEXES.txt file.
"""
# Group the agents for the complex
logger.info('Processing complexes...')
for cplx_id, this_cplx in cplx_df.groupby('CPLX_ID'):
agents = []
for hprd_id in this_cplx.HPRD_ID:
ag = self._make_agent(hprd_id)
if ag is not None:
agents.append(ag)
# Make sure we got some agents!
if not agents:
continue
# Get evidence info from first member of complex
row0 = this_cplx.iloc[0]
isoform_id = '%s_1' % row0.HPRD_ID
ev_list = self._get_evidence(row0.HPRD_ID, isoform_id, row0.PMIDS,
row0.EVIDENCE, 'interactions')
stmt = Complex(agents, evidence=ev_list)
self.statements.append(stmt)
def get_ptms(self, ptm_df):
"""Generate Modification statements from the HPRD PTM data.
Parameters
----------
ptm_df : pandas.DataFrame
DataFrame loaded from the POST_TRANSLATIONAL_MODIFICATIONS.txt file.
"""
logger.info('Processing PTMs...')
# Iterate over the rows of the dataframe
for ix, row in ptm_df.iterrows():
# Check the modification type; if we can't make an INDRA statement
# for it, then skip it
ptm_class = _ptm_map[row['MOD_TYPE']]
if ptm_class is None:
continue
# Use the Refseq protein ID for the substrate to make sure that
# we get the right Uniprot ID for the isoform
sub_ag = self._make_agent(row['HPRD_ID'],
refseq_id=row['REFSEQ_PROTEIN'])
# If we couldn't get the substrate, skip the statement
if sub_ag is None:
continue
enz_id = _nan_to_none(row['ENZ_HPRD_ID'])
enz_ag = self._make_agent(enz_id)
res = _nan_to_none(row['RESIDUE'])
pos = _nan_to_none(row['POSITION'])
if pos is not None and ';' in pos:
pos, dash = pos.split(';')
assert dash == '-'
# As a fallback for later site mapping, we also get the protein
# sequence information in case there was a problem with the
# RefSeq->Uniprot mapping
assert res
assert pos
motif_dict = self._get_seq_motif(row['REFSEQ_PROTEIN'], res, pos)
# Get evidence
ev_list = self._get_evidence(
row['HPRD_ID'], row['HPRD_ISOFORM'], row['PMIDS'],
row['EVIDENCE'], 'ptms', motif_dict)
stmt = ptm_class(enz_ag, sub_ag, res, pos, evidence=ev_list)
self.statements.append(stmt)
def get_ppis(self, ppi_df):
"""Generate Complex Statements from the HPRD PPI data.
Parameters
----------
ppi_df : pandas.DataFrame
DataFrame loaded from the BINARY_PROTEIN_PROTEIN_INTERACTIONS.txt
file.
"""
logger.info('Processing PPIs...')
for ix, row in ppi_df.iterrows():
agA = self._make_agent(row['HPRD_ID_A'])
agB = self._make_agent(row['HPRD_ID_B'])
# If don't get valid agents for both, skip this PPI
if agA is None or agB is None:
continue
isoform_id = '%s_1' % row['HPRD_ID_A']
ev_list = self._get_evidence(
row['HPRD_ID_A'], isoform_id, row['PMIDS'],
row['EVIDENCE'], 'interactions')
stmt = Complex([agA, agB], evidence=ev_list)
self.statements.append(stmt)
def _make_agent(self, hprd_id, refseq_id=None):
if hprd_id is None or hprd_id is nan:
return None
# Get the basic info (HGNC name/symbol, Entrez ID) from the
# ID mappings dataframe
try:
egid = self.id_df.loc[hprd_id].EGID
except KeyError:
logger.info('HPRD ID %s not found in mappings table.' % hprd_id)
return None
if not egid:
logger.info('No Entrez ID for HPRD ID %s' % hprd_id)
return None
# Get the HGNC ID
hgnc_id = hgnc_client.get_hgnc_from_entrez(egid)
# If we couldn't get an HGNC ID for the Entrez ID, this means that
# the Entrez ID has been discontinued or replaced.
if not hgnc_id:
self.no_hgnc_for_egid.append(egid)
return None
# Get the (possibly updated) HGNC Symbol
hgnc_name = hgnc_client.get_hgnc_name(hgnc_id)
assert hgnc_name is not None
# See if we can get a Uniprot ID from the HGNC symbol--if there is
# a RefSeq ID we wil also try to use it to get an isoform specific
# UP ID, but we will have this one to fall back on. But if we can't
# get one here, then we skip the Statement
up_id_from_hgnc = hgnc_client.get_uniprot_id(hgnc_id)
if not up_id_from_hgnc:
self.no_up_for_hgnc.append((egid, hgnc_name, hgnc_id))
return None
# If we have provided the RefSeq ID, it's because we need to make
# sure that we are getting the right isoform-specific ID (for sequence
# positions of PTMs). Here we try to get the Uniprot ID from the
# Refseq->UP mappings in the protmapper.uniprot_client.
if refseq_id is not None:
# Get the Uniprot IDs from the uniprot client
up_ids = uniprot_client.get_ids_from_refseq(refseq_id,
reviewed_only=True)
# Nothing for this RefSeq ID (quite likely because the RefSeq ID
# is obsolete; take the UP ID from HGNC
if len(up_ids) == 0:
self.no_up_for_refseq.append(refseq_id)
up_id = up_id_from_hgnc
# More than one reviewed entry--no thanks, we'll take the one from
# HGNC instead
elif len(up_ids) > 1:
self.many_ups_for_refseq.append(refseq_id)
up_id = up_id_from_hgnc
# We got a unique, reviewed UP entry for the RefSeq ID
else:
up_id = up_ids[0]
# If it's the canonical isoform, strip off the '-1'
if up_id.endswith('-1'):
up_id = up_id.split('-')[0]
# For completeness, get the Refseq ID from the HPRD ID table
else:
refseq_id = self.id_df.loc[hprd_id].REFSEQ_PROTEIN
up_id = up_id_from_hgnc
# Make db_refs, return Agent
db_refs = {'HGNC': hgnc_id, 'UP': up_id, 'EGID': egid,
'REFSEQ_PROT': refseq_id}
return Agent(hgnc_name, db_refs=db_refs)
def _get_evidence(self, hprd_id, isoform_id, pmid_str, evidence_type,
info_type, motif_dict=None):
pmids = pmid_str.split(',')
ev_list = []
if motif_dict is None:
motif_dict = {}
for pmid in pmids:
ev_type_list = [] if evidence_type is nan \
else evidence_type.split(';')
# Add the annotations with the site motif info if relevant
annotations = {'evidence': ev_type_list}
annotations.update(motif_dict)
ev = Evidence(source_api='hprd',
source_id=_hprd_url(hprd_id, isoform_id, info_type),
pmid=pmid, annotations=annotations)
ev_list.append(ev)
return ev_list
def _get_seq_motif(self, refseq_id, residue, pos_str):
seq = self.seq_dict[refseq_id]
pos_1ix = int(pos_str)
pos_0ix = pos_1ix - 1
if seq[pos_0ix] != residue:
self.invalid_site_pos.append((refseq_id, residue, pos_str))
if seq[pos_0ix + 1] == residue:
self.off_by_one.append((refseq_id, residue, pos_str))
motif, respos = \
ProtMapper.motif_from_position_seq(seq, pos_1ix + 1,
self.motif_window)
return {'site_motif': {'motif': motif, 'respos': respos,
'off_by_one': True}}
else:
return {}
else:
# The index of the residue at the start of the window
motif, respos = ProtMapper.motif_from_position_seq(seq, pos_1ix,
self.motif_window)
return {'site_motif': {'motif': motif, 'respos': respos,
'off_by_one': False}}
def _hprd_url(hprd_id, isoform_id, info_type):
if info_type == 'interactions':
return ('http://hprd.org/interactions?hprd_id=%s&isoform_id=%s'
'&isoform_name=Isoform_1' % (hprd_id, isoform_id))
elif info_type == 'ptms':
isoform_num = isoform_id.split('_')[1]
return ('http://hprd.org/ptms?hprd_id=%s&isoform_id=%s'
'&isoform_name=Isoform_%s' % (hprd_id, isoform_id, isoform_num))
else:
raise ValueError('info_type must be either interactions or ptms.')
def _nan_to_none(val):
return None if val is nan else val
|
|
"""Support for Tesla cars."""
import asyncio
from collections import defaultdict
from datetime import timedelta
import logging
import async_timeout
import httpx
from teslajsonpy import Controller as TeslaAPI
from teslajsonpy.exceptions import IncompleteCredentials, TeslaException
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
ATTR_BATTERY_CHARGING,
ATTR_BATTERY_LEVEL,
CONF_ACCESS_TOKEN,
CONF_PASSWORD,
CONF_SCAN_INTERVAL,
CONF_TOKEN,
CONF_USERNAME,
EVENT_HOMEASSISTANT_CLOSE,
HTTP_UNAUTHORIZED,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.httpx_client import SERVER_SOFTWARE, USER_AGENT
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from homeassistant.util import slugify
from .config_flow import CannotConnect, InvalidAuth, validate_input
from .const import (
CONF_EXPIRATION,
CONF_WAKE_ON_START,
DATA_LISTENER,
DEFAULT_SCAN_INTERVAL,
DEFAULT_WAKE_ON_START,
DOMAIN,
ICONS,
MIN_SCAN_INTERVAL,
PLATFORMS,
)
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(
CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL
): vol.All(cv.positive_int, vol.Clamp(min=MIN_SCAN_INTERVAL)),
}
)
},
extra=vol.ALLOW_EXTRA,
)
@callback
def _async_save_tokens(hass, config_entry, access_token, refresh_token):
hass.config_entries.async_update_entry(
config_entry,
data={
**config_entry.data,
CONF_ACCESS_TOKEN: access_token,
CONF_TOKEN: refresh_token,
},
)
@callback
def _async_configured_emails(hass):
"""Return a set of configured Tesla emails."""
return {
entry.data[CONF_USERNAME]
for entry in hass.config_entries.async_entries(DOMAIN)
if CONF_USERNAME in entry.data
}
async def async_setup(hass, base_config):
"""Set up of Tesla component."""
def _update_entry(email, data=None, options=None):
data = data or {}
options = options or {
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
CONF_WAKE_ON_START: DEFAULT_WAKE_ON_START,
}
for entry in hass.config_entries.async_entries(DOMAIN):
if email != entry.title:
continue
hass.config_entries.async_update_entry(entry, data=data, options=options)
config = base_config.get(DOMAIN)
if not config:
return True
email = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
scan_interval = config[CONF_SCAN_INTERVAL]
if email in _async_configured_emails(hass):
try:
info = await validate_input(hass, config)
except (CannotConnect, InvalidAuth):
return False
_update_entry(
email,
data={
CONF_USERNAME: email,
CONF_PASSWORD: password,
CONF_ACCESS_TOKEN: info[CONF_ACCESS_TOKEN],
CONF_TOKEN: info[CONF_TOKEN],
CONF_EXPIRATION: info[CONF_EXPIRATION],
},
options={CONF_SCAN_INTERVAL: scan_interval},
)
else:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={CONF_USERNAME: email, CONF_PASSWORD: password},
)
)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][email] = {CONF_SCAN_INTERVAL: scan_interval}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Tesla as config entry."""
hass.data.setdefault(DOMAIN, {})
config = config_entry.data
# Because users can have multiple accounts, we always create a new session so they have separate cookies
async_client = httpx.AsyncClient(headers={USER_AGENT: SERVER_SOFTWARE}, timeout=60)
email = config_entry.title
if email in hass.data[DOMAIN] and CONF_SCAN_INTERVAL in hass.data[DOMAIN][email]:
scan_interval = hass.data[DOMAIN][email][CONF_SCAN_INTERVAL]
hass.config_entries.async_update_entry(
config_entry, options={CONF_SCAN_INTERVAL: scan_interval}
)
hass.data[DOMAIN].pop(email)
try:
controller = TeslaAPI(
async_client,
email=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
refresh_token=config[CONF_TOKEN],
access_token=config[CONF_ACCESS_TOKEN],
expiration=config.get(CONF_EXPIRATION, 0),
update_interval=config_entry.options.get(
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
),
)
result = await controller.connect(
wake_if_asleep=config_entry.options.get(
CONF_WAKE_ON_START, DEFAULT_WAKE_ON_START
)
)
refresh_token = result["refresh_token"]
access_token = result["access_token"]
except IncompleteCredentials as ex:
await async_client.aclose()
raise ConfigEntryAuthFailed from ex
except httpx.ConnectTimeout as ex:
await async_client.aclose()
raise ConfigEntryNotReady from ex
except TeslaException as ex:
await async_client.aclose()
if ex.code == HTTP_UNAUTHORIZED:
raise ConfigEntryAuthFailed from ex
if ex.message in [
"VEHICLE_UNAVAILABLE",
"TOO_MANY_REQUESTS",
"SERVICE_MAINTENANCE",
"UPSTREAM_TIMEOUT",
]:
raise ConfigEntryNotReady(
f"Temporarily unable to communicate with Tesla API: {ex.message}"
) from ex
_LOGGER.error("Unable to communicate with Tesla API: %s", ex.message)
return False
async def _async_close_client(*_):
await async_client.aclose()
@callback
def _async_create_close_task():
asyncio.create_task(_async_close_client())
config_entry.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_client)
)
config_entry.async_on_unload(_async_create_close_task)
_async_save_tokens(hass, config_entry, access_token, refresh_token)
coordinator = TeslaDataUpdateCoordinator(
hass, config_entry=config_entry, controller=controller
)
# Fetch initial data so we have data when entities subscribe
entry_data = hass.data[DOMAIN][config_entry.entry_id] = {
"coordinator": coordinator,
"devices": defaultdict(list),
DATA_LISTENER: [config_entry.add_update_listener(update_listener)],
}
_LOGGER.debug("Connected to the Tesla API")
await coordinator.async_config_entry_first_refresh()
all_devices = controller.get_homeassistant_components()
if not all_devices:
return False
for device in all_devices:
entry_data["devices"][device.hass_type].append(device)
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
return True
async def async_unload_entry(hass, config_entry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
config_entry, PLATFORMS
)
for listener in hass.data[DOMAIN][config_entry.entry_id][DATA_LISTENER]:
listener()
username = config_entry.title
if unload_ok:
hass.data[DOMAIN].pop(config_entry.entry_id)
_LOGGER.debug("Unloaded entry for %s", username)
return True
return False
async def update_listener(hass, config_entry):
"""Update when config_entry options update."""
controller = hass.data[DOMAIN][config_entry.entry_id]["coordinator"].controller
old_update_interval = controller.update_interval
controller.update_interval = config_entry.options.get(CONF_SCAN_INTERVAL)
if old_update_interval != controller.update_interval:
_LOGGER.debug(
"Changing scan_interval from %s to %s",
old_update_interval,
controller.update_interval,
)
class TeslaDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Tesla data."""
def __init__(self, hass, *, config_entry, controller):
"""Initialize global Tesla data updater."""
self.controller = controller
self.config_entry = config_entry
update_interval = timedelta(seconds=MIN_SCAN_INTERVAL)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=update_interval,
)
async def _async_update_data(self):
"""Fetch data from API endpoint."""
if self.controller.is_token_refreshed():
result = self.controller.get_tokens()
refresh_token = result["refresh_token"]
access_token = result["access_token"]
_async_save_tokens(
self.hass, self.config_entry, access_token, refresh_token
)
_LOGGER.debug("Saving new tokens in config_entry")
try:
# Note: asyncio.TimeoutError and aiohttp.ClientError are already
# handled by the data update coordinator.
async with async_timeout.timeout(30):
return await self.controller.update()
except TeslaException as err:
raise UpdateFailed(f"Error communicating with API: {err}") from err
class TeslaDevice(CoordinatorEntity):
"""Representation of a Tesla device."""
def __init__(self, tesla_device, coordinator):
"""Initialise the Tesla device."""
super().__init__(coordinator)
self.tesla_device = tesla_device
self._name = self.tesla_device.name
self._unique_id = slugify(self.tesla_device.uniq_name)
self._attributes = self.tesla_device.attrs.copy()
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self) -> str:
"""Return a unique ID."""
return self._unique_id
@property
def icon(self):
"""Return the icon of the sensor."""
if self.device_class:
return None
return ICONS.get(self.tesla_device.type)
@property
def extra_state_attributes(self):
"""Return the state attributes of the device."""
attr = self._attributes
if self.tesla_device.has_battery():
attr[ATTR_BATTERY_LEVEL] = self.tesla_device.battery_level()
attr[ATTR_BATTERY_CHARGING] = self.tesla_device.battery_charging()
return attr
@property
def device_info(self):
"""Return the device_info of the device."""
return {
"identifiers": {(DOMAIN, self.tesla_device.id())},
"name": self.tesla_device.car_name(),
"manufacturer": "Tesla",
"model": self.tesla_device.car_type,
"sw_version": self.tesla_device.car_version,
}
async def async_added_to_hass(self):
"""Register state update callback."""
self.async_on_remove(self.coordinator.async_add_listener(self.refresh))
@callback
def refresh(self) -> None:
"""Refresh the state of the device.
This assumes the coordinator has updated the controller.
"""
self.tesla_device.refresh()
self._attributes = self.tesla_device.attrs.copy()
self.async_write_ha_state()
|
|
# Copyright 2012 Nicira, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
from lxml import etree
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
import six
from nova import exception
from nova.network import linux_net
from nova.network import model as network_model
from nova import objects
from nova.pci import utils as pci_utils
from nova import test
from nova import utils
from nova.virt.libvirt import config as vconfig
from nova.virt.libvirt import vif
CONF = cfg.CONF
class LibvirtVifTestCase(test.NoDBTestCase):
gateway_bridge_4 = network_model.IP(address='101.168.1.1', type='gateway')
dns_bridge_4 = network_model.IP(address='8.8.8.8', type=None)
ips_bridge_4 = [network_model.IP(address='101.168.1.9', type=None)]
subnet_bridge_4 = network_model.Subnet(cidr='101.168.1.0/24',
dns=[dns_bridge_4],
gateway=gateway_bridge_4,
routes=None,
dhcp_server='191.168.1.1')
gateway_bridge_6 = network_model.IP(address='101:1db9::1', type='gateway')
subnet_bridge_6 = network_model.Subnet(cidr='101:1db9::/64',
dns=None,
gateway=gateway_bridge_6,
ips=None,
routes=None)
network_bridge = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99)
vif_bridge = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_BRIDGE,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_bridge_neutron = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface='eth0',
vlan=99)
vif_bridge_neutron = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge_neutron,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
network_ovs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99)
network_ivs = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge='br0',
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
bridge_interface=None,
vlan=99)
vif_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'ovs_hybrid_plug': True,
'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_filter_cap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=network_model.VIF_TYPE_OVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ovs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid=None)
vif_ivs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_legacy = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ovs,
type=None,
devname=None,
ovs_interfaceid='aaa')
vif_ivs_filter_direct = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={'port_filter': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_ivs_filter_hybrid = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_ivs,
type=network_model.VIF_TYPE_IVS,
details={
'port_filter': True,
'ovs_hybrid_plug': True},
devname='tap-xxx-yyy-zzz',
ovs_interfaceid='aaa-bbb-ccc')
vif_none = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=None,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
network_8021 = network_model.Network(id='network-id-xxx-yyy-zzz',
bridge=None,
label=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0',
vlan=99)
vif_8021qbh = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBH,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_PROFILEID:
'MyPortProfile'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_hw_veb_macvtap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_HW_VEB,
vnic_type=network_model.VNIC_TYPE_MACVTAP,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_8021qbg = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_802_QBG,
ovs_interfaceid=None,
qbg_params=network_model.VIF8021QbgParams(
managerid="xxx-yyy-zzz",
typeid="aaa-bbb-ccc",
typeidversion="1",
instanceid="ddd-eee-fff"))
network_mlnx = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0')
network_midonet = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4],
interface='eth0')
network_vrouter = network_model.Network(id='network-id-xxx-yyy-zzz',
label=None,
bridge=None,
subnets=[subnet_bridge_4,
subnet_bridge_6],
interface='eth0')
vif_vrouter = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_vrouter,
type=network_model.VIF_TYPE_VROUTER,
devname='tap-xxx-yyy-zzz')
vif_mlnx = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_mlnx,
type=network_model.VIF_TYPE_MLNX_DIRECT,
devname='tap-xxx-yyy-zzz')
vif_ib_hostdev = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_IB_HOSTDEV,
vnic_type=network_model.VNIC_TYPE_DIRECT,
ovs_interfaceid=None,
details={
network_model.VIF_DETAILS_VLAN: '100'},
profile={'pci_vendor_info': '1137:0043',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'})
vif_mlnx_net = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_mlnx,
type=network_model.VIF_TYPE_MLNX_DIRECT,
details={'physical_network':
'fake_phy_network'},
devname='tap-xxx-yyy-zzz')
vif_midonet = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_midonet,
type=network_model.VIF_TYPE_MIDONET,
devname='tap-xxx-yyy-zzz')
vif_tap = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
type=network_model.VIF_TYPE_TAP,
devname='tap-xxx-yyy-zzz')
vif_iovisor = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_IOVISOR,
devname='tap-xxx-yyy-zzz',
ovs_interfaceid=None)
vif_vhostuser = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/vif-xxx-yyy-zzz'}
)
vif_vhostuser_ovs = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client',
network_model.VIF_DETAILS_VHOSTUSER_SOCKET:
'/tmp/usv-xxx-yyy-zzz',
network_model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True},
ovs_interfaceid='aaa-bbb-ccc'
)
vif_vhostuser_no_path = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_bridge,
type=network_model.VIF_TYPE_VHOSTUSER,
details = {network_model.VIF_DETAILS_VHOSTUSER_MODE: 'client'})
vif_macvtap_vlan = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_VLAN: '1',
network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0.1',
network_model.VIF_DETAILS_MACVTAP_MODE: 'vepa'})
vif_macvtap_flat = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP,
details={network_model.VIF_DETAILS_PHYS_INTERFACE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_SOURCE: 'eth0',
network_model.VIF_DETAILS_MACVTAP_MODE: 'bridge'})
vif_macvtap_exception = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=network_8021,
type=network_model.VIF_TYPE_MACVTAP)
instance = objects.Instance(id=1, uuid='instance-uuid')
bandwidth = {
'quota:vif_inbound_peak': '200',
'quota:vif_outbound_peak': '20',
'quota:vif_inbound_average': '100',
'quota:vif_outbound_average': '10',
'quota:vif_inbound_burst': '300',
'quota:vif_outbound_burst': '30'
}
def setUp(self):
super(LibvirtVifTestCase, self).setUp()
self.flags(allow_same_net_traffic=True)
self.executes = []
def fake_execute(*cmd, **kwargs):
self.executes.append(cmd)
return None, None
self.stubs.Set(utils, 'execute', fake_execute)
def _get_node(self, xml):
doc = etree.fromstring(xml)
ret = doc.findall('./devices/interface')
self.assertEqual(len(ret), 1)
return ret[0]
def _assertMacEquals(self, node, vif):
mac = node.find("mac").get("address")
self.assertEqual(mac, vif['address'])
def _assertTypeEquals(self, node, type, attr, source, br_want,
prefix=None):
self.assertEqual(node.get("type"), type)
br_name = node.find(attr).get(source)
if prefix is None:
self.assertEqual(br_name, br_want)
else:
self.assertTrue(br_name.startswith(prefix))
def _assertTypeAndMacEquals(self, node, type, attr, source, vif,
br_want=None, size=0, prefix=None):
ret = node.findall("filterref")
self.assertEqual(len(ret), size)
self._assertTypeEquals(node, type, attr, source, br_want,
prefix)
self._assertMacEquals(node, vif)
def _assertModel(self, xml, model_want=None, driver_want=None):
node = self._get_node(xml)
if model_want is None:
ret = node.findall("model")
self.assertEqual(len(ret), 0)
else:
model = node.find("model").get("type")
self.assertEqual(model, model_want)
if driver_want is None:
ret = node.findall("driver")
self.assertEqual(len(ret), 0)
else:
driver = node.find("driver").get("name")
self.assertEqual(driver, driver_want)
def _assertTypeAndPciEquals(self, node, type, vif):
self.assertEqual(node.get("type"), type)
self._assertPciEqual(node, vif, type="pci")
def _assertPciEqual(self, node, vif, type=None):
address = node.find("source").find("address")
if type:
addr_type = address.get("type")
self.assertEqual(type, addr_type)
pci_slot = "%(domain)s:%(bus)s:%(slot)s.%(func)s" % {
'domain': address.get("domain")[2:],
'bus': address.get("bus")[2:],
'slot': address.get("slot")[2:],
'func': address.get("function")[2:]}
pci_slot_want = vif['profile']['pci_slot']
self.assertEqual(pci_slot, pci_slot_want)
def _get_conf(self):
conf = vconfig.LibvirtConfigGuest()
conf.virt_type = "qemu"
conf.name = "fake-name"
conf.uuid = "fake-uuid"
conf.memory = 100 * 1024
conf.vcpus = 4
return conf
def _get_instance_xml(self, driver, vif, image_meta=None, flavor=None):
if flavor is None:
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=1,
root_gb=0,
ephemeral_gb=0,
swap=0,
extra_specs=dict(self.bandwidth),
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
conf = self._get_conf()
nic = driver.get_config(self.instance, vif, image_meta,
flavor, CONF.libvirt.virt_type)
conf.add_device(nic)
return conf.to_xml()
def test_virtio_multiqueue(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
flavor = objects.Flavor(name='m1.small',
memory_mb=128,
vcpus=4,
root_gb=0,
ephemeral_gb=0,
swap=0,
deleted_at=None,
deleted=0,
created_at=None, flavorid=1,
is_public=True, vcpu_weight=None,
id=2, disabled=False, rxtx_factor=1.0)
d = vif.LibvirtGenericVIFDriver()
image_meta = {'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}}
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta, flavor)
node = self._get_node(xml)
driver = node.find("driver").get("name")
self.assertEqual(driver, 'vhost')
queues = node.find("driver").get("queues")
self.assertEqual(queues, '4')
def test_multiple_nics(self):
conf = self._get_conf()
# Tests multiple nic configuration and that target_dev is
# set for each
nics = [{'net_type': 'bridge',
'mac_addr': '00:00:00:00:00:0b',
'source_dev': 'b_source_dev',
'target_dev': 'b_target_dev'},
{'net_type': 'ethernet',
'mac_addr': '00:00:00:00:00:0e',
'source_dev': 'e_source_dev',
'target_dev': 'e_target_dev'},
{'net_type': 'direct',
'mac_addr': '00:00:00:00:00:0d',
'source_dev': 'd_source_dev',
'target_dev': 'd_target_dev'}]
for nic in nics:
nic_conf = vconfig.LibvirtConfigGuestInterface()
nic_conf.net_type = nic['net_type']
nic_conf.target_dev = nic['target_dev']
nic_conf.mac_addr = nic['mac_addr']
nic_conf.source_dev = nic['source_dev']
conf.add_device(nic_conf)
xml = conf.to_xml()
doc = etree.fromstring(xml)
for nic in nics:
path = "./devices/interface/[@type='%s']" % nic['net_type']
node = doc.find(path)
self.assertEqual(nic['net_type'], node.get("type"))
self.assertEqual(nic['mac_addr'],
node.find("mac").get("address"))
self.assertEqual(nic['target_dev'],
node.find("target").get("dev"))
def test_model_novirtio(self):
self.flags(use_virtio_for_bridges=False,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_model_kvm(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_model_kvm_qemu_custom(self):
for virt in ('kvm', 'qemu'):
self.flags(use_virtio_for_bridges=True,
virt_type=virt,
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
supported = (network_model.VIF_MODEL_NE2K_PCI,
network_model.VIF_MODEL_PCNET,
network_model.VIF_MODEL_RTL8139,
network_model.VIF_MODEL_E1000,
network_model.VIF_MODEL_SPAPR_VLAN)
for model in supported:
image_meta = {'properties': {'hw_vif_model': model}}
xml = self._get_instance_xml(d, self.vif_bridge,
image_meta)
self._assertModel(xml, model)
def test_model_kvm_bogus(self):
self.flags(use_virtio_for_bridges=True,
virt_type='kvm',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
image_meta = {'properties': {'hw_vif_model': 'acme'}}
self.assertRaises(exception.UnsupportedHardware,
self._get_instance_xml,
d,
self.vif_bridge,
image_meta)
def _test_model_qemu(self, *vif_objs, **kw):
libvirt_version = kw.get('libvirt_version')
self.flags(use_virtio_for_bridges=True,
virt_type='qemu',
group='libvirt')
for vif_obj in vif_objs:
d = vif.LibvirtGenericVIFDriver()
if libvirt_version is not None:
d.libvirt_version = libvirt_version
xml = self._get_instance_xml(d, vif_obj)
doc = etree.fromstring(xml)
bandwidth = doc.find('./devices/interface/bandwidth')
self.assertNotEqual(bandwidth, None)
inbound = bandwidth.find('inbound')
self.assertEqual(inbound.get("average"),
self.bandwidth['quota:vif_inbound_average'])
self.assertEqual(inbound.get("peak"),
self.bandwidth['quota:vif_inbound_peak'])
self.assertEqual(inbound.get("burst"),
self.bandwidth['quota:vif_inbound_burst'])
outbound = bandwidth.find('outbound')
self.assertEqual(outbound.get("average"),
self.bandwidth['quota:vif_outbound_average'])
self.assertEqual(outbound.get("peak"),
self.bandwidth['quota:vif_outbound_peak'])
self.assertEqual(outbound.get("burst"),
self.bandwidth['quota:vif_outbound_burst'])
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO, "qemu")
def test_model_qemu_no_firewall(self):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_8021qbg,
self.vif_iovisor,
self.vif_mlnx,
self.vif_ovs,
)
def test_model_qemu_iptables(self):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
self._test_model_qemu(
self.vif_bridge,
self.vif_ovs,
self.vif_ivs,
self.vif_8021qbg,
self.vif_iovisor,
self.vif_mlnx,
)
def test_model_xen(self):
self.flags(use_virtio_for_bridges=True,
virt_type='xen',
group='libvirt')
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_bridge)
self._assertModel(xml)
def test_generic_driver_none(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.NovaException,
self._get_instance_xml,
d,
self.vif_none)
def _check_bridge_driver(self, d, vif, br_want):
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_bridge, br_want, 1)
def test_generic_driver_bridge(self):
d = vif.LibvirtGenericVIFDriver()
self._check_bridge_driver(d,
self.vif_bridge,
self.vif_bridge['network']['bridge'])
def _check_ivs_ethernet_driver(self, d, vif, dev_prefix):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, prefix=dev_prefix)
script = node.find("script").get("path")
self.assertEqual(script, "")
def test_unplug_ivs_ethernet(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(linux_net, 'delete_ivs_vif_port') as delete:
delete.side_effect = processutils.ProcessExecutionError
d.unplug_ivs_ethernet(None, self.vif_ovs)
def test_plug_ovs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy')],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True)],
'create_ovs_vif_port': [mock.call('br0',
'qvovif-xxx-yyy', 'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ovs_vif_port')
) as (device_exists, execute, _create_veth_pair, create_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.plug_ovs_hybrid(self.instance, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
def test_unplug_ovs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy')],
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ovs_vif_port': [mock.call('br0', 'qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=True),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ovs_vif_port')
) as (device_exists, execute, delete_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ovs_hybrid(None, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
execute.assert_has_calls(calls['execute'])
delete_ovs_vif_port.assert_has_calls(calls['delete_ovs_vif_port'])
@mock.patch.object(utils, 'execute')
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address')
@mock.patch.object(pci_utils, 'get_vf_num_by_pci_address', return_value=1)
def _test_hw_veb_op(self, op, vlan, mock_get_vf_num, mock_get_ifname,
mock_execute):
mock_get_ifname.side_effect = ['eth1', 'eth13']
exit_code = [0, 2, 254]
port_state = 'up' if vlan > 0 else 'down'
calls = {
'get_ifname':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'],
pf_interface=True),
mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'get_vf_num':
[mock.call(self.vif_hw_veb_macvtap['profile']['pci_slot'])],
'execute': [mock.call('ip', 'link', 'set', 'eth1',
'vf', 1, 'mac',
self.vif_hw_veb_macvtap['address'],
'vlan', vlan,
run_as_root=True,
check_exit_code=exit_code),
mock.call('ip', 'link', 'set',
'eth13', port_state,
run_as_root=True,
check_exit_code=exit_code)]
}
op(None, self.vif_hw_veb_macvtap)
mock_get_ifname.assert_has_calls(calls['get_ifname'])
mock_get_vf_num.assert_has_calls(calls['get_vf_num'])
mock_execute.assert_has_calls(calls['execute'])
def test_plug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(
d.plug_hw_veb,
self.vif_hw_veb_macvtap['details'][network_model.VIF_DETAILS_VLAN])
def test_unplug_hw_veb(self):
d = vif.LibvirtGenericVIFDriver()
self._test_hw_veb_op(d.unplug_hw_veb, 0)
def test_unplug_ovs_hybrid_bridge_does_not_exist(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy')],
'delete_ovs_vif_port': [mock.call('br0', 'qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(linux_net, 'delete_ovs_vif_port')
) as (device_exists, delete_ovs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ovs_hybrid(None, self.vif_ovs)
device_exists.assert_has_calls(calls['device_exists'])
delete_ovs_vif_port.assert_has_calls(calls['delete_ovs_vif_port'])
def test_plug_ivs_hybrid(self):
calls = {
'device_exists': [mock.call('qbrvif-xxx-yyy'),
mock.call('qvovif-xxx-yyy')],
'_create_veth_pair': [mock.call('qvbvif-xxx-yyy',
'qvovif-xxx-yyy')],
'execute': [mock.call('brctl', 'addbr', 'qbrvif-xxx-yyy',
run_as_root=True),
mock.call('brctl', 'setfd', 'qbrvif-xxx-yyy', 0,
run_as_root=True),
mock.call('brctl', 'stp', 'qbrvif-xxx-yyy', 'off',
run_as_root=True),
mock.call('tee', ('/sys/class/net/qbrvif-xxx-yyy'
'/bridge/multicast_snooping'),
process_input='0', run_as_root=True,
check_exit_code=[0, 1]),
mock.call('ip', 'link', 'set', 'qbrvif-xxx-yyy', 'up',
run_as_root=True),
mock.call('brctl', 'addif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True)],
'create_ivs_vif_port': [mock.call('qvovif-xxx-yyy', 'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'device_exists',
return_value=False),
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, '_create_veth_pair'),
mock.patch.object(linux_net, 'create_ivs_vif_port')
) as (device_exists, execute, _create_veth_pair, create_ivs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.plug_ivs_hybrid(self.instance, self.vif_ivs)
device_exists.assert_has_calls(calls['device_exists'])
_create_veth_pair.assert_has_calls(calls['_create_veth_pair'])
execute.assert_has_calls(calls['execute'])
create_ivs_vif_port.assert_has_calls(calls['create_ivs_vif_port'])
def test_unplug_ivs_hybrid(self):
calls = {
'execute': [mock.call('brctl', 'delif', 'qbrvif-xxx-yyy',
'qvbvif-xxx-yyy', run_as_root=True),
mock.call('ip', 'link', 'set',
'qbrvif-xxx-yyy', 'down', run_as_root=True),
mock.call('brctl', 'delbr',
'qbrvif-xxx-yyy', run_as_root=True)],
'delete_ivs_vif_port': [mock.call('qvovif-xxx-yyy')]
}
with contextlib.nested(
mock.patch.object(utils, 'execute'),
mock.patch.object(linux_net, 'delete_ivs_vif_port')
) as (execute, delete_ivs_vif_port):
d = vif.LibvirtGenericVIFDriver()
d.unplug_ivs_hybrid(None, self.vif_ivs)
execute.assert_has_calls(calls['execute'])
delete_ivs_vif_port.assert_has_calls(calls['delete_ivs_vif_port'])
def test_unplug_ivs_hybrid_bridge_does_not_exist(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug_ivs_hybrid(None, self.vif_ivs)
def test_unplug_iovisor(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
mynetwork = network_model.Network(id='network-id-xxx-yyy-zzz',
label='mylabel')
myvif = network_model.VIF(id='vif-xxx-yyy-zzz',
address='ca:fe:de:ad:be:ef',
network=mynetwork)
d.unplug_iovisor(None, myvif)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_iovisor(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
instance = objects.Instance(id=1,
uuid='instance-uuid',
project_id='myproject')
d.plug_iovisor(instance, self.vif_ivs)
def test_unplug_mlnx_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
execute.side_effect = processutils.ProcessExecutionError
d.unplug_mlnx_direct(None, self.vif_mlnx_net)
execute.assert_called_once_with('ebrctl', 'del-port',
'fake_phy_network',
'ca:fe:de:ad:be:ef',
run_as_root=True)
def test_plug_mlnx_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.plug_mlnx_direct(self.instance, self.vif_mlnx_net)
execute.assert_called_once_with('ebrctl', 'add-port',
'ca:fe:de:ad:be:ef',
'instance-uuid',
'fake_phy_network',
'mlnx_direct',
'eth-xxx-yyy-zzz',
run_as_root=True)
def test_plug_mlnx_no_physical_network(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
self.assertRaises(exception.NovaException,
d.plug_mlnx_direct,
self.instance,
self.vif_mlnx)
self.assertEqual(0, execute.call_count)
def test_unplug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
with mock.patch.object(utils, 'execute') as execute:
d.unplug_vrouter(None, self.vif_vrouter)
execute.assert_called_once_with(
'vrouter-port-control',
'--oper=delete --uuid=vif-xxx-yyy-zzz',
run_as_root=True)
def test_plug_vrouter_with_details(self):
d = vif.LibvirtGenericVIFDriver()
instance = mock.Mock()
instance.name = 'instance-name'
instance.uuid = '46a4308b-e75a-4f90-a34a-650c86ca18b2'
instance.project_id = 'b168ea26fa0c49c1a84e1566d9565fa5'
instance.display_name = 'instance1'
with mock.patch.object(utils, 'execute') as execute:
d.plug_vrouter(instance, self.vif_vrouter)
execute.assert_has_calls([
mock.call('ip', 'tuntap', 'add', 'tap-xxx-yyy-zzz', 'mode',
'tap', run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('ip', 'link', 'set', 'tap-xxx-yyy-zzz', 'up',
run_as_root=True, check_exit_code=[0, 2, 254]),
mock.call('vrouter-port-control',
'--oper=add --uuid=vif-xxx-yyy-zzz '
'--instance_uuid=46a4308b-e75a-4f90-a34a-650c86ca18b2 '
'--vn_uuid=network-id-xxx-yyy-zzz '
'--vm_project_uuid=b168ea26fa0c49c1a84e1566d9565fa5 '
'--ip_address=0.0.0.0 '
'--ipv6_address=None '
'--vm_name=instance1 '
'--mac=ca:fe:de:ad:be:ef '
'--tap_name=tap-xxx-yyy-zzz '
'--port_type=NovaVMPort '
'--tx_vlan_id=-1 '
'--rx_vlan_id=-1', run_as_root=True)])
def test_ivs_ethernet_driver(self):
d = vif.LibvirtGenericVIFDriver()
self._check_ivs_ethernet_driver(d,
self.vif_ivs,
"tap")
def _check_ivs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
vif, vif['devname'])
def _check_ovs_virtualport_driver(self, d, vif, want_iface_id):
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, "br0")
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "openvswitch")
iface_id_found = False
for p_elem in vp.findall("parameters"):
iface_id = p_elem.get("interfaceid", None)
if iface_id:
self.assertEqual(iface_id, want_iface_id)
iface_id_found = True
self.assertTrue(iface_id_found)
def test_generic_ovs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ovs['ovs_interfaceid']
self._check_ovs_virtualport_driver(d,
self.vif_ovs,
want_iface_id)
def test_generic_ivs_virtualport_driver(self):
d = vif.LibvirtGenericVIFDriver()
want_iface_id = self.vif_ivs['ovs_interfaceid']
self._check_ivs_virtualport_driver(d,
self.vif_ivs,
want_iface_id)
def test_ivs_plug_with_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs, br_want, 1)
def test_ivs_plug_with_port_filter_direct_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs_filter_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ivs_filter_hybrid, br_want, 0)
def test_ivs_plug_with_port_filter_hybrid_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_ivs_filter_direct['devname']
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ivs_filter_direct)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs_filter_direct, br_want, 0)
def test_hybrid_plug_without_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs_hybrid['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
xml = self._get_instance_xml(d, self.vif_ovs_hybrid)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
self.vif_ovs_hybrid, br_want, 0)
def test_direct_plug_with_port_filter_cap_no_nova_firewall(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_ovs_filter_cap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "target", "dev",
self.vif_ovs_filter_cap, br_want)
def _check_neutron_hybrid_driver(self, d, vif, br_want):
self.flags(firewall_driver="nova.virt.firewall.IptablesFirewallDriver")
xml = self._get_instance_xml(d, vif)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "bridge", "source", "bridge",
vif, br_want, 1)
def test_generic_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ovs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ovs,
br_want)
def test_ivs_hybrid_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = "qbr" + self.vif_ivs['id']
br_want = br_want[:network_model.NIC_NAME_LEN]
self._check_neutron_hybrid_driver(d,
self.vif_ivs,
br_want)
def test_mlnx_direct_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d,
self.vif_mlnx)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth-xxx-yyy-zzz")
self._assertTypeEquals(node, "direct", "source",
"mode", "passthrough")
self._assertMacEquals(node, self.vif_mlnx)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_ib_hostdev_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_ib_hostdev)
doc = etree.fromstring(xml)
node = doc.findall('./devices/hostdev')[0]
self.assertEqual(1, len(node))
self._assertPciEqual(node, self.vif_ib_hostdev)
def test_midonet_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_midonet['devname']
xml = self._get_instance_xml(d, self.vif_midonet)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_midonet, br_want)
def test_tap_ethernet_vif_driver(self):
d = vif.LibvirtGenericVIFDriver()
br_want = self.vif_tap['devname']
xml = self._get_instance_xml(d, self.vif_tap)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_tap, br_want)
@mock.patch('nova.network.linux_net.device_exists')
def test_plug_tap(self, device_exists):
device_exists.return_value = True
d = vif.LibvirtGenericVIFDriver()
d.plug_tap(None, self.vif_tap)
def test_unplug_tap(self):
d = vif.LibvirtGenericVIFDriver()
d.unplug_tap(None, self.vif_tap)
def test_generic_8021qbh_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbh)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_8021qbh)
self._assertMacEquals(node, self.vif_8021qbh)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbh")
profile_id_found = False
for p_elem in vp.findall("parameters"):
details = self.vif_8021qbh["details"]
profile_id = p_elem.get("profileid", None)
if profile_id:
self.assertEqual(profile_id,
details[network_model.VIF_DETAILS_PROFILEID])
profile_id_found = True
self.assertTrue(profile_id_found)
def test_hw_veb_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb)
node = self._get_node(xml)
self._assertTypeAndPciEquals(node, "hostdev", self.vif_hw_veb)
self._assertMacEquals(node, self.vif_hw_veb)
vlan = node.find("vlan").find("tag").get("id")
vlan_want = self.vif_hw_veb["details"]["vlan"]
self.assertEqual(vlan, vlan_want)
@mock.patch.object(pci_utils, 'get_ifname_by_pci_address',
return_value='eth1')
def test_hw_veb_driver_macvtap(self, mock_get_ifname):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_hw_veb_macvtap)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth1")
self._assertTypeEquals(node, "direct", "source",
"mode", "passthrough")
self._assertMacEquals(node, self.vif_hw_veb_macvtap)
vlan = node.find("vlan")
self.assertIsNone(vlan)
def test_driver_macvtap_vlan(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_vlan)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0.1")
self._assertTypeEquals(node, "direct", "source",
"mode", "vepa")
self._assertMacEquals(node, self.vif_macvtap_vlan)
def test_driver_macvtap_flat(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_macvtap_flat)
node = self._get_node(xml)
self.assertEqual(node.get("type"), "direct")
self._assertTypeEquals(node, "direct", "source",
"dev", "eth0")
self._assertTypeEquals(node, "direct", "source",
"mode", "bridge")
self._assertMacEquals(node, self.vif_macvtap_flat)
def test_driver_macvtap_exception(self):
d = vif.LibvirtGenericVIFDriver()
e = self.assertRaises(exception.VifDetailsMissingMacvtapParameters,
self._get_instance_xml,
d,
self.vif_macvtap_exception)
self.assertIn('macvtap_source', six.text_type(e))
self.assertIn('macvtap_mode', six.text_type(e))
self.assertIn('physical_interface', six.text_type(e))
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_vlan(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug_macvtap(self.instance, self.vif_macvtap_vlan)
ensure_vlan_mock.assert_called_once_with('1', 'eth0',
interface='eth0.1')
@mock.patch.object(linux_net.LinuxBridgeInterfaceDriver, 'ensure_vlan')
def test_macvtap_plug_flat(self, ensure_vlan_mock):
d = vif.LibvirtGenericVIFDriver()
d.plug_macvtap(self.instance, self.vif_macvtap_flat)
self.assertFalse(ensure_vlan_mock.called)
def test_generic_iovisor_driver(self):
d = vif.LibvirtGenericVIFDriver()
self.flags(firewall_driver="nova.virt.firewall.NoopFirewallDriver")
br_want = self.vif_ivs['devname']
xml = self._get_instance_xml(d, self.vif_ivs)
node = self._get_node(xml)
self._assertTypeAndMacEquals(node, "ethernet", "target", "dev",
self.vif_ivs, br_want)
def test_generic_8021qbg_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_8021qbg)
node = self._get_node(xml)
self._assertTypeEquals(node, "direct", "source", "dev", "eth0")
self._assertMacEquals(node, self.vif_8021qbg)
vp = node.find("virtualport")
self.assertEqual(vp.get("type"), "802.1Qbg")
manager_id_found = False
type_id_found = False
typeversion_id_found = False
instance_id_found = False
for p_elem in vp.findall("parameters"):
wantparams = self.vif_8021qbg['qbg_params']
manager_id = p_elem.get("managerid", None)
type_id = p_elem.get("typeid", None)
typeversion_id = p_elem.get("typeidversion", None)
instance_id = p_elem.get("instanceid", None)
if manager_id:
self.assertEqual(manager_id,
wantparams['managerid'])
manager_id_found = True
if type_id:
self.assertEqual(type_id,
wantparams['typeid'])
type_id_found = True
if typeversion_id:
self.assertEqual(typeversion_id,
wantparams['typeidversion'])
typeversion_id_found = True
if instance_id:
self.assertEqual(instance_id,
wantparams['instanceid'])
instance_id_found = True
self.assertTrue(manager_id_found)
self.assertTrue(type_id_found)
self.assertTrue(typeversion_id_found)
self.assertTrue(instance_id_found)
def test_vhostuser_driver(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d, self.vif_vhostuser)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/vif-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_vhostuser_no_queues(self):
d = vif.LibvirtGenericVIFDriver()
image_meta = {'properties': {'hw_vif_model': 'virtio',
'hw_vif_multiqueue_enabled': 'true'}}
xml = self._get_instance_xml(d, self.vif_vhostuser, image_meta)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertMacEquals(node, self.vif_vhostuser)
driver = node.find("driver")
self.assertIsNone(driver, None)
def test_vhostuser_driver_no_path(self):
d = vif.LibvirtGenericVIFDriver()
self.assertRaises(exception.VifDetailsMissingVhostuserSockPath,
self._get_instance_xml,
d,
self.vif_vhostuser_no_path)
def test_vhostuser_driver_ovs(self):
d = vif.LibvirtGenericVIFDriver()
xml = self._get_instance_xml(d,
self.vif_vhostuser_ovs)
node = self._get_node(xml)
self.assertEqual(node.get("type"),
network_model.VIF_TYPE_VHOSTUSER)
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "mode", "client")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "path", "/tmp/usv-xxx-yyy-zzz")
self._assertTypeEquals(node, network_model.VIF_TYPE_VHOSTUSER,
"source", "type", "unix")
self._assertMacEquals(node, self.vif_vhostuser_ovs)
self._assertModel(xml, network_model.VIF_MODEL_VIRTIO)
def test_vhostuser_ovs_plug(self):
calls = {
'create_ovs_vif_port': [mock.call('br0',
'usv-xxx-yyy-zzz',
'aaa-bbb-ccc',
'ca:fe:de:ad:be:ef',
'instance-uuid')],
'ovs_set_vhostuser_port_type': [mock.call('usv-xxx-yyy-zzz')]
}
with contextlib.nested(
mock.patch.object(linux_net, 'create_ovs_vif_port'),
mock.patch.object(linux_net, 'ovs_set_vhostuser_port_type')
) as (create_ovs_vif_port, ovs_set_vhostuser_port_type):
d = vif.LibvirtGenericVIFDriver()
d.plug_vhostuser(self.instance, self.vif_vhostuser_ovs)
create_ovs_vif_port.assert_has_calls(calls['create_ovs_vif_port'])
ovs_set_vhostuser_port_type.assert_has_calls(
calls['ovs_set_vhostuser_port_type'])
def test_vhostuser_ovs_unplug(self):
calls = {
'delete_ovs_vif_port': [mock.call('br0', 'usv-xxx-yyy-zzz')]
}
with mock.patch.object(linux_net,
'delete_ovs_vif_port') as delete_port:
d = vif.LibvirtGenericVIFDriver()
d.unplug_vhostuser(None, self.vif_vhostuser_ovs)
delete_port.assert_has_calls(calls['delete_ovs_vif_port'])
|
|
# Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance NFS Cinder Volume Driver
"""
import base64
import datetime as dt
import errno
import math
from oslo_config import cfg
from oslo_log import log
from oslo_utils import excutils
from oslo_utils import units
import six
from cinder import exception
from cinder import utils
from cinder.i18n import _, _LE, _LI
from cinder.volume.drivers import nfs
from cinder.volume.drivers.san import san
from cinder.volume.drivers.zfssa import zfssarest
ZFSSA_OPTS = [
cfg.StrOpt('zfssa_data_ip',
help='Data path IP address'),
cfg.StrOpt('zfssa_https_port', default='443',
help='HTTPS port number'),
cfg.StrOpt('zfssa_nfs_mount_options', default='',
help='Options to be passed while mounting share over nfs'),
cfg.StrOpt('zfssa_nfs_pool', default='',
help='Storage pool name.'),
cfg.StrOpt('zfssa_nfs_project', default='NFSProject',
help='Project name.'),
cfg.StrOpt('zfssa_nfs_share', default='nfs_share',
help='Share name.'),
cfg.StrOpt('zfssa_nfs_share_compression', default='off',
choices=['off', 'lzjb', 'gzip-2', 'gzip', 'gzip-9'],
help='Data compression.'),
cfg.StrOpt('zfssa_nfs_share_logbias', default='latency',
choices=['latency', 'throughput'],
help='Synchronous write bias-latency, throughput.'),
cfg.IntOpt('zfssa_rest_timeout',
help='REST connection timeout. (seconds)'),
cfg.BoolOpt('zfssa_enable_local_cache', default=True,
help='Flag to enable local caching: True, False.'),
cfg.StrOpt('zfssa_cache_directory', default='os-cinder-cache',
help='Name of directory inside zfssa_nfs_share where cache '
'volumes are stored.')
]
LOG = log.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(ZFSSA_OPTS)
def factory_zfssa():
return zfssarest.ZFSSANfsApi()
class ZFSSANFSDriver(nfs.NfsDriver):
"""ZFSSA Cinder NFS volume driver.
Version history:
1.0.1:
Backend enabled volume migration.
Local cache feature.
"""
VERSION = '1.0.1'
volume_backend_name = 'ZFSSA_NFS'
protocol = driver_prefix = driver_volume_type = 'nfs'
def __init__(self, *args, **kwargs):
super(ZFSSANFSDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(ZFSSA_OPTS)
self.configuration.append_config_values(san.san_opts)
self.zfssa = None
self._stats = None
def do_setup(self, context):
if not self.configuration.nfs_oversub_ratio > 0:
msg = _("NFS config 'nfs_oversub_ratio' invalid. Must be > 0: "
"%s") % self.configuration.nfs_oversub_ratio
LOG.error(msg)
raise exception.NfsException(msg)
if ((not self.configuration.nfs_used_ratio > 0) and
(self.configuration.nfs_used_ratio <= 1)):
msg = _("NFS config 'nfs_used_ratio' invalid. Must be > 0 "
"and <= 1.0: %s") % self.configuration.nfs_used_ratio
LOG.error(msg)
raise exception.NfsException(msg)
package = 'mount.nfs'
try:
self._execute(package, check_exit_code=False, run_as_root=True)
except OSError as exc:
if exc.errno == errno.ENOENT:
msg = _('%s is not installed') % package
raise exception.NfsException(msg)
else:
raise
lcfg = self.configuration
LOG.info(_LI('Connecting to host: %s.'), lcfg.san_ip)
host = lcfg.san_ip
user = lcfg.san_login
password = lcfg.san_password
https_port = lcfg.zfssa_https_port
credentials = ['san_ip', 'san_login', 'san_password', 'zfssa_data_ip']
for cred in credentials:
if not getattr(lcfg, cred, None):
exception_msg = _('%s not set in cinder.conf') % cred
LOG.error(exception_msg)
raise exception.CinderException(exception_msg)
self.zfssa = factory_zfssa()
self.zfssa.set_host(host, timeout=lcfg.zfssa_rest_timeout)
auth_str = base64.encodestring('%s:%s' % (user, password))[:-1]
self.zfssa.login(auth_str)
self.zfssa.create_project(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
compression=lcfg.zfssa_nfs_share_compression,
logbias=lcfg.zfssa_nfs_share_logbias)
share_args = {
'sharedav': 'rw',
'sharenfs': 'rw',
'root_permissions': '777',
'compression': lcfg.zfssa_nfs_share_compression,
'logbias': lcfg.zfssa_nfs_share_logbias
}
self.zfssa.create_share(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, share_args)
share_details = self.zfssa.get_share(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
mountpoint = share_details['mountpoint']
self.mount_path = lcfg.zfssa_data_ip + ':' + mountpoint
https_path = 'https://' + lcfg.zfssa_data_ip + ':' + https_port + \
'/shares' + mountpoint
LOG.debug('NFS mount path: %s', self.mount_path)
LOG.debug('WebDAV path to the share: %s', https_path)
self.shares = {}
mnt_opts = self.configuration.zfssa_nfs_mount_options
self.shares[self.mount_path] = mnt_opts if len(mnt_opts) > 1 else None
# Initialize the WebDAV client
self.zfssa.set_webdav(https_path, auth_str)
# Edit http service so that WebDAV requests are always authenticated
args = {'https_port': https_port,
'require_login': True}
self.zfssa.modify_service('http', args)
self.zfssa.enable_service('http')
if lcfg.zfssa_enable_local_cache:
LOG.debug('Creating local cache directory %s.',
lcfg.zfssa_cache_directory)
self.zfssa.create_directory(lcfg.zfssa_cache_directory)
def _ensure_shares_mounted(self):
try:
self._ensure_share_mounted(self.mount_path)
except Exception as exc:
LOG.error(_LE('Exception during mounting %s.'), exc)
self._mounted_shares = [self.mount_path]
LOG.debug('Available shares %s', self._mounted_shares)
def check_for_setup_error(self):
"""Check that driver can login.
Check also for properly configured pool, project and share
Check that the http and nfs services are enabled
"""
lcfg = self.configuration
self.zfssa.verify_pool(lcfg.zfssa_nfs_pool)
self.zfssa.verify_project(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project)
self.zfssa.verify_share(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
self.zfssa.verify_service('http')
self.zfssa.verify_service('nfs')
def create_snapshot(self, snapshot):
"""Creates a snapshot of a volume."""
LOG.info(_LI('Creating snapshot: %s'), snapshot['name'])
lcfg = self.configuration
snap_name = self._create_snapshot_name()
self.zfssa.create_snapshot(lcfg.zfssa_nfs_pool, lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, snap_name)
src_file = snap_name + '/' + snapshot['volume_name']
try:
self.zfssa.create_snapshot_of_volume_file(src_file=src_file,
dst_file=
snapshot['name'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.debug('Error thrown during snapshot: %s creation',
snapshot['name'])
finally:
self.zfssa.delete_snapshot(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share, snap_name)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.info(_LI('Deleting snapshot: %s'), snapshot['name'])
self.zfssa.delete_snapshot_of_volume_file(src_file=snapshot['name'])
def create_volume_from_snapshot(self, volume, snapshot, method='COPY'):
LOG.info(_LI('Creatng volume from snapshot. volume: %s'),
volume['name'])
LOG.info(_LI('Source Snapshot: %s'), snapshot['name'])
self._ensure_shares_mounted()
self.zfssa.create_volume_from_snapshot_file(src_file=snapshot['name'],
dst_file=volume['name'],
method=method)
volume['provider_location'] = self.mount_path
if volume['size'] != snapshot['volume_size']:
try:
self.extend_volume(volume, volume['size'])
except Exception:
vol_path = self.local_path(volume)
with excutils.save_and_reraise_exception():
LOG.error(_LE('Error in extending volume size: Volume: '
'%(volume)s Vol_Size: %(vol_size)d with '
'Snapshot: %(snapshot)s Snap_Size: '
'%(snap_size)d'),
{'volume': volume['name'],
'vol_size': volume['size'],
'snapshot': snapshot['name'],
'snap_size': snapshot['volume_size']})
self._execute('rm', '-f', vol_path, run_as_root=True)
volume_origin = {'origin': snapshot['volume_name']}
self.zfssa.set_file_props(volume['name'], volume_origin)
return {'provider_location': volume['provider_location']}
def create_cloned_volume(self, volume, src_vref):
"""Creates a snapshot and then clones the snapshot into a volume."""
LOG.info(_LI('new cloned volume: %s'), volume['name'])
LOG.info(_LI('source volume for cloning: %s'), src_vref['name'])
snapshot = {'volume_name': src_vref['name'],
'volume_id': src_vref['id'],
'volume_size': src_vref['size'],
'name': self._create_snapshot_name()}
self.create_snapshot(snapshot)
return self.create_volume_from_snapshot(volume, snapshot,
method='MOVE')
def delete_volume(self, volume):
LOG.debug('Deleting volume %s.', volume['name'])
lcfg = self.configuration
try:
vol_props = self.zfssa.get_volume(volume['name'])
except exception.VolumeNotFound:
return
super(ZFSSANFSDriver, self).delete_volume(volume)
if vol_props['origin'].startswith(lcfg.zfssa_cache_directory):
LOG.info(_LI('Checking origin %(origin)s of volume %(volume)s.'),
{'origin': vol_props['origin'],
'volume': volume['name']})
self._check_origin(vol_props['origin'])
def clone_image(self, context, volume,
image_location, image_meta,
image_service):
"""Create a volume efficiently from an existing image.
Verify the image ID being used:
(1) If there is no existing cache volume, create one and transfer
image data to it. Take a snapshot.
(2) If a cache volume already exists, verify if it is either alternated
or updated. If so try to remove it, raise exception if removal fails.
Create a new cache volume as in (1).
Clone a volume from the cache volume and returns it to Cinder.
"""
LOG.debug('Cloning image %(image)s to volume %(volume)s',
{'image': image_meta['id'], 'volume': volume['name']})
lcfg = self.configuration
if not lcfg.zfssa_enable_local_cache:
return None, False
# virtual_size is the image's actual size when stored in a volume
# virtual_size is expected to be updated manually through glance
try:
virtual_size = int(image_meta['properties'].get('virtual_size'))
except Exception:
LOG.error(_LE('virtual_size property is not set for the image.'))
return None, False
cachevol_size = int(math.ceil(float(virtual_size) / units.Gi))
if cachevol_size > volume['size']:
exception_msg = (_LE('Image size %(img_size)dGB is larger '
'than volume size %(vol_size)dGB.'),
{'img_size': cachevol_size,
'vol_size': volume['size']})
LOG.error(exception_msg)
return None, False
cache_dir = '%s/' % lcfg.zfssa_cache_directory
updated_at = six.text_type(image_meta['updated_at'].isoformat())
cachevol_props = {
'name': '%sos-cache-vol-%s' % (cache_dir,
image_meta['id']),
'size': cachevol_size,
'updated_at': updated_at,
'image_id': image_meta['id'],
}
try:
cachevol_name = self._verify_cache_volume(context,
image_meta,
image_service,
cachevol_props)
# A cache volume should be ready by now
# Create a clone from the cache volume
cache_vol = {
'name': cachevol_name,
'size': cachevol_size,
'id': image_meta['id'],
}
clone_vol = self.create_cloned_volume(volume, cache_vol)
self._update_origin(volume['name'], cachevol_name)
except exception.VolumeBackendAPIException as exc:
exception_msg = (_LE('Cannot clone image %(image)s to '
'volume %(volume)s. Error: %(error)s.'),
{'volume': volume['name'],
'image': image_meta['id'],
'error': exc.message})
LOG.error(exception_msg)
return None, False
return clone_vol, True
@utils.synchronized('zfssanfs', external=True)
def _verify_cache_volume(self, context, img_meta,
img_service, cachevol_props):
"""Verify if we have a cache volume that we want.
If we don't, create one.
If we do, check if it's been updated:
* If so, delete it and recreate a new volume
* If not, we are good.
If it's out of date, delete it and create a new one.
After the function returns, there should be a cache volume available,
ready for cloning.
"""
cachevol_name = cachevol_props['name']
cache_vol = None
LOG.debug('Verifying cache volume %s:', cachevol_name)
try:
cache_vol = self.zfssa.get_volume(cachevol_name)
except exception.VolumeNotFound:
# There is no existing cache volume, create one:
LOG.debug('Cache volume not found. Creating one...')
return self._create_cache_volume(context,
img_meta,
img_service,
cachevol_props)
# A cache volume does exist, check if it's updated:
if ((cache_vol['updated_at'] != cachevol_props['updated_at']) or
(cache_vol['image_id'] != cachevol_props['image_id'])):
if cache_vol['numclones'] > 0:
# The cache volume is updated, but has clones
exception_msg = (_('Cannot delete '
'cache volume: %(cachevol_name)s. '
'It was updated at %(updated_at)s '
'and currently has %(numclones)d '
'volume instances.'),
{'cachevol_name': cachevol_name,
'updated_at': cachevol_props['updated_at'],
'numclones': cache_vol['numclones']})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
# The cache volume is updated, but has no clone, so we delete it
# and re-create a new one:
cache_vol = {
'provider_location': self.mount_path,
'name': cachevol_name,
}
self.delete_volume(cache_vol)
return self._create_cache_volume(context,
img_meta,
img_service,
cachevol_props)
return cachevol_name
def _create_cache_volume(self, context, img_meta,
img_service, cachevol_props):
"""Create a cache volume from an image.
Returns name of the cache volume.
"""
cache_vol = {
'provider_location': self.mount_path,
'size': cachevol_props['size'],
'name': cachevol_props['name'],
}
LOG.debug('Creating cache volume %s', cache_vol['name'])
try:
super(ZFSSANFSDriver, self).create_volume(cache_vol)
LOG.debug('Copying image data:')
super(ZFSSANFSDriver, self).copy_image_to_volume(context,
cache_vol,
img_service,
img_meta['id'])
except Exception as exc:
exc_msg = (_('Fail to create cache volume %(volume)s. '
'Error: %(err)s'),
{'volume': cache_vol['name'],
'err': six.text_type(exc)})
LOG.error(exc_msg)
self.zfssa.delete_file(cache_vol['name'])
raise exception.VolumeBackendAPIException(data=exc_msg)
cachevol_meta = {
'updated_at': cachevol_props['updated_at'],
'image_id': cachevol_props['image_id'],
}
cachevol_meta.update({'numclones': '0'})
self.zfssa.set_file_props(cache_vol['name'], cachevol_meta)
return cache_vol['name']
def _create_snapshot_name(self):
"""Creates a snapshot name from the date and time."""
return ('cinder-zfssa-nfs-snapshot-%s' %
dt.datetime.utcnow().isoformat())
def _get_share_capacity_info(self):
"""Get available and used capacity info for the NFS share."""
lcfg = self.configuration
share_details = self.zfssa.get_share(lcfg.zfssa_nfs_pool,
lcfg.zfssa_nfs_project,
lcfg.zfssa_nfs_share)
free = share_details['space_available']
used = share_details['space_total']
return free, used
@utils.synchronized('zfssanfs', external=True)
def _check_origin(self, origin):
"""Verify the cache volume of a bootable volume.
If the cache no longer has clone, it will be deleted.
"""
cachevol_props = self.zfssa.get_volume(origin)
numclones = cachevol_props['numclones']
LOG.debug('Number of clones: %d', numclones)
if numclones <= 1:
# This cache vol does not have any other clone
self.zfssa.delete_file(origin)
else:
cachevol_props = {'numclones': six.text_type(numclones - 1)}
self.zfssa.set_file_props(origin, cachevol_props)
@utils.synchronized('zfssanfs', external=True)
def _update_origin(self, vol_name, cachevol_name):
"""Update WebDAV property of a volume.
WebDAV properties are used to keep track of:
(1) The number of clones of a cache volume.
(2) The cache volume name (origin) of a bootable volume.
To avoid race conditions when multiple volumes are created and needed
to be updated, a file lock is used to ensure that the properties are
updated properly.
"""
volume_origin = {'origin': cachevol_name}
self.zfssa.set_file_props(vol_name, volume_origin)
cache_props = self.zfssa.get_volume(cachevol_name)
cache_props.update({'numclones':
six.text_type(cache_props['numclones'] + 1)})
self.zfssa.set_file_props(cachevol_name, cache_props)
def _update_volume_stats(self):
"""Get volume stats from zfssa"""
self._ensure_shares_mounted()
data = {}
lcfg = self.configuration
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['vendor_name'] = 'Oracle'
data['driver_version'] = self.VERSION
data['storage_protocol'] = self.protocol
asn = self.zfssa.get_asn()
data['location_info'] = '%s:%s' % (asn, lcfg.zfssa_nfs_share)
free, used = self._get_share_capacity_info()
capacity = float(free) + float(used)
ratio_used = used / capacity
data['QoS_support'] = False
data['reserved_percentage'] = 0
if ratio_used > self.configuration.nfs_used_ratio or \
ratio_used >= self.configuration.nfs_oversub_ratio:
data['reserved_percentage'] = 100
data['total_capacity_gb'] = float(capacity) / units.Gi
data['free_capacity_gb'] = float(free) / units.Gi
self._stats = data
def migrate_volume(self, ctxt, volume, host):
LOG.debug('Attempting ZFSSA enabled volume migration. volume: %(id)s, '
'host: %(host)s, status=%(status)s',
{'id': volume['id'],
'host': host,
'status': volume['status']})
lcfg = self.configuration
default_ret = (False, None)
if volume['status'] != "available":
LOG.debug('Only available volumes can be migrated using backend '
'assisted migration. Defaulting to generic migration.')
return default_ret
if (host['capabilities']['vendor_name'] != 'Oracle' or
host['capabilities']['storage_protocol'] != self.protocol):
LOG.debug('Source and destination drivers need to be Oracle iSCSI '
'to use backend assisted migration. Defaulting to '
'generic migration.')
return default_ret
if 'location_info' not in host['capabilities']:
LOG.debug('Could not find location_info in capabilities reported '
'by the destination driver. Defaulting to generic '
'migration.')
return default_ret
loc_info = host['capabilities']['location_info']
try:
(tgt_asn, tgt_share) = loc_info.split(':')
except ValueError:
LOG.error(_LE("Location info needed for backend enabled volume "
"migration not in correct format: %s. Continuing "
"with generic volume migration."), loc_info)
return default_ret
src_asn = self.zfssa.get_asn()
if tgt_asn == src_asn and lcfg.zfssa_nfs_share == tgt_share:
LOG.info(_LI('Source and destination ZFSSA shares are the same. '
'Do nothing. volume: %s'), volume['name'])
return (True, None)
return (False, None)
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
"""Return model update for migrated volume.
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:param original_volume_status: The status of the original volume
:return model_update to update DB with any needed changes
"""
original_name = CONF.volume_name_template % volume['id']
current_name = CONF.volume_name_template % new_volume['id']
LOG.debug('Renaming migrated volume: %(cur)s to %(org)s.',
{'cur': current_name,
'org': original_name})
self.zfssa.create_volume_from_snapshot_file(src_file=current_name,
dst_file=original_name,
method='MOVE')
provider_location = new_volume['provider_location']
return {'_name_id': None, 'provider_location': provider_location}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.