repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
issyrichards/spartify2 | requests-master/requests/api.py | 160 | 5280 | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How long to wait for the server to send data
before giving up, as a float, or a (`connect timeout, read timeout
<user/advanced.html#timeouts>`_) tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
session = sessions.Session()
response = session.request(method=method, url=url, **kwargs)
# By explicitly closing the session, we avoid leaving sockets open which
# can trigger a ResourceWarning in some cases, and look like a memory leak
# in others.
session.close()
return response
def get(url, **kwargs):
"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs)
def options(url, **kwargs):
"""Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', True)
return request('options', url, **kwargs)
def head(url, **kwargs):
"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
def post(url, data=None, json=None, **kwargs):
"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs):
"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs)
| mit |
aaronorosen/horizon-congress | openstack_dashboard/dashboards/project/stacks/tabs.py | 14 | 5069 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.utils.translation import ugettext_lazy as _
from horizon import messages
from horizon import tabs
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.dashboards.project.stacks \
import api as project_api
from openstack_dashboard.dashboards.project.stacks import mappings
from openstack_dashboard.dashboards.project.stacks \
import tables as project_tables
LOG = logging.getLogger(__name__)
class StackTopologyTab(tabs.Tab):
name = _("Topology")
slug = "topology"
template_name = "project/stacks/_detail_topology.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStacks"),
("orchestration", "cloudformation:ListStackResources"),),
request)
def get_context_data(self, request):
context = {}
stack = self.tab_group.kwargs['stack']
context['stack_id'] = stack.id
context['d3_data'] = project_api.d3_data(request, stack_id=stack.id)
return context
class StackOverviewTab(tabs.Tab):
name = _("Overview")
slug = "overview"
template_name = "project/stacks/_detail_overview.html"
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStacks"),),
request)
def get_context_data(self, request):
return {"stack": self.tab_group.kwargs['stack']}
class ResourceOverviewTab(tabs.Tab):
name = _("Overview")
slug = "resource_overview"
template_name = "project/stacks/_resource_overview.html"
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStackResource"),),
request)
def get_context_data(self, request):
resource = self.tab_group.kwargs['resource']
resource_url = mappings.resource_to_url(resource)
return {
"resource": resource,
"resource_url": resource_url,
"metadata": self.tab_group.kwargs['metadata']}
class StackEventsTab(tabs.Tab):
name = _("Events")
slug = "events"
template_name = "project/stacks/_detail_events.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:DescribeStackEvents"),),
request)
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
events = api.heat.events_list(self.request, stack_identifier)
LOG.debug('got events %s' % events)
# The stack id is needed to generate the resource URL.
for event in events:
event.stack_id = stack.id
except Exception:
events = []
messages.error(request, _(
'Unable to get events for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.EventsTable(request, data=events), }
class StackResourcesTab(tabs.Tab):
name = _("Resources")
slug = "resources"
template_name = "project/stacks/_detail_resources.html"
preload = False
def allowed(self, request):
return policy.check(
(("orchestration", "cloudformation:ListStackResources"),),
request)
def get_context_data(self, request):
stack = self.tab_group.kwargs['stack']
try:
stack_identifier = '%s/%s' % (stack.stack_name, stack.id)
resources = api.heat.resources_list(self.request, stack_identifier)
LOG.debug('got resources %s' % resources)
# The stack id is needed to generate the resource URL.
for r in resources:
r.stack_id = stack.id
except Exception:
resources = []
messages.error(request, _(
'Unable to get resources for stack "%s".') % stack.stack_name)
return {"stack": stack,
"table": project_tables.ResourcesTable(
request, data=resources, stack=stack), }
class StackDetailTabs(tabs.TabGroup):
slug = "stack_details"
tabs = (StackTopologyTab, StackOverviewTab, StackResourcesTab,
StackEventsTab)
sticky = True
class ResourceDetailTabs(tabs.TabGroup):
slug = "resource_details"
tabs = (ResourceOverviewTab,)
sticky = True
| apache-2.0 |
zhengzhihust/tablib | tablib/packages/openpyxl/workbook.py | 116 | 6200 | # file openpyxl/workbook.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Workbook is the top-level container for all document information."""
__docformat__ = "restructuredtext en"
# Python stdlib imports
import datetime
import os
# package imports
from .worksheet import Worksheet
from .writer.dump_worksheet import DumpWorksheet, save_dump
from .writer.strings import StringTableBuilder
from .namedrange import NamedRange
from .style import Style
from .writer.excel import save_workbook
from .shared.exc import ReadOnlyWorkbookException
class DocumentProperties(object):
"""High-level properties of the document."""
def __init__(self):
self.creator = 'Unknown'
self.last_modified_by = self.creator
self.created = datetime.datetime.now()
self.modified = datetime.datetime.now()
self.title = 'Untitled'
self.subject = ''
self.description = ''
self.keywords = ''
self.category = ''
self.company = 'Microsoft Corporation'
class DocumentSecurity(object):
"""Security information about the document."""
def __init__(self):
self.lock_revision = False
self.lock_structure = False
self.lock_windows = False
self.revision_password = ''
self.workbook_password = ''
class Workbook(object):
"""Workbook is the container for all other parts of the document."""
def __init__(self, optimized_write = False):
self.worksheets = []
self._active_sheet_index = 0
self._named_ranges = []
self.properties = DocumentProperties()
self.style = Style()
self.security = DocumentSecurity()
self.__optimized_write = optimized_write
self.__optimized_read = False
self.strings_table_builder = StringTableBuilder()
if not optimized_write:
self.worksheets.append(Worksheet(self))
def _set_optimized_read(self):
self.__optimized_read = True
def get_active_sheet(self):
"""Returns the current active sheet."""
return self.worksheets[self._active_sheet_index]
def create_sheet(self, index = None):
"""Create a worksheet (at an optional index).
:param index: optional position at which the sheet will be inserted
:type index: int
"""
if self.__optimized_read:
raise ReadOnlyWorkbookException('Cannot create new sheet in a read-only workbook')
if self.__optimized_write :
new_ws = DumpWorksheet(parent_workbook = self)
else:
new_ws = Worksheet(parent_workbook = self)
self.add_sheet(worksheet = new_ws, index = index)
return new_ws
def add_sheet(self, worksheet, index = None):
"""Add an existing worksheet (at an optional index)."""
if index is None:
index = len(self.worksheets)
self.worksheets.insert(index, worksheet)
def remove_sheet(self, worksheet):
"""Remove a worksheet from this workbook."""
self.worksheets.remove(worksheet)
def get_sheet_by_name(self, name):
"""Returns a worksheet by its name.
Returns None if no worksheet has the name specified.
:param name: the name of the worksheet to look for
:type name: string
"""
requested_sheet = None
for sheet in self.worksheets:
if sheet.title == name:
requested_sheet = sheet
break
return requested_sheet
def get_index(self, worksheet):
"""Return the index of the worksheet."""
return self.worksheets.index(worksheet)
def get_sheet_names(self):
"""Returns the list of the names of worksheets in the workbook.
Names are returned in the worksheets order.
:rtype: list of strings
"""
return [s.title for s in self.worksheets]
def create_named_range(self, name, worksheet, range):
"""Create a new named_range on a worksheet"""
assert isinstance(worksheet, Worksheet)
named_range = NamedRange(name, [(worksheet, range)])
self.add_named_range(named_range)
def get_named_ranges(self):
"""Return all named ranges"""
return self._named_ranges
def add_named_range(self, named_range):
"""Add an existing named_range to the list of named_ranges."""
self._named_ranges.append(named_range)
def get_named_range(self, name):
"""Return the range specified by name."""
requested_range = None
for named_range in self._named_ranges:
if named_range.name == name:
requested_range = named_range
break
return requested_range
def remove_named_range(self, named_range):
"""Remove a named_range from this workbook."""
self._named_ranges.remove(named_range)
def save(self, filename):
""" shortcut """
if self.__optimized_write:
save_dump(self, filename)
else:
save_workbook(self, filename)
| mit |
jpshort/odoo | addons/portal_gamification/__openerp__.py | 381 | 1571 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Portal Gamification',
'version': '1',
'category': 'Tools',
'complexity': 'easy',
'description': """
This module adds security rules for gamification to allow portal users to participate to challenges
===================================================================================================
""",
'author': 'OpenERP SA',
'depends': ['gamification','portal'],
'data': [
'security/ir.model.access.csv',
'security/portal_security.xml',
],
'installable': True,
'auto_install': True,
'category': 'Hidden',
}
| agpl-3.0 |
MickSandoz/compassion-modules | __unported__/child_update_picture_date/wizard/update_child_picture_date.py | 5 | 1907 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: David Coninckx <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
import traceback
from openerp.osv import orm, fields
class update_child_picture_date(orm.TransientModel):
_name = 'update.child.picture.date'
def update(self, cr, uid, context=None):
count = 1
print('LAUNCH CHILD PICTURE UPDATE')
child_obj = self.pool.get('compassion.child')
child_ids = child_obj.search(
cr, uid, [('state', 'not in', ['F', 'X']),
('update_done', '=', False)], context=context)
total = str(len(child_ids))
for child in child_obj.browse(cr, uid, child_ids, context):
try:
print('Updating child {0}/{1}').format(str(count), total)
child_obj.get_infos(cr, uid, child.id, context)
child.write({'update_done': True})
except Exception:
if child.state != 'E':
child.write({
'state': 'E',
'previous_state': child.state})
self.pool.get('mail.thread').message_post(
cr, uid, child.id,
traceback.format_exc(), 'Child update',
context={'thread_model': 'compassion.child'})
finally:
count += 1
cr.commit()
return True
class child_compassion(orm.Model):
_inherit = 'compassion.child'
_columns = {
'update_done': fields.boolean('update done')
}
| agpl-3.0 |
niegenug/wesnoth | scons/python_devel.py | 49 | 1381 | # vi: syntax=python:et:ts=4
import sys, os
from config_check_utils import backup_env, restore_env
import distutils.sysconfig
def exists():
return True
def PythonExtension(env, target, source, **kv):
return env.SharedLibrary(target, source, SHLIBPREFIX='', SHLIBSUFFIX=distutils.sysconfig.get_config_var("SO"), **kv)
def generate(env):
env.AddMethod(PythonExtension)
def CheckPython(context):
env = context.env
backup = backup_env(env, ["CPPPATH", "LIBPATH", "LIBS"])
context.Message("Checking for Python... ")
env.AppendUnique(CPPPATH = distutils.sysconfig.get_python_inc())
version = distutils.sysconfig.get_config_var("VERSION")
if not version:
version = sys.version[:3]
if env["PLATFORM"] == "win32":
version = version.replace('.', '')
env.AppendUnique(LIBPATH = distutils.sysconfig.get_config_var("LIBDIR") or \
os.path.join(distutils.sysconfig.get_config_var("prefix"), "libs") )
env.AppendUnique(LIBS = "python" + version)
test_program = """
#include <Python.h>
int main()
{
Py_Initialize();
}
\n"""
if context.TryLink(test_program, ".c"):
context.Result("yes")
return True
else:
context.Result("no")
restore_env(context.env, backup)
return False
config_checks = { "CheckPython" : CheckPython }
| gpl-2.0 |
adam111316/SickGear | lib/rtorrent/compat.py | 180 | 1258 | # Copyright (c) 2013 Chris Lucas, <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import sys
def is_py3():
return sys.version_info[0] == 3
if is_py3():
import xmlrpc.client as xmlrpclib
else:
import xmlrpclib
| gpl-3.0 |
zubair-arbi/edx-platform | lms/djangoapps/debug/views.py | 119 | 2136 | """Views for debugging and diagnostics"""
import pprint
import traceback
from django.http import Http404, HttpResponse, HttpResponseNotFound
from django.contrib.auth.decorators import login_required
from django.utils.html import escape
from django.views.decorators.csrf import ensure_csrf_cookie
from edxmako.shortcuts import render_to_response
from codejail.safe_exec import safe_exec
from mako.exceptions import TopLevelLookupException
@login_required
@ensure_csrf_cookie
def run_python(request):
"""A page to allow testing the Python sandbox on a production server."""
if not request.user.is_staff:
raise Http404
c = {}
c['code'] = ''
c['results'] = None
if request.method == 'POST':
py_code = c['code'] = request.POST.get('code')
g = {}
try:
safe_exec(py_code, g)
except Exception as e:
c['results'] = traceback.format_exc()
else:
c['results'] = pprint.pformat(g)
return render_to_response("debug/run_python_form.html", c)
@login_required
def show_parameters(request):
"""A page that shows what parameters were on the URL and post."""
html = []
for name, value in sorted(request.GET.items()):
html.append(escape("GET {}: {!r}".format(name, value)))
for name, value in sorted(request.POST.items()):
html.append(escape("POST {}: {!r}".format(name, value)))
return HttpResponse("\n".join("<p>{}</p>".format(h) for h in html))
def show_reference_template(request, template):
"""
Shows the specified template as an HTML page. This is used only in debug mode to allow the UX team
to produce and work with static reference templates.
e.g. /template/ux/reference/container.html shows the template under ux/reference/container.html
Note: dynamic parameters can also be passed to the page.
e.g. /template/ux/reference/container.html?name=Foo
"""
try:
return render_to_response(template, request.GET.dict())
except TopLevelLookupException:
return HttpResponseNotFound("Couldn't find template {template}".format(template=template))
| agpl-3.0 |
rue89-tech/edx-analytics-pipeline | edx/analytics/tasks/tests/test_student_engagement.py | 3 | 15248 | """Test student engagement metrics"""
import json
import luigi
from ddt import ddt, data, unpack
from edx.analytics.tasks.student_engagement import StudentEngagementTask, SUBSECTION_VIEWED_MARKER
from edx.analytics.tasks.tests import unittest
from edx.analytics.tasks.tests.opaque_key_mixins import InitializeOpaqueKeysMixin, InitializeLegacyKeysMixin
from edx.analytics.tasks.tests.map_reduce_mixins import MapperTestMixin, ReducerTestMixin
class BaseStudentEngagementTaskMapTest(InitializeOpaqueKeysMixin, MapperTestMixin, unittest.TestCase):
"""Base class for test analysis of detailed student engagement"""
DEFAULT_USER_ID = 10
DEFAULT_TIMESTAMP = "2013-12-17T15:38:32.805444"
DEFAULT_DATE = "2013-12-17"
def setUp(self):
super(BaseStudentEngagementTaskMapTest, self).setUp()
self.initialize_ids()
self.video_id = 'i4x-foo-bar-baz'
self.event_templates = {
'play_video': {
"username": "test_user",
"host": "test_host",
"event_source": "browser",
"event_type": "play_video",
"context": {
"course_id": self.course_id,
"org_id": self.org_id,
"user_id": self.DEFAULT_USER_ID,
},
"time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP),
"ip": "127.0.0.1",
"event": '{"id": "%s", "currentTime": "23.4398", "code": "87389iouhdfh"}' % self.video_id,
"agent": "blah, blah, blah",
"page": None
},
'problem_check': {
"username": "test_user",
"host": "test_host",
"event_source": "server",
"event_type": "problem_check",
"context": {
"course_id": self.course_id,
"org_id": self.org_id,
"user_id": self.DEFAULT_USER_ID,
},
"time": "{0}+00:00".format(self.DEFAULT_TIMESTAMP),
"ip": "127.0.0.1",
"event": {
"problem_id": self.problem_id,
"success": "incorrect",
},
"agent": "blah, blah, blah",
"page": None
}
}
self.default_event_template = 'problem_check'
self.default_key = (self.DEFAULT_DATE, self.course_id, 'test_user')
def create_task(self, interval=None, interval_type=None):
"""Allow arguments to be passed to the task constructor."""
if not interval:
interval = self.DEFAULT_DATE
self.task = StudentEngagementTask(
interval=luigi.DateIntervalParameter().parse(interval),
output_root='/fake/output',
interval_type=interval_type,
)
self.task.init_local()
def assert_date_mappings(self, expected_end_date, actual_event_date):
"""Asserts that an event_date is mapped to the expected date in the key."""
self.assert_single_map_output(
self.create_event_log_line(time="{}T15:38:32.805444".format(actual_event_date)),
(expected_end_date, self.course_id, 'test_user'),
(self.problem_id, 'problem_check', '{}', actual_event_date)
)
@ddt
class StudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test analysis of detailed student engagement"""
def setUp(self):
super(StudentEngagementTaskMapTest, self).setUp()
self.create_task()
@data(
{'time': "2013-12-01T15:38:32.805444"},
{'username': ''},
{'event_type': None},
{'context': {'course_id': 'lskdjfslkdj'}},
{'event': 'sdfasdf'}
)
def test_invalid_events(self, kwargs):
self.assert_no_map_output_for(self.create_event_log_line(**kwargs))
def test_browser_problem_check_event(self):
template = self.event_templates['problem_check']
self.assert_no_map_output_for(self.create_event_log_line(template=template, event_source='browser'))
def test_incorrect_problem_check(self):
self.assert_single_map_output(
json.dumps(self.event_templates['problem_check']),
self.default_key,
(self.problem_id, 'problem_check', '{}', self.DEFAULT_DATE)
)
def test_correct_problem_check(self):
template = self.event_templates['problem_check']
template['event']['success'] = 'correct'
self.assert_single_map_output(
json.dumps(template),
self.default_key,
(self.problem_id, 'problem_check', json.dumps({'correct': True}), self.DEFAULT_DATE)
)
def test_missing_problem_id(self):
template = self.event_templates['problem_check']
del template['event']['problem_id']
self.assert_no_map_output_for(self.create_event_log_line(template=template))
def test_missing_video_id(self):
template = self.event_templates['play_video']
template['event'] = '{"currentTime": "23.4398", "code": "87389iouhdfh"}'
self.assert_no_map_output_for(self.create_event_log_line(template=template))
def test_play_video(self):
self.assert_single_map_output(
json.dumps(self.event_templates['play_video']),
self.default_key,
(self.video_id, 'play_video', '{}', self.DEFAULT_DATE)
)
def test_implicit_event(self):
self.assert_single_map_output(
self.create_event_log_line(event_type='/jsi18n/', event_source='server'),
self.default_key,
('', '/jsi18n/', '{}', self.DEFAULT_DATE)
)
def test_course_event(self):
self.assert_single_map_output(
self.create_event_log_line(event_type='/courses/foo/bar/', event_source='server'),
self.default_key,
('', '/courses/foo/bar/', '{}', self.DEFAULT_DATE)
)
def test_section_view_event(self):
event_type = '/courses/{0}/courseware/foo/'.format(self.course_id)
self.assert_single_map_output(
self.create_event_log_line(event_type=event_type, event_source='server'),
self.default_key,
('', event_type, '{}', self.DEFAULT_DATE)
)
def test_subsection_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/')
def assert_last_subsection_viewed_recognized(self, end_of_path):
"""Assert that given a path ending the event is recognized as a subsection view"""
event_type = '/courses/{0}/courseware/{1}'.format(self.course_id, end_of_path)
self.assert_single_map_output(
self.create_event_log_line(event_type=event_type, event_source='server'),
self.default_key,
('', 'marker:last_subsection_viewed', json.dumps({
'path': event_type,
'timestamp': self.DEFAULT_TIMESTAMP,
}), self.DEFAULT_DATE)
)
def test_subsection_sequence_num_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/10')
def test_subsection_jquery_event(self):
self.assert_last_subsection_viewed_recognized('foo/bar/jquery.js')
@ddt
class WeeklyStudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test mapping of dates to weekly intervals in student engagement."""
INTERVAL_START = "2013-11-01"
INTERVAL_END = "2014-01-02"
def setUp(self):
super(WeeklyStudentEngagementTaskMapTest, self).setUp()
interval = "{}-{}".format(self.INTERVAL_START, self.INTERVAL_END)
self.create_task(interval=interval, interval_type="weekly")
@data(
("2014-01-01", "2014-01-01"),
("2013-12-25", "2013-12-25"),
("2014-01-01", "2013-12-27"),
("2013-12-25", "2013-12-23"),
)
@unpack
def test_date_mappings(self, expected_end_date, actual_event_date):
self.assert_date_mappings(expected_end_date, actual_event_date)
@ddt
class AllStudentEngagementTaskMapTest(BaseStudentEngagementTaskMapTest):
"""Test mapping of dates to overall interval in student engagement."""
INTERVAL_START = "2013-11-01"
INTERVAL_END = "2014-01-02"
def setUp(self):
super(AllStudentEngagementTaskMapTest, self).setUp()
interval = "{}-{}".format(self.INTERVAL_START, self.INTERVAL_END)
self.create_task(interval=interval, interval_type="all")
@data(
("2014-01-01", "2014-01-01"),
("2014-01-01", "2013-12-25"),
("2014-01-01", "2013-12-27"),
("2014-01-01", "2013-12-23"),
)
@unpack
def test_date_mappings(self, expected_end_date, actual_event_date):
self.assert_date_mappings(expected_end_date, actual_event_date)
class StudentEngagementTaskLegacyMapTest(InitializeLegacyKeysMixin, StudentEngagementTaskMapTest):
"""Test analysis of detailed student engagement using legacy ID formats"""
pass
@ddt
class StudentEngagementTaskReducerTest(ReducerTestMixin, unittest.TestCase):
"""
Tests to verify that engagement data is reduced properly
"""
task_class = StudentEngagementTask
WAS_ACTIVE_COLUMN = 3
PROBLEMS_ATTEMPTED_COLUMN = 4
PROBLEM_ATTEMPTS_COLUMN = 5
PROBLEMS_CORRECT_COLUMN = 6
VIDEOS_PLAYED_COLUMN = 7
FORUM_POSTS_COLUMN = 8
FORUM_REPLIES_COLUMN = 9
FORUM_COMMENTS_COLUMN = 10
TEXTBOOK_PAGES_COLUMN = 11
LAST_SUBSECTION_COLUMN = 12
def setUp(self):
super(StudentEngagementTaskReducerTest, self).setUp()
self.reduce_key = (self.DATE, self.COURSE_ID, self.USERNAME)
def test_any_activity(self):
inputs = [
('', '/foo', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 0,
self.PROBLEM_ATTEMPTS_COLUMN: 0,
self.PROBLEMS_CORRECT_COLUMN: 0,
self.VIDEOS_PLAYED_COLUMN: 0,
self.FORUM_POSTS_COLUMN: 0,
self.FORUM_REPLIES_COLUMN: 0,
self.FORUM_COMMENTS_COLUMN: 0,
self.TEXTBOOK_PAGES_COLUMN: 0,
self.LAST_SUBSECTION_COLUMN: '',
})
def test_single_problem_attempted(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 1,
self.PROBLEMS_CORRECT_COLUMN: 1,
})
def test_single_problem_attempted_incorrect(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 1,
self.PROBLEMS_CORRECT_COLUMN: 0,
})
def test_single_problem_attempted_multiple_events(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 1,
self.PROBLEM_ATTEMPTS_COLUMN: 3,
self.PROBLEMS_CORRECT_COLUMN: 1,
})
def test_multiple_problems_attempted(self):
inputs = [
('i4x://foo/bar/baz', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz2', 'problem_check', json.dumps({'correct': True}), self.DATE),
('i4x://foo/bar/baz', 'problem_check', '{}', self.DATE)
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.PROBLEMS_ATTEMPTED_COLUMN: 2,
self.PROBLEM_ATTEMPTS_COLUMN: 3,
self.PROBLEMS_CORRECT_COLUMN: 2,
})
def test_single_video_played(self):
inputs = [
('foobarbaz', 'play_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 1,
})
def test_multiple_video_plays_same_video(self):
inputs = [
('foobarbaz', 'play_video', '{}', self.DATE),
('foobarbaz', 'play_video', '{}', self.DATE),
('foobarbaz', 'play_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 1,
})
def test_other_video_events(self):
inputs = [
('foobarbaz', 'pause_video', '{}', self.DATE),
('foobarbaz2', 'seek_video', '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
self.VIDEOS_PLAYED_COLUMN: 0,
})
@data(
('edx.forum.thread.created', FORUM_POSTS_COLUMN),
('edx.forum.response.created', FORUM_REPLIES_COLUMN),
('edx.forum.comment.created', FORUM_COMMENTS_COLUMN),
('book', TEXTBOOK_PAGES_COLUMN),
)
@unpack
def test_count_events(self, event_type, column_num):
inputs = [
('', event_type, '{}', self.DATE),
]
self._check_output_by_key(inputs, {
self.WAS_ACTIVE_COLUMN: 1,
column_num: 1,
})
@data(
('edx.forum.thread.created', FORUM_POSTS_COLUMN),
('edx.forum.response.created', FORUM_REPLIES_COLUMN),
('edx.forum.comment.created', FORUM_COMMENTS_COLUMN),
('book', TEXTBOOK_PAGES_COLUMN),
)
@unpack
def test_multiple_counted_events(self, event_type, column_num):
inputs = [
('', event_type, '{}', self.DATE),
('', event_type, '{}', self.DATE),
]
self._check_output_by_key(inputs, {
column_num: 2,
})
def test_last_subsection(self):
inputs = [
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar',
'timestamp': '2014-12-01T00:00:00.000000',
}), self.DATE),
]
self._check_output_by_key(inputs, {
self.LAST_SUBSECTION_COLUMN: 'foobar',
})
def test_multiple_subsection_views(self):
inputs = [
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'finalpath',
'timestamp': '2014-12-01T00:00:04.000000',
}), self.DATE),
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar',
'timestamp': '2014-12-01T00:00:00.000000',
}), self.DATE),
('', SUBSECTION_VIEWED_MARKER, json.dumps({
'path': 'foobar1',
'timestamp': '2014-12-01T00:00:03.000000',
}), self.DATE),
]
self._check_output_by_key(inputs, {
self.LAST_SUBSECTION_COLUMN: 'finalpath',
})
| agpl-3.0 |
wscullin/spack | var/spack/repos/builtin/packages/fontcacheproto/package.py | 3 | 1563 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Fontcacheproto(AutotoolsPackage):
"""X.org FontcacheProto protocol headers."""
homepage = "http://cgit.freedesktop.org/xorg/proto/fontcacheproto"
url = "https://www.x.org/archive/individual/proto/fontcacheproto-0.1.3.tar.gz"
version('0.1.3', '5a91ab914ffbfbc856e6fcde52e6f3e3')
| lgpl-2.1 |
BT-fgarbely/odoo | addons/stock/report/stock_graph.py | 326 | 4514 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from pychart import *
import pychart.legend
import time
from openerp.report.misc import choice_colors
from openerp import tools
#
# Draw a graph for stocks
#
class stock_graph(object):
def __init__(self, io):
self._datas = {}
self._canvas = canvas.init(fname=io, format='pdf')
self._canvas.set_author("Odoo")
self._canvas.set_title("Stock Level Forecast")
self._names = {}
self.val_min = ''
self.val_max = ''
def add(self, product_id, product_name, datas):
if hasattr(product_name, 'replace'):
product_name=product_name.replace('/', '//')
if product_id not in self._datas:
self._datas[product_id] = {}
self._names[product_id] = tools.ustr(product_name)
for (dt,stock) in datas:
if not dt in self._datas[product_id]:
self._datas[product_id][dt]=0
self._datas[product_id][dt]+=stock
if self.val_min:
self.val_min = min(self.val_min,dt)
else:
self.val_min = dt
self.val_max = max(self.val_max,dt)
def draw(self):
colors = choice_colors(len(self._datas.keys()))
user_color = {}
for user in self._datas.keys():
user_color[user] = colors.pop()
val_min = int(time.mktime(time.strptime(self.val_min,'%Y-%m-%d')))
val_max = int(time.mktime(time.strptime(self.val_max,'%Y-%m-%d')))
plots = []
for product_id in self._datas:
f = fill_style.Plain()
f.bgcolor = user_color[user]
datas = self._datas[product_id].items()
datas = map(lambda x: (int(time.mktime(time.strptime(x[0],'%Y-%m-%d'))),x[1]), datas)
datas.sort()
datas2 = []
val = 0
for d in datas:
val+=d[1]
if len(datas2):
d2 = d[0]-60*61*24
if datas2[-1][0]<d2-1000:
datas2.append((d2,datas2[-1][1]))
datas2.append((d[0],val))
if len(datas2) and datas2[-1][0]<val_max-100:
datas2.append((val_max, datas2[-1][1]))
if len(datas2)==1:
datas2.append( (datas2[0][0]+100, datas2[0][1]) )
st = line_style.T()
st.color = user_color[product_id]
st.width = 1
st.cap_style=1
st.join_style=1
plot = line_plot.T(label=self._names[product_id], data=datas2, line_style=st)
plots.append(plot)
interval = max((val_max-val_min)/15, 86400)
x_axis = axis.X(format=lambda x:'/a60{}'+time.strftime('%Y-%m-%d',time.gmtime(x)), tic_interval=interval, label=None)
# For add the report header on the top of the report.
tb = text_box.T(loc=(300, 500), text="/hL/15/bStock Level Forecast", line_style=None)
tb.draw()
ar = area.T(size = (620,435), x_range=(val_min,val_max+1), y_axis = axis.Y(format="%d", label="Virtual Stock (Unit)"), x_axis=x_axis)
for plot in plots:
ar.add_plot(plot)
ar.draw(self._canvas)
def close(self):
self._canvas.close()
if __name__ == '__main__':
gt = stock_graph('test.pdf')
gt.add(1, 'Pomme', [('2005-07-29', 6), ('2005-07-30', -2), ('2005-07-31', 4)])
gt.add(2, 'Cailloux', [('2005-07-29', 9), ('2005-07-30', -4), ('2005-07-31', 2)])
gt.draw()
gt.close()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hdknr/paloma | example/app/workers.py | 1 | 1671 | import os
import sys
#
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "app.settings")
APP_DIR=os.path.dirname(__file__)
LOG_FILE="/tmp/paloma.log" #: celery worker logfile
PID_FILE="/tmp/paloma.pid" #: celery worker PID file
PID_CAM="/tmp/paloma.pid"
NODE="celery" #: celery = default node
LOG_LEVEL="DEBUG" #: celery log level
def configure(*args):
''' return django-celery parameter for specified args
- args[0] : paloma_worker.py
- args[1] : path this django project application
- args[2] : command
'''
if len(args) < 3 or args[2] == "start" :
#: start worker
#: TODO: Check some exiting process
return [
"celery","worker",
"--loglevel=%s" % LOG_LEVEL,
"--pidfile=%s" % PID_FILE,
"--logfile=%s" % LOG_FILE ,
"-E", # event option for celerycam
"--beat" ,
"--scheduler=djcelery.schedulers.DatabaseScheduler",
]
if len(args) >2 and args[2] == "stop":
#: stop worker
return [
"celery","multi",
"stop",NODE,
"--pidfile=%s" % PID_FILE,
]
if len(args) >2 and args[2] == "cam":
#: TODO: Check some exiting process
return [
"celerycam",
"--pidfile=%s" % PID_CAM,
]
if len(args) >2 and args[2] == "camstop":
#: TODO: Check some exiting process
return [
"celery","multi",
"stop",NODE,
"--pidfile=%s" % PID_CAM,
]
| bsd-2-clause |
WalterPaixaoCortes/Reusables | labio/labio/argParseWrapper.py | 1 | 5984 | """
Purpose
The purpose of the argParseWrapper module is to create an easy way to use the native argparse module from python distribution
in order to parse command line arguments.
Description
It contains a simple wrapper class for the argparse.Action class, which adds the action attribute and a return_args method
that will return the command line arguments and options parsed and ready to be used.
Dependencies
argparse, labio.configWrapper.
"""
import argparse
#---------------------------------------------------------------------------------------
# [history]
# [15/03/2014 - walter.paixao-cortes] - First version
# [19/03/2014 - walter.paixao-cortes] - Adding comments to generate the documentation.
#---------------------------------------------------------------------------------------
class CustomAction(argparse.Action):
"""
Wrapper class for argparse.Action class.
Adds the action attribute to the object, which is missing from the class.
"""
action = None
"""The action attribute."""
#---------------------------------------------------------------------------------------
# [history]
# [15/03/2014 - walter.paixao-cortes] - First version
# [19/03/2014 - walter.paixao-cortes] - Adding comments to generate the documentation.
#---------------------------------------------------------------------------------------
def return_args(arguments):
"""
Purpose
Parse the arguments from command line, based on a json dictionary.
Description
The method receives and iterates through the arguments dictionary,
creating an instance of :class:`labio.argParseWrapper.CustomAction` for
each argument, that will be added to the parser collection.
Parameter
arguments - a dictionary of json objects describing the options.
Returns
Dynamic class with attributes as the keys of each json object in dictionary
and the values captured from the command line as values.
Json structure
The json structure that represents each argument is as follows:
::
{
short: string - Represents the short version of an optional parameter (e.g. -f).
The string "None" is used when it is an argument, not an optional parameter.
long: string - Represents the short version of an optional parameter (e.g. -file).
The string "None" is used when it is an argument, not an optional parameter.
dest: string - the attribute that will receive the value of the optional parameter.
help: string - The explanation that will be displayed for this optional parameter
when the command line is executed with the ``--help`` option.
metavar: string - The explanation that will be displayed for this argument
when the command line is executed with the ``--help`` option.
type: string - The type of data for this optional parameter or argument (str, int, ...).
action: string - The action that will be executed. See more detail in the argparse documentation.
nargs: string - The number of arguments that an optional parameter should have.
? means 0 or more
1..n means the number of arguments
default: string - The default value when the optional parameter does not have a value set.
const: string - The constant value when the optional parameter does not have a value set.
choices: list - The choices that are valid for an optional argument.
}
"""
#Initializing variables
optItem = None
isOptionCorrect = False
parser = argparse.ArgumentParser()
#iterate through the dictionary, filling an instance of CustomAction and adding to the parser
for item in arguments:
if arguments[item].has_key('short') and arguments[item].has_key('long') and arguments[item].has_key('dest'):
optItem = CustomAction([arguments[item]['short'],arguments[item]['long']],dest=arguments[item]['dest'])
isOptionCorrect = True
if arguments[item].has_key('dest') and isOptionCorrect:
optItem.dest = arguments[item]['dest']
if arguments[item].has_key('action') and isOptionCorrect:
optItem.action = arguments[item]['action']
if arguments[item].has_key('type') and isOptionCorrect:
optItem.type = eval(arguments[item]['type'])
if arguments[item].has_key('nargs') and isOptionCorrect:
optItem.nargs = eval(arguments[item]['nargs'])
else:
optItem.nargs='?'
if arguments[item].has_key('help') and isOptionCorrect:
optItem.help = arguments[item]['help']
if arguments[item].has_key('metavar') and isOptionCorrect:
optItem.metavar = arguments[item]['metavar']
if arguments[item].has_key('default') and isOptionCorrect:
optItem.default = eval(arguments[item]['default'])
if arguments[item].has_key('const') and isOptionCorrect:
optItem.const = eval(arguments[item]['const'])
if arguments[item].has_key('choices') and isOptionCorrect:
optItem.choices = eval(arguments[item]['choices'])
#Add to the parser with different parameters depending if it is an argument or optional parameter
if optItem.option_strings[0] == u'None':
parser.add_argument(optItem.metavar, action=optItem.action, type=optItem.type, nargs=optItem.nargs, help=optItem.help, metavar=optItem.metavar, default=optItem.default, choices=optItem.choices)
else:
if optItem.action is None:
parser.add_argument(optItem.option_strings[0],optItem.option_strings[1], dest=optItem.dest, action=optItem.action, type=optItem.type, nargs=optItem.nargs, help=optItem.help, metavar=optItem.metavar, default=optItem.default, choices=optItem.choices)
else:
parser.add_argument(optItem.option_strings[0],optItem.option_strings[1], dest=optItem.dest, action=optItem.action, help=optItem.help, default=optItem.default)
#Parse the arguments coming from command line and returns a dynamic class
#with the keys of the json objects as attributes.
options = parser.parse_args()
return options
| gpl-2.0 |
nabsboss/CouchPotatoServer | libs/elixir/collection.py | 27 | 4457 | '''
Default entity collection implementation
'''
import sys
import re
class BaseCollection(list):
def __init__(self, entities=None):
list.__init__(self)
if entities is not None:
self.extend(entities)
def extend(self, entities):
for e in entities:
self.append(e)
def clear(self):
del self[:]
def resolve_absolute(self, key, full_path, entity=None, root=None):
if root is None:
root = entity._descriptor.resolve_root
if root:
full_path = '%s.%s' % (root, full_path)
module_path, classname = full_path.rsplit('.', 1)
module = sys.modules[module_path]
res = getattr(module, classname, None)
if res is None:
if entity is not None:
raise Exception("Couldn't resolve target '%s' <%s> in '%s'!"
% (key, full_path, entity.__name__))
else:
raise Exception("Couldn't resolve target '%s' <%s>!"
% (key, full_path))
return res
def __getattr__(self, key):
return self.resolve(key)
# default entity collection
class GlobalEntityCollection(BaseCollection):
def __init__(self, entities=None):
# _entities is a dict of entities keyed on their name.
self._entities = {}
super(GlobalEntityCollection, self).__init__(entities)
def append(self, entity):
'''
Add an entity to the collection.
'''
super(EntityCollection, self).append(entity)
existing_entities = self._entities.setdefault(entity.__name__, [])
existing_entities.append(entity)
def resolve(self, key, entity=None):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
# Do we have a fully qualified entity name?
if '.' in key:
return self.resolve_absolute(key, key, entity)
else:
# Otherwise we look in the entities of this collection
res = self._entities.get(key, None)
if res is None:
if entity:
raise Exception("Couldn't resolve target '%s' in '%s'"
% (key, entity.__name__))
else:
raise Exception("This collection does not contain any "
"entity corresponding to the key '%s'!"
% key)
elif len(res) > 1:
raise Exception("'%s' resolves to several entities, you should"
" use the full path (including the full module"
" name) to that entity." % key)
else:
return res[0]
def clear(self):
self._entities = {}
super(GlobalEntityCollection, self).clear()
# backward compatible name
EntityCollection = GlobalEntityCollection
_leading_dots = re.compile('^([.]*).*$')
class RelativeEntityCollection(BaseCollection):
# the entity=None does not make any sense with a relative entity collection
def resolve(self, key, entity):
'''
Resolve a key to an Entity. The optional `entity` argument is the
"source" entity when resolving relationship targets.
'''
full_path = key
if '.' not in key or key.startswith('.'):
# relative target
# any leading dot is stripped and with each dot removed,
# the entity_module is stripped of one more chunk (starting with
# the last one).
num_dots = _leading_dots.match(full_path).end(1)
full_path = full_path[num_dots:]
chunks = entity.__module__.split('.')
chunkstokeep = len(chunks) - num_dots
if chunkstokeep < 0:
raise Exception("Couldn't resolve relative target "
"'%s' relative to '%s'" % (key, entity.__module__))
entity_module = '.'.join(chunks[:chunkstokeep])
if entity_module and entity_module is not '__main__':
full_path = '%s.%s' % (entity_module, full_path)
root = ''
else:
root = None
return self.resolve_absolute(key, full_path, entity, root=root)
def __getattr__(self, key):
raise NotImplementedError
| gpl-3.0 |
pitch-sands/i-MPI | flask/Lib/site-packages/pip-1.5.6-py2.7.egg/pip/log.py | 344 | 9455 | """Logging
"""
import sys
import os
import logging
from pip import backwardcompat
from pip._vendor import colorama, pkg_resources
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32)
else consumer.wrapped)
# If consumer isn't stdout or stderr we shouldn't colorize it
if real_consumer not in std:
return False
# If consumer is a tty we should color it
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
return True
# If we have an ASNI term we should color it
if environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def should_warn(current_version, removal_version):
# Our Significant digits on versions is 2, so remove everything but the
# first two places.
current_version = ".".join(current_version.split(".")[:2])
removal_version = ".".join(removal_version.split(".")[:2])
# Our warning threshold is one minor version before removal, so we
# decrement the minor version by one
major, minor = removal_version.split(".")
minor = str(int(minor) - 1)
warn_version = ".".join([major, minor])
# Test if our current_version should be a warn
return (pkg_resources.parse_version(current_version)
< pkg_resources.parse_version(warn_version))
class Logger(object):
"""
Logging object for use in command-line script. Allows ranges of
levels, to avoid some redundancy of displayed information.
"""
VERBOSE_DEBUG = logging.DEBUG - 1
DEBUG = logging.DEBUG
INFO = logging.INFO
NOTIFY = (logging.INFO + logging.WARN) / 2
WARN = WARNING = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
COLORS = {
WARN: _color_wrap(colorama.Fore.YELLOW),
ERROR: _color_wrap(colorama.Fore.RED),
FATAL: _color_wrap(colorama.Fore.RED),
}
def __init__(self):
self.consumers = []
self.indent = 0
self.explicit_levels = False
self.in_progress = None
self.in_progress_hanging = False
def add_consumers(self, *consumers):
if sys.platform.startswith("win"):
for level, consumer in consumers:
if hasattr(consumer, "write"):
self.consumers.append(
(level, colorama.AnsiToWin32(consumer)),
)
else:
self.consumers.append((level, consumer))
else:
self.consumers.extend(consumers)
def debug(self, msg, *args, **kw):
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
self.log(self.INFO, msg, *args, **kw)
def notify(self, msg, *args, **kw):
self.log(self.NOTIFY, msg, *args, **kw)
def warn(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def error(self, msg, *args, **kw):
self.log(self.ERROR, msg, *args, **kw)
def fatal(self, msg, *args, **kw):
self.log(self.FATAL, msg, *args, **kw)
def deprecated(self, removal_version, msg, *args, **kwargs):
"""
Logs deprecation message which is log level WARN if the
``removal_version`` is > 1 minor release away and log level ERROR
otherwise.
removal_version should be the version that the deprecated feature is
expected to be removed in, so something that will not exist in
version 1.7, but will in 1.6 would have a removal_version of 1.7.
"""
from pip import __version__
if should_warn(__version__, removal_version):
self.warn(msg, *args, **kwargs)
else:
self.error(msg, *args, **kwargs)
def log(self, level, msg, *args, **kw):
if args:
if kw:
raise TypeError(
"You may give positional or keyword arguments, not both")
args = args or kw
# render
if args:
rendered = msg % args
else:
rendered = msg
rendered = ' ' * self.indent + rendered
if self.explicit_levels:
## FIXME: should this be a name, not a level number?
rendered = '%02i %s' % (level, rendered)
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
if hasattr(consumer, 'write'):
write_content = rendered + '\n'
if should_color(consumer, os.environ):
# We are printing to stdout or stderr and it supports
# colors so render our text colored
colorizer = self.COLORS.get(level, lambda x: x)
write_content = colorizer(write_content)
consumer.write(write_content)
if hasattr(consumer, 'flush'):
consumer.flush()
else:
consumer(rendered)
def _show_progress(self):
"""Should we display download progress?"""
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
def start_progress(self, msg):
assert not self.in_progress, (
"Tried to start_progress(%r) while in_progress %r"
% (msg, self.in_progress))
if self._show_progress():
sys.stdout.write(' ' * self.indent + msg)
sys.stdout.flush()
self.in_progress_hanging = True
else:
self.in_progress_hanging = False
self.in_progress = msg
self.last_message = None
def end_progress(self, msg='done.'):
assert self.in_progress, (
"Tried to end_progress without start_progress")
if self._show_progress():
if not self.in_progress_hanging:
# Some message has been printed out since start_progress
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
# These erase any messages shown with show_progress (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
sys.stdout.flush()
self.in_progress = None
self.in_progress_hanging = False
def show_progress(self, message=None):
"""If we are in a progress scope, and no log messages have been
shown, write out another '.'"""
if self.in_progress_hanging:
if message is None:
sys.stdout.write('.')
sys.stdout.flush()
else:
if self.last_message:
padding = ' ' * max(0, len(self.last_message) - len(message))
else:
padding = ''
sys.stdout.write('\r%s%s%s%s' %
(' ' * self.indent, self.in_progress, message, padding))
sys.stdout.flush()
self.last_message = message
def stdout_level_matches(self, level):
"""Returns true if a message at this level will go to stdout"""
return self.level_matches(level, self._stdout_level())
def _stdout_level(self):
"""Returns the level that stdout runs at"""
for level, consumer in self.consumers:
if consumer is sys.stdout:
return level
return self.FATAL
def level_matches(self, level, consumer_level):
"""
>>> l = Logger()
>>> l.level_matches(3, 4)
False
>>> l.level_matches(3, 2)
True
>>> l.level_matches(slice(None, 3), 3)
False
>>> l.level_matches(slice(None, 3), 2)
True
>>> l.level_matches(slice(1, 3), 1)
True
>>> l.level_matches(slice(2, 3), 1)
False
"""
if isinstance(level, slice):
start, stop = level.start, level.stop
if start is not None and start > consumer_level:
return False
if stop is not None or stop <= consumer_level:
return False
return True
else:
return level >= consumer_level
@classmethod
def level_for_integer(cls, level):
levels = cls.LEVELS
if level < 0:
return levels[0]
if level >= len(levels):
return levels[-1]
return levels[level]
def move_stdout_to_stderr(self):
to_remove = []
to_add = []
for consumer_level, consumer in self.consumers:
if consumer == sys.stdout:
to_remove.append((consumer_level, consumer))
to_add.append((consumer_level, sys.stderr))
for item in to_remove:
self.consumers.remove(item)
self.consumers.extend(to_add)
logger = Logger()
| bsd-3-clause |
yelizariev/account-financial-tools | account_credit_control/wizard/__init__.py | 40 | 1154 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi, Guewen Baconnier
# Copyright 2012-2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import credit_control_emailer
from . import credit_control_marker
from . import credit_control_printer
from . import credit_control_communication
from . import credit_control_policy_changer
| agpl-3.0 |
joopert/home-assistant | homeassistant/components/mobile_app/websocket_api.py | 2 | 3785 | """Websocket API for mobile_app."""
import voluptuous as vol
from homeassistant.components.cloud import async_delete_cloudhook
from homeassistant.components.websocket_api import (
ActiveConnection,
async_register_command,
async_response,
error_message,
result_message,
websocket_command,
ws_require_user,
)
from homeassistant.components.websocket_api.const import (
ERR_INVALID_FORMAT,
ERR_NOT_FOUND,
ERR_UNAUTHORIZED,
)
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
CONF_CLOUDHOOK_URL,
CONF_USER_ID,
DATA_CONFIG_ENTRIES,
DATA_DELETED_IDS,
DATA_STORE,
DOMAIN,
)
from .helpers import safe_registration, savable_state
def register_websocket_handlers(hass: HomeAssistantType) -> bool:
"""Register the websocket handlers."""
async_register_command(hass, websocket_get_user_registrations)
async_register_command(hass, websocket_delete_registration)
return True
@ws_require_user()
@async_response
@websocket_command(
{
vol.Required("type"): "mobile_app/get_user_registrations",
vol.Optional(CONF_USER_ID): cv.string,
}
)
async def websocket_get_user_registrations(
hass: HomeAssistantType, connection: ActiveConnection, msg: dict
) -> None:
"""Return all registrations or just registrations for given user ID."""
user_id = msg.get(CONF_USER_ID, connection.user.id)
if user_id != connection.user.id and not connection.user.is_admin:
# If user ID is provided and is not current user ID and current user
# isn't an admin user
connection.send_error(msg["id"], ERR_UNAUTHORIZED, "Unauthorized")
return
user_registrations = []
for config_entry in hass.config_entries.async_entries(domain=DOMAIN):
registration = config_entry.data
if connection.user.is_admin or registration[CONF_USER_ID] is user_id:
user_registrations.append(safe_registration(registration))
connection.send_message(result_message(msg["id"], user_registrations))
@ws_require_user()
@async_response
@websocket_command(
{
vol.Required("type"): "mobile_app/delete_registration",
vol.Required(CONF_WEBHOOK_ID): cv.string,
}
)
async def websocket_delete_registration(
hass: HomeAssistantType, connection: ActiveConnection, msg: dict
) -> None:
"""Delete the registration for the given webhook_id."""
user = connection.user
webhook_id = msg.get(CONF_WEBHOOK_ID)
if webhook_id is None:
connection.send_error(msg["id"], ERR_INVALID_FORMAT, "Webhook ID not provided")
return
config_entry = hass.data[DOMAIN][DATA_CONFIG_ENTRIES][webhook_id]
registration = config_entry.data
if registration is None:
connection.send_error(
msg["id"], ERR_NOT_FOUND, "Webhook ID not found in storage"
)
return
if registration[CONF_USER_ID] != user.id and not user.is_admin:
return error_message(
msg["id"], ERR_UNAUTHORIZED, "User is not registration owner"
)
await hass.config_entries.async_remove(config_entry.entry_id)
hass.data[DOMAIN][DATA_DELETED_IDS].append(webhook_id)
store = hass.data[DOMAIN][DATA_STORE]
try:
await store.async_save(savable_state(hass))
except HomeAssistantError:
return error_message(msg["id"], "internal_error", "Error deleting registration")
if CONF_CLOUDHOOK_URL in registration and "cloud" in hass.config.components:
await async_delete_cloudhook(hass, webhook_id)
connection.send_message(result_message(msg["id"], "ok"))
| apache-2.0 |
onitu/onitu | docs/conf.py | 1 | 8416 | # -*- coding: utf-8 -*-
#
# Onitu documentation build configuration file, created by
# sphinx-quickstart on Fri Nov 8 21:18:03 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinxcontrib.httpdomain']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Onitu'
copyright = u'2013, Yannick Péroux, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1-prev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Onitudoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Onitu.tex', u'Onitu Documentation',
u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'onitu', u'Onitu Documentation',
[u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Onitu', u'Onitu Documentation',
u'Yannick PÉROUX, Alexandre Baron, Antoine Rozo, Wannes Rombouts, Louis Roché, Maxime Constantinian, Morgan Faget, Mathis Dupuy, Frank Lenormand, Timothee Maurin', 'Onitu', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit |
eneabio/nova | nova/rootwrap/wrapper.py | 8 | 1974 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
FILTERS_MODULES = ['nova.rootwrap.compute',
'nova.rootwrap.network',
'nova.rootwrap.volume',
]
def load_filters():
"""Load filters from modules present in nova.rootwrap."""
filters = []
for modulename in FILTERS_MODULES:
try:
__import__(modulename)
module = sys.modules[modulename]
filters = filters + module.filterlist
except ImportError:
# It's OK to have missing filters, since filter modules are
# shipped with specific nodes rather than with python-nova
pass
return filters
def match_filter(filters, userargs):
"""
Checks user command and arguments through command filters and
returns the first matching filter, or None is none matched.
"""
found_filter = None
for f in filters:
if f.match(userargs):
# Try other filters if executable is absent
if not os.access(f.exec_path, os.X_OK):
if not found_filter:
found_filter = f
continue
# Otherwise return matching filter for execution
return f
# No filter matched or first missing executable
return found_filter
| apache-2.0 |
taedla01/MissionPlanner | Lib/site-packages/numpy/core/function_base.py | 82 | 5474 | __all__ = ['logspace', 'linspace']
import numeric as _nx
from numeric import array
def linspace(start, stop, num=50, endpoint=True, retstep=False):
"""
Return evenly spaced numbers over a specified interval.
Returns `num` evenly spaced samples, calculated over the
interval [`start`, `stop` ].
The endpoint of the interval can optionally be excluded.
Parameters
----------
start : scalar
The starting value of the sequence.
stop : scalar
The end value of the sequence, unless `endpoint` is set to False.
In that case, the sequence consists of all but the last of ``num + 1``
evenly spaced samples, so that `stop` is excluded. Note that the step
size changes when `endpoint` is False.
num : int, optional
Number of samples to generate. Default is 50.
endpoint : bool, optional
If True, `stop` is the last sample. Otherwise, it is not included.
Default is True.
retstep : bool, optional
If True, return (`samples`, `step`), where `step` is the spacing
between samples.
Returns
-------
samples : ndarray
There are `num` equally spaced samples in the closed interval
``[start, stop]`` or the half-open interval ``[start, stop)``
(depending on whether `endpoint` is True or False).
step : float (only if `retstep` is True)
Size of spacing between samples.
See Also
--------
arange : Similiar to `linspace`, but uses a step size (instead of the
number of samples).
logspace : Samples uniformly distributed in log space.
Examples
--------
>>> np.linspace(2.0, 3.0, num=5)
array([ 2. , 2.25, 2.5 , 2.75, 3. ])
>>> np.linspace(2.0, 3.0, num=5, endpoint=False)
array([ 2. , 2.2, 2.4, 2.6, 2.8])
>>> np.linspace(2.0, 3.0, num=5, retstep=True)
(array([ 2. , 2.25, 2.5 , 2.75, 3. ]), 0.25)
Graphical illustration:
>>> import matplotlib.pyplot as plt
>>> N = 8
>>> y = np.zeros(N)
>>> x1 = np.linspace(0, 10, N, endpoint=True)
>>> x2 = np.linspace(0, 10, N, endpoint=False)
>>> plt.plot(x1, y, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.plot(x2, y + 0.5, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.ylim([-0.5, 1])
(-0.5, 1)
>>> plt.show()
"""
num = int(num)
if num <= 0:
return array([], float)
if endpoint:
if num == 1:
return array([float(start)])
step = (stop-start)/float((num-1))
y = _nx.arange(0, num) * step + start
y[-1] = stop
else:
step = (stop-start)/float(num)
y = _nx.arange(0, num) * step + start
if retstep:
return y, step
else:
return y
def logspace(start,stop,num=50,endpoint=True,base=10.0):
"""
Return numbers spaced evenly on a log scale.
In linear space, the sequence starts at ``base ** start``
(`base` to the power of `start`) and ends with ``base ** stop``
(see `endpoint` below).
Parameters
----------
start : float
``base ** start`` is the starting value of the sequence.
stop : float
``base ** stop`` is the final value of the sequence, unless `endpoint`
is False. In that case, ``num + 1`` values are spaced over the
interval in log-space, of which all but the last (a sequence of
length ``num``) are returned.
num : integer, optional
Number of samples to generate. Default is 50.
endpoint : boolean, optional
If true, `stop` is the last sample. Otherwise, it is not included.
Default is True.
base : float, optional
The base of the log space. The step size between the elements in
``ln(samples) / ln(base)`` (or ``log_base(samples)``) is uniform.
Default is 10.0.
Returns
-------
samples : ndarray
`num` samples, equally spaced on a log scale.
See Also
--------
arange : Similiar to linspace, with the step size specified instead of the
number of samples. Note that, when used with a float endpoint, the
endpoint may or may not be included.
linspace : Similar to logspace, but with the samples uniformly distributed
in linear space, instead of log space.
Notes
-----
Logspace is equivalent to the code
>>> y = np.linspace(start, stop, num=num, endpoint=endpoint)
... # doctest: +SKIP
>>> power(base, y)
... # doctest: +SKIP
Examples
--------
>>> np.logspace(2.0, 3.0, num=4)
array([ 100. , 215.443469 , 464.15888336, 1000. ])
>>> np.logspace(2.0, 3.0, num=4, endpoint=False)
array([ 100. , 177.827941 , 316.22776602, 562.34132519])
>>> np.logspace(2.0, 3.0, num=4, base=2.0)
array([ 4. , 5.0396842 , 6.34960421, 8. ])
Graphical illustration:
>>> import matplotlib.pyplot as plt
>>> N = 10
>>> x1 = np.logspace(0.1, 1, N, endpoint=True)
>>> x2 = np.logspace(0.1, 1, N, endpoint=False)
>>> y = np.zeros(N)
>>> plt.plot(x1, y, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.plot(x2, y + 0.5, 'o')
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.ylim([-0.5, 1])
(-0.5, 1)
>>> plt.show()
"""
y = linspace(start,stop,num=num,endpoint=endpoint)
return _nx.power(base,y)
| gpl-3.0 |
chokribr/invenio | invenio/modules/upgrader/engine.py | 13 | 19568 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Upgrader engine."""
from __future__ import absolute_import
from datetime import datetime
import logging
import re
import sys
import warnings
from flask import current_app
from flask_registry import RegistryProxy, ImportPathRegistry
from sqlalchemy import desc
from invenio.ext.sqlalchemy import db
from .models import Upgrade
from .logging import InvenioUpgraderLogFormatter
from .checks import post_check_bibsched
class InvenioUpgrader(object):
"""Class responsible for loading, sorting and executing upgrades.
A note on cross graph dependencies: An upgrade is uniquely identified
by it's id (part of the filename). This means we do not get into
a situation where an upgrade id will exist in two repositories. One
repository will simply overwrite the other on install.
"""
FILE_LOG_FMT = '*%(prefix)s %(asctime)s %(levelname)-8s ' \
'%(plugin_id)s%(message)s'
CONSOLE_LOG_INFO_FMT = '>>> %(prefix)s%(message)s'
CONSOLE_LOG_FMT = '>>> %(prefix)s%(levelname)s: %(message)s'
def __init__(self, packages=None, global_pre_upgrade=None,
global_post_upgrade=None):
"""Init.
@param global_pre_upgrade: List of callables. Each check will be
executed once per upgrade-batch run. Useful e.g. to check if
bibsched is running.
@param global_post_upgrade: List of callables. Each check will be
executed once per upgrade-batch run. Useful e.g. to tell users
to start bibsched again.
"""
self.upgrades = None
self.history = {}
self.ordered_history = []
self.global_pre_upgrade = global_pre_upgrade or []
self.global_post_upgrade = global_post_upgrade or [
post_check_bibsched
]
if packages is None:
self.packages = current_app.extensions['registry']['packages']
else:
self.packages = RegistryProxy(
'upgrader.packages', ImportPathRegistry, initial=packages)
# Warning related
self.old_showwarning = None
self.warning_occured = 0
self._logger = None
self._logger_file_fmtter = InvenioUpgraderLogFormatter(
self.FILE_LOG_FMT)
self._logger_console_fmtter = InvenioUpgraderLogFormatter(
self.CONSOLE_LOG_FMT, info=self.CONSOLE_LOG_INFO_FMT,)
def estimate(self, upgrades):
"""Estimate the time needed to apply upgrades.
If an upgrades does not specify and estimate it is assumed to be
in the order of 1 second.
@param upgrades: List of upgrades sorted in topological order.
"""
val = 0
for u in upgrades:
if 'estimate' in u:
val += u['estimate']()
else:
val += 1
return val
def human_estimate(self, upgrades):
"""Make a human readable estimated time to completion string.
@param upgrades: List of upgrades sorted in topological order.
"""
val = self.estimate(upgrades)
if val < 60:
return "less than 1 minute"
elif val < 300:
return "less than 5 minutes"
elif val < 600:
return "less than 10 minutes"
elif val < 1800:
return "less than 30 minutes"
elif val < 3600:
return "less than 1 hour"
elif val < 3 * 3600:
return "less than 3 hours"
elif val < 6 * 3600:
return "less than 6 hours"
elif val < 12 * 3600:
return "less than 12 hours"
elif val < 86400:
return "less than 1 day"
else:
return "more than 1 day"
def _setup_log_prefix(self, plugin_id=''):
"""Setup custom warning notification."""
self._logger_console_fmtter.prefix = '%s: ' % plugin_id
self._logger_console_fmtter.plugin_id = plugin_id
self._logger_file_fmtter.prefix = '*'
self._logger_file_fmtter.plugin_id = '%s: ' % plugin_id
def _teardown_log_prefix(self):
"""Tear down custom warning notification."""
self._logger_console_fmtter.prefix = ''
self._logger_console_fmtter.plugin_id = ''
self._logger_file_fmtter.prefix = ' '
self._logger_file_fmtter.plugin_id = ''
def get_logger(self, logfilename=None):
"""Setup logger.
Allow outputting to both a log file and console at the
same time.
"""
if self._logger is None:
self._logger = logging.getLogger('invenio_upgrader')
self._logger.setLevel(logging.INFO)
if logfilename:
fh = logging.FileHandler(logfilename)
fh.setLevel(logging.INFO)
fh.setFormatter(self._logger_file_fmtter)
self._logger.addHandler(fh)
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.INFO)
ch.setFormatter(self._logger_console_fmtter)
self._logger.addHandler(ch)
# Replace show warnings (documented in Python manual)
def showwarning(message, dummy_category, dummy_filename,
dummy_lineno, *dummy_args):
self.warning_occured += 1
logger = self.get_logger()
logger.warning(message)
warnings.showwarning = showwarning
self._teardown_log_prefix()
return self._logger
def has_warnings(self):
"""Determine if a warning has occurred in this upgrader instance."""
return self.warning_occured != 0
def get_warnings_count(self):
"""Get number of warnings issued."""
return self.warning_occured
def pre_upgrade_checks(self, upgrades):
"""Run upgrade pre-checks prior to applying upgrades.
Pre-checks should
in general be fast to execute. Pre-checks may the use the wait_for_user
function, to query the user for confirmation, but should respect the
--yes-i-know option to run unattended.
All pre-checks will be executed even if one fails, however if one pre-
check fails, the upgrade process will be stopped and the user warned.
@param upgrades: List of upgrades sorted in topological order.
"""
errors = []
for check in self.global_pre_upgrade:
self._setup_log_prefix(plugin_id=check.__name__)
try:
check()
except RuntimeError as e:
errors.append((check.__name__, e.args))
for u in upgrades:
if 'pre_upgrade' in u:
self._setup_log_prefix(plugin_id=u['id'])
try:
u['pre_upgrade']()
except RuntimeError as e:
errors.append((u['id'], e.args))
self._teardown_log_prefix()
self._check_errors(errors, "Pre-upgrade check for %s failed with the"
" following errors:")
def _check_errors(self, errors, prefix):
"""Check for errors and possible raise and format an error message.
@param errors: List of error messages.
@param prefix: str, Prefix message for error messages
"""
args = []
for uid, messages in errors:
error_msg = []
error_msg.append(prefix % uid)
for msg in messages:
error_msg.append(" (-) %s" % msg)
args.append("\n".join(error_msg))
if args:
raise RuntimeError(*args)
def post_upgrade_checks(self, upgrades):
"""Run post-upgrade checks after applying all pending upgrades.
Post checks may be used to emit warnings encountered when applying an
upgrade, but post-checks can also be used to advice the user to run
re-indexing or similar long running processes.
Post-checks may query for user-input, but should respect the
--yes-i-know option to run in an unattended mode.
All applied upgrades post-checks are executed.
@param upgrades: List of upgrades sorted in topological order.
"""
errors = []
for u in upgrades:
if 'post_upgrade' in u:
self._setup_log_prefix(plugin_id=u['id'])
try:
u['post_upgrade']()
except RuntimeError as e:
errors.append((u['id'], e.args))
for check in self.global_post_upgrade:
self._setup_log_prefix(plugin_id=check.__name__)
try:
check()
except RuntimeError as e:
errors.append((check.__name__, e.args))
self._teardown_log_prefix()
self._check_errors(errors, "Post-upgrade check for %s failed with the "
"following errors:")
def apply_upgrade(self, upgrade):
"""Apply a upgrade and register that it was successful.
A upgrade may throw a RuntimeError, if an unrecoverable error happens.
@param upgrade: A single upgrade
"""
self._setup_log_prefix(plugin_id=upgrade['id'])
try: # Nested due to Python 2.4
try:
upgrade['do_upgrade']()
self.register_success(upgrade)
except RuntimeError as e:
msg = ["Upgrade error(s):"]
for m in e.args:
msg.append(" (-) %s" % m)
logger = self.get_logger()
logger.error("\n".join(msg))
raise RuntimeError(
"Upgrade '%s' failed. Your installation is in an"
" inconsistent state. Please manually review the upgrade "
"and resolve inconsistencies." % upgrade['id']
)
finally:
self._teardown_log_prefix()
def load_history(self):
"""Load upgrade history from database table.
If upgrade table does not exists, the history is assumed to be empty.
"""
if not self.history:
query = Upgrade.query.order_by(desc(Upgrade.applied))
for u in query.all():
self.history[u.upgrade] = u.applied
self.ordered_history.append(u.upgrade)
def latest_applied_upgrade(self, repository='invenio'):
"""Get the latest applied upgrade for a repository."""
u = Upgrade.query.filter(
Upgrade.upgrade.like("%s_%%" % repository)
).order_by(desc(Upgrade.applied)).first()
return u.upgrade if u else None
def register_success(self, upgrade):
"""Register a successful upgrade."""
u = Upgrade(upgrade=upgrade['id'], applied=datetime.now())
db.session.add(u)
db.session.commit()
def get_history(self):
"""Get history of applied upgrades."""
self.load_history()
return map(lambda x: (x, self.history[x]), self.ordered_history)
def _load_upgrades(self, remove_applied=True):
"""Load upgrade modules.
Upgrade modules are loaded using pluginutils. The pluginutils module
is either loaded from site-packages via normal or via a user-loaded
module supplied in the __init__ method. This is useful when the engine
is running before actually being installed into site-packages.
@param remove_applied: if True, already applied upgrades will not
be included, if False the entire upgrade graph will be
returned.
"""
from invenio.ext.registry import ModuleAutoDiscoverySubRegistry
from invenio.utils.autodiscovery import create_enhanced_plugin_builder
if remove_applied:
self.load_history()
plugin_builder = create_enhanced_plugin_builder(
compulsory_objects={
'do_upgrade': dummy_signgature,
'info': dummy_signgature,
},
optional_objects={
'estimate': dummy_signgature,
'pre_upgrade': dummy_signgature,
'post_upgrade': dummy_signgature,
},
other_data={
'depends_on': (list, []),
},
)
def builder(plugin):
plugin_id = plugin.__name__.split('.')[-1]
data = plugin_builder(plugin)
data['id'] = plugin_id
data['repository'] = self._parse_plugin_id(plugin_id)
return plugin_id, data
# Load all upgrades from installed packages
plugins = dict(map(
builder,
ModuleAutoDiscoverySubRegistry(
'upgrades', registry_namespace=self.packages
)))
return plugins
def _parse_plugin_id(self, plugin_id):
"""Determine repository from plugin id."""
m = re.match("(.+)(_\d{4}_\d{2}_\d{2}_)(.+)", plugin_id)
if m:
return m.group(1)
m = re.match("(.+)(_release_)(.+)", plugin_id)
if m:
return m.group(1)
raise RuntimeError("Repository could not be determined from "
"the upgrade identifier: %s." % plugin_id)
def get_upgrades(self, remove_applied=True):
"""Get upgrades (ordered according to their dependencies).
@param remove_applied: Set to false to return all upgrades, otherwise
already applied upgrades are removed from their graph (incl. all
their dependencies.
"""
if self.upgrades is None:
plugins = self._load_upgrades(remove_applied=remove_applied)
# List of un-applied upgrades in topological order
self.upgrades = map(_upgrade_doc_mapper,
self.order_upgrades(plugins, self.history))
return self.upgrades
def _create_graph(self, upgrades, history={}):
"""Create dependency graph from upgrades.
@param upgrades: Dict of upgrades
@param history: Dict of applied upgrades
"""
graph_incoming = {} # nodes their incoming edges
graph_outgoing = {} # nodes their outgoing edges
# Create graph data structure
for mod in upgrades.values():
# Remove all incoming edges from already applied upgrades
graph_incoming[mod['id']] = filter(lambda x: x not in history,
mod['depends_on'])
# Build graph_outgoing
if mod['id'] not in graph_outgoing:
graph_outgoing[mod['id']] = []
for edge in graph_incoming[mod['id']]:
if edge not in graph_outgoing:
graph_outgoing[edge] = []
graph_outgoing[edge].append(mod['id'])
return (graph_incoming, graph_outgoing)
def find_endpoints(self):
"""Find upgrade end-points (i.e nodes without dependents)."""
plugins = self._load_upgrades(remove_applied=False)
dummy_graph_incoming, graph_outgoing = self._create_graph(plugins, {})
endpoints = {}
for node, outgoing in graph_outgoing.items():
if not outgoing:
repository = plugins[node]['repository']
if repository not in endpoints:
endpoints[repository] = []
endpoints[repository].append(node)
return endpoints
def order_upgrades(self, upgrades, history={}):
"""Order upgrades according to their dependencies.
(topological sort using
Kahn's algorithm - http://en.wikipedia.org/wiki/Topological_sorting).
@param upgrades: Dict of upgrades
@param history: Dict of applied upgrades
"""
graph_incoming, graph_outgoing = self._create_graph(upgrades, history)
# Removed already applied upgrades (assumes all dependencies prior to
# this upgrade has been applied).
for node_id in history.keys():
start_nodes = [node_id, ]
while start_nodes:
node = start_nodes.pop()
# Remove from direct dependents
try:
for d in graph_outgoing[node]:
graph_incoming[d] = filter(lambda x: x != node,
graph_incoming[d])
except KeyError:
warnings.warn("Ghost upgrade %s detected" % node)
# Remove all prior dependencies
if node in graph_incoming:
# Get dependencies, remove node, and recursively
# remove all dependencies.
depends_on = graph_incoming[node]
# Add dependencies to check
for d in depends_on:
graph_outgoing[d] = filter(lambda x: x != node,
graph_outgoing[d])
start_nodes.append(d)
del graph_incoming[node]
# Check for missing dependencies
for node_id, depends_on in graph_incoming.items():
for d in depends_on:
if d not in graph_incoming:
raise RuntimeError("Upgrade %s depends on an unknown"
" upgrade %s" % (node_id, d))
# Nodes with no incoming edges
start_nodes = filter(lambda x: len(graph_incoming[x]) == 0,
graph_incoming.keys())
topo_order = []
while start_nodes:
# Append node_n to list (it has no incoming edges)
node_n = start_nodes.pop()
topo_order.append(node_n)
# For each node m with and edge from n to m
for node_m in graph_outgoing[node_n]:
# Remove the edge n to m
graph_incoming[node_m] = filter(lambda x: x != node_n,
graph_incoming[node_m])
# If m has no incoming edges, add it to start_nodes.
if not graph_incoming[node_m]:
start_nodes.append(node_m)
for node, edges in graph_incoming.items():
if edges:
raise RuntimeError("The upgrades have at least one cyclic "
"dependency involving %s." % node)
return map(lambda x: upgrades[x], topo_order)
def dummy_signgature():
"""Dummy function signature for pluginutils."""
pass
def _upgrade_doc_mapper(x):
"""Map function for ingesting documentation strings into plug-ins."""
try:
x["__doc__"] = x['info']().split("\n")[0].strip()
except Exception:
x["__doc__"] = ''
return x
| gpl-2.0 |
nox/skia | tools/tests/base_unittest.py | 68 | 2416 | #!/usr/bin/python
"""
Copyright 2014 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
A wrapper around the standard Python unittest library, adding features we need
for various unittests within this directory.
"""
import errno
import os
import shutil
import sys
import unittest
# Set the PYTHONPATH to include the tools directory.
sys.path.append(
os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir))
import find_run_binary
class TestCase(unittest.TestCase):
def shortDescription(self):
"""Tell unittest framework to not print docstrings for test cases."""
return None
def create_empty_dir(self, path):
"""Creates an empty directory at path and returns path.
Args:
path: path on local disk
"""
shutil.rmtree(path=path, ignore_errors=True)
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
return path
def run_command(self, args):
"""Runs a program from the command line and returns stdout.
Args:
args: Command line to run, as a list of string parameters. args[0] is the
binary to run.
Returns:
stdout from the program, as a single string.
Raises:
Exception: the program exited with a nonzero return code.
"""
return find_run_binary.run_command(args)
def find_path_to_program(self, program):
"""Returns path to an existing program binary.
Args:
program: Basename of the program to find (e.g., 'render_pictures').
Returns:
Absolute path to the program binary, as a string.
Raises:
Exception: unable to find the program binary.
"""
return find_run_binary.find_path_to_program(program)
def main(test_case_class):
"""Run the unit tests within the given class.
Raises an Exception if any of those tests fail (in case we are running in the
context of run_all.py, which depends on that Exception to signal failures).
TODO(epoger): Make all of our unit tests use the Python unittest framework,
so we can leverage its ability to run *all* the tests and report failures at
the end.
"""
suite = unittest.TestLoader().loadTestsFromTestCase(test_case_class)
results = unittest.TextTestRunner(verbosity=2).run(suite)
if not results.wasSuccessful():
raise Exception('failed unittest %s' % test_case_class)
| bsd-3-clause |
mpdehaan/ansible | lib/ansible/runner/connection_plugins/funcd.py | 62 | 3629 | # Based on local.py (c) 2012, Michael DeHaan <[email protected]>
# Based on chroot.py (c) 2013, Maykel Moya <[email protected]>
# (c) 2013, Michael Scherer <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# ---
# The func transport permit to use ansible over func. For people who have already setup
# func and that wish to play with ansible, this permit to move gradually to ansible
# without having to redo completely the setup of the network.
HAVE_FUNC=False
try:
import func.overlord.client as fc
HAVE_FUNC=True
except ImportError:
pass
import os
from ansible.callbacks import vvv
from ansible import errors
import tempfile
import shutil
class Connection(object):
''' Func-based connections '''
def __init__(self, runner, host, port, *args, **kwargs):
self.runner = runner
self.host = host
self.has_pipelining = False
# port is unused, this go on func
self.port = port
def connect(self, port=None):
if not HAVE_FUNC:
raise errors.AnsibleError("func is not installed")
self.client = fc.Client(self.host)
return self
def exec_command(self, cmd, tmp_path, sudo_user=None, sudoable=False,
executable='/bin/sh', in_data=None, su=None, su_user=None):
''' run a command on the remote minion '''
if su or su_user:
raise errors.AnsibleError("Internal Error: this module does not support running commands via su")
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
vvv("EXEC %s" % (cmd), host=self.host)
p = self.client.command.run(cmd)[self.host]
return (p[0], '', p[1], p[2])
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:])
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
out_path = self._normalize_path(out_path, '/')
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
self.client.local.copyfile.send(in_path, out_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
in_path = self._normalize_path(in_path, '/')
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
# need to use a tmp dir due to difference of semantic for getfile
# ( who take a # directory as destination) and fetch_file, who
# take a file directly
tmpdir = tempfile.mkdtemp(prefix="func_ansible")
self.client.local.getfile.get(in_path, tmpdir)
shutil.move(os.path.join(tmpdir, self.host, os.path.basename(in_path)),
out_path)
shutil.rmtree(tmpdir)
def close(self):
''' terminate the connection; nothing to do here '''
pass
| gpl-3.0 |
raildo/nova | nova/objects/compute_node.py | 13 | 19773 | # Copyright 2013 IBM Corp
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
from nova.objects import pci_device_pool
from nova import utils
CONF = cfg.CONF
CONF.import_opt('cpu_allocation_ratio', 'nova.compute.resource_tracker')
CONF.import_opt('ram_allocation_ratio', 'nova.compute.resource_tracker')
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class ComputeNode(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added get_by_service_id()
# Version 1.2: String attributes updated to support unicode
# Version 1.3: Added stats field
# Version 1.4: Added host ip field
# Version 1.5: Added numa_topology field
# Version 1.6: Added supported_hv_specs
# Version 1.7: Added host field
# Version 1.8: Added get_by_host_and_nodename()
# Version 1.9: Added pci_device_pools
# Version 1.10: Added get_first_node_by_host_for_old_compat()
# Version 1.11: PciDevicePoolList version 1.1
# Version 1.12: HVSpec version 1.1
# Version 1.13: Changed service_id field to be nullable
# Version 1.14: Added cpu_allocation_ratio and ram_allocation_ratio
VERSION = '1.14'
fields = {
'id': fields.IntegerField(read_only=True),
'service_id': fields.IntegerField(nullable=True),
'host': fields.StringField(nullable=True),
'vcpus': fields.IntegerField(),
'memory_mb': fields.IntegerField(),
'local_gb': fields.IntegerField(),
'vcpus_used': fields.IntegerField(),
'memory_mb_used': fields.IntegerField(),
'local_gb_used': fields.IntegerField(),
'hypervisor_type': fields.StringField(),
'hypervisor_version': fields.IntegerField(),
'hypervisor_hostname': fields.StringField(nullable=True),
'free_ram_mb': fields.IntegerField(nullable=True),
'free_disk_gb': fields.IntegerField(nullable=True),
'current_workload': fields.IntegerField(nullable=True),
'running_vms': fields.IntegerField(nullable=True),
'cpu_info': fields.StringField(nullable=True),
'disk_available_least': fields.IntegerField(nullable=True),
'metrics': fields.StringField(nullable=True),
'stats': fields.DictOfNullableStringsField(nullable=True),
'host_ip': fields.IPAddressField(nullable=True),
'numa_topology': fields.StringField(nullable=True),
# NOTE(pmurray): the supported_hv_specs field maps to the
# supported_instances field in the database
'supported_hv_specs': fields.ListOfObjectsField('HVSpec'),
# NOTE(pmurray): the pci_device_pools field maps to the
# pci_stats field in the database
'pci_device_pools': fields.ObjectField('PciDevicePoolList',
nullable=True),
'cpu_allocation_ratio': fields.FloatField(),
'ram_allocation_ratio': fields.FloatField(),
}
obj_relationships = {
'pci_device_pools': [('1.9', '1.0'), ('1.11', '1.1')],
'supported_hv_specs': [('1.6', '1.0'), ('1.12', '1.1')],
}
def obj_make_compatible(self, primitive, target_version):
super(ComputeNode, self).obj_make_compatible(primitive, target_version)
target_version = utils.convert_version_to_tuple(target_version)
if target_version < (1, 14):
if 'ram_allocation_ratio' in primitive:
del primitive['ram_allocation_ratio']
if 'cpu_allocation_ratio' in primitive:
del primitive['cpu_allocation_ratio']
if target_version < (1, 13) and primitive.get('service_id') is None:
# service_id is non-nullable in versions before 1.13
try:
service = objects.Service.get_by_compute_host(
self._context, primitive['host'])
primitive['service_id'] = service.id
except (exception.ComputeHostNotFound, KeyError):
# NOTE(hanlind): In case anything goes wrong like service not
# found or host not being set, catch and set a fake value just
# to allow for older versions that demand a value to work.
# Setting to -1 will, if value is later used result in a
# ServiceNotFound, so should be safe.
primitive['service_id'] = -1
if target_version < (1, 7) and 'host' in primitive:
del primitive['host']
if target_version < (1, 5) and 'numa_topology' in primitive:
del primitive['numa_topology']
if target_version < (1, 4) and 'host_ip' in primitive:
del primitive['host_ip']
if target_version < (1, 3) and 'stats' in primitive:
# pre 1.3 version does not have a stats field
del primitive['stats']
@staticmethod
def _host_from_db_object(compute, db_compute):
if (('host' not in db_compute or db_compute['host'] is None)
and 'service_id' in db_compute
and db_compute['service_id'] is not None):
# FIXME(sbauza) : Unconverted compute record, provide compatibility
# This has to stay until we can be sure that any/all compute nodes
# in the database have been converted to use the host field
# Service field of ComputeNode could be deprecated in a next patch,
# so let's use directly the Service object
try:
service = objects.Service.get_by_id(
compute._context, db_compute['service_id'])
except exception.ServiceNotFound:
compute['host'] = None
return
try:
compute['host'] = service.host
except (AttributeError, exception.OrphanedObjectError):
# Host can be nullable in Service
compute['host'] = None
elif 'host' in db_compute and db_compute['host'] is not None:
# New-style DB having host as a field
compute['host'] = db_compute['host']
else:
# We assume it should not happen but in case, let's set it to None
compute['host'] = None
@staticmethod
def _from_db_object(context, compute, db_compute):
special_cases = set([
'stats',
'supported_hv_specs',
'host',
'pci_device_pools',
])
fields = set(compute.fields) - special_cases
for key in fields:
value = db_compute[key]
# NOTE(sbauza): Since all compute nodes don't possibly run the
# latest RT code updating allocation ratios, we need to provide
# a backwards compatible way of hydrating them.
# As we want to care about our operators and since we don't want to
# ask them to change their configuration files before upgrading, we
# prefer to hardcode the default values for the ratios here until
# the next release (Mitaka) where the opt default values will be
# restored for both cpu (16.0) and ram (1.5) allocation ratios.
# TODO(sbauza): Remove that in the next major version bump where
# we break compatibilility with old Kilo computes
if key == 'cpu_allocation_ratio' or key == 'ram_allocation_ratio':
if value == 0.0:
# Operator has not yet provided a new value for that ratio
# on the compute node
value = None
if value is None:
# ResourceTracker is not updating the value (old node)
# or the compute node is updated but the default value has
# not been changed
value = getattr(CONF, key)
if value == 0.0 and key == 'cpu_allocation_ratio':
# It's not specified either on the controller
value = 16.0
if value == 0.0 and key == 'ram_allocation_ratio':
# It's not specified either on the controller
value = 1.5
compute[key] = value
stats = db_compute['stats']
if stats:
compute['stats'] = jsonutils.loads(stats)
sup_insts = db_compute.get('supported_instances')
if sup_insts:
hv_specs = jsonutils.loads(sup_insts)
hv_specs = [objects.HVSpec.from_list(hv_spec)
for hv_spec in hv_specs]
compute['supported_hv_specs'] = hv_specs
pci_stats = db_compute.get('pci_stats')
compute.pci_device_pools = pci_device_pool.from_pci_stats(pci_stats)
compute._context = context
# Make sure that we correctly set the host field depending on either
# host column is present in the table or not
compute._host_from_db_object(compute, db_compute)
compute.obj_reset_changes()
return compute
@base.remotable_classmethod
def get_by_id(cls, context, compute_id):
db_compute = db.compute_node_get(context, compute_id)
return cls._from_db_object(context, cls(), db_compute)
# NOTE(hanlind): This is deprecated and should be removed on the next
# major version bump
@base.remotable_classmethod
def get_by_service_id(cls, context, service_id):
db_computes = db.compute_nodes_get_by_service_id(context, service_id)
# NOTE(sbauza): Old version was returning an item, we need to keep this
# behaviour for backwards compatibility
db_compute = db_computes[0]
return cls._from_db_object(context, cls(), db_compute)
@base.remotable_classmethod
def get_by_host_and_nodename(cls, context, host, nodename):
try:
db_compute = db.compute_node_get_by_host_and_nodename(
context, host, nodename)
except exception.ComputeHostNotFound:
# FIXME(sbauza): Some old computes can still have no host record
# We need to provide compatibility by using the old service_id
# record.
# We assume the compatibility as an extra penalty of one more DB
# call but that's necessary until all nodes are upgraded.
try:
service = objects.Service.get_by_compute_host(context, host)
db_computes = db.compute_nodes_get_by_service_id(
context, service.id)
except exception.ServiceNotFound:
# We need to provide the same exception upstream
raise exception.ComputeHostNotFound(host=host)
db_compute = None
for compute in db_computes:
if compute['hypervisor_hostname'] == nodename:
db_compute = compute
# We can avoid an extra call to Service object in
# _from_db_object
db_compute['host'] = service.host
break
if not db_compute:
raise exception.ComputeHostNotFound(host=host)
return cls._from_db_object(context, cls(), db_compute)
@base.remotable_classmethod
def get_first_node_by_host_for_old_compat(cls, context, host,
use_slave=False):
computes = ComputeNodeList.get_all_by_host(context, host, use_slave)
# FIXME(sbauza): Some hypervisors (VMware, Ironic) can return multiple
# nodes per host, we should return all the nodes and modify the callers
# instead.
# Arbitrarily returning the first node.
return computes[0]
@staticmethod
def _convert_stats_to_db_format(updates):
stats = updates.pop('stats', None)
if stats is not None:
updates['stats'] = jsonutils.dumps(stats)
@staticmethod
def _convert_host_ip_to_db_format(updates):
host_ip = updates.pop('host_ip', None)
if host_ip:
updates['host_ip'] = str(host_ip)
@staticmethod
def _convert_supported_instances_to_db_format(updates):
hv_specs = updates.pop('supported_hv_specs', None)
if hv_specs is not None:
hv_specs = [hv_spec.to_list() for hv_spec in hv_specs]
updates['supported_instances'] = jsonutils.dumps(hv_specs)
@staticmethod
def _convert_pci_stats_to_db_format(updates):
pools = updates.pop('pci_device_pools', None)
if pools:
updates['pci_stats'] = jsonutils.dumps(pools.obj_to_primitive())
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
self._convert_stats_to_db_format(updates)
self._convert_host_ip_to_db_format(updates)
self._convert_supported_instances_to_db_format(updates)
self._convert_pci_stats_to_db_format(updates)
db_compute = db.compute_node_create(self._context, updates)
self._from_db_object(self._context, self, db_compute)
@base.remotable
def save(self, prune_stats=False):
# NOTE(belliott) ignore prune_stats param, no longer relevant
updates = self.obj_get_changes()
updates.pop('id', None)
self._convert_stats_to_db_format(updates)
self._convert_host_ip_to_db_format(updates)
self._convert_supported_instances_to_db_format(updates)
self._convert_pci_stats_to_db_format(updates)
db_compute = db.compute_node_update(self._context, self.id, updates)
self._from_db_object(self._context, self, db_compute)
@base.remotable
def destroy(self):
db.compute_node_delete(self._context, self.id)
def update_from_virt_driver(self, resources):
# NOTE(pmurray): the virt driver provides a dict of values that
# can be copied into the compute node. The names and representation
# do not exactly match.
# TODO(pmurray): the resources dict should be formalized.
keys = ["vcpus", "memory_mb", "local_gb", "cpu_info",
"vcpus_used", "memory_mb_used", "local_gb_used",
"numa_topology", "hypervisor_type",
"hypervisor_version", "hypervisor_hostname",
"disk_available_least", "host_ip"]
for key in keys:
if key in resources:
self[key] = resources[key]
# supported_instances has a different name in compute_node
# TODO(pmurray): change virt drivers not to json encode
# values they add to the resources dict
if 'supported_instances' in resources:
si = resources['supported_instances']
if isinstance(si, six.string_types):
si = jsonutils.loads(si)
self.supported_hv_specs = [objects.HVSpec.from_list(s) for s in si]
@base.NovaObjectRegistry.register
class ComputeNodeList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# ComputeNode <= version 1.2
# Version 1.1 ComputeNode version 1.3
# Version 1.2 Add get_by_service()
# Version 1.3 ComputeNode version 1.4
# Version 1.4 ComputeNode version 1.5
# Version 1.5 Add use_slave to get_by_service
# Version 1.6 ComputeNode version 1.6
# Version 1.7 ComputeNode version 1.7
# Version 1.8 ComputeNode version 1.8 + add get_all_by_host()
# Version 1.9 ComputeNode version 1.9
# Version 1.10 ComputeNode version 1.10
# Version 1.11 ComputeNode version 1.11
# Version 1.12 ComputeNode version 1.12
# Version 1.13 ComputeNode version 1.13
# Version 1.14 ComputeNode version 1.14
VERSION = '1.14'
fields = {
'objects': fields.ListOfObjectsField('ComputeNode'),
}
# NOTE(danms): ComputeNode was at 1.2 before we added this
obj_relationships = {
'objects': [('1.0', '1.2'), ('1.1', '1.3'), ('1.2', '1.3'),
('1.3', '1.4'), ('1.4', '1.5'), ('1.5', '1.5'),
('1.6', '1.6'), ('1.7', '1.7'), ('1.8', '1.8'),
('1.9', '1.9'), ('1.10', '1.10'), ('1.11', '1.11'),
('1.12', '1.12'), ('1.13', '1.13'), ('1.14', '1.14')],
}
@base.remotable_classmethod
def get_all(cls, context):
db_computes = db.compute_node_get_all(context)
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
@base.remotable_classmethod
def get_by_hypervisor(cls, context, hypervisor_match):
db_computes = db.compute_node_search_by_hypervisor(context,
hypervisor_match)
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
# NOTE(hanlind): This is deprecated and should be removed on the next
# major version bump
@base.remotable_classmethod
def _get_by_service(cls, context, service_id, use_slave=False):
try:
db_computes = db.compute_nodes_get_by_service_id(
context, service_id)
except exception.ServiceNotFound:
# NOTE(sbauza): Previous behaviour was returning an empty list
# if the service was created with no computes, we need to keep it.
db_computes = []
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
@base.remotable_classmethod
def get_all_by_host(cls, context, host, use_slave=False):
try:
db_computes = db.compute_node_get_all_by_host(context, host,
use_slave)
except exception.ComputeHostNotFound:
# FIXME(sbauza): Some old computes can still have no host record
# We need to provide compatibility by using the old service_id
# record.
# We assume the compatibility as an extra penalty of one more DB
# call but that's necessary until all nodes are upgraded.
try:
service = objects.Service.get_by_compute_host(context, host,
use_slave)
db_computes = db.compute_nodes_get_by_service_id(
context, service.id)
except exception.ServiceNotFound:
# We need to provide the same exception upstream
raise exception.ComputeHostNotFound(host=host)
# We can avoid an extra call to Service object in _from_db_object
for db_compute in db_computes:
db_compute['host'] = service.host
return base.obj_make_list(context, cls(context), objects.ComputeNode,
db_computes)
| apache-2.0 |
spaceexperiment/forum-app | app/api/views/category.py | 1 | 1784 | from flask import request, session, g, redirect, url_for, abort
from . import api
from ..exceptions import ExistsError
from ..models import Category
from .main import BaseMethodView
class CategoryView(BaseMethodView):
def get(self, id=None):
if id:
instance = Category(id)
if not instance:
abort(404)
instance.category.subs = instance.subs()
return instance.category
categories_subs = []
for id in Category.all_ids():
instance = Category(id)
instance.category.subs = instance.subs()
categories_subs.append(instance.category)
return categories_subs
def post(self):
self.is_admin()
missing_data = self.missing_data(['title'])
if missing_data:
return missing_data
try:
category = Category.create(request.json['title'])
except ExistsError:
return self.error('Category exists', 409)
return category, 201
def put(self, id=None):
self.is_admin()
title = request.json.get('title')
if not title:
return self.bad_request('missing title')
if not Category.get(id):
return abort(404)
category = Category.edit(id, title=title)
return category, 200
def delete(self, id=None):
self.is_admin()
if id:
category = Category.get(id)
if not id or not category:
abort(404)
Category.delete(id)
return '', 200
view = CategoryView.as_view('category')
api.add_url_rule('/category/',view_func=view, methods=['GET', 'POST', ])
api.add_url_rule('/category/<int:id>/', view_func=view,
methods=['GET', 'PUT', 'DELETE'])
| mit |
tesb/flask-crystal | venv/Lib/site-packages/requests/__init__.py | 327 | 1856 | # -*- coding: utf-8 -*-
# __
# /__) _ _ _ _ _/ _
# / ( (- (/ (/ (- _) / _)
# /
"""
requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print(r.text)
{
...
"form": {
"key2": "value2",
"key1": "value1"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2014 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
__version__ = '2.3.0'
__build__ = 0x020300
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2014 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
from .packages.urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3()
except ImportError:
pass
from . import utils
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError
)
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| apache-2.0 |
homme/ansible | test/integration/cleanup_gce.py | 163 | 2589 | '''
Find and delete GCE resources matching the provided --match string. Unless
--yes|-y is provided, the prompt for confirmation prior to deleting resources.
Please use caution, you can easily delete your *ENTIRE* GCE infrastructure.
'''
import os
import re
import sys
import optparse
import yaml
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
_ = Provider.GCE
except ImportError:
print("failed=True " + \
"msg='libcloud with GCE support (0.13.3+) required for this module'")
sys.exit(1)
import gce_credentials
def delete_gce_resources(get_func, attr, opts):
for item in get_func():
val = getattr(item, attr)
if re.search(opts.match_re, val, re.IGNORECASE):
prompt_and_delete(item, "Delete matching %s? [y/n]: " % (item,), opts.assumeyes)
def prompt_and_delete(item, prompt, assumeyes):
if not assumeyes:
assumeyes = raw_input(prompt).lower() == 'y'
assert hasattr(item, 'destroy'), "Class <%s> has no delete attribute" % item.__class__
if assumeyes:
item.destroy()
print ("Deleted %s" % item)
def parse_args():
parser = optparse.OptionParser(usage="%s [options]" % (sys.argv[0],),
description=__doc__)
gce_credentials.add_credentials_options(parser)
parser.add_option("--yes", "-y",
action="store_true", dest="assumeyes",
default=False,
help="Don't prompt for confirmation")
parser.add_option("--match",
action="store", dest="match_re",
default="^ansible-testing-",
help="Regular expression used to find GCE resources (default: %default)")
(opts, args) = parser.parse_args()
gce_credentials.check_required(opts, parser)
return (opts, args)
if __name__ == '__main__':
(opts, args) = parse_args()
# Connect to GCE
gce = gce_credentials.get_gce_driver(opts)
try:
# Delete matching instances
delete_gce_resources(gce.list_nodes, 'name', opts)
# Delete matching snapshots
def get_snapshots():
for volume in gce.list_volumes():
for snapshot in gce.list_volume_snapshots(volume):
yield snapshot
delete_gce_resources(get_snapshots, 'name', opts)
# Delete matching disks
delete_gce_resources(gce.list_volumes, 'name', opts)
except KeyboardInterrupt as e:
print("\nExiting on user command.")
| gpl-3.0 |
nizhikov/ignite | modules/platforms/python/pyignite/datatypes/__init__.py | 11 | 1078 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module contains classes, used internally by `pyignite` for parsing and
creating binary data.
"""
from .complex import *
from .internal import *
from .null_object import *
from .primitive import *
from .primitive_arrays import *
from .primitive_objects import *
from .standard import *
| apache-2.0 |
ArcticaProject/vcxsrv | mesalib/src/glsl/nir/nir_opcodes_c.py | 1 | 2036 | #! /usr/bin/env python
#
# Copyright (C) 2014 Connor Abbott
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Connor Abbott ([email protected])
from nir_opcodes import opcodes
from mako.template import Template
template = Template("""
#include "nir.h"
const nir_op_info nir_op_infos[nir_num_opcodes] = {
% for name, opcode in sorted(opcodes.iteritems()):
{
.name = "${name}",
.num_inputs = ${opcode.num_inputs},
.output_size = ${opcode.output_size},
.output_type = ${"nir_type_" + opcode.output_type},
.input_sizes = {
${ ", ".join(str(size) for size in opcode.input_sizes) }
},
.input_types = {
${ ", ".join("nir_type_" + type for type in opcode.input_types) }
},
.algebraic_properties =
${ "0" if opcode.algebraic_properties == "" else " | ".join(
"NIR_OP_IS_" + prop.upper() for prop in
opcode.algebraic_properties.strip().split(" ")) }
},
% endfor
};
""")
print template.render(opcodes=opcodes)
| gpl-3.0 |
demarle/VTK | ThirdParty/Twisted/twisted/cred/strcred.py | 63 | 8301 | # -*- test-case-name: twisted.test.test_strcred -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""
Support for resolving command-line strings that represent different
checkers available to cred.
Examples:
- passwd:/etc/passwd
- memory:admin:asdf:user:lkj
- unix
"""
import sys
from zope.interface import Interface, Attribute
from twisted.plugin import getPlugins
from twisted.python import usage
class ICheckerFactory(Interface):
"""
A factory for objects which provide
L{twisted.cred.checkers.ICredentialsChecker}.
It's implemented by twistd plugins creating checkers.
"""
authType = Attribute(
'A tag that identifies the authentication method.')
authHelp = Attribute(
'A detailed (potentially multi-line) description of precisely '
'what functionality this CheckerFactory provides.')
argStringFormat = Attribute(
'A short (one-line) description of the argument string format.')
credentialInterfaces = Attribute(
'A list of credentials interfaces that this factory will support.')
def generateChecker(argstring):
"""
Return an L{ICredentialChecker} provider using the supplied
argument string.
"""
class StrcredException(Exception):
"""
Base exception class for strcred.
"""
class InvalidAuthType(StrcredException):
"""
Raised when a user provides an invalid identifier for the
authentication plugin (known as the authType).
"""
class InvalidAuthArgumentString(StrcredException):
"""
Raised by an authentication plugin when the argument string
provided is formatted incorrectly.
"""
class UnsupportedInterfaces(StrcredException):
"""
Raised when an application is given a checker to use that does not
provide any of the application's supported credentials interfaces.
"""
# This will be used to warn the users whenever they view help for an
# authType that is not supported by the application.
notSupportedWarning = ("WARNING: This authType is not supported by "
"this application.")
def findCheckerFactories():
"""
Find all objects that implement L{ICheckerFactory}.
"""
return getPlugins(ICheckerFactory)
def findCheckerFactory(authType):
"""
Find the first checker factory that supports the given authType.
"""
for factory in findCheckerFactories():
if factory.authType == authType:
return factory
raise InvalidAuthType(authType)
def makeChecker(description):
"""
Returns an L{twisted.cred.checkers.ICredentialsChecker} based on the
contents of a descriptive string. Similar to
L{twisted.application.strports}.
"""
if ':' in description:
authType, argstring = description.split(':', 1)
else:
authType = description
argstring = ''
return findCheckerFactory(authType).generateChecker(argstring)
class AuthOptionMixin:
"""
Defines helper methods that can be added on to any
L{usage.Options} subclass that needs authentication.
This mixin implements three new options methods:
The opt_auth method (--auth) will write two new values to the
'self' dictionary: C{credInterfaces} (a dict of lists) and
C{credCheckers} (a list).
The opt_help_auth method (--help-auth) will search for all
available checker plugins and list them for the user; it will exit
when finished.
The opt_help_auth_type method (--help-auth-type) will display
detailed help for a particular checker plugin.
@cvar supportedInterfaces: An iterable object that returns
credential interfaces which this application is able to support.
@cvar authOutput: A writeable object to which this options class
will send all help-related output. Default: L{sys.stdout}
"""
supportedInterfaces = None
authOutput = sys.stdout
def supportsInterface(self, interface):
"""
Returns whether a particular credentials interface is supported.
"""
return (self.supportedInterfaces is None
or interface in self.supportedInterfaces)
def supportsCheckerFactory(self, factory):
"""
Returns whether a checker factory will provide at least one of
the credentials interfaces that we care about.
"""
for interface in factory.credentialInterfaces:
if self.supportsInterface(interface):
return True
return False
def addChecker(self, checker):
"""
Supply a supplied credentials checker to the Options class.
"""
# First figure out which interfaces we're willing to support.
supported = []
if self.supportedInterfaces is None:
supported = checker.credentialInterfaces
else:
for interface in checker.credentialInterfaces:
if self.supportsInterface(interface):
supported.append(interface)
if not supported:
raise UnsupportedInterfaces(checker.credentialInterfaces)
# If we get this far, then we know we can use this checker.
if 'credInterfaces' not in self:
self['credInterfaces'] = {}
if 'credCheckers' not in self:
self['credCheckers'] = []
self['credCheckers'].append(checker)
for interface in supported:
self['credInterfaces'].setdefault(interface, []).append(checker)
def opt_auth(self, description):
"""
Specify an authentication method for the server.
"""
try:
self.addChecker(makeChecker(description))
except UnsupportedInterfaces, e:
raise usage.UsageError(
'Auth plugin not supported: %s' % e.args[0])
except InvalidAuthType, e:
raise usage.UsageError(
'Auth plugin not recognized: %s' % e.args[0])
except Exception, e:
raise usage.UsageError('Unexpected error: %s' % e)
def _checkerFactoriesForOptHelpAuth(self):
"""
Return a list of which authTypes will be displayed by --help-auth.
This makes it a lot easier to test this module.
"""
for factory in findCheckerFactories():
for interface in factory.credentialInterfaces:
if self.supportsInterface(interface):
yield factory
break
def opt_help_auth(self):
"""
Show all authentication methods available.
"""
self.authOutput.write("Usage: --auth AuthType[:ArgString]\n")
self.authOutput.write("For detailed help: --help-auth-type AuthType\n")
self.authOutput.write('\n')
# Figure out the right width for our columns
firstLength = 0
for factory in self._checkerFactoriesForOptHelpAuth():
if len(factory.authType) > firstLength:
firstLength = len(factory.authType)
formatString = ' %%-%is\t%%s\n' % firstLength
self.authOutput.write(formatString % ('AuthType', 'ArgString format'))
self.authOutput.write(formatString % ('========', '================'))
for factory in self._checkerFactoriesForOptHelpAuth():
self.authOutput.write(
formatString % (factory.authType, factory.argStringFormat))
self.authOutput.write('\n')
raise SystemExit(0)
def opt_help_auth_type(self, authType):
"""
Show help for a particular authentication type.
"""
try:
cf = findCheckerFactory(authType)
except InvalidAuthType:
raise usage.UsageError("Invalid auth type: %s" % authType)
self.authOutput.write("Usage: --auth %s[:ArgString]\n" % authType)
self.authOutput.write("ArgString format: %s\n" % cf.argStringFormat)
self.authOutput.write('\n')
for line in cf.authHelp.strip().splitlines():
self.authOutput.write(' %s\n' % line.rstrip())
self.authOutput.write('\n')
if not self.supportsCheckerFactory(cf):
self.authOutput.write(' %s\n' % notSupportedWarning)
self.authOutput.write('\n')
raise SystemExit(0)
| bsd-3-clause |
vrieni/orange | Orange/OrangeWidgets/Data/OWSave.py | 6 | 5594 | from OWWidget import *
import OWGUI
import re, os.path
from exceptions import Exception
NAME = "Save"
DESCRIPTION = "Saves data to file."
LONG_DESCRIPTION = ""
ICON = "icons/Save.svg"
PRIORITY = 90
AUTHOR = "Janez Demsar"
AUTHOR_EMAIL = "janez.demsar(@at@)fri.uni-lj.si"
INPUTS = [("Data", Orange.data.Table, "dataset", Default)]
class OWSave(OWWidget):
settingsList = ["recentFiles", "selectedFileName"]
savers = {".txt": orange.saveTxt,
".tab": orange.saveTabDelimited,
".names": orange.saveC45,
".test": orange.saveC45,
".data": orange.saveC45,
".csv": orange.saveCsv
}
# exclude C50 since it has the same extension and we do not need saving to it anyway
registeredFileTypes = [ft for ft in orange.getRegisteredFileTypes() if len(ft)>3 and ft[3] and not ft[0]=="C50"]
dlgFormats = 'Tab-delimited files (*.tab)\nHeaderless tab-delimited (*.txt)\nComma separated (*.csv)\nC4.5 files (*.data)\nRetis files (*.rda *.rdo)\n' \
+ "\n".join("%s (%s)" % (ft[:2]) for ft in registeredFileTypes) \
+ "\nAll files(*.*)"
savers.update(dict((lower(ft[1][1:]), ft[3]) for ft in registeredFileTypes))
re_filterExtension = re.compile(r"\(\*(?P<ext>\.[^ )]+)")
def __init__(self,parent=None, signalManager = None):
OWWidget.__init__(self, parent, signalManager, "Save", wantMainArea = 0, resizingEnabled = 0)
self.inputs = [("Data", ExampleTable, self.dataset)]
self.outputs = []
self.recentFiles=[]
self.selectedFileName = "None"
self.data = None
self.filename = ""
self.loadSettings()
# vb = OWGUI.widgetBox(self.controlArea)
rfbox = OWGUI.widgetBox(self.controlArea, "Filename", orientation="horizontal", addSpace=True)
self.filecombo = OWGUI.comboBox(rfbox, self, "filename")
self.filecombo.setMinimumWidth(200)
# browse = OWGUI.button(rfbox, self, "...", callback = self.browseFile, width=25)
button = OWGUI.button(rfbox, self, '...', callback = self.browseFile, disabled=0)
button.setIcon(self.style().standardIcon(QStyle.SP_DirOpenIcon))
button.setSizePolicy(QSizePolicy.Maximum, QSizePolicy.Fixed)
fbox = OWGUI.widgetBox(self.controlArea, "Save")
self.save = OWGUI.button(fbox, self, "Save", callback = self.saveFile, default=True)
self.save.setDisabled(1)
OWGUI.rubber(self.controlArea)
#self.adjustSize()
self.setFilelist()
self.resize(260, 100)
self.filecombo.setCurrentIndex(0)
if self.selectedFileName != "":
if os.path.exists(self.selectedFileName):
self.openFile(self.selectedFileName)
else:
self.selectedFileName = ""
def dataset(self, data):
self.data = data
self.save.setDisabled(data == None)
def browseFile(self):
if self.recentFiles:
startfile = self.recentFiles[0]
else:
startfile = os.path.expanduser("~/")
# filename, selectedFilter = QFileDialog.getSaveFileNameAndFilter(self, 'Save Orange Data File', startfile,
# self.dlgFormats, self.dlgFormats.splitlines()[0])
# filename = str(filename)
# The preceding lines should work as per API, but do not; it's probably a PyQt bug as per March 2010.
# The following is a workaround.
# (As a consequence, filter selection is not taken into account when appending a default extension.)
filename, selectedFilter = QFileDialog.getSaveFileName(self, 'Save Orange Data File', startfile,
self.dlgFormats), self.dlgFormats.splitlines()[0]
filename = unicode(filename)
if not filename or not os.path.split(filename)[1]:
return
ext = lower(os.path.splitext(filename)[1])
if not ext in self.savers:
filt_ext = self.re_filterExtension.search(str(selectedFilter)).group("ext")
if filt_ext == ".*":
filt_ext = ".tab"
filename += filt_ext
self.addFileToList(filename)
self.saveFile()
def saveFile(self, *index):
self.error()
if self.data is not None:
combotext = unicode(self.filecombo.currentText())
if combotext == "(none)":
QMessageBox.information( None, "Error saving data", "Unable to save data. Select first a file name by clicking the '...' button.", QMessageBox.Ok + QMessageBox.Default)
return
filename = self.recentFiles[self.filecombo.currentIndex()]
fileExt = lower(os.path.splitext(filename)[1])
if fileExt == "":
fileExt = ".tab"
try:
self.savers[fileExt](filename, self.data)
except Exception, (errValue):
self.error(str(errValue))
return
self.error()
def addFileToList(self, fn):
if fn in self.recentFiles:
self.recentFiles.remove(fn)
self.recentFiles.insert(0,fn)
self.setFilelist()
def setFilelist(self):
"""Set the GUI filelist"""
self.filecombo.clear()
if self.recentFiles:
self.filecombo.addItems([os.path.split(file)[1] for file in self.recentFiles])
else:
self.filecombo.addItem("(none)")
if __name__ == "__main__":
a=QApplication(sys.argv)
owf=OWSave()
owf.show()
a.exec_()
owf.saveSettings()
| gpl-3.0 |
locustio/locust | locust/test/test_wait_time.py | 1 | 2342 | import random
import time
from locust import User, TaskSet, between, constant, constant_pacing
from locust.exception import MissingWaitTimeError
from .testcases import LocustTestCase
class TestWaitTime(LocustTestCase):
def test_between(self):
class MyUser(User):
wait_time = between(3, 9)
class TaskSet1(TaskSet):
pass
class TaskSet2(TaskSet):
wait_time = between(20.0, 21.0)
u = MyUser(self.environment)
ts1 = TaskSet1(u)
ts2 = TaskSet2(u)
for i in range(100):
w = u.wait_time()
self.assertGreaterEqual(w, 3)
self.assertLessEqual(w, 9)
w = ts1.wait_time()
self.assertGreaterEqual(w, 3)
self.assertLessEqual(w, 9)
for i in range(100):
w = ts2.wait_time()
self.assertGreaterEqual(w, 20)
self.assertLessEqual(w, 21)
def test_constant(self):
class MyUser(User):
wait_time = constant(13)
class TaskSet1(TaskSet):
pass
self.assertEqual(13, MyUser(self.environment).wait_time())
self.assertEqual(13, TaskSet1(MyUser(self.environment)).wait_time())
def test_default_wait_time(self):
class MyUser(User):
pass # default is wait_time = constant(0)
class TaskSet1(TaskSet):
pass
self.assertEqual(0, MyUser(self.environment).wait_time())
self.assertEqual(0, TaskSet1(MyUser(self.environment)).wait_time())
taskset = TaskSet1(MyUser(self.environment))
start_time = time.perf_counter()
taskset.wait()
self.assertLess(time.perf_counter() - start_time, 0.002)
def test_constant_pacing(self):
class MyUser(User):
wait_time = constant_pacing(0.1)
class TS(TaskSet):
pass
ts = TS(MyUser(self.environment))
ts2 = TS(MyUser(self.environment))
previous_time = time.perf_counter()
for i in range(7):
ts.wait()
since_last_run = time.perf_counter() - previous_time
self.assertLess(abs(0.1 - since_last_run), 0.02)
previous_time = time.perf_counter()
time.sleep(random.random() * 0.1)
_ = ts2.wait_time()
_ = ts2.wait_time()
| mit |
icio/github3.py | tests/unit/test_null.py | 10 | 1832 | from .helper import UnitHelper
from github3.null import NullObject
import pytest
class TestNullObject(UnitHelper):
described_class = NullObject
def create_instance_of_described_class(self):
return self.described_class()
def test_returns_empty_list(self):
assert list(self.instance) == []
def test_contains_nothing(self):
assert 'foo' not in self.instance
def test_returns_itself_when_called(self):
assert self.instance('foo', 'bar', 'bogus') is self.instance
def test_returns_empty_string(self):
assert str(self.instance) == ''
def test_allows_arbitrary_attributes(self):
assert self.instance.attr is self.instance
def test_allows_arbitrary_attributes_to_be_set(self):
self.instance.attr = 'new'
assert self.instance.attr is self.instance
def test_provides_an_api_to_check_if_it_is_null(self):
assert self.instance.is_null()
def test_stops_iteration(self):
with pytest.raises(StopIteration):
next(self.instance)
def test_next_raises_stops_iteration(self):
with pytest.raises(StopIteration):
self.instance.next()
def test_getitem_returns_itself(self):
assert self.instance['attr'] is self.instance
def test_setitem_sets_nothing(self):
self.instance['attr'] = 'attr'
assert self.instance['attr'] is self.instance
def test_turns_into_unicode(self):
unicode_str = b''.decode('utf-8')
try:
assert unicode(self.instance) == unicode_str
except NameError:
assert str(self.instance) == unicode_str
def test_instances_are_falsey(self):
if self.instance:
pytest.fail()
def test_instances_can_be_coerced_to_zero(self):
assert int(self.instance) == 0
| bsd-3-clause |
gmist/fix-5studio | main/auth/twitter.py | 6 | 1468 | # coding: utf-8
from __future__ import absolute_import
import flask
import auth
import config
import model
import util
from main import app
twitter_config = dict(
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',
base_url='https://api.twitter.com/1.1/',
consumer_key=config.CONFIG_DB.twitter_consumer_key,
consumer_secret=config.CONFIG_DB.twitter_consumer_secret,
request_token_url='https://api.twitter.com/oauth/request_token',
)
twitter = auth.create_oauth_app(twitter_config, 'twitter')
@app.route('/api/auth/callback/twitter/')
def twitter_authorized():
response = twitter.authorized_response()
if response is None:
flask.flash('You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
response['oauth_token'],
response['oauth_token_secret'],
)
user_db = retrieve_user_from_twitter(response)
return auth.signin_user_db(user_db)
@twitter.tokengetter
def get_twitter_token():
return flask.session.get('oauth_token')
@app.route('/signin/twitter/')
def signin_twitter():
return auth.signin_oauth(twitter)
def retrieve_user_from_twitter(response):
auth_id = 'twitter_%s' % response['user_id']
user_db = model.User.get_by('auth_ids', auth_id)
return user_db or auth.create_user_db(
auth_id=auth_id,
name=response['screen_name'],
username=response['screen_name'],
)
| mit |
js0701/chromium-crosswalk | tools/perf_expectations/make_expectations.py | 37 | 12595 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# For instructions see:
# http://www.chromium.org/developers/tree-sheriffs/perf-sheriffs
import hashlib
import math
import optparse
import os
import re
import subprocess
import sys
import time
import urllib2
try:
import json
except ImportError:
import simplejson as json
__version__ = '1.0'
EXPECTATIONS_DIR = os.path.dirname(os.path.abspath(__file__))
DEFAULT_CONFIG_FILE = os.path.join(EXPECTATIONS_DIR,
'chromium_perf_expectations.cfg')
DEFAULT_TOLERANCE = 0.05
USAGE = ''
def ReadFile(filename):
try:
file = open(filename, 'rb')
except IOError, e:
print >> sys.stderr, ('I/O Error reading file %s(%s): %s' %
(filename, e.errno, e.strerror))
raise e
contents = file.read()
file.close()
return contents
def ConvertJsonIntoDict(string):
"""Read a JSON string and convert its contents into a Python datatype."""
if len(string) == 0:
print >> sys.stderr, ('Error could not parse empty string')
raise Exception('JSON data missing')
try:
jsondata = json.loads(string)
except ValueError, e:
print >> sys.stderr, ('Error parsing string: "%s"' % string)
raise e
return jsondata
# Floating point representation of last time we fetched a URL.
last_fetched_at = None
def FetchUrlContents(url):
global last_fetched_at
if last_fetched_at and ((time.time() - last_fetched_at) <= 0.5):
# Sleep for half a second to avoid overloading the server.
time.sleep(0.5)
try:
last_fetched_at = time.time()
connection = urllib2.urlopen(url)
except urllib2.HTTPError, e:
if e.code == 404:
return None
raise e
text = connection.read().strip()
connection.close()
return text
def GetRowData(data, key):
rowdata = []
# reva and revb always come first.
for subkey in ['reva', 'revb']:
if subkey in data[key]:
rowdata.append('"%s": %s' % (subkey, data[key][subkey]))
# Strings, like type, come next.
for subkey in ['type', 'better']:
if subkey in data[key]:
rowdata.append('"%s": "%s"' % (subkey, data[key][subkey]))
# Finally the main numbers come last.
for subkey in ['improve', 'regress', 'tolerance']:
if subkey in data[key]:
rowdata.append('"%s": %s' % (subkey, data[key][subkey]))
return rowdata
def GetRowDigest(rowdata, key):
sha1 = hashlib.sha1()
rowdata = [str(possibly_unicode_string).encode('ascii')
for possibly_unicode_string in rowdata]
sha1.update(str(rowdata) + key)
return sha1.hexdigest()[0:8]
def WriteJson(filename, data, keys, calculate_sha1=True):
"""Write a list of |keys| in |data| to the file specified in |filename|."""
try:
file = open(filename, 'wb')
except IOError, e:
print >> sys.stderr, ('I/O Error writing file %s(%s): %s' %
(filename, e.errno, e.strerror))
return False
jsondata = []
for key in keys:
rowdata = GetRowData(data, key)
if calculate_sha1:
# Include an updated checksum.
rowdata.append('"sha1": "%s"' % GetRowDigest(rowdata, key))
else:
if 'sha1' in data[key]:
rowdata.append('"sha1": "%s"' % (data[key]['sha1']))
jsondata.append('"%s": {%s}' % (key, ', '.join(rowdata)))
jsondata.append('"load": true')
jsontext = '{%s\n}' % ',\n '.join(jsondata)
file.write(jsontext + '\n')
file.close()
return True
def FloatIsInt(f):
epsilon = 1.0e-10
return abs(f - int(f)) <= epsilon
last_key_printed = None
def Main(args):
def OutputMessage(message, verbose_message=True):
global last_key_printed
if not options.verbose and verbose_message:
return
if key != last_key_printed:
last_key_printed = key
print '\n' + key + ':'
print ' %s' % message
parser = optparse.OptionParser(usage=USAGE, version=__version__)
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='enable verbose output')
parser.add_option('-s', '--checksum', action='store_true',
help='test if any changes are pending')
parser.add_option('-c', '--config', dest='config_file',
default=DEFAULT_CONFIG_FILE,
help='set the config file to FILE', metavar='FILE')
options, args = parser.parse_args(args)
if options.verbose:
print 'Verbose output enabled.'
config = ConvertJsonIntoDict(ReadFile(options.config_file))
# Get the list of summaries for a test.
base_url = config['base_url']
# Make the perf expectations file relative to the path of the config file.
perf_file = os.path.join(
os.path.dirname(options.config_file), config['perf_file'])
perf = ConvertJsonIntoDict(ReadFile(perf_file))
# Fetch graphs.dat for this combination.
perfkeys = perf.keys()
# In perf_expectations.json, ignore the 'load' key.
perfkeys.remove('load')
perfkeys.sort()
write_new_expectations = False
found_checksum_mismatch = False
for key in perfkeys:
value = perf[key]
tolerance = value.get('tolerance', DEFAULT_TOLERANCE)
better = value.get('better', None)
# Verify the checksum.
original_checksum = value.get('sha1', '')
if 'sha1' in value:
del value['sha1']
rowdata = GetRowData(perf, key)
computed_checksum = GetRowDigest(rowdata, key)
if original_checksum == computed_checksum:
OutputMessage('checksum matches, skipping')
continue
elif options.checksum:
found_checksum_mismatch = True
continue
# Skip expectations that are missing a reva or revb. We can't generate
# expectations for those.
if not(value.has_key('reva') and value.has_key('revb')):
OutputMessage('missing revision range, skipping')
continue
revb = int(value['revb'])
reva = int(value['reva'])
# Ensure that reva is less than revb.
if reva > revb:
temp = reva
reva = revb
revb = temp
# Get the system/test/graph/tracename and reftracename for the current key.
matchData = re.match(r'^([^/]+)\/([^/]+)\/([^/]+)\/([^/]+)$', key)
if not matchData:
OutputMessage('cannot parse key, skipping')
continue
system = matchData.group(1)
test = matchData.group(2)
graph = matchData.group(3)
tracename = matchData.group(4)
reftracename = tracename + '_ref'
# Create the summary_url and get the json data for that URL.
# FetchUrlContents() may sleep to avoid overloading the server with
# requests.
summary_url = '%s/%s/%s/%s-summary.dat' % (base_url, system, test, graph)
summaryjson = FetchUrlContents(summary_url)
if not summaryjson:
OutputMessage('ERROR: cannot find json data, please verify',
verbose_message=False)
return 0
# Set value's type to 'relative' by default.
value_type = value.get('type', 'relative')
summarylist = summaryjson.split('\n')
trace_values = {}
traces = [tracename]
if value_type == 'relative':
traces += [reftracename]
for trace in traces:
trace_values.setdefault(trace, {})
# Find the high and low values for each of the traces.
scanning = False
for line in summarylist:
jsondata = ConvertJsonIntoDict(line)
try:
rev = int(jsondata['rev'])
except ValueError:
print ('Warning: skipping rev %r because could not be parsed '
'as an integer.' % jsondata['rev'])
continue
if rev <= revb:
scanning = True
if rev < reva:
break
# We found the upper revision in the range. Scan for trace data until we
# find the lower revision in the range.
if scanning:
for trace in traces:
if trace not in jsondata['traces']:
OutputMessage('trace %s missing' % trace)
continue
if type(jsondata['traces'][trace]) != type([]):
OutputMessage('trace %s format not recognized' % trace)
continue
try:
tracevalue = float(jsondata['traces'][trace][0])
except ValueError:
OutputMessage('trace %s value error: %s' % (
trace, str(jsondata['traces'][trace][0])))
continue
for bound in ['high', 'low']:
trace_values[trace].setdefault(bound, tracevalue)
trace_values[trace]['high'] = max(trace_values[trace]['high'],
tracevalue)
trace_values[trace]['low'] = min(trace_values[trace]['low'],
tracevalue)
if 'high' not in trace_values[tracename]:
OutputMessage('no suitable traces matched, skipping')
continue
if value_type == 'relative':
# Calculate assuming high deltas are regressions and low deltas are
# improvements.
regress = (float(trace_values[tracename]['high']) -
float(trace_values[reftracename]['low']))
improve = (float(trace_values[tracename]['low']) -
float(trace_values[reftracename]['high']))
elif value_type == 'absolute':
# Calculate assuming high absolutes are regressions and low absolutes are
# improvements.
regress = float(trace_values[tracename]['high'])
improve = float(trace_values[tracename]['low'])
# So far we've assumed better is lower (regress > improve). If the actual
# values for regress and improve are equal, though, and better was not
# specified, alert the user so we don't let them create a new file with
# ambiguous rules.
if better == None and regress == improve:
OutputMessage('regress (%s) is equal to improve (%s), and "better" is '
'unspecified, please fix by setting "better": "lower" or '
'"better": "higher" in this perf trace\'s expectation' % (
regress, improve), verbose_message=False)
return 1
# If the existing values assume regressions are low deltas relative to
# improvements, swap our regress and improve. This value must be a
# scores-like result.
if 'regress' in perf[key] and 'improve' in perf[key]:
if perf[key]['regress'] < perf[key]['improve']:
assert(better != 'lower')
better = 'higher'
temp = regress
regress = improve
improve = temp
else:
# Sometimes values are equal, e.g., when they are both 0,
# 'better' may still be set to 'higher'.
assert(better != 'higher' or
perf[key]['regress'] == perf[key]['improve'])
better = 'lower'
# If both were ints keep as int, otherwise use the float version.
originally_ints = False
if FloatIsInt(regress) and FloatIsInt(improve):
originally_ints = True
if better == 'higher':
if originally_ints:
regress = int(math.floor(regress - abs(regress*tolerance)))
improve = int(math.ceil(improve + abs(improve*tolerance)))
else:
regress = regress - abs(regress*tolerance)
improve = improve + abs(improve*tolerance)
else:
if originally_ints:
improve = int(math.floor(improve - abs(improve*tolerance)))
regress = int(math.ceil(regress + abs(regress*tolerance)))
else:
improve = improve - abs(improve*tolerance)
regress = regress + abs(regress*tolerance)
# Calculate the new checksum to test if this is the only thing that may have
# changed.
checksum_rowdata = GetRowData(perf, key)
new_checksum = GetRowDigest(checksum_rowdata, key)
if ('regress' in perf[key] and 'improve' in perf[key] and
perf[key]['regress'] == regress and perf[key]['improve'] == improve and
original_checksum == new_checksum):
OutputMessage('no change')
continue
write_new_expectations = True
OutputMessage('traces: %s' % trace_values, verbose_message=False)
OutputMessage('before: %s' % perf[key], verbose_message=False)
perf[key]['regress'] = regress
perf[key]['improve'] = improve
OutputMessage('after: %s' % perf[key], verbose_message=False)
if options.checksum:
if found_checksum_mismatch:
return 1
else:
return 0
if write_new_expectations:
print '\nWriting expectations... ',
WriteJson(perf_file, perf, perfkeys)
print 'done'
else:
if options.verbose:
print ''
print 'No changes.'
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
| bsd-3-clause |
MediaKraken/MediaKraken_Deployment | source/database_async/db_base_metadata_async.py | 1 | 8531 | import inspect
from common import common_logging_elasticsearch_httpx
async def db_metadata_guid_from_media_guid(self, guid, db_connection=None):
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_media_metadata_guid'
' from mm_media'
' where mm_media_guid = $1', guid)
async def db_meta_insert_tmdb(self, uuid_id, series_id, data_title, data_json,
data_image_json, db_connection=None):
"""
# insert metadata from themoviedb
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
await db_conn.execute('insert into mm_metadata_movie (mm_metadata_guid,'
' mm_metadata_media_id,'
' mm_metadata_name,'
' mm_metadata_json,'
' mm_metadata_localimage_json)'
' values ($1,$2,$3,$4,$5)',
uuid_id, series_id, data_title,
data_json, data_image_json)
await db_conn.execute('commit')
async def db_meta_guid_by_imdb(self, imdb_uuid, db_connection=None):
"""
# metadata guid by imdb id
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_metadata_guid'
' from mm_metadata_movie'
' where mm_metadata_media_id->\'imdb\' ? $1',
imdb_uuid)
async def db_meta_guid_by_tmdb(self, tmdb_uuid, db_connection=None):
"""
# see if metadata exists type and id
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
return await db_conn.fetchval('select mm_metadata_guid'
' from mm_metadata_movie'
' where mm_metadata_media_id = $1',
tmdb_uuid)
async def db_find_metadata_guid(self, media_name, media_release_year, db_connection=None):
"""
Lookup id by name/year
"""
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
'function':
inspect.stack()[0][
3],
'locals': locals(),
'caller':
inspect.stack()[1][
3]})
if db_connection is None:
db_conn = self.db_connection
else:
db_conn = db_connection
metadata_guid = None
if media_release_year is not None:
# for year and -3/+3 year as well
meta_results = await db_conn.fetch('select mm_metadata_guid from mm_metadata_movie'
' where (LOWER(mm_metadata_name) = $1'
' or lower(mm_metadata_json->>\'original_title\') = $2)'
' and substring(mm_metadata_json->>\'release_date\''
' from 0 for 5)'
' in ($3,$4,$5,$6,$7,$8,$9)',
media_name.lower(), media_name.lower(),
str(media_release_year),
str(int(media_release_year) + 1),
str(int(media_release_year) + 2),
str(int(media_release_year) + 3),
str(int(media_release_year) - 1),
str(int(media_release_year) - 2),
str(int(media_release_year) - 3))
else:
meta_results = await db_conn.fetch('select mm_metadata_guid from mm_metadata_movie'
' where (LOWER(mm_metadata_name) = $1'
' or lower(mm_metadata_json->>\'original_title\') = $2)',
media_name.lower(), media_name.lower())
for row_data in meta_results:
# TODO should probably handle multiple results better. Perhaps a notification?
metadata_guid = row_data['mm_metadata_guid']
await common_logging_elasticsearch_httpx.com_es_httpx_post_async(message_type='info',
message_text={
"db find metadata guid": metadata_guid})
break
return metadata_guid
| gpl-3.0 |
dwightgunning/django | tests/auth_tests/test_mixins.py | 274 | 8335 | from django.contrib.auth import models
from django.contrib.auth.mixins import (
LoginRequiredMixin, PermissionRequiredMixin, UserPassesTestMixin,
)
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.test import RequestFactory, TestCase
from django.views.generic import View
class AlwaysTrueMixin(UserPassesTestMixin):
def test_func(self):
return True
class AlwaysFalseMixin(UserPassesTestMixin):
def test_func(self):
return False
class EmptyResponseView(View):
def get(self, request, *args, **kwargs):
return HttpResponse()
class AlwaysTrueView(AlwaysTrueMixin, EmptyResponseView):
pass
class AlwaysFalseView(AlwaysFalseMixin, EmptyResponseView):
pass
class StackedMixinsView1(LoginRequiredMixin, PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class StackedMixinsView2(PermissionRequiredMixin, LoginRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
raise_exception = True
class AccessMixinTests(TestCase):
factory = RequestFactory()
def test_stacked_mixins_success(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
view = StackedMixinsView2.as_view()
response = view(request)
self.assertEqual(response.status_code, 200)
def test_stacked_mixins_missing_permission(self):
user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser',))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
def test_stacked_mixins_not_logged_in(self):
user = models.User.objects.create(username='joe', password='qwerty')
user.is_authenticated = lambda: False
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
user.user_permissions.add(*perms)
request = self.factory.get('/rand')
request.user = user
view = StackedMixinsView1.as_view()
with self.assertRaises(PermissionDenied):
view(request)
view = StackedMixinsView2.as_view()
with self.assertRaises(PermissionDenied):
view(request)
class UserPassesTestTests(TestCase):
factory = RequestFactory()
def _test_redirect(self, view=None, url='/accounts/login/?next=/rand'):
if not view:
view = AlwaysFalseView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, url)
def test_default(self):
self._test_redirect()
def test_custom_redirect_url(self):
class AView(AlwaysFalseView):
login_url = '/login/'
self._test_redirect(AView.as_view(), '/login/?next=/rand')
def test_custom_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = 'goto'
self._test_redirect(AView.as_view(), '/accounts/login/?goto=/rand')
def test_no_redirect_parameter(self):
class AView(AlwaysFalseView):
redirect_field_name = None
self._test_redirect(AView.as_view(), '/accounts/login/')
def test_raise_exception(self):
class AView(AlwaysFalseView):
raise_exception = True
request = self.factory.get('/rand')
request.user = AnonymousUser()
self.assertRaises(PermissionDenied, AView.as_view(), request)
def test_raise_exception_custom_message(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
permission_denied_message = msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_raise_exception_custom_message_function(self):
msg = "You don't have access here"
class AView(AlwaysFalseView):
raise_exception = True
def get_permission_denied_message(self):
return msg
request = self.factory.get('/rand')
request.user = AnonymousUser()
view = AView.as_view()
with self.assertRaises(PermissionDenied) as cm:
view(request)
self.assertEqual(cm.exception.args[0], msg)
def test_user_passes(self):
view = AlwaysTrueView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 200)
class LoginRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
def test_login_required(self):
"""
Check that login_required works on a simple view wrapped in a
login_required decorator.
"""
class AView(LoginRequiredMixin, EmptyResponseView):
pass
view = AView.as_view()
request = self.factory.get('/rand')
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual('/accounts/login/?next=/rand', response.url)
request = self.factory.get('/rand')
request.user = self.user
response = view(request)
self.assertEqual(response.status_code, 200)
class PermissionsRequiredMixinTests(TestCase):
factory = RequestFactory()
@classmethod
def setUpTestData(cls):
cls.user = models.User.objects.create(username='joe', password='qwerty')
perms = models.Permission.objects.filter(codename__in=('add_customuser', 'change_customuser'))
cls.user.user_permissions.add(*perms)
def test_many_permissions_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_single_permission_pass(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = 'auth.add_customuser'
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 200)
def test_permissioned_denied_redirect(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
request = self.factory.get('/rand')
request.user = self.user
resp = AView.as_view()(request)
self.assertEqual(resp.status_code, 302)
def test_permissioned_denied_exception_raised(self):
class AView(PermissionRequiredMixin, EmptyResponseView):
permission_required = ['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission']
raise_exception = True
request = self.factory.get('/rand')
request.user = self.user
self.assertRaises(PermissionDenied, AView.as_view(), request)
| bsd-3-clause |
s20121035/rk3288_android5.1_repo | external/chromium_org/third_party/closure_compiler/compiler_customization_test.py | 40 | 6461 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from checker import Checker
from processor import FileCache, Processor
ASSERT_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js",
"assert.js")
CR_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js", "cr.js")
UI_FILE = os.path.join("..", "..", "ui", "webui", "resources", "js", "cr",
"ui.js")
def rel_to_abs(rel_path):
script_path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(script_path, rel_path)
class CompilerCustomizationTest(unittest.TestCase):
_ASSERT_DEFINITION = Processor(rel_to_abs(ASSERT_FILE)).contents
_CR_DEFINE_DEFINITION = Processor(rel_to_abs(CR_FILE)).contents
_CR_UI_DECORATE_DEFINITION = Processor(rel_to_abs(UI_FILE)).contents
def setUp(self):
self._checker = Checker()
def _runChecker(self, source_code):
file_path = "/script.js"
FileCache._cache[file_path] = source_code
return self._checker.check(file_path)
def _runCheckerTestExpectError(self, source_code, expected_error):
_, output = self._runChecker(source_code)
self.assertTrue(expected_error in output,
msg="Expected chunk: \n%s\n\nOutput:\n%s\n" % (
expected_error, output))
def _runCheckerTestExpectSuccess(self, source_code):
return_code, output = self._runChecker(source_code)
self.assertTrue(return_code == 0,
msg="Expected success, got return code %d\n\nOutput:\n%s\n" % (
return_code, output))
def testGetInstance(self):
self._runCheckerTestExpectError("""
var cr = {
/** @param {!Function} ctor */
addSingletonGetter: function(ctor) {
ctor.getInstance = function() {
return ctor.instance_ || (ctor.instance_ = new ctor());
};
}
};
/** @constructor */
function Class() {
/** @param {number} num */
this.needsNumber = function(num) {};
}
cr.addSingletonGetter(Class);
Class.getInstance().needsNumber("wrong type");
""", "ERROR - actual parameter 1 of Class.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionDefinition(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
function internalName(num) {}
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineFunctionAssignment(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @param {number} num */
var internalName = function(num) {};
return {
needsNumber: internalName
};
});
a.b.c.needsNumber("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.needsNumber does not match formal "
"parameter")
def testCrDefineConstructorDefinitionPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
function ClassInternalName() {}
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineConstructorAssignmentPrototypeMethod(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @constructor */
var ClassInternalName = function() {};
ClassInternalName.prototype = {
/** @param {number} num */
method: function(num) {}
};
return {
ClassExternalName: ClassInternalName
};
});
new a.b.c.ClassExternalName().method("wrong type");
""", "ERROR - actual parameter 1 of a.b.c.ClassExternalName.prototype.method "
"does not match formal parameter")
def testCrDefineEnum(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
cr.define('a.b.c', function() {
/** @enum {string} */
var internalNameForEnum = {key: 'wrong_type'};
return {
exportedEnum: internalNameForEnum
};
});
/** @param {number} num */
function needsNumber(num) {}
needsNumber(a.b.c.exportedEnum.key);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testObjectDefineProperty(self):
self._runCheckerTestExpectSuccess("""
/** @constructor */
function Class() {}
Object.defineProperty(Class.prototype, 'myProperty', {});
alert(new Class().myProperty);
""")
def testCrDefineProperty(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'myProperty', cr.PropertyKind.JS);
alert(new Class().myProperty);
""")
def testCrDefinePropertyTypeChecking(self):
self._runCheckerTestExpectError(self._CR_DEFINE_DEFINITION + """
/** @constructor */
function Class() {}
cr.defineProperty(Class.prototype, 'booleanProp', cr.PropertyKind.BOOL_ATTR);
/** @param {number} num */
function needsNumber(num) {}
needsNumber(new Class().booleanProp);
""", "ERROR - actual parameter 1 of needsNumber does not match formal "
"parameter")
def testCrDefineOnCrWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION + """
cr.define('cr', function() {
return {};
});
""")
def testAssertWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @return {?string} */
function f() {
return "string";
}
/** @type {!string} */
var a = assert(f());
""")
def testAssertInstanceofWorks(self):
self._runCheckerTestExpectSuccess(self._ASSERT_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
return assertInstanceof(a, Class);
}
""")
def testCrUiDecorateWorks(self):
self._runCheckerTestExpectSuccess(self._CR_DEFINE_DEFINITION +
self._CR_UI_DECORATE_DEFINITION + """
/** @constructor */
function Class() {}
/** @return {Class} */
function f() {
var a = document.createElement('div');
cr.ui.decorate(a, Class);
return a;
}
""")
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
CapOM/ChromiumGStreamerBackend | tools/telemetry/third_party/gsutilz/third_party/boto/boto/sdb/item.py | 153 | 6885 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import base64
class Item(dict):
"""
A ``dict`` sub-class that serves as an object representation of a
SimpleDB item. An item in SDB is similar to a row in a relational
database. Items belong to a :py:class:`Domain <boto.sdb.domain.Domain>`,
which is similar to a table in a relational database.
The keys on instances of this object correspond to attributes that are
stored on the SDB item.
.. tip:: While it is possible to instantiate this class directly, you may
want to use the convenience methods on :py:class:`boto.sdb.domain.Domain`
for that purpose. For example, :py:meth:`boto.sdb.domain.Domain.get_item`.
"""
def __init__(self, domain, name='', active=False):
"""
:type domain: :py:class:`boto.sdb.domain.Domain`
:param domain: The domain that this item belongs to.
:param str name: The name of this item. This name will be used when
querying for items using methods like
:py:meth:`boto.sdb.domain.Domain.get_item`
"""
dict.__init__(self)
self.domain = domain
self.name = name
self.active = active
self.request_id = None
self.encoding = None
self.in_attribute = False
self.converter = self.domain.connection.converter
def startElement(self, name, attrs, connection):
if name == 'Attribute':
self.in_attribute = True
self.encoding = attrs.get('encoding', None)
return None
def decode_value(self, value):
if self.encoding == 'base64':
self.encoding = None
return base64.decodestring(value)
else:
return value
def endElement(self, name, value, connection):
if name == 'ItemName':
self.name = self.decode_value(value)
elif name == 'Name':
if self.in_attribute:
self.last_key = self.decode_value(value)
else:
self.name = self.decode_value(value)
elif name == 'Value':
if self.last_key in self:
if not isinstance(self[self.last_key], list):
self[self.last_key] = [self[self.last_key]]
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key].append(value)
else:
value = self.decode_value(value)
if self.converter:
value = self.converter.decode(value)
self[self.last_key] = value
elif name == 'BoxUsage':
try:
connection.box_usage += float(value)
except:
pass
elif name == 'RequestId':
self.request_id = value
elif name == 'Attribute':
self.in_attribute = False
else:
setattr(self, name, value)
def load(self):
"""
Loads or re-loads this item's attributes from SDB.
.. warning::
If you have changed attribute values on an Item instance,
this method will over-write the values if they are different in
SDB. For any local attributes that don't yet exist in SDB,
they will be safe.
"""
self.domain.get_attributes(self.name, item=self)
def save(self, replace=True):
"""
Saves this item to SDB.
:param bool replace: If ``True``, delete any attributes on the remote
SDB item that have a ``None`` value on this object.
"""
self.domain.put_attributes(self.name, self, replace)
# Delete any attributes set to "None"
if replace:
del_attrs = []
for name in self:
if self[name] is None:
del_attrs.append(name)
if len(del_attrs) > 0:
self.domain.delete_attributes(self.name, del_attrs)
def add_value(self, key, value):
"""
Helps set or add to attributes on this item. If you are adding a new
attribute that has yet to be set, it will simply create an attribute
named ``key`` with your given ``value`` as its value. If you are
adding a value to an existing attribute, this method will convert the
attribute to a list (if it isn't already) and append your new value
to said list.
For clarification, consider the following interactive session:
.. code-block:: python
>>> item = some_domain.get_item('some_item')
>>> item.has_key('some_attr')
False
>>> item.add_value('some_attr', 1)
>>> item['some_attr']
1
>>> item.add_value('some_attr', 2)
>>> item['some_attr']
[1, 2]
:param str key: The attribute to add a value to.
:param object value: The value to set or append to the attribute.
"""
if key in self:
# We already have this key on the item.
if not isinstance(self[key], list):
# The key isn't already a list, take its current value and
# convert it to a list with the only member being the
# current value.
self[key] = [self[key]]
# Add the new value to the list.
self[key].append(value)
else:
# This is a new attribute, just set it.
self[key] = value
def delete(self):
"""
Deletes this item in SDB.
.. note:: This local Python object remains in its current state
after deletion, this only deletes the remote item in SDB.
"""
self.domain.delete_item(self)
| bsd-3-clause |
dudepare/django | django/contrib/admindocs/urls.py | 574 | 1183 | from django.conf.urls import url
from django.contrib.admindocs import views
urlpatterns = [
url('^$',
views.BaseAdminDocsView.as_view(template_name='admin_doc/index.html'),
name='django-admindocs-docroot'),
url('^bookmarklets/$',
views.BookmarkletsView.as_view(),
name='django-admindocs-bookmarklets'),
url('^tags/$',
views.TemplateTagIndexView.as_view(),
name='django-admindocs-tags'),
url('^filters/$',
views.TemplateFilterIndexView.as_view(),
name='django-admindocs-filters'),
url('^views/$',
views.ViewIndexView.as_view(),
name='django-admindocs-views-index'),
url('^views/(?P<view>[^/]+)/$',
views.ViewDetailView.as_view(),
name='django-admindocs-views-detail'),
url('^models/$',
views.ModelIndexView.as_view(),
name='django-admindocs-models-index'),
url('^models/(?P<app_label>[^\.]+)\.(?P<model_name>[^/]+)/$',
views.ModelDetailView.as_view(),
name='django-admindocs-models-detail'),
url('^templates/(?P<template>.*)/$',
views.TemplateDetailView.as_view(),
name='django-admindocs-templates'),
]
| bsd-3-clause |
patilsangram/erpnext | erpnext/patches/v10_0/set_auto_created_serial_no_in_stock_entry.py | 17 | 1708 | # Copyright (c) 2017, Frappe and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
serialised_items = [d.name for d in frappe.get_all("Item", filters={"has_serial_no": 1})]
if not serialised_items:
return
for dt in ["Stock Entry Detail", "Purchase Receipt Item", "Purchase Invoice Item"]:
cond = ""
if dt=="Purchase Invoice Item":
cond = """ and parent in (select name from `tabPurchase Invoice`
where `tabPurchase Invoice`.name = `tabPurchase Invoice Item`.parent and update_stock=1)"""
item_rows = frappe.db.sql("""
select name
from `tab{0}`
where conversion_factor != 1
and docstatus = 1
and ifnull(serial_no, '') = ''
and item_code in ({1})
{2}
""".format(dt, ', '.join(['%s']*len(serialised_items)), cond), tuple(serialised_items))
if item_rows:
sle_serial_nos = dict(frappe.db.sql("""
select voucher_detail_no, serial_no
from `tabStock Ledger Entry`
where ifnull(serial_no, '') != ''
and voucher_detail_no in (%s)
""".format(', '.join(['%s']*len(item_rows))),
tuple([d[0] for d in item_rows])))
batch_size = 100
for i in range(0, len(item_rows), batch_size):
batch_item_rows = item_rows[i:i + batch_size]
when_then = []
for item_row in batch_item_rows:
when_then.append('WHEN `name` = "{row_name}" THEN "{value}"'.format(
row_name=item_row[0],
value=sle_serial_nos.get(item_row[0])))
frappe.db.sql("""
update
`tab{doctype}`
set
serial_no = CASE {when_then_cond} ELSE `serial_no` END
""".format(
doctype = dt,
when_then_cond=" ".join(when_then)
)) | gpl-3.0 |
MartijnBraam/CouchPotatoServer | libs/suds/servicedefinition.py | 200 | 8478 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The I{service definition} provides a textual representation of a service.
"""
from logging import getLogger
from suds import *
import suds.metrics as metrics
from suds.sax import Namespace
log = getLogger(__name__)
class ServiceDefinition:
"""
A service definition provides an object used to generate a textual description
of a service.
@ivar wsdl: A wsdl.
@type wsdl: L{wsdl.Definitions}
@ivar service: The service object.
@type service: L{suds.wsdl.Service}
@ivar ports: A list of port-tuple: (port, [(method-name, pdef)])
@type ports: [port-tuple,..]
@ivar prefixes: A list of remapped prefixes.
@type prefixes: [(prefix,uri),..]
@ivar types: A list of type definitions
@type types: [I{Type},..]
"""
def __init__(self, wsdl, service):
"""
@param wsdl: A wsdl object
@type wsdl: L{Definitions}
@param service: A service B{name}.
@type service: str
"""
self.wsdl = wsdl
self.service = service
self.ports = []
self.params = []
self.types = []
self.prefixes = []
self.addports()
self.paramtypes()
self.publictypes()
self.getprefixes()
self.pushprefixes()
def pushprefixes(self):
"""
Add our prefixes to the wsdl so that when users invoke methods
and reference the prefixes, the will resolve properly.
"""
for ns in self.prefixes:
self.wsdl.root.addPrefix(ns[0], ns[1])
def addports(self):
"""
Look through the list of service ports and construct a list of tuples where
each tuple is used to describe a port and it's list of methods as:
(port, [method]). Each method is tuple: (name, [pdef,..] where each pdef is
a tuple: (param-name, type).
"""
timer = metrics.Timer()
timer.start()
for port in self.service.ports:
p = self.findport(port)
for op in port.binding.operations.values():
m = p[0].method(op.name)
binding = m.binding.input
method = (m.name, binding.param_defs(m))
p[1].append(method)
metrics.log.debug("method '%s' created: %s", m.name, timer)
p[1].sort()
timer.stop()
def findport(self, port):
"""
Find and return a port tuple for the specified port.
Created and added when not found.
@param port: A port.
@type port: I{service.Port}
@return: A port tuple.
@rtype: (port, [method])
"""
for p in self.ports:
if p[0] == p: return p
p = (port, [])
self.ports.append(p)
return p
def getprefixes(self):
"""
Add prefixes foreach namespace referenced by parameter types.
"""
namespaces = []
for l in (self.params, self.types):
for t,r in l:
ns = r.namespace()
if ns[1] is None: continue
if ns[1] in namespaces: continue
if Namespace.xs(ns) or Namespace.xsd(ns):
continue
namespaces.append(ns[1])
if t == r: continue
ns = t.namespace()
if ns[1] is None: continue
if ns[1] in namespaces: continue
namespaces.append(ns[1])
i = 0
namespaces.sort()
for u in namespaces:
p = self.nextprefix()
ns = (p, u)
self.prefixes.append(ns)
def paramtypes(self):
""" get all parameter types """
for m in [p[1] for p in self.ports]:
for p in [p[1] for p in m]:
for pd in p:
if pd[1] in self.params: continue
item = (pd[1], pd[1].resolve())
self.params.append(item)
def publictypes(self):
""" get all public types """
for t in self.wsdl.schema.types.values():
if t in self.params: continue
if t in self.types: continue
item = (t, t)
self.types.append(item)
tc = lambda x,y: cmp(x[0].name, y[0].name)
self.types.sort(cmp=tc)
def nextprefix(self):
"""
Get the next available prefix. This means a prefix starting with 'ns' with
a number appended as (ns0, ns1, ..) that is not already defined on the
wsdl document.
"""
used = [ns[0] for ns in self.prefixes]
used += [ns[0] for ns in self.wsdl.root.nsprefixes.items()]
for n in range(0,1024):
p = 'ns%d'%n
if p not in used:
return p
raise Exception('prefixes exhausted')
def getprefix(self, u):
"""
Get the prefix for the specified namespace (uri)
@param u: A namespace uri.
@type u: str
@return: The namspace.
@rtype: (prefix, uri).
"""
for ns in Namespace.all:
if u == ns[1]: return ns[0]
for ns in self.prefixes:
if u == ns[1]: return ns[0]
raise Exception('ns (%s) not mapped' % u)
def xlate(self, type):
"""
Get a (namespace) translated I{qualified} name for specified type.
@param type: A schema type.
@type type: I{suds.xsd.sxbasic.SchemaObject}
@return: A translated I{qualified} name.
@rtype: str
"""
resolved = type.resolve()
name = resolved.name
if type.unbounded():
name += '[]'
ns = resolved.namespace()
if ns[1] == self.wsdl.tns[1]:
return name
prefix = self.getprefix(ns[1])
return ':'.join((prefix, name))
def description(self):
"""
Get a textual description of the service for which this object represents.
@return: A textual description.
@rtype: str
"""
s = []
indent = (lambda n : '\n%*s'%(n*3,' '))
s.append('Service ( %s ) tns="%s"' % (self.service.name, self.wsdl.tns[1]))
s.append(indent(1))
s.append('Prefixes (%d)' % len(self.prefixes))
for p in self.prefixes:
s.append(indent(2))
s.append('%s = "%s"' % p)
s.append(indent(1))
s.append('Ports (%d):' % len(self.ports))
for p in self.ports:
s.append(indent(2))
s.append('(%s)' % p[0].name)
s.append(indent(3))
s.append('Methods (%d):' % len(p[1]))
for m in p[1]:
sig = []
s.append(indent(4))
sig.append(m[0])
sig.append('(')
for p in m[1]:
sig.append(self.xlate(p[1]))
sig.append(' ')
sig.append(p[0])
sig.append(', ')
sig.append(')')
try:
s.append(''.join(sig))
except:
pass
s.append(indent(3))
s.append('Types (%d):' % len(self.types))
for t in self.types:
s.append(indent(4))
s.append(self.xlate(t[0]))
s.append('\n\n')
return ''.join(s)
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
try:
return self.description()
except Exception, e:
log.exception(e)
return tostr(e) | gpl-3.0 |
agentfog/qiime | scripts/upgma_cluster.py | 15 | 2742 | #!/usr/bin/env python
# File created on 09 Feb 2010
from __future__ import division
__author__ = "Justin Kuczynski"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Justin Kuczynski"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Justin Kuczynski"
__email__ = "[email protected]"
from qiime.util import parse_command_line_parameters
from qiime.util import make_option
from qiime.hierarchical_cluster import single_file_upgma, multiple_file_upgma
import os
script_info = {}
script_info['brief_description'] = """Build a UPGMA tree comparing samples"""
script_info[
'script_description'] = """In addition to using PCoA, it can be useful to cluster samples using UPGMA (Unweighted Pair Group Method with Arithmetic mean, also known as average linkage). As with PCoA, the input to this step is a distance matrix (i.e. resulting file from beta_diversity.py)."""
script_info['script_usage'] = []
script_info['script_usage'].append(
("""UPGMA Cluster (Single File):""",
"""To perform UPGMA clustering on a single distance matrix (e.g.: beta_div.txt, a result file from beta_diversity.py) use the following idiom:""",
"""%prog -i $PWD/beta_div.txt -o $PWD/beta_div_cluster.tre"""))
script_info['script_usage'].append(
("""UPGMA Cluster (Multiple Files):""",
"""The script also functions in batch mode if a folder is supplied as input. This script operates on every file in the input directory and creates a corresponding upgma tree file in the output directory, e.g.:""",
"""%prog -i $PWD/beta_div_folder -o $PWD/beta_div_folder_results/"""))
script_info[
'output_description'] = """The output is a newick formatted tree compatible with most standard tree viewing programs. Batch processing is also available, allowing the analysis of an entire directory of distance matrices."""
script_info['required_options'] = [
make_option('-i', '--input_path',
type='existing_path', help='input path. directory for batch processing, ' +
'filename for single file operation'),
make_option('-o', '--output_path',
type='new_path', help='output path. directory for batch processing, ' +
'filename for single file operation'),
]
script_info['optional_options'] = []
script_info['version'] = __version__
def main():
option_parser, opts, args = parse_command_line_parameters(**script_info)
if os.path.isdir(opts.input_path):
multiple_file_upgma(opts.input_path, opts.output_path)
elif os.path.isfile(opts.input_path):
single_file_upgma(opts.input_path, opts.output_path)
else:
print("io error, check input file path")
exit(1)
if __name__ == "__main__":
main()
| gpl-2.0 |
donce/django-cms | cms/models/fields.py | 11 | 4349 | # -*- coding: utf-8 -*-
from cms.forms.fields import PageSelectFormField
from cms.models.pagemodel import Page
from cms.models.placeholdermodel import Placeholder
from cms.utils.placeholder import PlaceholderNoAction, validate_placeholder_name
from django.db import models
class PlaceholderField(models.ForeignKey):
def __init__(self, slotname, default_width=None, actions=PlaceholderNoAction, **kwargs):
if kwargs.get('related_name', None) == '+':
raise ValueError("PlaceholderField does not support disabling of related names via '+'.")
if not callable(slotname):
validate_placeholder_name(slotname)
self.slotname = slotname
self.default_width = default_width
self.actions = actions()
if 'to' in kwargs:
del(kwargs['to'])
kwargs.update({'null': True}) # always allow Null
kwargs.update({'editable': False}) # never allow edits in admin
super(PlaceholderField, self).__init__(Placeholder, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(PlaceholderField, self).deconstruct()
kwargs['slotname'] = self.slotname
return name, path, args, kwargs
def _get_new_placeholder(self, instance):
return Placeholder.objects.create(slot=self._get_placeholder_slot(instance), default_width=self.default_width)
def _get_placeholder_slot(self, model_instance):
if callable(self.slotname):
slotname = self.slotname(model_instance)
validate_placeholder_name(slotname)
else:
slotname = self.slotname
return slotname
def pre_save(self, model_instance, add):
if not model_instance.pk:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
else:
slot = self._get_placeholder_slot(model_instance)
placeholder = getattr(model_instance, self.name)
if not placeholder:
setattr(model_instance, self.name, self._get_new_placeholder(model_instance))
placeholder = getattr(model_instance, self.name)
if placeholder.slot != slot:
placeholder.slot = slot
placeholder.save()
return super(PlaceholderField, self).pre_save(model_instance, add)
def save_form_data(self, instance, data):
data = getattr(instance, self.name, '')
if not isinstance(data, Placeholder):
data = self._get_new_placeholder(instance)
super(PlaceholderField, self).save_form_data(instance, data)
def south_field_triple(self):
"Returns a suitable description of this field for South."
# We'll just introspect ourselves, since we inherit.
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
# That's our definition!
return (field_class, args, kwargs)
def contribute_to_class(self, cls, name):
super(PlaceholderField, self).contribute_to_class(cls, name)
if not hasattr(cls._meta, 'placeholder_field_names'):
cls._meta.placeholder_field_names = []
if not hasattr(cls._meta, 'placeholder_fields'):
cls._meta.placeholder_fields = {}
cls._meta.placeholder_field_names.append(name)
cls._meta.placeholder_fields[self] = name
self.model = cls
class PageField(models.ForeignKey):
default_form_class = PageSelectFormField
default_model_class = Page
def __init__(self, **kwargs):
# We hard-code the `to` argument for ForeignKey.__init__
# since a PageField can only be a ForeignKey to a Page
kwargs['to'] = self.default_model_class
super(PageField, self).__init__(**kwargs)
def formfield(self, **kwargs):
defaults = {
'form_class': self.default_form_class,
}
defaults.update(kwargs)
return super(PageField, self).formfield(**defaults)
def south_field_triple(self):
"Returns a suitable description of this field for South."
from south.modelsinspector import introspector
field_class = "django.db.models.fields.related.ForeignKey"
args, kwargs = introspector(self)
return (field_class, args, kwargs)
| bsd-3-clause |
Frodox/buildbot | master/buildbot/test/unit/test_changes_gerritchangesource.py | 3 | 14848 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc[''], 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.utils import iteritems
import datetime
import json
import types
from twisted.internet import defer
from twisted.internet import error
from twisted.internet import reactor
from twisted.python import failure
from twisted.trial import unittest
from buildbot.changes import gerritchangesource
from buildbot.test.fake import httpclientservice as fakehttpclientservice
from buildbot.test.fake import fakedb
from buildbot.test.fake.change import Change
from buildbot.test.util import changesource
class TestGerritHelpers(unittest.TestCase):
def test_proper_json(self):
self.assertEqual(u"Justin Case <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"name": "Justin Case",
"email": "[email protected]"
}))
def test_missing_username(self):
self.assertEqual(u"Justin Case <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"name": "Justin Case",
"email": "[email protected]"
}))
def test_missing_name(self):
self.assertEqual(u"unknown <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"email": "[email protected]"
}))
self.assertEqual(u"gerrit <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"email": "[email protected]"
}, u"gerrit"))
self.assertEqual(u"justincase <[email protected]>",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"email": "[email protected]"
}, u"gerrit"))
def test_missing_email(self):
self.assertEqual(u"Justin Case",
gerritchangesource._gerrit_user_to_author({
"username": "justincase",
"name": "Justin Case"
}))
self.assertEqual(u"Justin Case",
gerritchangesource._gerrit_user_to_author({
"name": "Justin Case"
}))
self.assertEqual(u"justincase",
gerritchangesource._gerrit_user_to_author({
"username": "justincase"
}))
self.assertEqual(u"unknown",
gerritchangesource._gerrit_user_to_author({
}))
self.assertEqual(u"gerrit",
gerritchangesource._gerrit_user_to_author({
}, u"gerrit"))
class TestGerritChangeSource(changesource.ChangeSourceMixin,
unittest.TestCase):
def setUp(self):
return self.setUpChangeSource()
def tearDown(self):
return self.tearDownChangeSource()
def newChangeSource(self, host, user, *args, **kwargs):
s = gerritchangesource.GerritChangeSource(
host, user, *args, **kwargs)
self.attachChangeSource(s)
s.configureService()
return s
# tests
def test_describe(self):
s = self.newChangeSource('somehost', 'someuser')
self.assertSubstring("GerritChangeSource", s.describe())
def test_name(self):
s = self.newChangeSource('somehost', 'someuser')
self.assertEqual("GerritChangeSource:someuser@somehost:29418", s.name)
s = self.newChangeSource('somehost', 'someuser', name="MyName")
self.assertEqual("MyName", s.name)
# TODO: test the backoff algorithm
# this variable is reused in test_steps_source_repo
# to ensure correct integration between change source and repo step
expected_change = {'category': u'patchset-created',
'files': ['unknown'],
'repository': u'ssh://someuser@somehost:29418/pr',
'author': u'Dustin <[email protected]>',
'comments': u'fix 1234',
'project': u'pr',
'branch': u'br/4321',
'revlink': u'http://buildbot.net',
'codebase': None,
'revision': u'abcdef',
'src': None,
'when_timestamp': None,
'properties': {u'event.change.owner.email': u'[email protected]',
u'event.change.subject': u'fix 1234',
u'event.change.project': u'pr',
u'event.change.owner.name': u'Dustin',
u'event.change.number': u'4321',
u'event.change.url': u'http://buildbot.net',
u'event.change.branch': u'br',
u'event.type': u'patchset-created',
u'event.patchSet.revision': u'abcdef',
u'event.patchSet.number': u'12'}}
def test_lineReceived_patchset_created(self):
s = self.newChangeSource('somehost', 'someuser')
d = s.lineReceived(json.dumps(dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
patchSet=dict(revision="abcdef", number="12")
)))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
for k, v in iteritems(c):
self.assertEqual(self.expected_change[k], v)
return d
change_merged_event = {
"type": "change-merged",
"change": {
"branch": "br",
"project": "pr",
"number": "4321",
"owner": {"name": "Chuck", "email": "[email protected]"},
"url": "http://buildbot.net",
"subject": "fix 1234"},
"patchSet": {"revision": "abcdefj", "number": "13"}
}
def test_handled_events_filter_true(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user', handled_events=["change-merged"])
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
self.assertEqual(c["category"], "change-merged")
self.assertEqual(c["branch"], "br")
return d
def test_handled_events_filter_false(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user')
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 0)
return d
def test_custom_handler(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user',
handled_events=["change-merged"])
def custom_handler(self, properties, event):
event['change']['project'] = "world"
return self.addChangeFromEvent(properties, event)
# Patches class to not bother with the inheritance
s.eventReceived_change_merged = types.MethodType(custom_handler, s)
d = s.lineReceived(json.dumps(self.change_merged_event))
@d.addCallback
def check(_):
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
self.assertEqual(c['project'], "world")
return d
def test_startStreamProcess_bytes_output(self):
s = self.newChangeSource(
'somehost', 'some_choosy_user', debug=True)
exp_argv = ['ssh', 'some_choosy_user@somehost', '-p', '29418']
exp_argv += ['gerrit', 'stream-events']
def spawnProcess(pp, cmd, argv, env):
self.assertEqual([cmd, argv], [exp_argv[0], exp_argv])
pp.errReceived(b'test stderr\n')
pp.outReceived(b'{"type":"dropped-output"}\n')
so = error.ProcessDone(None)
pp.processEnded(failure.Failure(so))
self.patch(reactor, 'spawnProcess', spawnProcess)
s.startStreamProcess()
class TestGerritEventLogPoller(changesource.ChangeSourceMixin,
unittest.TestCase):
NOW_TIMESTAMP = 1479302598
EVENT_TIMESTAMP = 1479302599
NOW_FORMATTED = '2016-16-11 13:23:18'
EVENT_FORMATTED = '2016-16-11 13:23:19'
OBJECTID = 1234
@defer.inlineCallbacks
def setUp(self):
yield self.setUpChangeSource()
yield self.master.startService()
@defer.inlineCallbacks
def tearDown(self):
yield self.master.stopService()
yield self.tearDownChangeSource()
@defer.inlineCallbacks
def newChangeSource(self, **kwargs):
auth = kwargs.pop('auth', ('log', 'pass'))
self._http = yield fakehttpclientservice.HTTPClientService.getFakeService(
self.master, self, 'gerrit', auth=auth)
self.changesource = gerritchangesource.GerritEventLogPoller(
'gerrit', auth=auth, gitBaseURL="ssh://someuser@somehost:29418", pollAtLaunch=False, **kwargs)
@defer.inlineCallbacks
def startChangeSource(self):
yield self.changesource.setServiceParent(self.master)
yield self.attachChangeSource(self.changesource)
# tests
@defer.inlineCallbacks
def test_now(self):
yield self.newChangeSource()
self.changesource.now()
@defer.inlineCallbacks
def test_describe(self):
# describe is not used yet in buildbot nine, but it can still be useful in the future, so lets
# implement and test it
yield self.newChangeSource()
self.assertSubstring('GerritEventLogPoller', self.changesource.describe())
@defer.inlineCallbacks
def test_name(self):
yield self.newChangeSource()
self.assertEqual('GerritEventLogPoller:gerrit', self.changesource.name)
@defer.inlineCallbacks
def test_lineReceived_patchset_created(self):
self.master.db.insertTestData([
fakedb.Object(id=self.OBJECTID, name='GerritEventLogPoller:gerrit',
class_name='GerritEventLogPoller')])
yield self.newChangeSource()
self.changesource.now = lambda: datetime.datetime.utcfromtimestamp(self.NOW_TIMESTAMP)
self._http.expect(method='get', ep='/plugins/events-log/events/',
params={'t1': self.NOW_FORMATTED},
content_json=dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
eventCreatedOn=self.EVENT_TIMESTAMP,
patchSet=dict(revision="abcdef", number="12")))
yield self.startChangeSource()
yield self.changesource.poll()
self.assertEqual(len(self.master.data.updates.changesAdded), 1)
c = self.master.data.updates.changesAdded[0]
for k, v in iteritems(c):
self.assertEqual(TestGerritChangeSource.expected_change[k], v)
self.master.db.state.assertState(self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP)
# do a second poll, it should ask for the next events
self._http.expect(method='get', ep='/plugins/events-log/events/',
params={'t1': self.EVENT_FORMATTED},
content_json=dict(
type="patchset-created",
change=dict(
branch="br",
project="pr",
number="4321",
owner=dict(name="Dustin", email="[email protected]"),
url="http://buildbot.net",
subject="fix 1234"
),
eventCreatedOn=self.EVENT_TIMESTAMP + 1,
patchSet=dict(revision="abcdef", number="12")))
yield self.changesource.poll()
self.master.db.state.assertState(self.OBJECTID, last_event_ts=self.EVENT_TIMESTAMP + 1)
class TestGerritChangeFilter(unittest.TestCase):
def test_basic(self):
ch = Change(**TestGerritChangeSource.expected_change)
f = gerritchangesource.GerritChangeFilter(
branch=["br"], eventtype=["patchset-created"])
self.assertTrue(f.filter_change(ch))
f = gerritchangesource.GerritChangeFilter(
branch="br2", eventtype=["patchset-created"])
self.assertFalse(f.filter_change(ch))
f = gerritchangesource.GerritChangeFilter(
branch="br", eventtype="ref-updated")
self.assertFalse(f.filter_change(ch))
self.assertEqual(
repr(f),
'<GerritChangeFilter on prop:event.change.branch == br and prop:event.type == ref-updated>')
| gpl-2.0 |
darrenabbey/ymap | scripts_genomes/genome_process_for_standard_bins.GC_bias_1.py | 2 | 7151 | # Input arguments: (Those with '[*]' at end are used here.)
# 2) genome : [String]: defines genome in use for project. (ex. 'Ca_a') [*]
# 3) workingDir : [String]: Directory where links in system are stored. (ex. '/home/bermanj/shared/links/) [*]
#
# Process input files:
# 1) Restriction-digested genome file.
# *) Load usable fragment definitions into array : fragments[i][chr#,bpStart,bpEnd, data_count,data_max,data_ave]
# [0 ,1 ,2 , 3 ,4 ,5 ]
# 2) Read counts for strain of interest dataset.
# -) Find max read count on fragment.
# -) Find average read count along fragment.
#
# Generate output file:
# 3) Output values for each fragment in a tab-delimited text file.
# Each line contains information for one fragment = [chr_num,bp_start,bp_end, data_count,data_max,length]
# 0) chr_num : Numerical chromosome identifier, defined for each genome in "figure_details.txt".
# 1) bp_start : Start bp coordinate along chromosome.
# 2) bp_end : End bp coordinate along chromosome.
# 3) GC_ratio : Ratio of bases as 'G' or 'C' in fragment.
# 4) Comment lines in output begin with '###'.
#
import string, sys, re, time
userName = sys.argv[1];
genomeName = sys.argv[2];
main_dir = sys.argv[3];
logName = sys.argv[4];
t0 = time.clock()
with open(logName, "a") as myfile:
myfile.write("\t\t\t*================================================================*\n")
myfile.write("\t\t\t| Log of 'genome_process_for_standard_bins.GC_bias_1.py' |\n")
myfile.write("\t\t\t*----------------------------------------------------------------*\n")
#============================================================================================================
# Process restriction-digested genome file.
#------------------------------------------------------------------------------------------------------------
# Example FASTQ header line.
# >Ca_a.chr1 (9638..10115) (478bp) [*]
# FASTA entries with header lines ending in '[*]' are usable.
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tProcessing standard bin fragmented genome file.")
# Find name of genome FASTA file for species being examined.
# Read in and parse : "links_dir/main_script_dir/genome_specific/[genome]/reference.txt"
workingDir = main_dir + 'users/' + userName + '/genomes/' + genomeName + '/';
reference_file = workingDir + '/reference.txt'
refFile = open(reference_file,'r')
refFASTA = refFile.read().strip()
refFile.close()
# Open standard-fragmented genome FASTQ file.
standardBins_FASTA_file = workingDir + string.replace(refFASTA, '.fasta','.standard_bins.fasta')
standardBins_FASTA_data = open(standardBins_FASTA_file,'r')
#............................................................................................................
# Setup array and counter for tracking fragment definition data.
fragments = []
fragment_counter = 0
## Process digested FASTQ genome file, line by line.
while True:
# Line pairs have the following structure.
# >Ca_a.chr1 (9638..10115) (478bp) [*]
# ATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGCATGC
line1 = standardBins_FASTA_data.readline()
line2 = standardBins_FASTA_data.readline()
if not line2:
break # EOF
first_char = line1[:1];
if first_char == ">":
# Line is header to FASTQ entry.
line_parts = string.split(string.strip(line1))
chrGenomeAndNum_string = line_parts[0]
bp_coordinate_string = line_parts[1]
fragment_size_string = line_parts[2]
if len(line_parts) > 3:
fragment_usable_string = line_parts[3]
if fragment_usable_string[1] == "*":
# Fragment is usable, so the details should be placed into fragments structure.
# chr number.
# fragment start bp.
# fragment end bp.
# split the chr string by '.' character, then trim off the first three characters ('chr') from the second substring.
# string has format of : ">Ca_a.chr1"
genomeName_string,chrNum_string = chrGenomeAndNum_string.split(".")
chr_num = int(float(chrNum_string.replace("chr","")))
# string has format of : "(9638..10115)"
coordinates = bp_coordinate_string.replace('(','').replace(')','').replace('..',' ').split()
bp_start = int(float(coordinates[0]))
bp_end = int(float(coordinates[1]))
GC_ratio = 0 # placeholder value.
sequence = line2;
G_count = sequence.count('G') + sequence.count('g')
C_count = sequence.count('C') + sequence.count('c')
T_count = sequence.count('T') + sequence.count('t')
A_count = sequence.count('A') + sequence.count('a')
if (float(G_count+C_count+T_count+A_count) == 0):
GC_ratio = 0
else:
GC_ratio = (G_count+C_count)/float(G_count+C_count+T_count+A_count)
fragments.append([chr_num,bp_start,bp_end,GC_ratio])
fragment_counter += 1
standardBins_FASTA_data.close()
# Put fragment counter into a general use variable.
numFragments = fragment_counter
#------------------------------------------------------------------------------------------------------------
# End of code section to parse restriction fragments from genome.
#============================================================================================================
print "### ", time.clock() - t0, "seconds to parse restriction fragments from digested genome."
t1 = time.clock()
print '### numFragments = ' + str(numFragments);
print '### Data from each fragment: [chrNum, bpStart, bpEnd, GC_ratio]'
#============================================================================================================
# Code section to output information about genome restriction fragments.
#------------------------------------------------------------------------------------------------------------
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tOutputting GC-ratios of standard-bin fragmented genome.")
for fragment in range(1,numFragments):
# Output a line for each fragment.
# fragments[fragment-1] = [chr_num,bp_start,bp_end, GC_ratio]
# 0) chr_num
# 1) bp_start
# 2) bp_end
# 3) GC_ratio
chr_num = fragments[fragment-1][0]
bp_start = fragments[fragment-1][1]
bp_end = fragments[fragment-1][2]
GC_ratio = fragments[fragment-1][3]
print str(chr_num) + '\t' + str(bp_start) + '\t' + str(bp_end) + '\t' + str(GC_ratio)
#------------------------------------------------------------------------------------------------------------
# End of code section to output information about fragments.
#============================================================================================================
print "### ", time.clock() - t1, "seconds to output basic stats of each restriction fragment."
print "### ", time.clock() - t0, "seconds to complete processing of fragment definitions."
with open(logName, "a") as myfile:
myfile.write("\n\t\t\tTime to process = " + str(time.clock()-t0) )
myfile.write("\n\t\t* 'py/genome_process_for_standard_bins.GC_bias_1.py' completed. *")
| mit |
flumotion-mirror/flumotion-flash | flumotion/component/encoders/flv/flv.py | 1 | 1114 | # -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
from flumotion.component import feedcomponent
class FLVEncoder(feedcomponent.EncoderComponent):
checkTimestamp = True
checkOffset = True
def get_pipeline_string(self, properties):
return "ffmpegcolorspace ! ffenc_flv name=encoder"
def configure_pipeline(self, pipeline, properties):
element = pipeline.get_by_name('encoder')
if 'bitrate' in properties:
element.set_property('bitrate', properties['bitrate'])
| lgpl-2.1 |
oczkers/pyllegro | pyllegro/core.py | 1 | 13373 | # -*- coding: utf-8 -*-
"""
gshop.allegro
~~~~~~~~~~~~~~~~~~~
A lightweight wrapper around the Allegro webapi.
"""
import sys
import time
import logging
from zeep import Client
from zeep.exceptions import Fault
from requests.exceptions import ConnectionError
# from socket import error as socketError
from decimal import Decimal
from base64 import b64encode
from hashlib import sha256
if sys.version_info[0] == 3:
long = int
# logger - zeep
logger_zeep = logging.getLogger('zeep')
logger_zeep.setLevel(logging.ERROR)
# logger - allegro
logger_allegro = logging.getLogger(__name__)
def magicDecode(var):
"""Decode unicode to string."""
if var:
var = var.encode('utf8')
return var
def chunked(l, n):
"""Chunk one big list into few small lists."""
return [l[i:i + n] for i in range(0, len(l), n)]
class Allegro(object):
def __init__(self, username, passwd, webapi_key, debug=False):
self.debug = debug
self.webapi_url = 'https://webapi.allegro.pl/service.php?wsdl'
self.username = username
self.passwd_hash = b64encode(sha256(passwd.encode('utf-8')).digest()).decode('utf-8') # hash password
self.webapi_key = webapi_key
self.last_event_id = 0
# init soap client & login
# self.client = Client(self.webapi_url)
# self.ArrayOfLong = self.client.get_type('ns0:ArrayOfLong') # this should be done by zeep...
self.token = self.login(self.username, self.passwd_hash, self.webapi_key)
def __relogin__(self):
"""Forced logging. Returns token."""
while True:
try:
return self.login(self.username, self.passwd_hash, self.webapi_key)
# except socketError as e:
# logger_allegro.warning(e)
# time.sleep(5)
except:
print(sys.exc_info())
print('Unknown login error')
logger_allegro.warning('Unknown login error')
logger_allegro.exception(sys.exc_info())
time.sleep(5)
def __ask__(self, service, **kwargs):
"""Ask allegro (catch errors). Returns response."""
# TODO: send error code/message to mail
if self.debug:
print('ALLEGRO: %s %s' % (service, kwargs)) # DEBUG
while True:
if service not in ('doGetSiteJournalDeals', 'doGetSiteJournalDealsInfo'):
kwargs['sessionHandle'] = self.token
else:
kwargs['sessionId'] = self.token
# process only if token avaible
try:
return self.client.service[service](**kwargs)
except Fault as e:
if e.code in ('ERR_NO_SESSION', 'ERR_SESSION_EXPIRED'):
print('zly identyfikator, relogowanie')
time.sleep(5)
self.token = self.__relogin__()
elif e.code == 'ERR_INTERNAL_SYSTEM_ERROR':
print('internal server error')
time.sleep(5)
else:
print(sys.exc_info())
print(e)
print(e.code)
time.sleep(5)
self.token = self.__relogin__()
except ConnectionError as e:
print('connection error')
print(e)
time.sleep(5)
# except socketError as e:
# logger_allegro.warning(e)
# time.sleep(5)
# except SoapFault as e:
# if e[0] == 'ERR_SESSION_EXPIRED' or e[0] == 'ERR_NO_SESSION':
# # logger_allegro.debug('Session expired - relogging.')
# logger_allegro.debug(e)
# self.token = self.__relogin__()
# elif e[0] == 'ERR_INTERNAL_SYSTEM_ERROR':
# logger_allegro.debug(e)
# time.sleep(5)
# # elif e[0] == 'ERR_AUCTION_KILLED': # deleted by allegro admin
# # pass
# else:
# logger_allegro.warning(e)
# time.sleep(5)
# self.token = self.__relogin__()
except:
print(sys.exc_info())
print('Unknown soap error')
logger_allegro.warning('Unknown soap error')
logger_allegro.exception(sys.exc_info())
time.sleep(5)
self.token = self.__relogin__()
def login(self, username, passwd_hash, webapi_key, country_code=1):
"""Log in (sets self.token). Returns token (session_handle)."""
self.client = Client(self.webapi_url)
self.ArrayOfLong = self.client.get_type('ns0:ArrayOfLong') # this should be done by zeep...
ver_key = self.client.service.doQuerySysStatus(1, 1, webapi_key)['verKey']
return self.client.service.doLoginEnc(username, passwd_hash,
country_code, webapi_key,
ver_key)['sessionHandlePart']
def getAuctionDetails(self, auction_id):
"""Return basic auction details (doShowItemInfoExt)."""
return self.__ask__('doShowItemInfoExt',
itemId=auction_id,
# getDesc=0,
# getImageUrl=0,
# getAttribs=0,
# getPostageOptions=0,
# getCompanyInfo=0
) # ['itemListInfoExt']
def getBids(self, auction_id):
"""Retrieve all bids in given auction."""
bids = {}
rc = self.__ask__('doGetBidItem2', itemId=auction_id)
if rc:
for i in rc:
i = i['bidsArray']
bids[long(i['item'][1])] = {
'price': Decimal(i['item'][6]),
'quantity': int(i['item'][5]),
'date_buy': i['item'][7]
}
return bids
def getBuyerInfo(self, auction_id, buyer_id):
"""Return buyer info."""
# TODO: add price from getBids
rc = self.__ask__('doGetPostBuyData', itemsArray=self.ArrayOfLong([auction_id]), buyerFilterArray=self.ArrayOfLong([buyer_id]))
rc = rc[0]['usersPostBuyData']['item'][0]['userData']
return {'allegro_aid': auction_id,
'allegro_uid': rc['userId'],
'allegro_login': magicDecode(rc['userLogin']),
'name': magicDecode(rc['userFirstName']),
'surname': magicDecode(rc['userLastName']),
'company': magicDecode(rc['userCompany']),
'postcode': magicDecode(rc['userPostcode']),
'city': magicDecode(rc['userCity']),
'address': magicDecode(rc['userAddress']),
'email': magicDecode(rc['userEmail']),
'phone': rc['userPhone']}
def getOrders(self, auction_ids):
"""Return orders details."""
orders = {}
# chunk list (only 25 auction_ids per request)
for chunk in chunked(auction_ids, 25):
# auctions = [{'item': auction_id} for auction_id in chunk] # TODO?: is it needed?
auctions = self.ArrayOfLong(chunk)
rc = self.__ask__('doGetPostBuyData', itemsArray=auctions)
for auction in rc:
orders_auction = []
bids = self.getBids(auction['itemId'])
# get orders details
# for i in auction.get('usersPostBuyData', ()):
if not auction['usersPostBuyData']: # empty
continue
for i in auction['usersPostBuyData']['item']:
i = i['userData']
if i['userId'] not in bids: # temporary(?) webapi bug fix
continue
orders_auction.append({
'allegro_aid': auction['itemId'],
'allegro_uid': i['userId'],
'allegro_login': magicDecode(i['userLogin']),
'name': magicDecode(i['userFirstName']),
'surname': magicDecode(i['userLastName']),
'company': magicDecode(i['userCompany']),
'postcode': magicDecode(i['userPostcode']),
'city': magicDecode(i['userCity']),
'address': magicDecode(i['userAddress']),
'email': magicDecode(i['userEmail']),
'phone': i['userPhone'],
'price': bids[i['userId']]['price'],
'quantity': bids[i['userId']]['quantity'],
'date_buy': bids[i['userId']]['date_buy']
})
orders[auction['itemId']] = orders_auction
return orders
def getTotalPaid(self, auction_id, buyer_id):
"""Return total paid from buyer on single auction."""
# TODO: it has to be better way to check payments.
date_end = long(time.time())
date_start = date_end - 60 * 60 * 24 * 90
rc = self.__ask__('doGetMyIncomingPayments',
buyerId=buyer_id,
itemId=auction_id,
transRecvDateFrom=date_start,
transRecvDateTo=date_end,
transPageLimit=25, # notneeded | TODO: can be more than 25 payments
transOffset=0)
paid = 0
for t in (rc or []):
# t = t['item']
if t['payTransStatus'] == u'Zakończona' and t['payTransIncomplete'] == 0:
if t['payTransItId'] == 0: # wplata laczna
for td in t['payTransDetails']['item']:
if td['payTransDetailsItId'] == auction_id:
paid += Decimal(str(td['payTransDetailsPrice']))
else: # wplata pojedyncza
paid += Decimal(str(t['payTransAmount']))
return paid
def getJournal(self, start=0):
"""Get all journal events from start."""
# TODO: while len(journaldeals) < 100
pass
def getJournalDealsInfo(self, start=0):
"""Return all events ammount (from start)."""
rc = self.__ask__('doGetSiteJournalDealsInfo',
journalStart=start)
return rc['dealEventsCount']
def getJournalDeals(self, start=None):
"""Return all journal events from start."""
# 1 - utworzenie aktu zakupowego (deala), 2 - utworzenie formularza pozakupowego (karta platnosci), 3 - anulowanie formularza pozakupowego (karta platnosci), 4 - zakończenie (opłacenie) transakcji przez PzA
if start is not None:
self.last_event_id = start
events = []
while self.getJournalDealsInfo(self.last_event_id) > 0:
rc = self.__ask__('doGetSiteJournalDeals', journalStart=self.last_event_id)
for i in rc:
events.append({
'allegro_did': i['dealId'],
'deal_status': i['dealEventType'],
'transaction_id': i['dealTransactionId'],
'time': i['dealEventTime'],
'event_id': i['dealEventId'],
'allegro_aid': i['dealItemId'],
'allegro_uid': i['dealBuyerId'],
# 'seller_id': i['dealSellerId '],
'quantity': i['dealQuantity']
})
self.last_event_id = rc[-1]['dealEventId']
return events
# feedback
def getWaitingFeedbacks(self):
"""Return all waiting feedbacks from buyers."""
# TODO: return sorted dictionary (negative/positive/neutral)
feedbacks = []
offset = 0
amount = self.__ask__('doGetWaitingFeedbacksCount')
while amount > 0:
rc = self.__ask__('doGetWaitingFeedbacks',
offset=offset, packageSize=200)
feedbacks.extend(rc['feWaitList'])
amount -= 200
offset += 1
return feedbacks
def doFeedback(self, item_id, use_comment_template, buyer_id, comment, comment_type, op):
"""http://allegro.pl/webapi/documentation.php/show/id,42"""
return self.__ask__('doFeedback',
feItemId=item_id,
feUseCommentTemplate=use_comment_template,
feToUserId=buyer_id,
feComment=comment,
feCommentType=comment_type,
feOp=op)['feedbackId']
# refund
def doSendRefundForms(self, item_id, buyer_id, reason, quantity_sold):
"""http://allegro.pl/webapi/documentation.php/show/id,201"""
# TODO: deprecated
return self.__ask__('doSendRefundForms',
sendRefundFormsDataArr={
'item': {
'itemId': item_id, 'buyerId': buyer_id,
'refundReason': reason, 'itemQuantitySold': quantity_sold
}
})['sendRefundFormsResultsArr']
| lgpl-3.0 |
0jpq0/kbengine | kbe/res/scripts/common/Lib/gettext.py | 90 | 17661 | """Internationalization and localization support.
This module provides internationalization (I18N) and localization (L10N)
support for your Python programs by providing an interface to the GNU gettext
message catalog library.
I18N refers to the operation by which a program is made aware of multiple
languages. L10N refers to the adaptation of your program, once
internationalized, to the local language and cultural habits.
"""
# This module represents the integration of work, contributions, feedback, and
# suggestions from the following people:
#
# Martin von Loewis, who wrote the initial implementation of the underlying
# C-based libintlmodule (later renamed _gettext), along with a skeletal
# gettext.py implementation.
#
# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule,
# which also included a pure-Python implementation to read .mo files if
# intlmodule wasn't available.
#
# James Henstridge, who also wrote a gettext.py module, which has some
# interesting, but currently unsupported experimental features: the notion of
# a Catalog class and instances, and the ability to add to a catalog file via
# a Python API.
#
# Barry Warsaw integrated these modules, wrote the .install() API and code,
# and conformed all C and Python code to Python's coding standards.
#
# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this
# module.
#
# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs.
#
# TODO:
# - Lazy loading of .mo files. Currently the entire catalog is loaded into
# memory, but that's probably bad for large translated programs. Instead,
# the lexical sort of original strings in GNU .mo files should be exploited
# to do binary searches and lazy initializations. Or you might want to use
# the undocumented double-hash algorithm for .mo files with hash tables, but
# you'll need to study the GNU gettext code to do this.
#
# - Support Solaris .mo file formats. Unfortunately, we've been unable to
# find this format documented anywhere.
import locale, copy, io, os, re, struct, sys
from errno import ENOENT
__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
'find', 'translation', 'install', 'textdomain', 'bindtextdomain',
'dgettext', 'dngettext', 'gettext', 'ngettext',
]
_default_localedir = os.path.join(sys.base_prefix, 'share', 'locale')
def c2py(plural):
"""Gets a C expression as used in PO files for plural forms and returns a
Python lambda function that implements an equivalent expression.
"""
# Security check, allow only the "n" identifier
import token, tokenize
tokens = tokenize.generate_tokens(io.StringIO(plural).readline)
try:
danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
except tokenize.TokenError:
raise ValueError('plural forms expression error, maybe unbalanced parenthesis')
else:
if danger:
raise ValueError('plural forms expression could be dangerous')
# Replace some C operators by their Python equivalents
plural = plural.replace('&&', ' and ')
plural = plural.replace('||', ' or ')
expr = re.compile(r'\!([^=])')
plural = expr.sub(' not \\1', plural)
# Regular expression and replacement function used to transform
# "a?b:c" to "b if a else c".
expr = re.compile(r'(.*?)\?(.*?):(.*)')
def repl(x):
return "(%s if %s else %s)" % (x.group(2), x.group(1),
expr.sub(repl, x.group(3)))
# Code to transform the plural expression, taking care of parentheses
stack = ['']
for c in plural:
if c == '(':
stack.append('')
elif c == ')':
if len(stack) == 1:
# Actually, we never reach this code, because unbalanced
# parentheses get caught in the security check at the
# beginning.
raise ValueError('unbalanced parenthesis in plural form')
s = expr.sub(repl, stack.pop())
stack[-1] += '(%s)' % s
else:
stack[-1] += c
plural = expr.sub(repl, stack.pop())
return eval('lambda n: int(%s)' % plural)
def _expand_lang(loc):
loc = locale.normalize(loc)
COMPONENT_CODESET = 1 << 0
COMPONENT_TERRITORY = 1 << 1
COMPONENT_MODIFIER = 1 << 2
# split up the locale into its base components
mask = 0
pos = loc.find('@')
if pos >= 0:
modifier = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_MODIFIER
else:
modifier = ''
pos = loc.find('.')
if pos >= 0:
codeset = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_CODESET
else:
codeset = ''
pos = loc.find('_')
if pos >= 0:
territory = loc[pos:]
loc = loc[:pos]
mask |= COMPONENT_TERRITORY
else:
territory = ''
language = loc
ret = []
for i in range(mask+1):
if not (i & ~mask): # if all components for this combo exist ...
val = language
if i & COMPONENT_TERRITORY: val += territory
if i & COMPONENT_CODESET: val += codeset
if i & COMPONENT_MODIFIER: val += modifier
ret.append(val)
ret.reverse()
return ret
class NullTranslations:
def __init__(self, fp=None):
self._info = {}
self._charset = None
self._output_charset = None
self._fallback = None
if fp is not None:
self._parse(fp)
def _parse(self, fp):
pass
def add_fallback(self, fallback):
if self._fallback:
self._fallback.add_fallback(fallback)
else:
self._fallback = fallback
def gettext(self, message):
if self._fallback:
return self._fallback.gettext(message)
return message
def lgettext(self, message):
if self._fallback:
return self._fallback.lgettext(message)
return message
def ngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def lngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def info(self):
return self._info
def charset(self):
return self._charset
def output_charset(self):
return self._output_charset
def set_output_charset(self, charset):
self._output_charset = charset
def install(self, names=None):
import builtins
builtins.__dict__['_'] = self.gettext
if hasattr(names, "__contains__"):
if "gettext" in names:
builtins.__dict__['gettext'] = builtins.__dict__['_']
if "ngettext" in names:
builtins.__dict__['ngettext'] = self.ngettext
if "lgettext" in names:
builtins.__dict__['lgettext'] = self.lgettext
if "lngettext" in names:
builtins.__dict__['lngettext'] = self.lngettext
class GNUTranslations(NullTranslations):
# Magic number of .mo files
LE_MAGIC = 0x950412de
BE_MAGIC = 0xde120495
def _parse(self, fp):
"""Override this method to support alternative .mo formats."""
unpack = struct.unpack
filename = getattr(fp, 'name', '')
# Parse the .mo file header, which consists of 5 little endian 32
# bit words.
self._catalog = catalog = {}
self.plural = lambda n: int(n != 1) # germanic plural by default
buf = fp.read()
buflen = len(buf)
# Are we big endian or little endian?
magic = unpack('<I', buf[:4])[0]
if magic == self.LE_MAGIC:
version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
ii = '<II'
elif magic == self.BE_MAGIC:
version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
ii = '>II'
else:
raise OSError(0, 'Bad magic number', filename)
# Now put all messages from the .mo file buffer into the catalog
# dictionary.
for i in range(0, msgcount):
mlen, moff = unpack(ii, buf[masteridx:masteridx+8])
mend = moff + mlen
tlen, toff = unpack(ii, buf[transidx:transidx+8])
tend = toff + tlen
if mend < buflen and tend < buflen:
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
raise OSError(0, 'File is corrupt', filename)
# See if we're looking at GNU .mo conventions for metadata
if mlen == 0:
# Catalog description
lastk = k = None
for b_item in tmsg.split('\n'.encode("ascii")):
item = b_item.decode().strip()
if not item:
continue
if ':' in item:
k, v = item.split(':', 1)
k = k.strip().lower()
v = v.strip()
self._info[k] = v
lastk = k
elif lastk:
self._info[lastk] += '\n' + item
if k == 'content-type':
self._charset = v.split('charset=')[1]
elif k == 'plural-forms':
v = v.split(';')
plural = v[1].split('plural=')[1]
self.plural = c2py(plural)
# Note: we unconditionally convert both msgids and msgstrs to
# Unicode using the character encoding specified in the charset
# parameter of the Content-Type header. The gettext documentation
# strongly encourages msgids to be us-ascii, but some applications
# require alternative encodings (e.g. Zope's ZCML and ZPT). For
# traditional gettext applications, the msgid conversion will
# cause no problems since us-ascii should always be a subset of
# the charset encoding. We may want to fall back to 8-bit msgids
# if the Unicode conversion fails.
charset = self._charset or 'ascii'
if b'\x00' in msg:
# Plural forms
msgid1, msgid2 = msg.split(b'\x00')
tmsg = tmsg.split(b'\x00')
msgid1 = str(msgid1, charset)
for i, x in enumerate(tmsg):
catalog[(msgid1, i)] = str(x, charset)
else:
catalog[str(msg, charset)] = str(tmsg, charset)
# advance to next entry in the seek tables
masteridx += 8
transidx += 8
def lgettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.lgettext(message)
return message
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
def lngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
except KeyError:
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
if n == 1:
return msgid1
else:
return msgid2
def gettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.gettext(message)
return message
return tmsg
def ngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
if n == 1:
tmsg = msgid1
else:
tmsg = msgid2
return tmsg
# Locate a .mo file using the gettext strategy
def find(domain, localedir=None, languages=None, all=False):
# Get some reasonable defaults for arguments that were not supplied
if localedir is None:
localedir = _default_localedir
if languages is None:
languages = []
for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
val = os.environ.get(envar)
if val:
languages = val.split(':')
break
if 'C' not in languages:
languages.append('C')
# now normalize and expand the languages
nelangs = []
for lang in languages:
for nelang in _expand_lang(lang):
if nelang not in nelangs:
nelangs.append(nelang)
# select a language
if all:
result = []
else:
result = None
for lang in nelangs:
if lang == 'C':
break
mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
if os.path.exists(mofile):
if all:
result.append(mofile)
else:
return mofile
return result
# a mapping between absolute .mo file path and Translation object
_translations = {}
def translation(domain, localedir=None, languages=None,
class_=None, fallback=False, codeset=None):
if class_ is None:
class_ = GNUTranslations
mofiles = find(domain, localedir, languages, all=True)
if not mofiles:
if fallback:
return NullTranslations()
raise OSError(ENOENT, 'No translation file found for domain', domain)
# Avoid opening, reading, and parsing the .mo file after it's been done
# once.
result = None
for mofile in mofiles:
key = (class_, os.path.abspath(mofile))
t = _translations.get(key)
if t is None:
with open(mofile, 'rb') as fp:
t = _translations.setdefault(key, class_(fp))
# Copy the translation object to allow setting fallbacks and
# output charset. All other instance data is shared with the
# cached object.
t = copy.copy(t)
if codeset:
t.set_output_charset(codeset)
if result is None:
result = t
else:
result.add_fallback(t)
return result
def install(domain, localedir=None, codeset=None, names=None):
t = translation(domain, localedir, fallback=True, codeset=codeset)
t.install(names)
# a mapping b/w domains and locale directories
_localedirs = {}
# a mapping b/w domains and codesets
_localecodesets = {}
# current global domain, `messages' used for compatibility w/ GNU gettext
_current_domain = 'messages'
def textdomain(domain=None):
global _current_domain
if domain is not None:
_current_domain = domain
return _current_domain
def bindtextdomain(domain, localedir=None):
global _localedirs
if localedir is not None:
_localedirs[domain] = localedir
return _localedirs.get(domain, _default_localedir)
def bind_textdomain_codeset(domain, codeset=None):
global _localecodesets
if codeset is not None:
_localecodesets[domain] = codeset
return _localecodesets.get(domain)
def dgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
return message
return t.gettext(message)
def ldgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
return message
return t.lgettext(message)
def dngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
if n == 1:
return msgid1
else:
return msgid2
return t.ngettext(msgid1, msgid2, n)
def ldngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None),
codeset=_localecodesets.get(domain))
except OSError:
if n == 1:
return msgid1
else:
return msgid2
return t.lngettext(msgid1, msgid2, n)
def gettext(message):
return dgettext(_current_domain, message)
def lgettext(message):
return ldgettext(_current_domain, message)
def ngettext(msgid1, msgid2, n):
return dngettext(_current_domain, msgid1, msgid2, n)
def lngettext(msgid1, msgid2, n):
return ldngettext(_current_domain, msgid1, msgid2, n)
# dcgettext() has been deemed unnecessary and is not implemented.
# James Henstridge's Catalog constructor from GNOME gettext. Documented usage
# was:
#
# import gettext
# cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR)
# _ = cat.gettext
# print _('Hello World')
# The resulting catalog object currently don't support access through a
# dictionary API, which was supported (but apparently unused) in GNOME
# gettext.
Catalog = translation
| lgpl-3.0 |
dariomalchiodi/yaplf | test/test_models.py | 1 | 3023 |
#*****************************************************************************
# Copyright (C) 2010 Dario Malchiodi <[email protected]>
#
# This file is part of yaplf.
# yaplf is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
# yaplf is distributed in the hope that it will be useful, but without any
# warranty; without even the implied warranty of merchantability or fitness
# for a particular purpose. See the GNU Lesser General Public License for
# more details.
# You should have received a copy of the GNU Lesser General Public License
# along with yaplf; if not, see <http://www.gnu.org/licenses/>.
#
#*****************************************************************************
import unittest
from yaplf.models import *
from yaplf.models.neural import *
class Test(unittest.TestCase):
"""Unit tests for models module of yaplf."""
def test_Model(self):
"""Test yaplf Model."""
from yaplf.utility.error import MSE, MaxError
from yaplf.data import LabeledExample
sample = (LabeledExample( (-1,), (-1,) ), LabeledExample((0,), (0,)),
LabeledExample((1,), (1,)))
model = ConstantModel(0)
self.assertEqual(model.test(sample, MSE()), 2.0 / 3)
self.assertEqual(model.test(sample, MaxError()), 1)
def test_ConstandModel(self):
"""Test yalpf ConstantModel."""
from numpy import random
model = ConstantModel(0)
self.assertEqual(model.compute(1), 0)
self.assertEqual(model.compute((1, 3)), 0)
self.assertEqual(model.compute("string"), 0)
self.assertEqual(model.compute(random.normal()), 0)
def test_Perceptron(self):
"""Test yalpf Perceptron."""
Perceptron(((1, 1),))
Perceptron(((1, 1), (8, -4)))
self.assertRaises(ValueError, Perceptron, ((1, 1), (8, -4, 9)))
Perceptron(((1, 1),), threshold = (-1,))
Perceptron(((1, 1), (8, -4)), threshold = (-1, 1))
self.assertRaises(ValueError, Perceptron, ((1, 1), (8, -4)),
threshold = (-1,))
from yaplf.utility.activation import SigmoidActivationFunction
from numpy import array
Perceptron(((1, 1),), threshold = (-1,),
activation = SigmoidActivationFunction())
self.assertEqual(Perceptron(((1, 1),)).compute((0, 2)), 1)
self.assertEqual(Perceptron(((1, 1),),
activation=SigmoidActivationFunction()).compute((0, 2)),
0.88079707797788231)
self.assertEqual(Perceptron(((1, 1),), threshold=(1,),
activation=SigmoidActivationFunction()).compute((0, 2)),
0.7310585786300049)
self.assertEqual(Perceptron(((1, -1), (-1, 1)),
threshold = (-1, 1)).compute((0, 1)).tolist(), [1, 1])
if __name__ == "__main__":
unittest.main() | lgpl-3.0 |
jcoady9/python-for-android | python3-alpha/python3-src/Lib/distutils/bcppcompiler.py | 179 | 14935 | """distutils.bcppcompiler
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
for the Borland C++ compiler.
"""
# This implementation by Lyle Johnson, based on the original msvccompiler.py
# module and using the directions originally published by Gordon Williams.
# XXX looks like there's a LOT of overlap between these two classes:
# someone should sit down and factor out the common code as
# WindowsCCompiler! --GPW
import os
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError, UnknownFileError
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
class BCPPCompiler(CCompiler) :
"""Concrete class that implements an interface to the Borland C/C++
compiler, as defined by the CCompiler abstract class.
"""
compiler_type = 'bcpp'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = _c_extensions + _cpp_extensions
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
# These executables are assumed to all be in the path.
# Borland doesn't seem to use any special registry settings to
# indicate their installation locations.
self.cc = "bcc32.exe"
self.linker = "ilink32.exe"
self.lib = "tlib.exe"
self.preprocess_options = None
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_static = []
self.ldflags_exe = ['/Gn', '/q', '/x']
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
# -- Worker methods ------------------------------------------------
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('-c')
if debug:
compile_opts.extend (self.compile_options_debug)
else:
compile_opts.extend (self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
# XXX why do the normpath here?
src = os.path.normpath(src)
obj = os.path.normpath(obj)
# XXX _setup_compile() did a mkpath() too but before the normpath.
# Is it possible to skip the normpath?
self.mkpath(os.path.dirname(obj))
if ext == '.res':
# This is already a binary file -- skip it.
continue # the 'for' loop
if ext == '.rc':
# This needs to be compiled to a .res file -- do it now.
try:
self.spawn (["brcc32", "-fo", obj, src])
except DistutilsExecError as msg:
raise CompileError(msg)
continue # the 'for' loop
# The next two are both for the real compiler.
if ext in self._c_extensions:
input_opt = ""
elif ext in self._cpp_extensions:
input_opt = "-P"
else:
# Unknown file type -- no extra options. The compiler
# will probably fail, but let it just in case this is a
# file the compiler recognizes even if we don't.
input_opt = ""
output_opt = "-o" + obj
# Compiler command line syntax is: "bcc32 [options] file(s)".
# Note that the source file names must appear at the end of
# the command line.
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs + [src])
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = [output_filename, '/u'] + objects
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# XXX this ignores 'build_temp'! should follow the lead of
# msvccompiler.py
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
str(runtime_library_dirs))
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
# Figure out linker args based on type of target.
if target_desc == CCompiler.EXECUTABLE:
startup_obj = 'c0w32'
if debug:
ld_args = self.ldflags_exe_debug[:]
else:
ld_args = self.ldflags_exe[:]
else:
startup_obj = 'c0d32'
if debug:
ld_args = self.ldflags_shared_debug[:]
else:
ld_args = self.ldflags_shared[:]
# Create a temporary exports file for use by the linker
if export_symbols is None:
def_file = ''
else:
head, tail = os.path.split (output_filename)
modname, ext = os.path.splitext (tail)
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
def_file = os.path.join (temp_dir, '%s.def' % modname)
contents = ['EXPORTS']
for sym in (export_symbols or []):
contents.append(' %s=_%s' % (sym, sym))
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# Borland C++ has problems with '/' in paths
objects2 = map(os.path.normpath, objects)
# split objects in .obj and .res files
# Borland C++ needs them at different positions in the command line
objects = [startup_obj]
resources = []
for file in objects2:
(base, ext) = os.path.splitext(os.path.normcase(file))
if ext == '.res':
resources.append(file)
else:
objects.append(file)
for l in library_dirs:
ld_args.append("/L%s" % os.path.normpath(l))
ld_args.append("/L.") # we sometimes use relative paths
# list of object files
ld_args.extend(objects)
# XXX the command-line syntax for Borland C++ is a bit wonky;
# certain filenames are jammed together in one big string, but
# comma-delimited. This doesn't mesh too well with the
# Unix-centric attitude (with a DOS/Windows quoting hack) of
# 'spawn()', so constructing the argument list is a bit
# awkward. Note that doing the obvious thing and jamming all
# the filenames and commas into one argument would be wrong,
# because 'spawn()' would quote any filenames with spaces in
# them. Arghghh!. Apparently it works fine as coded...
# name of dll/exe file
ld_args.extend([',',output_filename])
# no map file and start libraries
ld_args.append(',,')
for lib in libraries:
# see if we find it and if there is a bcpp specific lib
# (xxx_bcpp.lib)
libfile = self.find_library_file(library_dirs, lib, debug)
if libfile is None:
ld_args.append(lib)
# probably a BCPP internal library -- don't warn
else:
# full name which prefers bcpp_xxx.lib over xxx.lib
ld_args.append(libfile)
# some default libraries
ld_args.append ('import32')
ld_args.append ('cw32mt')
# def file for export symbols
ld_args.extend([',',def_file])
# add resource files
ld_args.append(',')
ld_args.extend(resources)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
def find_library_file (self, dirs, lib, debug=0):
# List of effective library names to try, in order of preference:
# xxx_bcpp.lib is better than xxx.lib
# and xxx_d.lib is better than xxx.lib if debug is set
#
# The "_bcpp" suffix is to handle a Python installation for people
# with multiple compilers (primarily Distutils hackers, I suspect
# ;-). The idea is they'd have one static library for each
# compiler they care about, since (almost?) every Windows compiler
# seems to have a different format for static libraries.
if debug:
dlib = (lib + "_d")
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
else:
try_names = (lib + "_bcpp", lib)
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename(name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# overwrite the one from CCompiler to support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError("unknown file type '%s' (from '%s')" % \
(ext, src_name))
if strip_dir:
base = os.path.basename (base)
if ext == '.res':
# these can go unchanged
obj_names.append (os.path.join (output_dir, base + ext))
elif ext == '.rc':
# these need to be compiled to .res-files
obj_names.append (os.path.join (output_dir, base + '.res'))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def preprocess (self,
source,
output_file=None,
macros=None,
include_dirs=None,
extra_preargs=None,
extra_postargs=None):
(_, macros, include_dirs) = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = ['cpp32.exe'] + pp_opts
if output_file is not None:
pp_args.append('-o' + output_file)
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or the
# source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError as msg:
print(msg)
raise CompileError(msg)
# preprocess()
| apache-2.0 |
ahb0327/intellij-community | python/helpers/coverage/bytecode.py | 209 | 2036 | """Bytecode manipulation for coverage.py"""
import opcode, types
from coverage.backward import byte_to_int
class ByteCode(object):
"""A single bytecode."""
def __init__(self):
# The offset of this bytecode in the code object.
self.offset = -1
# The opcode, defined in the `opcode` module.
self.op = -1
# The argument, a small integer, whose meaning depends on the opcode.
self.arg = -1
# The offset in the code object of the next bytecode.
self.next_offset = -1
# The offset to jump to.
self.jump_to = -1
class ByteCodes(object):
"""Iterator over byte codes in `code`.
Returns `ByteCode` objects.
"""
# pylint: disable=R0924
def __init__(self, code):
self.code = code
def __getitem__(self, i):
return byte_to_int(self.code[i])
def __iter__(self):
offset = 0
while offset < len(self.code):
bc = ByteCode()
bc.op = self[offset]
bc.offset = offset
next_offset = offset+1
if bc.op >= opcode.HAVE_ARGUMENT:
bc.arg = self[offset+1] + 256*self[offset+2]
next_offset += 2
label = -1
if bc.op in opcode.hasjrel:
label = next_offset + bc.arg
elif bc.op in opcode.hasjabs:
label = bc.arg
bc.jump_to = label
bc.next_offset = offset = next_offset
yield bc
class CodeObjects(object):
"""Iterate over all the code objects in `code`."""
def __init__(self, code):
self.stack = [code]
def __iter__(self):
while self.stack:
# We're going to return the code object on the stack, but first
# push its children for later returning.
code = self.stack.pop()
for c in code.co_consts:
if isinstance(c, types.CodeType):
self.stack.append(c)
yield code
| apache-2.0 |
jarvys/django-1.7-jdb | tests/db_backends/tests.py | 32 | 1074 | from django.test import TestCase
from django.db.backends import BaseDatabaseWrapper
class DummyDatabaseWrapper(BaseDatabaseWrapper):
pass
class DummyObject(object):
alias = None
class DbBackendTests(TestCase):
def test_compare_db_wrapper_with_another_object(self):
wrapper = BaseDatabaseWrapper({})
self.assertFalse(wrapper == 'not-a-db-wrapper')
def test_compare_db_wrapper_with_another_object_with_alias(self):
wrapper = BaseDatabaseWrapper({})
obj = DummyObject()
obj.alias = wrapper.alias = 'foobar'
self.assertFalse(wrapper == obj)
def test_negate_compare_db_wrapper_with_another_object(self):
wrapper = BaseDatabaseWrapper({})
self.assertTrue(wrapper != 'not-a-db-wrapper')
def test_compare_db_wrappers(self):
wrapper1 = DummyDatabaseWrapper({})
wrapper2 = BaseDatabaseWrapper({})
wrapper1.alias = wrapper2.alias = 'foo'
self.assertTrue(wrapper1 == wrapper2)
wrapper1.alias = 'bar'
self.assertFalse(wrapper1 == wrapper2)
| bsd-3-clause |
stonegithubs/odoo | addons/l10n_ch/account_wizard.py | 424 | 2192 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi. Copyright Camptocamp SA
# Financial contributors: Hasa SA, Open Net SA,
# Prisme Solutions Informatique SA, Quod SA
#
# Translation contributors: brain-tec AG, Agile Business Group
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv.orm import TransientModel
class WizardMultiChartsAccounts(TransientModel):
_inherit ='wizard.multi.charts.accounts'
def onchange_chart_template_id(self, cursor, uid, ids, chart_template_id=False, context=None):
if context is None: context = {}
res = super(WizardMultiChartsAccounts, self).onchange_chart_template_id(cursor, uid, ids,
chart_template_id=chart_template_id,
context=context)
# 0 is evaluated as False in python so we have to do this
# because original wizard test code_digits value on a float widget
if chart_template_id:
sterchi_template = self.pool.get('ir.model.data').get_object(cursor, uid, 'l10n_ch', 'l10nch_chart_template')
if sterchi_template.id == chart_template_id:
res['value']['code_digits'] = 0
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mhvk/astropy | astropy/coordinates/builtin_frames/altaz.py | 3 | 5451 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
from astropy import units as u
from astropy.utils.decorators import format_doc
from astropy.coordinates import representation as r
from astropy.coordinates.baseframe import BaseCoordinateFrame, RepresentationMapping, base_doc
from astropy.coordinates.attributes import (TimeAttribute,
QuantityAttribute,
EarthLocationAttribute)
__all__ = ['AltAz']
_90DEG = 90*u.deg
doc_components = """
az : `~astropy.coordinates.Angle`, optional, keyword-only
The Azimuth for this object (``alt`` must also be given and
``representation`` must be None).
alt : `~astropy.coordinates.Angle`, optional, keyword-only
The Altitude for this object (``az`` must also be given and
``representation`` must be None).
distance : `~astropy.units.Quantity` ['length'], optional, keyword-only
The Distance for this object along the line-of-sight.
pm_az_cosalt : `~astropy.units.Quantity` ['angular speed'], optional, keyword-only
The proper motion in azimuth (including the ``cos(alt)`` factor) for
this object (``pm_alt`` must also be given).
pm_alt : `~astropy.units.Quantity` ['angular speed'], optional, keyword-only
The proper motion in altitude for this object (``pm_az_cosalt`` must
also be given).
radial_velocity : `~astropy.units.Quantity` ['speed'], optional, keyword-only
The radial velocity of this object."""
doc_footer = """
Other parameters
----------------
obstime : `~astropy.time.Time`
The time at which the observation is taken. Used for determining the
position and orientation of the Earth.
location : `~astropy.coordinates.EarthLocation`
The location on the Earth. This can be specified either as an
`~astropy.coordinates.EarthLocation` object or as anything that can be
transformed to an `~astropy.coordinates.ITRS` frame.
pressure : `~astropy.units.Quantity` ['pressure']
The atmospheric pressure as an `~astropy.units.Quantity` with pressure
units. This is necessary for performing refraction corrections.
Setting this to 0 (the default) will disable refraction calculations
when transforming to/from this frame.
temperature : `~astropy.units.Quantity` ['temperature']
The ground-level temperature as an `~astropy.units.Quantity` in
deg C. This is necessary for performing refraction corrections.
relative_humidity : `~astropy.units.Quantity` ['dimensionless'] or number
The relative humidity as a dimensionless quantity between 0 to 1.
This is necessary for performing refraction corrections.
obswl : `~astropy.units.Quantity` ['length']
The average wavelength of observations as an `~astropy.units.Quantity`
with length units. This is necessary for performing refraction
corrections.
Notes
-----
The refraction model is based on that implemented in ERFA, which is fast
but becomes inaccurate for altitudes below about 5 degrees. Near and below
altitudes of 0, it can even give meaningless answers, and in this case
transforming to AltAz and back to another frame can give highly discrepant
results. For much better numerical stability, leave the ``pressure`` at
``0`` (the default), thereby disabling the refraction correction and
yielding "topocentric" horizontal coordinates.
"""
@format_doc(base_doc, components=doc_components, footer=doc_footer)
class AltAz(BaseCoordinateFrame):
"""
A coordinate or frame in the Altitude-Azimuth system (Horizontal
coordinates) with respect to the WGS84 ellipsoid. Azimuth is oriented
East of North (i.e., N=0, E=90 degrees). Altitude is also known as
elevation angle, so this frame is also in the Azimuth-Elevation system.
This frame is assumed to *include* refraction effects if the ``pressure``
frame attribute is non-zero.
The frame attributes are listed under **Other Parameters**, which are
necessary for transforming from AltAz to some other system.
"""
frame_specific_representation_info = {
r.SphericalRepresentation: [
RepresentationMapping('lon', 'az'),
RepresentationMapping('lat', 'alt')
]
}
default_representation = r.SphericalRepresentation
default_differential = r.SphericalCosLatDifferential
obstime = TimeAttribute(default=None)
location = EarthLocationAttribute(default=None)
pressure = QuantityAttribute(default=0, unit=u.hPa)
temperature = QuantityAttribute(default=0, unit=u.deg_C)
relative_humidity = QuantityAttribute(default=0, unit=u.dimensionless_unscaled)
obswl = QuantityAttribute(default=1*u.micron, unit=u.micron)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@property
def secz(self):
"""
Secant of the zenith angle for this coordinate, a common estimate of
the airmass.
"""
return 1/np.sin(self.alt)
@property
def zen(self):
"""
The zenith angle (or zenith distance / co-altitude) for this coordinate.
"""
return _90DEG.to(self.alt.unit) - self.alt
# self-transform defined in cirs_observed_transforms.py
| bsd-3-clause |
kayhayen/Nuitka | nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Options/PackageOption.py | 7 | 1965 | #
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PackageOption.py 2014/07/05 09:42:21 garyo"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
def PackageOption(*args, **kw):
global warned
if not warned:
msg = "The PackageOption() function is deprecated; use the PackageVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
return SCons.Variables.PackageVariable(*args, **kw)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
Sephyros/URI-Python | 01 - Iniciante/1006.py | 1 | 1180 | # URI Online Judge | 1006
# Média 2
#
# Adaptado por Neilor Tonin, URI Brasil
# Timelimit: 1
#
# Leia 3 valores, no caso, variáveis A, B e C, que são as três notas de um aluno. A seguir, calcule a média do aluno,
# sabendo que a nota A tem peso 2, a nota B tem peso 3 e a nota C tem peso 5. Considere que cada nota pode ir
# de 0 até 10.0, sempre com uma casa decimal.
#
# Entrada
# O arquivo de entrada contém 3 valores com uma casa decimal, de dupla precisão (double).
#
# Saída
# Imprima a variável MEDIA conforme exemplo abaixo, com 1 dígito após o ponto decimal e com um espaço em branco antes e
# depois da igualdade. Assim como todos os problemas, não esqueça de imprimir o fim de linha após o resultado, caso
# contrário, você receberá "Presentation Error".
# Exemplos de Entrada Exemplos de Saída
#
# 5.0 MEDIA = 6.3
# 6.0
# 7.0
#
# 5.0 MEDIA = 9.0
# 10.0
# 10.0
#
# 10.0 MEDIA = 7.5
# 10.0
# 5.0
#
# Resolvido por Leonardo Vinicius Maciel (aka Sephyros)
A = float(input())
B = float(input())
C = float(input())
MEDIA = ((A * 2.0) + (B * 3.0) + (C * 5.0))/10
print("MEDIA = %.1f" % MEDIA)
| gpl-3.0 |
pydlv/rlauncher | requests/packages/chardet/big5freq.py | 3133 | 82594 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
Big5CharToFreqOrder = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376 #last 512
#Everything below is of no interest for detection purpose
2522,1613,4812,5799,3345,3945,2523,5800,4162,5801,1637,4163,2471,4813,3946,5802, # 5392
2500,3034,3800,5803,5804,2195,4814,5805,2163,5806,5807,5808,5809,5810,5811,5812, # 5408
5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828, # 5424
5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844, # 5440
5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856,5857,5858,5859,5860, # 5456
5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872,5873,5874,5875,5876, # 5472
5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888,5889,5890,5891,5892, # 5488
5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,5906,5907,5908, # 5504
5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,5921,5922,5923,5924, # 5520
5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936,5937,5938,5939,5940, # 5536
5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952,5953,5954,5955,5956, # 5552
5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968,5969,5970,5971,5972, # 5568
5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984,5985,5986,5987,5988, # 5584
5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004, # 5600
6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020, # 5616
6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032,6033,6034,6035,6036, # 5632
6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052, # 5648
6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068, # 5664
6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084, # 5680
6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100, # 5696
6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116, # 5712
6117,6118,6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,6132, # 5728
6133,6134,6135,6136,6137,6138,6139,6140,6141,6142,6143,6144,6145,6146,6147,6148, # 5744
6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,6164, # 5760
6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,6180, # 5776
6181,6182,6183,6184,6185,6186,6187,6188,6189,6190,6191,6192,6193,6194,6195,6196, # 5792
6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,6211,6212, # 5808
6213,6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,3670,6224,6225,6226,6227, # 5824
6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,6242,6243, # 5840
6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,6254,6255,6256,6257,6258,6259, # 5856
6260,6261,6262,6263,6264,6265,6266,6267,6268,6269,6270,6271,6272,6273,6274,6275, # 5872
6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,4815,6286,6287,6288,6289,6290, # 5888
6291,6292,4816,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,6303,6304,6305, # 5904
6306,6307,6308,6309,6310,6311,4817,4818,6312,6313,6314,6315,6316,6317,6318,4819, # 5920
6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,6334, # 5936
6335,6336,6337,4820,6338,6339,6340,6341,6342,6343,6344,6345,6346,6347,6348,6349, # 5952
6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,6364,6365, # 5968
6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,6380,6381, # 5984
6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,6396,6397, # 6000
6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,3441,6411,6412, # 6016
6413,6414,6415,6416,6417,6418,6419,6420,6421,6422,6423,6424,6425,4440,6426,6427, # 6032
6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,6439,6440,6441,6442,6443, # 6048
6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,4821,6455,6456,6457,6458, # 6064
6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,6473,6474, # 6080
6475,6476,6477,3947,3948,6478,6479,6480,6481,3272,4441,6482,6483,6484,6485,4442, # 6096
6486,6487,6488,6489,6490,6491,6492,6493,6494,6495,6496,4822,6497,6498,6499,6500, # 6112
6501,6502,6503,6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516, # 6128
6517,6518,6519,6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532, # 6144
6533,6534,6535,6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548, # 6160
6549,6550,6551,6552,6553,6554,6555,6556,2784,6557,4823,6558,6559,6560,6561,6562, # 6176
6563,6564,6565,6566,6567,6568,6569,3949,6570,6571,6572,4824,6573,6574,6575,6576, # 6192
6577,6578,6579,6580,6581,6582,6583,4825,6584,6585,6586,3950,2785,6587,6588,6589, # 6208
6590,6591,6592,6593,6594,6595,6596,6597,6598,6599,6600,6601,6602,6603,6604,6605, # 6224
6606,6607,6608,6609,6610,6611,6612,4826,6613,6614,6615,4827,6616,6617,6618,6619, # 6240
6620,6621,6622,6623,6624,6625,4164,6626,6627,6628,6629,6630,6631,6632,6633,6634, # 6256
3547,6635,4828,6636,6637,6638,6639,6640,6641,6642,3951,2984,6643,6644,6645,6646, # 6272
6647,6648,6649,4165,6650,4829,6651,6652,4830,6653,6654,6655,6656,6657,6658,6659, # 6288
6660,6661,6662,4831,6663,6664,6665,6666,6667,6668,6669,6670,6671,4166,6672,4832, # 6304
3952,6673,6674,6675,6676,4833,6677,6678,6679,4167,6680,6681,6682,3198,6683,6684, # 6320
6685,6686,6687,6688,6689,6690,6691,6692,6693,6694,6695,6696,6697,4834,6698,6699, # 6336
6700,6701,6702,6703,6704,6705,6706,6707,6708,6709,6710,6711,6712,6713,6714,6715, # 6352
6716,6717,6718,6719,6720,6721,6722,6723,6724,6725,6726,6727,6728,6729,6730,6731, # 6368
6732,6733,6734,4443,6735,6736,6737,6738,6739,6740,6741,6742,6743,6744,6745,4444, # 6384
6746,6747,6748,6749,6750,6751,6752,6753,6754,6755,6756,6757,6758,6759,6760,6761, # 6400
6762,6763,6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777, # 6416
6778,6779,6780,6781,4168,6782,6783,3442,6784,6785,6786,6787,6788,6789,6790,6791, # 6432
4169,6792,6793,6794,6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806, # 6448
6807,6808,6809,6810,6811,4835,6812,6813,6814,4445,6815,6816,4446,6817,6818,6819, # 6464
6820,6821,6822,6823,6824,6825,6826,6827,6828,6829,6830,6831,6832,6833,6834,6835, # 6480
3548,6836,6837,6838,6839,6840,6841,6842,6843,6844,6845,6846,4836,6847,6848,6849, # 6496
6850,6851,6852,6853,6854,3953,6855,6856,6857,6858,6859,6860,6861,6862,6863,6864, # 6512
6865,6866,6867,6868,6869,6870,6871,6872,6873,6874,6875,6876,6877,3199,6878,6879, # 6528
6880,6881,6882,4447,6883,6884,6885,6886,6887,6888,6889,6890,6891,6892,6893,6894, # 6544
6895,6896,6897,6898,6899,6900,6901,6902,6903,6904,4170,6905,6906,6907,6908,6909, # 6560
6910,6911,6912,6913,6914,6915,6916,6917,6918,6919,6920,6921,6922,6923,6924,6925, # 6576
6926,6927,4837,6928,6929,6930,6931,6932,6933,6934,6935,6936,3346,6937,6938,4838, # 6592
6939,6940,6941,4448,6942,6943,6944,6945,6946,4449,6947,6948,6949,6950,6951,6952, # 6608
6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,6967,6968, # 6624
6969,6970,6971,6972,6973,6974,6975,6976,6977,6978,6979,6980,6981,6982,6983,6984, # 6640
6985,6986,6987,6988,6989,6990,6991,6992,6993,6994,3671,6995,6996,6997,6998,4839, # 6656
6999,7000,7001,7002,3549,7003,7004,7005,7006,7007,7008,7009,7010,7011,7012,7013, # 6672
7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,7028,7029, # 6688
7030,4840,7031,7032,7033,7034,7035,7036,7037,7038,4841,7039,7040,7041,7042,7043, # 6704
7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,7059, # 6720
7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,2985,7071,7072,7073,7074, # 6736
7075,7076,7077,7078,7079,7080,4842,7081,7082,7083,7084,7085,7086,7087,7088,7089, # 6752
7090,7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105, # 6768
7106,7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,4450,7119,7120, # 6784
7121,7122,7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136, # 6800
7137,7138,7139,7140,7141,7142,7143,4843,7144,7145,7146,7147,7148,7149,7150,7151, # 6816
7152,7153,7154,7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167, # 6832
7168,7169,7170,7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183, # 6848
7184,7185,7186,7187,7188,4171,4172,7189,7190,7191,7192,7193,7194,7195,7196,7197, # 6864
7198,7199,7200,7201,7202,7203,7204,7205,7206,7207,7208,7209,7210,7211,7212,7213, # 6880
7214,7215,7216,7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229, # 6896
7230,7231,7232,7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245, # 6912
7246,7247,7248,7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261, # 6928
7262,7263,7264,7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277, # 6944
7278,7279,7280,7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293, # 6960
7294,7295,7296,4844,7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308, # 6976
7309,7310,7311,7312,7313,7314,7315,7316,4451,7317,7318,7319,7320,7321,7322,7323, # 6992
7324,7325,7326,7327,7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339, # 7008
7340,7341,7342,7343,7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,4173,7354, # 7024
7355,4845,7356,7357,7358,7359,7360,7361,7362,7363,7364,7365,7366,7367,7368,7369, # 7040
7370,7371,7372,7373,7374,7375,7376,7377,7378,7379,7380,7381,7382,7383,7384,7385, # 7056
7386,7387,7388,4846,7389,7390,7391,7392,7393,7394,7395,7396,7397,7398,7399,7400, # 7072
7401,7402,7403,7404,7405,3672,7406,7407,7408,7409,7410,7411,7412,7413,7414,7415, # 7088
7416,7417,7418,7419,7420,7421,7422,7423,7424,7425,7426,7427,7428,7429,7430,7431, # 7104
7432,7433,7434,7435,7436,7437,7438,7439,7440,7441,7442,7443,7444,7445,7446,7447, # 7120
7448,7449,7450,7451,7452,7453,4452,7454,3200,7455,7456,7457,7458,7459,7460,7461, # 7136
7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,7472,7473,7474,4847,7475,7476, # 7152
7477,3133,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,7488,7489,7490,7491, # 7168
7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,3347,7503,7504,7505,7506, # 7184
7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,7520,7521,4848, # 7200
7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,7536,7537, # 7216
7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,3801,4849,7550,7551, # 7232
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567, # 7248
7568,7569,3035,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582, # 7264
7583,7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598, # 7280
7599,7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614, # 7296
7615,7616,4850,7617,7618,3802,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628, # 7312
7629,7630,7631,7632,4851,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643, # 7328
7644,7645,7646,7647,7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659, # 7344
7660,7661,7662,7663,7664,7665,7666,7667,7668,7669,7670,4453,7671,7672,7673,7674, # 7360
7675,7676,7677,7678,7679,7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690, # 7376
7691,7692,7693,7694,7695,7696,7697,3443,7698,7699,7700,7701,7702,4454,7703,7704, # 7392
7705,7706,7707,7708,7709,7710,7711,7712,7713,2472,7714,7715,7716,7717,7718,7719, # 7408
7720,7721,7722,7723,7724,7725,7726,7727,7728,7729,7730,7731,3954,7732,7733,7734, # 7424
7735,7736,7737,7738,7739,7740,7741,7742,7743,7744,7745,7746,7747,7748,7749,7750, # 7440
3134,7751,7752,4852,7753,7754,7755,4853,7756,7757,7758,7759,7760,4174,7761,7762, # 7456
7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,7777,7778, # 7472
7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,7792,7793,7794, # 7488
7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,4854,7806,7807,7808,7809, # 7504
7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824,7825, # 7520
4855,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7536
7841,7842,7843,7844,7845,7846,7847,3955,7848,7849,7850,7851,7852,7853,7854,7855, # 7552
7856,7857,7858,7859,7860,3444,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870, # 7568
7871,7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886, # 7584
7887,7888,7889,7890,7891,4175,7892,7893,7894,7895,7896,4856,4857,7897,7898,7899, # 7600
7900,2598,7901,7902,7903,7904,7905,7906,7907,7908,4455,7909,7910,7911,7912,7913, # 7616
7914,3201,7915,7916,7917,7918,7919,7920,7921,4858,7922,7923,7924,7925,7926,7927, # 7632
7928,7929,7930,7931,7932,7933,7934,7935,7936,7937,7938,7939,7940,7941,7942,7943, # 7648
7944,7945,7946,7947,7948,7949,7950,7951,7952,7953,7954,7955,7956,7957,7958,7959, # 7664
7960,7961,7962,7963,7964,7965,7966,7967,7968,7969,7970,7971,7972,7973,7974,7975, # 7680
7976,7977,7978,7979,7980,7981,4859,7982,7983,7984,7985,7986,7987,7988,7989,7990, # 7696
7991,7992,7993,7994,7995,7996,4860,7997,7998,7999,8000,8001,8002,8003,8004,8005, # 7712
8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,8016,4176,8017,8018,8019,8020, # 7728
8021,8022,8023,4861,8024,8025,8026,8027,8028,8029,8030,8031,8032,8033,8034,8035, # 7744
8036,4862,4456,8037,8038,8039,8040,4863,8041,8042,8043,8044,8045,8046,8047,8048, # 7760
8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,8064, # 7776
8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,8080, # 7792
8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,8096, # 7808
8097,8098,8099,4864,4177,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110, # 7824
8111,8112,8113,8114,8115,8116,8117,8118,8119,8120,4178,8121,8122,8123,8124,8125, # 7840
8126,8127,8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141, # 7856
8142,8143,8144,8145,4865,4866,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155, # 7872
8156,8157,8158,8159,8160,8161,8162,8163,8164,8165,4179,8166,8167,8168,8169,8170, # 7888
8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181,4457,8182,8183,8184,8185, # 7904
8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197,8198,8199,8200,8201, # 7920
8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213,8214,8215,8216,8217, # 7936
8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229,8230,8231,8232,8233, # 7952
8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245,8246,8247,8248,8249, # 7968
8250,8251,8252,8253,8254,8255,8256,3445,8257,8258,8259,8260,8261,8262,4458,8263, # 7984
8264,8265,8266,8267,8268,8269,8270,8271,8272,4459,8273,8274,8275,8276,3550,8277, # 8000
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,4460,8290,8291,8292, # 8016
8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,4867, # 8032
8308,8309,8310,8311,8312,3551,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322, # 8048
8323,8324,8325,8326,4868,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337, # 8064
8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353, # 8080
8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,4869,4461,8364,8365,8366,8367, # 8096
8368,8369,8370,4870,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382, # 8112
8383,8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398, # 8128
8399,8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,4871,8411,8412,8413, # 8144
8414,8415,8416,8417,8418,8419,8420,8421,8422,4462,8423,8424,8425,8426,8427,8428, # 8160
8429,8430,8431,8432,8433,2986,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443, # 8176
8444,8445,8446,8447,8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459, # 8192
8460,8461,8462,8463,8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475, # 8208
8476,8477,8478,4180,8479,8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490, # 8224
8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506, # 8240
8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522, # 8256
8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538, # 8272
8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554, # 8288
8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,4872,8565,8566,8567,8568,8569, # 8304
8570,8571,8572,8573,4873,8574,8575,8576,8577,8578,8579,8580,8581,8582,8583,8584, # 8320
8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597,8598,8599,8600, # 8336
8601,8602,8603,8604,8605,3803,8606,8607,8608,8609,8610,8611,8612,8613,4874,3804, # 8352
8614,8615,8616,8617,8618,8619,8620,8621,3956,8622,8623,8624,8625,8626,8627,8628, # 8368
8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,2865,8639,8640,8641,8642,8643, # 8384
8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,4463,8657,8658, # 8400
8659,4875,4876,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672, # 8416
8673,8674,8675,8676,8677,8678,8679,8680,8681,4464,8682,8683,8684,8685,8686,8687, # 8432
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703, # 8448
8704,8705,8706,8707,8708,8709,2261,8710,8711,8712,8713,8714,8715,8716,8717,8718, # 8464
8719,8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,4181, # 8480
8734,8735,8736,8737,8738,8739,8740,8741,8742,8743,8744,8745,8746,8747,8748,8749, # 8496
8750,8751,8752,8753,8754,8755,8756,8757,8758,8759,8760,8761,8762,8763,4877,8764, # 8512
8765,8766,8767,8768,8769,8770,8771,8772,8773,8774,8775,8776,8777,8778,8779,8780, # 8528
8781,8782,8783,8784,8785,8786,8787,8788,4878,8789,4879,8790,8791,8792,4880,8793, # 8544
8794,8795,8796,8797,8798,8799,8800,8801,4881,8802,8803,8804,8805,8806,8807,8808, # 8560
8809,8810,8811,8812,8813,8814,8815,3957,8816,8817,8818,8819,8820,8821,8822,8823, # 8576
8824,8825,8826,8827,8828,8829,8830,8831,8832,8833,8834,8835,8836,8837,8838,8839, # 8592
8840,8841,8842,8843,8844,8845,8846,8847,4882,8848,8849,8850,8851,8852,8853,8854, # 8608
8855,8856,8857,8858,8859,8860,8861,8862,8863,8864,8865,8866,8867,8868,8869,8870, # 8624
8871,8872,8873,8874,8875,8876,8877,8878,8879,8880,8881,8882,8883,8884,3202,8885, # 8640
8886,8887,8888,8889,8890,8891,8892,8893,8894,8895,8896,8897,8898,8899,8900,8901, # 8656
8902,8903,8904,8905,8906,8907,8908,8909,8910,8911,8912,8913,8914,8915,8916,8917, # 8672
8918,8919,8920,8921,8922,8923,8924,4465,8925,8926,8927,8928,8929,8930,8931,8932, # 8688
4883,8933,8934,8935,8936,8937,8938,8939,8940,8941,8942,8943,2214,8944,8945,8946, # 8704
8947,8948,8949,8950,8951,8952,8953,8954,8955,8956,8957,8958,8959,8960,8961,8962, # 8720
8963,8964,8965,4884,8966,8967,8968,8969,8970,8971,8972,8973,8974,8975,8976,8977, # 8736
8978,8979,8980,8981,8982,8983,8984,8985,8986,8987,8988,8989,8990,8991,8992,4885, # 8752
8993,8994,8995,8996,8997,8998,8999,9000,9001,9002,9003,9004,9005,9006,9007,9008, # 8768
9009,9010,9011,9012,9013,9014,9015,9016,9017,9018,9019,9020,9021,4182,9022,9023, # 8784
9024,9025,9026,9027,9028,9029,9030,9031,9032,9033,9034,9035,9036,9037,9038,9039, # 8800
9040,9041,9042,9043,9044,9045,9046,9047,9048,9049,9050,9051,9052,9053,9054,9055, # 8816
9056,9057,9058,9059,9060,9061,9062,9063,4886,9064,9065,9066,9067,9068,9069,4887, # 8832
9070,9071,9072,9073,9074,9075,9076,9077,9078,9079,9080,9081,9082,9083,9084,9085, # 8848
9086,9087,9088,9089,9090,9091,9092,9093,9094,9095,9096,9097,9098,9099,9100,9101, # 8864
9102,9103,9104,9105,9106,9107,9108,9109,9110,9111,9112,9113,9114,9115,9116,9117, # 8880
9118,9119,9120,9121,9122,9123,9124,9125,9126,9127,9128,9129,9130,9131,9132,9133, # 8896
9134,9135,9136,9137,9138,9139,9140,9141,3958,9142,9143,9144,9145,9146,9147,9148, # 8912
9149,9150,9151,4888,9152,9153,9154,9155,9156,9157,9158,9159,9160,9161,9162,9163, # 8928
9164,9165,9166,9167,9168,9169,9170,9171,9172,9173,9174,9175,4889,9176,9177,9178, # 8944
9179,9180,9181,9182,9183,9184,9185,9186,9187,9188,9189,9190,9191,9192,9193,9194, # 8960
9195,9196,9197,9198,9199,9200,9201,9202,9203,4890,9204,9205,9206,9207,9208,9209, # 8976
9210,9211,9212,9213,9214,9215,9216,9217,9218,9219,9220,9221,9222,4466,9223,9224, # 8992
9225,9226,9227,9228,9229,9230,9231,9232,9233,9234,9235,9236,9237,9238,9239,9240, # 9008
9241,9242,9243,9244,9245,4891,9246,9247,9248,9249,9250,9251,9252,9253,9254,9255, # 9024
9256,9257,4892,9258,9259,9260,9261,4893,4894,9262,9263,9264,9265,9266,9267,9268, # 9040
9269,9270,9271,9272,9273,4467,9274,9275,9276,9277,9278,9279,9280,9281,9282,9283, # 9056
9284,9285,3673,9286,9287,9288,9289,9290,9291,9292,9293,9294,9295,9296,9297,9298, # 9072
9299,9300,9301,9302,9303,9304,9305,9306,9307,9308,9309,9310,9311,9312,9313,9314, # 9088
9315,9316,9317,9318,9319,9320,9321,9322,4895,9323,9324,9325,9326,9327,9328,9329, # 9104
9330,9331,9332,9333,9334,9335,9336,9337,9338,9339,9340,9341,9342,9343,9344,9345, # 9120
9346,9347,4468,9348,9349,9350,9351,9352,9353,9354,9355,9356,9357,9358,9359,9360, # 9136
9361,9362,9363,9364,9365,9366,9367,9368,9369,9370,9371,9372,9373,4896,9374,4469, # 9152
9375,9376,9377,9378,9379,4897,9380,9381,9382,9383,9384,9385,9386,9387,9388,9389, # 9168
9390,9391,9392,9393,9394,9395,9396,9397,9398,9399,9400,9401,9402,9403,9404,9405, # 9184
9406,4470,9407,2751,9408,9409,3674,3552,9410,9411,9412,9413,9414,9415,9416,9417, # 9200
9418,9419,9420,9421,4898,9422,9423,9424,9425,9426,9427,9428,9429,3959,9430,9431, # 9216
9432,9433,9434,9435,9436,4471,9437,9438,9439,9440,9441,9442,9443,9444,9445,9446, # 9232
9447,9448,9449,9450,3348,9451,9452,9453,9454,9455,9456,9457,9458,9459,9460,9461, # 9248
9462,9463,9464,9465,9466,9467,9468,9469,9470,9471,9472,4899,9473,9474,9475,9476, # 9264
9477,4900,9478,9479,9480,9481,9482,9483,9484,9485,9486,9487,9488,3349,9489,9490, # 9280
9491,9492,9493,9494,9495,9496,9497,9498,9499,9500,9501,9502,9503,9504,9505,9506, # 9296
9507,9508,9509,9510,9511,9512,9513,9514,9515,9516,9517,9518,9519,9520,4901,9521, # 9312
9522,9523,9524,9525,9526,4902,9527,9528,9529,9530,9531,9532,9533,9534,9535,9536, # 9328
9537,9538,9539,9540,9541,9542,9543,9544,9545,9546,9547,9548,9549,9550,9551,9552, # 9344
9553,9554,9555,9556,9557,9558,9559,9560,9561,9562,9563,9564,9565,9566,9567,9568, # 9360
9569,9570,9571,9572,9573,9574,9575,9576,9577,9578,9579,9580,9581,9582,9583,9584, # 9376
3805,9585,9586,9587,9588,9589,9590,9591,9592,9593,9594,9595,9596,9597,9598,9599, # 9392
9600,9601,9602,4903,9603,9604,9605,9606,9607,4904,9608,9609,9610,9611,9612,9613, # 9408
9614,4905,9615,9616,9617,9618,9619,9620,9621,9622,9623,9624,9625,9626,9627,9628, # 9424
9629,9630,9631,9632,4906,9633,9634,9635,9636,9637,9638,9639,9640,9641,9642,9643, # 9440
4907,9644,9645,9646,9647,9648,9649,9650,9651,9652,9653,9654,9655,9656,9657,9658, # 9456
9659,9660,9661,9662,9663,9664,9665,9666,9667,9668,9669,9670,9671,9672,4183,9673, # 9472
9674,9675,9676,9677,4908,9678,9679,9680,9681,4909,9682,9683,9684,9685,9686,9687, # 9488
9688,9689,9690,4910,9691,9692,9693,3675,9694,9695,9696,2945,9697,9698,9699,9700, # 9504
9701,9702,9703,9704,9705,4911,9706,9707,9708,9709,9710,9711,9712,9713,9714,9715, # 9520
9716,9717,9718,9719,9720,9721,9722,9723,9724,9725,9726,9727,9728,9729,9730,9731, # 9536
9732,9733,9734,9735,4912,9736,9737,9738,9739,9740,4913,9741,9742,9743,9744,9745, # 9552
9746,9747,9748,9749,9750,9751,9752,9753,9754,9755,9756,9757,9758,4914,9759,9760, # 9568
9761,9762,9763,9764,9765,9766,9767,9768,9769,9770,9771,9772,9773,9774,9775,9776, # 9584
9777,9778,9779,9780,9781,9782,4915,9783,9784,9785,9786,9787,9788,9789,9790,9791, # 9600
9792,9793,4916,9794,9795,9796,9797,9798,9799,9800,9801,9802,9803,9804,9805,9806, # 9616
9807,9808,9809,9810,9811,9812,9813,9814,9815,9816,9817,9818,9819,9820,9821,9822, # 9632
9823,9824,9825,9826,9827,9828,9829,9830,9831,9832,9833,9834,9835,9836,9837,9838, # 9648
9839,9840,9841,9842,9843,9844,9845,9846,9847,9848,9849,9850,9851,9852,9853,9854, # 9664
9855,9856,9857,9858,9859,9860,9861,9862,9863,9864,9865,9866,9867,9868,4917,9869, # 9680
9870,9871,9872,9873,9874,9875,9876,9877,9878,9879,9880,9881,9882,9883,9884,9885, # 9696
9886,9887,9888,9889,9890,9891,9892,4472,9893,9894,9895,9896,9897,3806,9898,9899, # 9712
9900,9901,9902,9903,9904,9905,9906,9907,9908,9909,9910,9911,9912,9913,9914,4918, # 9728
9915,9916,9917,4919,9918,9919,9920,9921,4184,9922,9923,9924,9925,9926,9927,9928, # 9744
9929,9930,9931,9932,9933,9934,9935,9936,9937,9938,9939,9940,9941,9942,9943,9944, # 9760
9945,9946,4920,9947,9948,9949,9950,9951,9952,9953,9954,9955,4185,9956,9957,9958, # 9776
9959,9960,9961,9962,9963,9964,9965,4921,9966,9967,9968,4473,9969,9970,9971,9972, # 9792
9973,9974,9975,9976,9977,4474,9978,9979,9980,9981,9982,9983,9984,9985,9986,9987, # 9808
9988,9989,9990,9991,9992,9993,9994,9995,9996,9997,9998,9999,10000,10001,10002,10003, # 9824
10004,10005,10006,10007,10008,10009,10010,10011,10012,10013,10014,10015,10016,10017,10018,10019, # 9840
10020,10021,4922,10022,4923,10023,10024,10025,10026,10027,10028,10029,10030,10031,10032,10033, # 9856
10034,10035,10036,10037,10038,10039,10040,10041,10042,10043,10044,10045,10046,10047,10048,4924, # 9872
10049,10050,10051,10052,10053,10054,10055,10056,10057,10058,10059,10060,10061,10062,10063,10064, # 9888
10065,10066,10067,10068,10069,10070,10071,10072,10073,10074,10075,10076,10077,10078,10079,10080, # 9904
10081,10082,10083,10084,10085,10086,10087,4475,10088,10089,10090,10091,10092,10093,10094,10095, # 9920
10096,10097,4476,10098,10099,10100,10101,10102,10103,10104,10105,10106,10107,10108,10109,10110, # 9936
10111,2174,10112,10113,10114,10115,10116,10117,10118,10119,10120,10121,10122,10123,10124,10125, # 9952
10126,10127,10128,10129,10130,10131,10132,10133,10134,10135,10136,10137,10138,10139,10140,3807, # 9968
4186,4925,10141,10142,10143,10144,10145,10146,10147,4477,4187,10148,10149,10150,10151,10152, # 9984
10153,4188,10154,10155,10156,10157,10158,10159,10160,10161,4926,10162,10163,10164,10165,10166, #10000
10167,10168,10169,10170,10171,10172,10173,10174,10175,10176,10177,10178,10179,10180,10181,10182, #10016
10183,10184,10185,10186,10187,10188,10189,10190,10191,10192,3203,10193,10194,10195,10196,10197, #10032
10198,10199,10200,4478,10201,10202,10203,10204,4479,10205,10206,10207,10208,10209,10210,10211, #10048
10212,10213,10214,10215,10216,10217,10218,10219,10220,10221,10222,10223,10224,10225,10226,10227, #10064
10228,10229,10230,10231,10232,10233,10234,4927,10235,10236,10237,10238,10239,10240,10241,10242, #10080
10243,10244,10245,10246,10247,10248,10249,10250,10251,10252,10253,10254,10255,10256,10257,10258, #10096
10259,10260,10261,10262,10263,10264,10265,10266,10267,10268,10269,10270,10271,10272,10273,4480, #10112
4928,4929,10274,10275,10276,10277,10278,10279,10280,10281,10282,10283,10284,10285,10286,10287, #10128
10288,10289,10290,10291,10292,10293,10294,10295,10296,10297,10298,10299,10300,10301,10302,10303, #10144
10304,10305,10306,10307,10308,10309,10310,10311,10312,10313,10314,10315,10316,10317,10318,10319, #10160
10320,10321,10322,10323,10324,10325,10326,10327,10328,10329,10330,10331,10332,10333,10334,4930, #10176
10335,10336,10337,10338,10339,10340,10341,10342,4931,10343,10344,10345,10346,10347,10348,10349, #10192
10350,10351,10352,10353,10354,10355,3088,10356,2786,10357,10358,10359,10360,4189,10361,10362, #10208
10363,10364,10365,10366,10367,10368,10369,10370,10371,10372,10373,10374,10375,4932,10376,10377, #10224
10378,10379,10380,10381,10382,10383,10384,10385,10386,10387,10388,10389,10390,10391,10392,4933, #10240
10393,10394,10395,4934,10396,10397,10398,10399,10400,10401,10402,10403,10404,10405,10406,10407, #10256
10408,10409,10410,10411,10412,3446,10413,10414,10415,10416,10417,10418,10419,10420,10421,10422, #10272
10423,4935,10424,10425,10426,10427,10428,10429,10430,4936,10431,10432,10433,10434,10435,10436, #10288
10437,10438,10439,10440,10441,10442,10443,4937,10444,10445,10446,10447,4481,10448,10449,10450, #10304
10451,10452,10453,10454,10455,10456,10457,10458,10459,10460,10461,10462,10463,10464,10465,10466, #10320
10467,10468,10469,10470,10471,10472,10473,10474,10475,10476,10477,10478,10479,10480,10481,10482, #10336
10483,10484,10485,10486,10487,10488,10489,10490,10491,10492,10493,10494,10495,10496,10497,10498, #10352
10499,10500,10501,10502,10503,10504,10505,4938,10506,10507,10508,10509,10510,2552,10511,10512, #10368
10513,10514,10515,10516,3447,10517,10518,10519,10520,10521,10522,10523,10524,10525,10526,10527, #10384
10528,10529,10530,10531,10532,10533,10534,10535,10536,10537,10538,10539,10540,10541,10542,10543, #10400
4482,10544,4939,10545,10546,10547,10548,10549,10550,10551,10552,10553,10554,10555,10556,10557, #10416
10558,10559,10560,10561,10562,10563,10564,10565,10566,10567,3676,4483,10568,10569,10570,10571, #10432
10572,3448,10573,10574,10575,10576,10577,10578,10579,10580,10581,10582,10583,10584,10585,10586, #10448
10587,10588,10589,10590,10591,10592,10593,10594,10595,10596,10597,10598,10599,10600,10601,10602, #10464
10603,10604,10605,10606,10607,10608,10609,10610,10611,10612,10613,10614,10615,10616,10617,10618, #10480
10619,10620,10621,10622,10623,10624,10625,10626,10627,4484,10628,10629,10630,10631,10632,4940, #10496
10633,10634,10635,10636,10637,10638,10639,10640,10641,10642,10643,10644,10645,10646,10647,10648, #10512
10649,10650,10651,10652,10653,10654,10655,10656,4941,10657,10658,10659,2599,10660,10661,10662, #10528
10663,10664,10665,10666,3089,10667,10668,10669,10670,10671,10672,10673,10674,10675,10676,10677, #10544
10678,10679,10680,4942,10681,10682,10683,10684,10685,10686,10687,10688,10689,10690,10691,10692, #10560
10693,10694,10695,10696,10697,4485,10698,10699,10700,10701,10702,10703,10704,4943,10705,3677, #10576
10706,10707,10708,10709,10710,10711,10712,4944,10713,10714,10715,10716,10717,10718,10719,10720, #10592
10721,10722,10723,10724,10725,10726,10727,10728,4945,10729,10730,10731,10732,10733,10734,10735, #10608
10736,10737,10738,10739,10740,10741,10742,10743,10744,10745,10746,10747,10748,10749,10750,10751, #10624
10752,10753,10754,10755,10756,10757,10758,10759,10760,10761,4946,10762,10763,10764,10765,10766, #10640
10767,4947,4948,10768,10769,10770,10771,10772,10773,10774,10775,10776,10777,10778,10779,10780, #10656
10781,10782,10783,10784,10785,10786,10787,10788,10789,10790,10791,10792,10793,10794,10795,10796, #10672
10797,10798,10799,10800,10801,10802,10803,10804,10805,10806,10807,10808,10809,10810,10811,10812, #10688
10813,10814,10815,10816,10817,10818,10819,10820,10821,10822,10823,10824,10825,10826,10827,10828, #10704
10829,10830,10831,10832,10833,10834,10835,10836,10837,10838,10839,10840,10841,10842,10843,10844, #10720
10845,10846,10847,10848,10849,10850,10851,10852,10853,10854,10855,10856,10857,10858,10859,10860, #10736
10861,10862,10863,10864,10865,10866,10867,10868,10869,10870,10871,10872,10873,10874,10875,10876, #10752
10877,10878,4486,10879,10880,10881,10882,10883,10884,10885,4949,10886,10887,10888,10889,10890, #10768
10891,10892,10893,10894,10895,10896,10897,10898,10899,10900,10901,10902,10903,10904,10905,10906, #10784
10907,10908,10909,10910,10911,10912,10913,10914,10915,10916,10917,10918,10919,4487,10920,10921, #10800
10922,10923,10924,10925,10926,10927,10928,10929,10930,10931,10932,4950,10933,10934,10935,10936, #10816
10937,10938,10939,10940,10941,10942,10943,10944,10945,10946,10947,10948,10949,4488,10950,10951, #10832
10952,10953,10954,10955,10956,10957,10958,10959,4190,10960,10961,10962,10963,10964,10965,10966, #10848
10967,10968,10969,10970,10971,10972,10973,10974,10975,10976,10977,10978,10979,10980,10981,10982, #10864
10983,10984,10985,10986,10987,10988,10989,10990,10991,10992,10993,10994,10995,10996,10997,10998, #10880
10999,11000,11001,11002,11003,11004,11005,11006,3960,11007,11008,11009,11010,11011,11012,11013, #10896
11014,11015,11016,11017,11018,11019,11020,11021,11022,11023,11024,11025,11026,11027,11028,11029, #10912
11030,11031,11032,4951,11033,11034,11035,11036,11037,11038,11039,11040,11041,11042,11043,11044, #10928
11045,11046,11047,4489,11048,11049,11050,11051,4952,11052,11053,11054,11055,11056,11057,11058, #10944
4953,11059,11060,11061,11062,11063,11064,11065,11066,11067,11068,11069,11070,11071,4954,11072, #10960
11073,11074,11075,11076,11077,11078,11079,11080,11081,11082,11083,11084,11085,11086,11087,11088, #10976
11089,11090,11091,11092,11093,11094,11095,11096,11097,11098,11099,11100,11101,11102,11103,11104, #10992
11105,11106,11107,11108,11109,11110,11111,11112,11113,11114,11115,3808,11116,11117,11118,11119, #11008
11120,11121,11122,11123,11124,11125,11126,11127,11128,11129,11130,11131,11132,11133,11134,4955, #11024
11135,11136,11137,11138,11139,11140,11141,11142,11143,11144,11145,11146,11147,11148,11149,11150, #11040
11151,11152,11153,11154,11155,11156,11157,11158,11159,11160,11161,4956,11162,11163,11164,11165, #11056
11166,11167,11168,11169,11170,11171,11172,11173,11174,11175,11176,11177,11178,11179,11180,4957, #11072
11181,11182,11183,11184,11185,11186,4958,11187,11188,11189,11190,11191,11192,11193,11194,11195, #11088
11196,11197,11198,11199,11200,3678,11201,11202,11203,11204,11205,11206,4191,11207,11208,11209, #11104
11210,11211,11212,11213,11214,11215,11216,11217,11218,11219,11220,11221,11222,11223,11224,11225, #11120
11226,11227,11228,11229,11230,11231,11232,11233,11234,11235,11236,11237,11238,11239,11240,11241, #11136
11242,11243,11244,11245,11246,11247,11248,11249,11250,11251,4959,11252,11253,11254,11255,11256, #11152
11257,11258,11259,11260,11261,11262,11263,11264,11265,11266,11267,11268,11269,11270,11271,11272, #11168
11273,11274,11275,11276,11277,11278,11279,11280,11281,11282,11283,11284,11285,11286,11287,11288, #11184
11289,11290,11291,11292,11293,11294,11295,11296,11297,11298,11299,11300,11301,11302,11303,11304, #11200
11305,11306,11307,11308,11309,11310,11311,11312,11313,11314,3679,11315,11316,11317,11318,4490, #11216
11319,11320,11321,11322,11323,11324,11325,11326,11327,11328,11329,11330,11331,11332,11333,11334, #11232
11335,11336,11337,11338,11339,11340,11341,11342,11343,11344,11345,11346,11347,4960,11348,11349, #11248
11350,11351,11352,11353,11354,11355,11356,11357,11358,11359,11360,11361,11362,11363,11364,11365, #11264
11366,11367,11368,11369,11370,11371,11372,11373,11374,11375,11376,11377,3961,4961,11378,11379, #11280
11380,11381,11382,11383,11384,11385,11386,11387,11388,11389,11390,11391,11392,11393,11394,11395, #11296
11396,11397,4192,11398,11399,11400,11401,11402,11403,11404,11405,11406,11407,11408,11409,11410, #11312
11411,4962,11412,11413,11414,11415,11416,11417,11418,11419,11420,11421,11422,11423,11424,11425, #11328
11426,11427,11428,11429,11430,11431,11432,11433,11434,11435,11436,11437,11438,11439,11440,11441, #11344
11442,11443,11444,11445,11446,11447,11448,11449,11450,11451,11452,11453,11454,11455,11456,11457, #11360
11458,11459,11460,11461,11462,11463,11464,11465,11466,11467,11468,11469,4963,11470,11471,4491, #11376
11472,11473,11474,11475,4964,11476,11477,11478,11479,11480,11481,11482,11483,11484,11485,11486, #11392
11487,11488,11489,11490,11491,11492,4965,11493,11494,11495,11496,11497,11498,11499,11500,11501, #11408
11502,11503,11504,11505,11506,11507,11508,11509,11510,11511,11512,11513,11514,11515,11516,11517, #11424
11518,11519,11520,11521,11522,11523,11524,11525,11526,11527,11528,11529,3962,11530,11531,11532, #11440
11533,11534,11535,11536,11537,11538,11539,11540,11541,11542,11543,11544,11545,11546,11547,11548, #11456
11549,11550,11551,11552,11553,11554,11555,11556,11557,11558,11559,11560,11561,11562,11563,11564, #11472
4193,4194,11565,11566,11567,11568,11569,11570,11571,11572,11573,11574,11575,11576,11577,11578, #11488
11579,11580,11581,11582,11583,11584,11585,11586,11587,11588,11589,11590,11591,4966,4195,11592, #11504
11593,11594,11595,11596,11597,11598,11599,11600,11601,11602,11603,11604,3090,11605,11606,11607, #11520
11608,11609,11610,4967,11611,11612,11613,11614,11615,11616,11617,11618,11619,11620,11621,11622, #11536
11623,11624,11625,11626,11627,11628,11629,11630,11631,11632,11633,11634,11635,11636,11637,11638, #11552
11639,11640,11641,11642,11643,11644,11645,11646,11647,11648,11649,11650,11651,11652,11653,11654, #11568
11655,11656,11657,11658,11659,11660,11661,11662,11663,11664,11665,11666,11667,11668,11669,11670, #11584
11671,11672,11673,11674,4968,11675,11676,11677,11678,11679,11680,11681,11682,11683,11684,11685, #11600
11686,11687,11688,11689,11690,11691,11692,11693,3809,11694,11695,11696,11697,11698,11699,11700, #11616
11701,11702,11703,11704,11705,11706,11707,11708,11709,11710,11711,11712,11713,11714,11715,11716, #11632
11717,11718,3553,11719,11720,11721,11722,11723,11724,11725,11726,11727,11728,11729,11730,4969, #11648
11731,11732,11733,11734,11735,11736,11737,11738,11739,11740,4492,11741,11742,11743,11744,11745, #11664
11746,11747,11748,11749,11750,11751,11752,4970,11753,11754,11755,11756,11757,11758,11759,11760, #11680
11761,11762,11763,11764,11765,11766,11767,11768,11769,11770,11771,11772,11773,11774,11775,11776, #11696
11777,11778,11779,11780,11781,11782,11783,11784,11785,11786,11787,11788,11789,11790,4971,11791, #11712
11792,11793,11794,11795,11796,11797,4972,11798,11799,11800,11801,11802,11803,11804,11805,11806, #11728
11807,11808,11809,11810,4973,11811,11812,11813,11814,11815,11816,11817,11818,11819,11820,11821, #11744
11822,11823,11824,11825,11826,11827,11828,11829,11830,11831,11832,11833,11834,3680,3810,11835, #11760
11836,4974,11837,11838,11839,11840,11841,11842,11843,11844,11845,11846,11847,11848,11849,11850, #11776
11851,11852,11853,11854,11855,11856,11857,11858,11859,11860,11861,11862,11863,11864,11865,11866, #11792
11867,11868,11869,11870,11871,11872,11873,11874,11875,11876,11877,11878,11879,11880,11881,11882, #11808
11883,11884,4493,11885,11886,11887,11888,11889,11890,11891,11892,11893,11894,11895,11896,11897, #11824
11898,11899,11900,11901,11902,11903,11904,11905,11906,11907,11908,11909,11910,11911,11912,11913, #11840
11914,11915,4975,11916,11917,11918,11919,11920,11921,11922,11923,11924,11925,11926,11927,11928, #11856
11929,11930,11931,11932,11933,11934,11935,11936,11937,11938,11939,11940,11941,11942,11943,11944, #11872
11945,11946,11947,11948,11949,4976,11950,11951,11952,11953,11954,11955,11956,11957,11958,11959, #11888
11960,11961,11962,11963,11964,11965,11966,11967,11968,11969,11970,11971,11972,11973,11974,11975, #11904
11976,11977,11978,11979,11980,11981,11982,11983,11984,11985,11986,11987,4196,11988,11989,11990, #11920
11991,11992,4977,11993,11994,11995,11996,11997,11998,11999,12000,12001,12002,12003,12004,12005, #11936
12006,12007,12008,12009,12010,12011,12012,12013,12014,12015,12016,12017,12018,12019,12020,12021, #11952
12022,12023,12024,12025,12026,12027,12028,12029,12030,12031,12032,12033,12034,12035,12036,12037, #11968
12038,12039,12040,12041,12042,12043,12044,12045,12046,12047,12048,12049,12050,12051,12052,12053, #11984
12054,12055,12056,12057,12058,12059,12060,12061,4978,12062,12063,12064,12065,12066,12067,12068, #12000
12069,12070,12071,12072,12073,12074,12075,12076,12077,12078,12079,12080,12081,12082,12083,12084, #12016
12085,12086,12087,12088,12089,12090,12091,12092,12093,12094,12095,12096,12097,12098,12099,12100, #12032
12101,12102,12103,12104,12105,12106,12107,12108,12109,12110,12111,12112,12113,12114,12115,12116, #12048
12117,12118,12119,12120,12121,12122,12123,4979,12124,12125,12126,12127,12128,4197,12129,12130, #12064
12131,12132,12133,12134,12135,12136,12137,12138,12139,12140,12141,12142,12143,12144,12145,12146, #12080
12147,12148,12149,12150,12151,12152,12153,12154,4980,12155,12156,12157,12158,12159,12160,4494, #12096
12161,12162,12163,12164,3811,12165,12166,12167,12168,12169,4495,12170,12171,4496,12172,12173, #12112
12174,12175,12176,3812,12177,12178,12179,12180,12181,12182,12183,12184,12185,12186,12187,12188, #12128
12189,12190,12191,12192,12193,12194,12195,12196,12197,12198,12199,12200,12201,12202,12203,12204, #12144
12205,12206,12207,12208,12209,12210,12211,12212,12213,12214,12215,12216,12217,12218,12219,12220, #12160
12221,4981,12222,12223,12224,12225,12226,12227,12228,12229,12230,12231,12232,12233,12234,12235, #12176
4982,12236,12237,12238,12239,12240,12241,12242,12243,12244,12245,4983,12246,12247,12248,12249, #12192
4984,12250,12251,12252,12253,12254,12255,12256,12257,12258,12259,12260,12261,12262,12263,12264, #12208
4985,12265,4497,12266,12267,12268,12269,12270,12271,12272,12273,12274,12275,12276,12277,12278, #12224
12279,12280,12281,12282,12283,12284,12285,12286,12287,4986,12288,12289,12290,12291,12292,12293, #12240
12294,12295,12296,2473,12297,12298,12299,12300,12301,12302,12303,12304,12305,12306,12307,12308, #12256
12309,12310,12311,12312,12313,12314,12315,12316,12317,12318,12319,3963,12320,12321,12322,12323, #12272
12324,12325,12326,12327,12328,12329,12330,12331,12332,4987,12333,12334,12335,12336,12337,12338, #12288
12339,12340,12341,12342,12343,12344,12345,12346,12347,12348,12349,12350,12351,12352,12353,12354, #12304
12355,12356,12357,12358,12359,3964,12360,12361,12362,12363,12364,12365,12366,12367,12368,12369, #12320
12370,3965,12371,12372,12373,12374,12375,12376,12377,12378,12379,12380,12381,12382,12383,12384, #12336
12385,12386,12387,12388,12389,12390,12391,12392,12393,12394,12395,12396,12397,12398,12399,12400, #12352
12401,12402,12403,12404,12405,12406,12407,12408,4988,12409,12410,12411,12412,12413,12414,12415, #12368
12416,12417,12418,12419,12420,12421,12422,12423,12424,12425,12426,12427,12428,12429,12430,12431, #12384
12432,12433,12434,12435,12436,12437,12438,3554,12439,12440,12441,12442,12443,12444,12445,12446, #12400
12447,12448,12449,12450,12451,12452,12453,12454,12455,12456,12457,12458,12459,12460,12461,12462, #12416
12463,12464,4989,12465,12466,12467,12468,12469,12470,12471,12472,12473,12474,12475,12476,12477, #12432
12478,12479,12480,4990,12481,12482,12483,12484,12485,12486,12487,12488,12489,4498,12490,12491, #12448
12492,12493,12494,12495,12496,12497,12498,12499,12500,12501,12502,12503,12504,12505,12506,12507, #12464
12508,12509,12510,12511,12512,12513,12514,12515,12516,12517,12518,12519,12520,12521,12522,12523, #12480
12524,12525,12526,12527,12528,12529,12530,12531,12532,12533,12534,12535,12536,12537,12538,12539, #12496
12540,12541,12542,12543,12544,12545,12546,12547,12548,12549,12550,12551,4991,12552,12553,12554, #12512
12555,12556,12557,12558,12559,12560,12561,12562,12563,12564,12565,12566,12567,12568,12569,12570, #12528
12571,12572,12573,12574,12575,12576,12577,12578,3036,12579,12580,12581,12582,12583,3966,12584, #12544
12585,12586,12587,12588,12589,12590,12591,12592,12593,12594,12595,12596,12597,12598,12599,12600, #12560
12601,12602,12603,12604,12605,12606,12607,12608,12609,12610,12611,12612,12613,12614,12615,12616, #12576
12617,12618,12619,12620,12621,12622,12623,12624,12625,12626,12627,12628,12629,12630,12631,12632, #12592
12633,12634,12635,12636,12637,12638,12639,12640,12641,12642,12643,12644,12645,12646,4499,12647, #12608
12648,12649,12650,12651,12652,12653,12654,12655,12656,12657,12658,12659,12660,12661,12662,12663, #12624
12664,12665,12666,12667,12668,12669,12670,12671,12672,12673,12674,12675,12676,12677,12678,12679, #12640
12680,12681,12682,12683,12684,12685,12686,12687,12688,12689,12690,12691,12692,12693,12694,12695, #12656
12696,12697,12698,4992,12699,12700,12701,12702,12703,12704,12705,12706,12707,12708,12709,12710, #12672
12711,12712,12713,12714,12715,12716,12717,12718,12719,12720,12721,12722,12723,12724,12725,12726, #12688
12727,12728,12729,12730,12731,12732,12733,12734,12735,12736,12737,12738,12739,12740,12741,12742, #12704
12743,12744,12745,12746,12747,12748,12749,12750,12751,12752,12753,12754,12755,12756,12757,12758, #12720
12759,12760,12761,12762,12763,12764,12765,12766,12767,12768,12769,12770,12771,12772,12773,12774, #12736
12775,12776,12777,12778,4993,2175,12779,12780,12781,12782,12783,12784,12785,12786,4500,12787, #12752
12788,12789,12790,12791,12792,12793,12794,12795,12796,12797,12798,12799,12800,12801,12802,12803, #12768
12804,12805,12806,12807,12808,12809,12810,12811,12812,12813,12814,12815,12816,12817,12818,12819, #12784
12820,12821,12822,12823,12824,12825,12826,4198,3967,12827,12828,12829,12830,12831,12832,12833, #12800
12834,12835,12836,12837,12838,12839,12840,12841,12842,12843,12844,12845,12846,12847,12848,12849, #12816
12850,12851,12852,12853,12854,12855,12856,12857,12858,12859,12860,12861,4199,12862,12863,12864, #12832
12865,12866,12867,12868,12869,12870,12871,12872,12873,12874,12875,12876,12877,12878,12879,12880, #12848
12881,12882,12883,12884,12885,12886,12887,4501,12888,12889,12890,12891,12892,12893,12894,12895, #12864
12896,12897,12898,12899,12900,12901,12902,12903,12904,12905,12906,12907,12908,12909,12910,12911, #12880
12912,4994,12913,12914,12915,12916,12917,12918,12919,12920,12921,12922,12923,12924,12925,12926, #12896
12927,12928,12929,12930,12931,12932,12933,12934,12935,12936,12937,12938,12939,12940,12941,12942, #12912
12943,12944,12945,12946,12947,12948,12949,12950,12951,12952,12953,12954,12955,12956,1772,12957, #12928
12958,12959,12960,12961,12962,12963,12964,12965,12966,12967,12968,12969,12970,12971,12972,12973, #12944
12974,12975,12976,12977,12978,12979,12980,12981,12982,12983,12984,12985,12986,12987,12988,12989, #12960
12990,12991,12992,12993,12994,12995,12996,12997,4502,12998,4503,12999,13000,13001,13002,13003, #12976
4504,13004,13005,13006,13007,13008,13009,13010,13011,13012,13013,13014,13015,13016,13017,13018, #12992
13019,13020,13021,13022,13023,13024,13025,13026,13027,13028,13029,3449,13030,13031,13032,13033, #13008
13034,13035,13036,13037,13038,13039,13040,13041,13042,13043,13044,13045,13046,13047,13048,13049, #13024
13050,13051,13052,13053,13054,13055,13056,13057,13058,13059,13060,13061,13062,13063,13064,13065, #13040
13066,13067,13068,13069,13070,13071,13072,13073,13074,13075,13076,13077,13078,13079,13080,13081, #13056
13082,13083,13084,13085,13086,13087,13088,13089,13090,13091,13092,13093,13094,13095,13096,13097, #13072
13098,13099,13100,13101,13102,13103,13104,13105,13106,13107,13108,13109,13110,13111,13112,13113, #13088
13114,13115,13116,13117,13118,3968,13119,4995,13120,13121,13122,13123,13124,13125,13126,13127, #13104
4505,13128,13129,13130,13131,13132,13133,13134,4996,4506,13135,13136,13137,13138,13139,4997, #13120
13140,13141,13142,13143,13144,13145,13146,13147,13148,13149,13150,13151,13152,13153,13154,13155, #13136
13156,13157,13158,13159,4998,13160,13161,13162,13163,13164,13165,13166,13167,13168,13169,13170, #13152
13171,13172,13173,13174,13175,13176,4999,13177,13178,13179,13180,13181,13182,13183,13184,13185, #13168
13186,13187,13188,13189,13190,13191,13192,13193,13194,13195,13196,13197,13198,13199,13200,13201, #13184
13202,13203,13204,13205,13206,5000,13207,13208,13209,13210,13211,13212,13213,13214,13215,13216, #13200
13217,13218,13219,13220,13221,13222,13223,13224,13225,13226,13227,4200,5001,13228,13229,13230, #13216
13231,13232,13233,13234,13235,13236,13237,13238,13239,13240,3969,13241,13242,13243,13244,3970, #13232
13245,13246,13247,13248,13249,13250,13251,13252,13253,13254,13255,13256,13257,13258,13259,13260, #13248
13261,13262,13263,13264,13265,13266,13267,13268,3450,13269,13270,13271,13272,13273,13274,13275, #13264
13276,5002,13277,13278,13279,13280,13281,13282,13283,13284,13285,13286,13287,13288,13289,13290, #13280
13291,13292,13293,13294,13295,13296,13297,13298,13299,13300,13301,13302,3813,13303,13304,13305, #13296
13306,13307,13308,13309,13310,13311,13312,13313,13314,13315,13316,13317,13318,13319,13320,13321, #13312
13322,13323,13324,13325,13326,13327,13328,4507,13329,13330,13331,13332,13333,13334,13335,13336, #13328
13337,13338,13339,13340,13341,5003,13342,13343,13344,13345,13346,13347,13348,13349,13350,13351, #13344
13352,13353,13354,13355,13356,13357,13358,13359,13360,13361,13362,13363,13364,13365,13366,13367, #13360
5004,13368,13369,13370,13371,13372,13373,13374,13375,13376,13377,13378,13379,13380,13381,13382, #13376
13383,13384,13385,13386,13387,13388,13389,13390,13391,13392,13393,13394,13395,13396,13397,13398, #13392
13399,13400,13401,13402,13403,13404,13405,13406,13407,13408,13409,13410,13411,13412,13413,13414, #13408
13415,13416,13417,13418,13419,13420,13421,13422,13423,13424,13425,13426,13427,13428,13429,13430, #13424
13431,13432,4508,13433,13434,13435,4201,13436,13437,13438,13439,13440,13441,13442,13443,13444, #13440
13445,13446,13447,13448,13449,13450,13451,13452,13453,13454,13455,13456,13457,5005,13458,13459, #13456
13460,13461,13462,13463,13464,13465,13466,13467,13468,13469,13470,4509,13471,13472,13473,13474, #13472
13475,13476,13477,13478,13479,13480,13481,13482,13483,13484,13485,13486,13487,13488,13489,13490, #13488
13491,13492,13493,13494,13495,13496,13497,13498,13499,13500,13501,13502,13503,13504,13505,13506, #13504
13507,13508,13509,13510,13511,13512,13513,13514,13515,13516,13517,13518,13519,13520,13521,13522, #13520
13523,13524,13525,13526,13527,13528,13529,13530,13531,13532,13533,13534,13535,13536,13537,13538, #13536
13539,13540,13541,13542,13543,13544,13545,13546,13547,13548,13549,13550,13551,13552,13553,13554, #13552
13555,13556,13557,13558,13559,13560,13561,13562,13563,13564,13565,13566,13567,13568,13569,13570, #13568
13571,13572,13573,13574,13575,13576,13577,13578,13579,13580,13581,13582,13583,13584,13585,13586, #13584
13587,13588,13589,13590,13591,13592,13593,13594,13595,13596,13597,13598,13599,13600,13601,13602, #13600
13603,13604,13605,13606,13607,13608,13609,13610,13611,13612,13613,13614,13615,13616,13617,13618, #13616
13619,13620,13621,13622,13623,13624,13625,13626,13627,13628,13629,13630,13631,13632,13633,13634, #13632
13635,13636,13637,13638,13639,13640,13641,13642,5006,13643,13644,13645,13646,13647,13648,13649, #13648
13650,13651,5007,13652,13653,13654,13655,13656,13657,13658,13659,13660,13661,13662,13663,13664, #13664
13665,13666,13667,13668,13669,13670,13671,13672,13673,13674,13675,13676,13677,13678,13679,13680, #13680
13681,13682,13683,13684,13685,13686,13687,13688,13689,13690,13691,13692,13693,13694,13695,13696, #13696
13697,13698,13699,13700,13701,13702,13703,13704,13705,13706,13707,13708,13709,13710,13711,13712, #13712
13713,13714,13715,13716,13717,13718,13719,13720,13721,13722,13723,13724,13725,13726,13727,13728, #13728
13729,13730,13731,13732,13733,13734,13735,13736,13737,13738,13739,13740,13741,13742,13743,13744, #13744
13745,13746,13747,13748,13749,13750,13751,13752,13753,13754,13755,13756,13757,13758,13759,13760, #13760
13761,13762,13763,13764,13765,13766,13767,13768,13769,13770,13771,13772,13773,13774,3273,13775, #13776
13776,13777,13778,13779,13780,13781,13782,13783,13784,13785,13786,13787,13788,13789,13790,13791, #13792
13792,13793,13794,13795,13796,13797,13798,13799,13800,13801,13802,13803,13804,13805,13806,13807, #13808
13808,13809,13810,13811,13812,13813,13814,13815,13816,13817,13818,13819,13820,13821,13822,13823, #13824
13824,13825,13826,13827,13828,13829,13830,13831,13832,13833,13834,13835,13836,13837,13838,13839, #13840
13840,13841,13842,13843,13844,13845,13846,13847,13848,13849,13850,13851,13852,13853,13854,13855, #13856
13856,13857,13858,13859,13860,13861,13862,13863,13864,13865,13866,13867,13868,13869,13870,13871, #13872
13872,13873,13874,13875,13876,13877,13878,13879,13880,13881,13882,13883,13884,13885,13886,13887, #13888
13888,13889,13890,13891,13892,13893,13894,13895,13896,13897,13898,13899,13900,13901,13902,13903, #13904
13904,13905,13906,13907,13908,13909,13910,13911,13912,13913,13914,13915,13916,13917,13918,13919, #13920
13920,13921,13922,13923,13924,13925,13926,13927,13928,13929,13930,13931,13932,13933,13934,13935, #13936
13936,13937,13938,13939,13940,13941,13942,13943,13944,13945,13946,13947,13948,13949,13950,13951, #13952
13952,13953,13954,13955,13956,13957,13958,13959,13960,13961,13962,13963,13964,13965,13966,13967, #13968
13968,13969,13970,13971,13972) #13973
# flake8: noqa
| mit |
asascience-open/ooi-ui-services | ooiservices/app/uframe/events_create_update.py | 1 | 12767 | """
Asset Management - Events: Create and update functions.
"""
__author__ = 'Edna Donoughe'
from ooiservices.app.uframe.uframe_tools import (uframe_get_asset_by_uid, get_uframe_event, uframe_put_event,
uframe_postto, uframe_create_cruise, uframe_create_calibration)
from ooiservices.app.uframe.common_tools import (get_event_types, get_supported_event_types, get_event_class)
from ooiservices.app.uframe.common_tools import convert_status_value_for_display
from ooiservices.app.uframe.events_validate_fields import (events_validate_all_required_fields_are_provided,
events_validate_user_required_fields_are_provided)
# Create event.
def create_event_type(request_data):
""" Create a new event. Return new event on success, or raise exception on error.
Response on success:
{
"message" : "Element created successfully.",
"id" : 14501,
"statusCode" : "CREATED"
}
"""
action = 'create'
try:
# Verify minimum required fields to proceed with create (event_type and uid)
# Required field: event_type
if 'eventType' not in request_data:
message = 'No eventType in request data to create event.'
raise Exception(message)
event_type = request_data['eventType']
if event_type not in get_event_types():
message = 'The event type provided %s is invalid.' % event_type
raise Exception(message)
# If event type create/update not yet supported, raise exception.
if event_type not in get_supported_event_types():
message = 'Event type %s \'%s\' is not supported.' % (event_type, action)
raise Exception(message)
# Required field: assetUid
uid = None
if event_type != 'CRUISE_INFO':
if 'assetUid' not in request_data:
message = 'No assetUid in request data to create event %s.' % event_type
raise Exception(message)
uid = request_data['assetUid']
if not uid:
message = 'The assetUid is empty or null, unable to create a %s event.' % event_type
raise Exception(message)
# Event name not really provided by UI, fill with event type unless CALIBRATION event.
if event_type != 'CALIBRATION_DATA':
request_data['eventName'] = event_type
# Validate data fields to ensure required fields are provided for create.
data = events_validate_all_required_fields_are_provided(event_type, request_data, action=action)
events_validate_user_required_fields_are_provided(event_type, data, action=action)
# Add '@class' field to data; remove 'lastModifiedTimestamp' field; ensure eventId is set to -1.
# Get event class
event_class = get_event_class(event_type)
data['@class'] = event_class
if 'lastModifiedTimestamp' in data:
del data['lastModifiedTimestamp']
# Set eventId for create
data['eventId'] = -1
# Create event.
id = 0
id = perform_uframe_create_event(event_type, uid, data)
if id < 1:
message = 'Failed to create %s event for asset with uid %s' % (event_type, uid)
raise Exception(message)
# Get newly created event and return.
event = get_uframe_event(id)
# Post process event content for display.
event = post_process_event(event)
return event
except Exception as err:
message = str(err)
raise Exception(message)
# Prepare event for display.
def post_process_event(event):
""" Process event from uframe before returning for display (in UI).
"""
try:
if not event:
message = 'The event provided for post processing is empty.'
raise Exception(message)
if '@class' in event:
del event['@class']
if 'eventType' in event:
if event['eventType'] == 'ASSET_STATUS':
event['status'] = convert_status_value_for_display(event['status'])
return event
except Exception as err:
message = 'Error post-processing event for display. %s' % str(err)
raise Exception(message)
# Update event.
def update_event_type(id, data):
""" Update an existing event, no success return event, on error raise exception.
"""
debug = False
action = 'update'
try:
# Verify minimum required fields to proceed with update (event_type and uid)
if 'eventId' not in data:
message = 'An event id must be provided in the request data.'
raise Exception(message)
# Required field: event_type
if 'eventType' not in data:
message = 'An event type must be provided in the request data.'
raise Exception(message)
# Get event type, verify if valid event type.
event_type = data['eventType']
if event_type not in get_event_types():
message = 'The event type provided %s is invalid.' % event_type
raise Exception(message)
# If event type create/update not yet supported, raise exception.
if event_type not in get_supported_event_types():
message = 'Event type %s \'%s\' is not supported.' % (event_type, action)
raise Exception(message)
# Event name not really provided by UI, fill with event type unless CALIBRATION event.
if event_type != 'CALIBRATION_DATA':
data['eventName'] = event_type
# Validate data fields to ensure required fields are provided for update.
data = events_validate_all_required_fields_are_provided(event_type, data, action=action)
events_validate_user_required_fields_are_provided(event_type, data, action=action)
# Verify uid provided in data for all event types except CRUISE_INFO.
uid = None
if event_type != 'CRUISE_INFO' and event_type != 'DEPLOYMENT':
# Required field: assetUid
if 'assetUid' not in data:
message = 'No assetUid in request data to update event %s.' % event_type
raise Exception(message)
uid = data['assetUid']
if not uid or uid is None:
message = 'The assetUid provided is empty or null, unable to update event %s.' % event_type
raise Exception(message)
# Verify eventId provided and of type int.
# Required field: eventId
if 'eventId' not in data:
message = 'No eventId in request data to update event %s.' % event_type
raise Exception(message)
if not isinstance(data['eventId'], int):
message = 'The event id value (%r) must be an integer, it is type: %s' % \
(data['eventId'], str(type(data['eventId'])))
raise Exception(message)
if data['eventId'] != id:
message = 'The event id (\'%r\') provided in data is not equal to id (%d) in url.' % (data['eventId'], id)
raise Exception(message)
# Get event class and add @class field to data
event_class = get_event_class(event_type)
data['@class'] = event_class
# Update event in uframe
updated_id = uframe_put_event(event_type, id, data)
if updated_id <= 0:
message = 'Failed to update %s event in uframe for id %d.' % (event_type, id)
raise Exception(message)
if updated_id != id:
message = 'The event id returned from event update (%d) is not equal to original id (%d).' % (updated_id, id)
# Get updated event, return event
event = get_uframe_event(id)
if debug: print '\n event: ', event
if event['eventType'] == 'ASSET_STATUS':
event['status'] = convert_status_value_for_display(event['status'])
if debug: print '\n event[status]: ', event['status']
return event
except Exception as err:
message = str(err)
raise Exception(message)
def perform_uframe_create_event(event_type, uid, data):
""" Create event using uframe interface determined by event type.
"""
try:
if event_type != 'CRUISE_INFO':
if uid is None or not uid:
message = 'Unable to create %s event for asset with uid: \'%s\'.' % (event_type, uid)
raise Exception(message)
# Create cruise_info event using/events/cruise POST
if event_type == 'CRUISE_INFO':
id = uframe_create_cruise(event_type, data)
# Create calibration_data event
elif event_type == 'CALIBRATION_DATA':
if not isinstance(data['eventId'], int):
message = 'The event id value (%r) must be an integer, it is type: %s' % \
(data['eventId'], str(type(data['eventId'])))
raise Exception(message)
id = create_calibration_data_event(event_type, uid, data)
# Create event using /events/postto/uid POST
else:
if event_type == 'DEPLOYMENT':
message = 'Create event type DEPLOYMENT is not supported through the events create/update interface.'
raise Exception(message)
id = uframe_postto(event_type, uid, data)
if id is None or id <= 0:
message = 'Failed to create and retrieve event from uframe for asset uid: \'%s\'. ' % uid
raise Exception(message)
return id
except Exception as err:
message = str(err)
raise Exception(message)
def create_calibration_data_event(event_type, uid, data):
success_codes = [201, 204]
try:
# create calibration data using /assets/cal POST
event_name = None
if 'eventName' in data:
event_name = data['eventName']
if calibration_data_exists(uid, event_name):
message = 'Calibration data event name \'%s\' exists for asset with uid \'%s\'.' % (event_name, uid)
raise Exception(message)
status_code = uframe_create_calibration(event_type, uid, data)
if status_code not in success_codes:
message = 'Failed to create calibration data for asset uid \'%s\', event name \'%s\'.' % (uid, event_name)
raise Exception(message)
# Get eventId for calibration data event where eventName is event_name and asset uid is uid.
id, _ = get_calibration_event_id(uid, event_name)
return id
except Exception as err:
message = str(err)
raise Exception(message)
def get_calibration_event_id(uid, event_name):
"""
"calibration" : [ {
"@class" : ".XCalibration",
"name" : "CC_a1",
"calData" : [ {
"@class" : ".XCalibrationData",
"values" : [ -1.493703E-4 ],
"dimensions" : [ 1 ],
"cardinality" : 0,
"comments" : "Test entry",
"eventId" : 31534,
"assetUid" : "A01682",
"eventType" : "CALIBRATION_DATA",
"eventName" : "CC_a1",
"eventStartTime" : 1443614400000,
"eventStopTime" : null,
"notes" : null,
"dataSource" : "API:createCalibration:2016-08-31T22:37:22.096Z",
"lastModifiedTimestamp" : 1472683042096
} ]
} ],
"""
id = None
last_modified = None
try:
asset = uframe_get_asset_by_uid(uid)
calibrations = asset['calibration']
for cal in calibrations:
if 'name' in cal:
if cal['name'] == event_name:
# Get eventId
if 'calData' in cal:
for item in cal['calData']:
if 'eventId' in item:
id = item['eventId']
last_modified = item['lastModifiedTimestamp']
break
if id is None:
message = 'Failed to locate calibration name \'%s\' in asset with uid %s.' % (event_name, uid)
raise Exception(message)
return id, last_modified
except Exception as err:
message = str(err)
raise Exception(message)
def calibration_data_exists(uid, event_name):
""" Determine if calibration data contains event name. Return True or False.
"""
result = False
try:
try:
event_id, _ = get_calibration_event_id(uid, event_name)
except:
event_id = 0
if event_id > 0:
result = True
return result
except Exception as err:
message = str(err)
raise Exception(message) | apache-2.0 |
miing/mci_migo | webui/views/devices.py | 1 | 11714 | # Copyright 2012 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import re
from base64 import b16encode
from collections import namedtuple
from django.contrib import messages
from django.contrib.auth.views import redirect_to_login
from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django.template.response import TemplateResponse
from django.views.generic import View
from django.utils.translation import ugettext as _
from gargoyle.decorators import switch_is_active
from gargoyle import gargoyle
from M2Crypto.Rand import rand_bytes
from oath.hotp import accept_hotp, hotp
from identityprovider.forms import HOTPDeviceForm, DeviceRenameForm
from identityprovider.models import AuthenticationDevice
from identityprovider.models import twofactor
from identityprovider.models.twofactor import get_otp_type
from webui.decorators import require_twofactor_enabled, sso_login_required
from webui.views.const import (
DEVICE_ADDED,
DEVICE_DELETED,
DEVICE_GENERATION_WARNING,
DEVICE_RENAMED,
OTP_MATCH_ERROR,
)
from webui.views.utils import HttpResponseSeeOther, allow_only
DEVICE_ADDITION = 'device-addition'
DEVICE_LIST = 'device-list'
CodePageDetails = namedtuple('CodePageDetails', 'codes page start position')
def get_context(request, **kwargs):
kwargs['current_section'] = 'devices'
return RequestContext(request, kwargs)
device_types = {
'yubi': _('Yubikey'),
'google': _('Google Authenticator'),
'generic': _('Authentication device'),
'paper': _('Printable Backup Codes'),
}
def generate_key(n):
"""Returns an OATH/HOTP key as a string of n raw bytes."""
# An OATH/HOTP key is just bunch of random (in the "unpredictable"
# sense) bits, of certain quantities (e.g. 160 bits or 20 bytes)
# that are compatible with the AES algorithms.
# From openssl's documentation:
#
# RAND_bytes() puts num cryptographically strong pseudo-random
# bytes into buf. An error occurs if the PRNG has not been
# seeded with enough randomness to ensure an unpredictable byte
# sequence.
#
# openssl's RAND_bytes(num) function is available in Python as
# M2Crypto.Rand.rand_bytes(num).
return b16encode(rand_bytes(n))
def get_unique_device_name_for_user(name, user):
"""Returns the name with an extra number to make it unique if it exists in
existing_names
"""
original_name = name
counter = 1
existing_names = [device.name for device in user.devices.all()]
while name in existing_names:
name = '%s (%d)' % (original_name, counter)
counter += 1
return name
@sso_login_required
@require_twofactor_enabled
@allow_only('GET')
def device_list(request):
paper_renewals = list(request.user.paper_devices_needing_renewal)
context = get_context(
request, device_addition_path=reverse(DEVICE_ADDITION),
devices=request.user.devices.all(),
need_backup_device_warning=request.user.need_backup_device_warning,
paper_devices_needing_renewal=paper_renewals
)
return render_to_response('device/list.html', context)
@sso_login_required
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_addition(request):
if request.user.has_twofactor_devices():
if not (twofactor.is_upgraded(request) and
twofactor.is_fresh(request)):
return redirect_to_login(
request.get_full_path(),
reverse('twofactor')
)
if request.method == 'GET':
context = get_context(request, device_list_path=reverse(DEVICE_LIST))
return render_to_response('device/types.html', context)
device_type = request.POST.get('type')
if device_type not in device_types.keys():
return render_to_response('device/types.html', get_context(request))
if device_type == 'paper':
return _device_addition_paper(request)
return _device_addition_standard(request, device_type)
def _device_addition_paper(request):
hex_key = generate_key(20)
device_name = get_unique_device_name_for_user(device_types['paper'],
request.user)
device = _create_device(request, device_name, hex_key, 0, 'paper')
return HttpResponseSeeOther(reverse('device-print', args=(device.id,)))
def _device_addition_standard(request, device_type):
error = None
if 'hex_key' in request.POST:
hex_key = request.POST.get('hex_key')
else:
# TODO: 20 bytes = 160 bits; this will change based on
# device-type.
hex_key = generate_key(20)
if 'name' not in request.POST:
initial_name = get_unique_device_name_for_user(
device_types.get(device_type), request.user)
form = HOTPDeviceForm(initial={'name': initial_name})
else:
form = HOTPDeviceForm(request.POST)
if form.is_valid():
device_name = get_unique_device_name_for_user(
form.cleaned_data['name'], request.user)
otp = form.cleaned_data['otp']
otp_type = get_otp_type(otp)
accepted, new_counter = accept_hotp(
hex_key, otp, 0, otp_type, drift=settings.HOTP_DRIFT,
backward_drift=settings.HOTP_BACKWARDS_DRIFT)
if accepted:
_create_device(request, device_name, hex_key,
new_counter, device_type)
return HttpResponseSeeOther(reverse(DEVICE_LIST))
# Otherwise, set the error flag and fall through...
error = OTP_MATCH_ERROR
# Google would base32-encode, yubi would hex-encode, etc. There
# might even be multiple formats displayed simultaneously.
formatted_key = re.sub('(.{4})', r'\1 ', hex_key).strip()
ctx = get_context(
request,
device_list_path=reverse(DEVICE_LIST),
type=device_type,
ident="/".join([settings.TWOFACTOR_SERVICE_IDENT,
request.user.preferredemail.email]),
hex_key=hex_key,
form=form,
formatted_key=formatted_key,
error=error,
)
return render_to_response('device/addition-%s.html' % device_type, ctx)
def _create_device(request, device_name, hex_key, counter, device_type):
device = AuthenticationDevice.objects.create(
account=request.user,
name=device_name,
key=hex_key,
counter=counter,
device_type=device_type
)
twofactor.login(request)
messages.success(request,
DEVICE_ADDED.format(name=device_name), 'temporary')
return device
@switch_is_active('PAPER_DEVICE')
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET')
def device_print(request, device_id):
device = _get_device_or_404(device_id, request.user)
if device.device_type != 'paper':
raise Http404
details = _codes_for_position(device)
remaining_codes = settings.TWOFACTOR_PAPER_CODES - details.position
generation_enabled = (
remaining_codes <= settings.TWOFACTOR_PAPER_CODES_ALLOW_GENERATION)
if generation_enabled:
messages.warning(request, DEVICE_GENERATION_WARNING)
context = get_context(
request,
codes=details.codes,
counter=details.position,
device_id=device.id,
generation_enabled=generation_enabled,
)
return TemplateResponse(request, 'device/print-codes.html', context)
def _codes_for_position(device, next_page=False):
# use integer division to round the "window" boundaries
page_size = settings.TWOFACTOR_PAPER_CODES
page, page_position = divmod(device.counter, page_size)
if next_page:
page += 1
page_start = page * page_size
codes = [hotp(device.key, i, 'dec6')
for i in range(page_start, page_start + page_size)]
return CodePageDetails(codes, page, page_start, page_position)
@switch_is_active('PAPER_DEVICE')
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_generate(request, device_id):
device = _get_device_or_404(device_id, request.user)
if device.device_type != 'paper':
raise Http404
# find the next page of codes
details = _codes_for_position(device, next_page=True)
if request.method == 'GET':
context = get_context(
request,
codes=details.codes,
device_id=device.id,
)
return TemplateResponse(request, 'device/generate-codes.html', context)
device.counter = details.start
device.save()
return HttpResponseRedirect(reverse('device-print', args=(device.id,)))
def _get_device_or_404(device_id, user):
"""Explicit helper function to ensure we don't forget to limit by user."""
return get_object_or_404(AuthenticationDevice, id=device_id, account=user)
@sso_login_required(require_twofactor=True, require_twofactor_freshness=True)
@require_twofactor_enabled
@allow_only('GET', 'POST')
def device_removal(request, device_id):
device = _get_device_or_404(device_id, request.user)
if request.method != 'POST':
context = get_context(request, device_list_path=reverse(DEVICE_LIST),
name=device.name)
return render_to_response('device/removal.html', context)
device.delete()
# We should probably send an e-mail to the user stating which
# device was removed. As a security measure, this would be much
# stronger if bugs #784813, #784817, and #784818 were done.
if not request.user.has_twofactor_devices():
request.user.twofactor_required = False
request.user.save()
twofactor.logout(request)
messages.success(request, DEVICE_DELETED.format(name=device.name))
return HttpResponseSeeOther('/device-list')
class DeviceRenameView(View):
def get(self, request, device_id):
device = _get_device_or_404(device_id, request.user)
form = DeviceRenameForm({'name': device.name})
context = get_context(
request, device_list_path=reverse(DEVICE_LIST), form=form)
return render_to_response('device/rename.html', context)
def post(self, request, device_id):
device = _get_device_or_404(device_id, request.user)
form = DeviceRenameForm(request.POST)
if form.is_valid():
original_name = device.name
device.name = form.cleaned_data['name']
device.save()
messages.success(request,
DEVICE_RENAMED.format(original=original_name,
renamed=device.name))
return HttpResponseRedirect(reverse(DEVICE_LIST))
context = get_context(
request, device_list_path=reverse(DEVICE_LIST), form=form)
return render_to_response('device/rename.html', context)
device_rename = sso_login_required(
require_twofactor=True,
require_twofactor_freshness=True)(DeviceRenameView.as_view())
@allow_only('GET')
def device_help(request):
if gargoyle.is_active('CAN_VIEW_SUPPORT_PHONE', request.user):
support_phone = settings.SUPPORT_PHONE
else:
support_phone = ''
context = RequestContext(request, {'support_phone': support_phone})
return render_to_response('device/device-help.html', context)
| agpl-3.0 |
GabrieleAndrea/MEGAnnotator | bin/SPAdes/share/spades/pyyaml3/reader.py | 272 | 6854 | # This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from .error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, bytes):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to a unicode string,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `bytes` object,
# - a `str` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = ''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, str):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+'\0'
elif isinstance(stream, bytes):
self.name = "<byte string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = None
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in '\n\x85\u2028\u2029' \
or (ch == '\r' and self.buffer[self.pointer] != '\n'):
self.line += 1
self.column = 0
elif ch != '\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
self.update_raw()
if isinstance(self.raw_buffer, bytes):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError as exc:
character = self.raw_buffer[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += '\0'
self.raw_buffer = None
break
def update_raw(self, size=4096):
data = self.stream.read(size)
if self.raw_buffer is None:
self.raw_buffer = data
else:
self.raw_buffer += data
self.stream_pointer += len(data)
if not data:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass
| gpl-3.0 |
Godiyos/python-for-android | python-modules/twisted/twisted/python/dxprofile.py | 61 | 1528 | # Copyright (c) 2001-2007 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
DEPRECATED since Twisted 8.0.
Utility functions for reporting bytecode frequencies to Skip Montanaro's
stat collector.
This module requires a version of Python build with DYNAMIC_EXCUTION_PROFILE,
and optionally DXPAIRS, defined to be useful.
"""
import sys, types, xmlrpclib, warnings
warnings.warn("twisted.python.dxprofile is deprecated since Twisted 8.0.",
category=DeprecationWarning)
def rle(iterable):
"""
Run length encode a list.
"""
iterable = iter(iterable)
runlen = 1
result = []
try:
previous = iterable.next()
except StopIteration:
return []
for element in iterable:
if element == previous:
runlen = runlen + 1
continue
else:
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
previous = element
runlen = 1
if isinstance(previous, (types.ListType, types.TupleType)):
previous = rle(previous)
result.append([previous, runlen])
return result
def report(email, appname):
"""
Send an RLE encoded version of sys.getdxp() off to our Top Men (tm)
for analysis.
"""
if hasattr(sys, 'getdxp') and appname:
dxp = xmlrpclib.ServerProxy("http://manatee.mojam.com:7304")
dxp.add_dx_info(appname, email, sys.version_info[:3], rle(sys.getdxp()))
| apache-2.0 |
kingvuplus/boom2 | lib/python/Plugins/SystemPlugins/AnimationSetup/plugin.py | 15 | 8964 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Components.ActionMap import ActionMap
from Components.ConfigList import ConfigListScreen
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigNumber, ConfigSelectionNumber, getConfigListEntry
from Plugins.Plugin import PluginDescriptor
from enigma import setAnimation_current, setAnimation_speed
# default = slide to left
g_default = {
"current": 0,
"speed" : 20,
}
g_max_speed = 30
g_animation_paused = False
g_orig_show = None
g_orig_doClose = None
config.misc.window_animation_default = ConfigNumber(default=g_default["current"])
config.misc.window_animation_speed = ConfigSelectionNumber(1, g_max_speed, 1, default=g_default["speed"])
class AnimationSetupConfig(ConfigListScreen, Screen):
skin= """
<screen position="center,center" size="600,140" title="Animation Settings">
<widget name="config" position="0,0" size="600,100" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/red.png" position="0,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,100" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,100" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,100" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
</screen>
"""
def __init__(self, session):
self.session = session
self.entrylist = []
Screen.__init__(self, session)
ConfigListScreen.__init__(self, self.entrylist)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions",], {
"ok" : self.keyGreen,
"green" : self.keyGreen,
"yellow" : self.keyYellow,
"red" : self.keyRed,
"cancel" : self.keyRed,
}, -2)
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Default"))
self.makeConfigList()
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.setTitle(_('Animation Setup'))
def keyGreen(self):
config.misc.window_animation_speed.save()
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def keyRed(self):
config.misc.window_animation_speed.cancel()
self.close()
def keyYellow(self):
global g_default
config.misc.window_animation_speed.value = g_default["speed"]
self.makeConfigList()
def keyLeft(self):
ConfigListScreen.keyLeft(self)
def keyRight(self):
ConfigListScreen.keyRight(self)
def makeConfigList(self):
self.entrylist = []
entrySpeed = getConfigListEntry(_("Animation Speed"), config.misc.window_animation_speed)
self.entrylist.append(entrySpeed)
self["config"].list = self.entrylist
self["config"].l.setList(self.entrylist)
class AnimationSetupScreen(Screen):
animationSetupItems = [
{"idx":0, "name":_("Disable Animations")},
{"idx":1, "name":_("Simple fade")},
{"idx":2, "name":_("Grow drop")},
{"idx":3, "name":_("Grow from left")},
{"idx":4, "name":_("Popup")},
{"idx":5, "name":_("Slide drop")},
{"idx":6, "name":_("Slide left to right")},
{"idx":7, "name":_("Slide top to bottom")},
{"idx":8, "name":_("Stripes")},
]
skin = """
<screen name="AnimationSetup" position="center,center" size="580,400" title="Animation Setup">
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" zPosition="1" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/blue.png" position="420,0" size="140,40" zPosition="1" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="2" size="140,40" font="Regular;20" halign="center" valign="center" foregroundColor="#ffffff" backgroundColor="#18188b" transparent="1" />
<widget name="list" position="10,60" size="560,364" scrollbarMode="showOnDemand" />
<widget source="introduction" render="Label" position="0,370" size="560,40" zPosition="10" font="Regular;20" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session):
self.skin = AnimationSetupScreen.skin
Screen.__init__(self, session)
self.animationList = []
self["introduction"] = StaticText(_("* current animation"))
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("Save"))
self["key_yellow"] = StaticText(_("Setting"))
self["key_blue"] = StaticText(_("Preview"))
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"cancel": self.keyclose,
"save": self.ok,
"ok" : self.ok,
"yellow": self.config,
"blue": self.preview
}, -3)
self["list"] = MenuList(self.animationList)
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
l = []
for x in self.animationSetupItems:
key = x.get("idx", 0)
name = x.get("name", "??")
if key == config.misc.window_animation_default.value:
name = "* %s" % (name)
l.append( (name, key) )
self["list"].setList(l)
def ok(self):
current = self["list"].getCurrent()
if current:
key = current[1]
config.misc.window_animation_default.value = key
config.misc.window_animation_default.save()
setAnimation_current(key)
self.close()
def keyclose(self):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
self.close()
def config(self):
self.session.open(AnimationSetupConfig)
def preview(self):
current = self["list"].getCurrent()
if current:
global g_animation_paused
tmp = g_animation_paused
g_animation_paused = False
setAnimation_current(current[1])
self.session.open(MessageBox, current[0], MessageBox.TYPE_INFO, timeout=3)
g_animation_paused = tmp
def checkAttrib(self, paused):
global g_animation_paused
if g_animation_paused is paused and self.skinAttributes is not None:
for (attr, value) in self.skinAttributes:
if attr == "animationPaused" and value in ("1", "on"):
return True
return False
def screen_show(self):
global g_animation_paused
if g_animation_paused:
setAnimation_current(0)
g_orig_show(self)
if checkAttrib(self, False):
g_animation_paused = True
def screen_doClose(self):
global g_animation_paused
if checkAttrib(self, True):
g_animation_paused = False
setAnimation_current(config.misc.window_animation_default.value)
g_orig_doClose(self)
def animationSetupMain(session, **kwargs):
session.open(AnimationSetupScreen)
def startAnimationSetup(menuid):
if menuid != "osd_menu":
return []
return [( _("Animations"), animationSetupMain, "animation_setup", None)]
def sessionAnimationSetup(session, reason, **kwargs):
setAnimation_current(config.misc.window_animation_default.value)
setAnimation_speed(int(config.misc.window_animation_speed.value))
global g_orig_show, g_orig_doClose
if g_orig_show is None:
g_orig_show = Screen.show
if g_orig_doClose is None:
g_orig_doClose = Screen.doClose
Screen.show = screen_show
Screen.doClose = screen_doClose
def Plugins(**kwargs):
plugin_list = [
PluginDescriptor(
name = "Animations",
description = "Setup UI animations",
where = PluginDescriptor.WHERE_MENU,
needsRestart = False,
fnc = startAnimationSetup),
PluginDescriptor(
where = PluginDescriptor.WHERE_SESSIONSTART,
needsRestart = False,
fnc = sessionAnimationSetup),
]
return plugin_list;
| gpl-2.0 |
menardorama/ReadyNAS-Add-ons | headphones-1.0.0/files/etc/apps/headphones/lib/unidecode/x07a.py | 252 | 4669 | data = (
'Xi ', # 0x00
'Kao ', # 0x01
'Lang ', # 0x02
'Fu ', # 0x03
'Ze ', # 0x04
'Shui ', # 0x05
'Lu ', # 0x06
'Kun ', # 0x07
'Gan ', # 0x08
'Geng ', # 0x09
'Ti ', # 0x0a
'Cheng ', # 0x0b
'Tu ', # 0x0c
'Shao ', # 0x0d
'Shui ', # 0x0e
'Ya ', # 0x0f
'Lun ', # 0x10
'Lu ', # 0x11
'Gu ', # 0x12
'Zuo ', # 0x13
'Ren ', # 0x14
'Zhun ', # 0x15
'Bang ', # 0x16
'Bai ', # 0x17
'Ji ', # 0x18
'Zhi ', # 0x19
'Zhi ', # 0x1a
'Kun ', # 0x1b
'Leng ', # 0x1c
'Peng ', # 0x1d
'Ke ', # 0x1e
'Bing ', # 0x1f
'Chou ', # 0x20
'Zu ', # 0x21
'Yu ', # 0x22
'Su ', # 0x23
'Lue ', # 0x24
'[?] ', # 0x25
'Yi ', # 0x26
'Xi ', # 0x27
'Bian ', # 0x28
'Ji ', # 0x29
'Fu ', # 0x2a
'Bi ', # 0x2b
'Nuo ', # 0x2c
'Jie ', # 0x2d
'Zhong ', # 0x2e
'Zong ', # 0x2f
'Xu ', # 0x30
'Cheng ', # 0x31
'Dao ', # 0x32
'Wen ', # 0x33
'Lian ', # 0x34
'Zi ', # 0x35
'Yu ', # 0x36
'Ji ', # 0x37
'Xu ', # 0x38
'Zhen ', # 0x39
'Zhi ', # 0x3a
'Dao ', # 0x3b
'Jia ', # 0x3c
'Ji ', # 0x3d
'Gao ', # 0x3e
'Gao ', # 0x3f
'Gu ', # 0x40
'Rong ', # 0x41
'Sui ', # 0x42
'You ', # 0x43
'Ji ', # 0x44
'Kang ', # 0x45
'Mu ', # 0x46
'Shan ', # 0x47
'Men ', # 0x48
'Zhi ', # 0x49
'Ji ', # 0x4a
'Lu ', # 0x4b
'Su ', # 0x4c
'Ji ', # 0x4d
'Ying ', # 0x4e
'Wen ', # 0x4f
'Qiu ', # 0x50
'Se ', # 0x51
'[?] ', # 0x52
'Yi ', # 0x53
'Huang ', # 0x54
'Qie ', # 0x55
'Ji ', # 0x56
'Sui ', # 0x57
'Xiao ', # 0x58
'Pu ', # 0x59
'Jiao ', # 0x5a
'Zhuo ', # 0x5b
'Tong ', # 0x5c
'Sai ', # 0x5d
'Lu ', # 0x5e
'Sui ', # 0x5f
'Nong ', # 0x60
'Se ', # 0x61
'Hui ', # 0x62
'Rang ', # 0x63
'Nuo ', # 0x64
'Yu ', # 0x65
'Bin ', # 0x66
'Ji ', # 0x67
'Tui ', # 0x68
'Wen ', # 0x69
'Cheng ', # 0x6a
'Huo ', # 0x6b
'Gong ', # 0x6c
'Lu ', # 0x6d
'Biao ', # 0x6e
'[?] ', # 0x6f
'Rang ', # 0x70
'Zhuo ', # 0x71
'Li ', # 0x72
'Zan ', # 0x73
'Xue ', # 0x74
'Wa ', # 0x75
'Jiu ', # 0x76
'Qiong ', # 0x77
'Xi ', # 0x78
'Qiong ', # 0x79
'Kong ', # 0x7a
'Yu ', # 0x7b
'Sen ', # 0x7c
'Jing ', # 0x7d
'Yao ', # 0x7e
'Chuan ', # 0x7f
'Zhun ', # 0x80
'Tu ', # 0x81
'Lao ', # 0x82
'Qie ', # 0x83
'Zhai ', # 0x84
'Yao ', # 0x85
'Bian ', # 0x86
'Bao ', # 0x87
'Yao ', # 0x88
'Bing ', # 0x89
'Wa ', # 0x8a
'Zhu ', # 0x8b
'Jiao ', # 0x8c
'Qiao ', # 0x8d
'Diao ', # 0x8e
'Wu ', # 0x8f
'Gui ', # 0x90
'Yao ', # 0x91
'Zhi ', # 0x92
'Chuang ', # 0x93
'Yao ', # 0x94
'Tiao ', # 0x95
'Jiao ', # 0x96
'Chuang ', # 0x97
'Jiong ', # 0x98
'Xiao ', # 0x99
'Cheng ', # 0x9a
'Kou ', # 0x9b
'Cuan ', # 0x9c
'Wo ', # 0x9d
'Dan ', # 0x9e
'Ku ', # 0x9f
'Ke ', # 0xa0
'Zhui ', # 0xa1
'Xu ', # 0xa2
'Su ', # 0xa3
'Guan ', # 0xa4
'Kui ', # 0xa5
'Dou ', # 0xa6
'[?] ', # 0xa7
'Yin ', # 0xa8
'Wo ', # 0xa9
'Wa ', # 0xaa
'Ya ', # 0xab
'Yu ', # 0xac
'Ju ', # 0xad
'Qiong ', # 0xae
'Yao ', # 0xaf
'Yao ', # 0xb0
'Tiao ', # 0xb1
'Chao ', # 0xb2
'Yu ', # 0xb3
'Tian ', # 0xb4
'Diao ', # 0xb5
'Ju ', # 0xb6
'Liao ', # 0xb7
'Xi ', # 0xb8
'Wu ', # 0xb9
'Kui ', # 0xba
'Chuang ', # 0xbb
'Zhao ', # 0xbc
'[?] ', # 0xbd
'Kuan ', # 0xbe
'Long ', # 0xbf
'Cheng ', # 0xc0
'Cui ', # 0xc1
'Piao ', # 0xc2
'Zao ', # 0xc3
'Cuan ', # 0xc4
'Qiao ', # 0xc5
'Qiong ', # 0xc6
'Dou ', # 0xc7
'Zao ', # 0xc8
'Long ', # 0xc9
'Qie ', # 0xca
'Li ', # 0xcb
'Chu ', # 0xcc
'Shi ', # 0xcd
'Fou ', # 0xce
'Qian ', # 0xcf
'Chu ', # 0xd0
'Hong ', # 0xd1
'Qi ', # 0xd2
'Qian ', # 0xd3
'Gong ', # 0xd4
'Shi ', # 0xd5
'Shu ', # 0xd6
'Miao ', # 0xd7
'Ju ', # 0xd8
'Zhan ', # 0xd9
'Zhu ', # 0xda
'Ling ', # 0xdb
'Long ', # 0xdc
'Bing ', # 0xdd
'Jing ', # 0xde
'Jing ', # 0xdf
'Zhang ', # 0xe0
'Yi ', # 0xe1
'Si ', # 0xe2
'Jun ', # 0xe3
'Hong ', # 0xe4
'Tong ', # 0xe5
'Song ', # 0xe6
'Jing ', # 0xe7
'Diao ', # 0xe8
'Yi ', # 0xe9
'Shu ', # 0xea
'Jing ', # 0xeb
'Qu ', # 0xec
'Jie ', # 0xed
'Ping ', # 0xee
'Duan ', # 0xef
'Shao ', # 0xf0
'Zhuan ', # 0xf1
'Ceng ', # 0xf2
'Deng ', # 0xf3
'Cui ', # 0xf4
'Huai ', # 0xf5
'Jing ', # 0xf6
'Kan ', # 0xf7
'Jing ', # 0xf8
'Zhu ', # 0xf9
'Zhu ', # 0xfa
'Le ', # 0xfb
'Peng ', # 0xfc
'Yu ', # 0xfd
'Chi ', # 0xfe
'Gan ', # 0xff
)
| gpl-2.0 |
primepix/django-sentry | sentry/filters.py | 2 | 5486 | """
sentry.filters
~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# Widget api is pretty ugly
from __future__ import absolute_import
from django.conf import settings as django_settings
from django.utils.datastructures import SortedDict
from django.utils.safestring import mark_safe
from django.utils.html import escape
from sentry.conf import settings
class Widget(object):
def __init__(self, filter, request):
self.filter = filter
self.request = request
def get_query_string(self):
return self.filter.get_query_string()
class TextWidget(Widget):
def render(self, value, placeholder='', **kwargs):
return mark_safe('<div class="filter-text"><p class="textfield"><input type="text" name="%(name)s" value="%(value)s" placeholder="%(placeholder)s"/></p><p class="submit"><input type="submit" class="search-submit"/></p></div>' % dict(
name=self.filter.get_query_param(),
value=escape(value),
placeholder=escape(placeholder or 'enter %s' % self.filter.label.lower()),
))
class ChoiceWidget(Widget):
def render(self, value, **kwargs):
choices = self.filter.get_choices()
query_string = self.get_query_string()
column = self.filter.get_query_param()
output = ['<ul class="%s-list filter-list" rel="%s">' % (self.filter.column, column)]
output.append('<li%(active)s><a href="%(query_string)s&%(column)s=">Any %(label)s</a></li>' % dict(
active=not value and ' class="active"' or '',
query_string=query_string,
label=self.filter.label,
column=column,
))
for key, val in choices.iteritems():
key = unicode(key)
output.append('<li%(active)s rel="%(key)s"><a href="%(query_string)s&%(column)s=%(key)s">%(value)s</a></li>' % dict(
active=value == key and ' class="active"' or '',
column=column,
key=key,
value=val,
query_string=query_string,
))
output.append('</ul>')
return mark_safe('\n'.join(output))
class SentryFilter(object):
label = ''
column = ''
widget = ChoiceWidget
# This must be a string
default = ''
show_label = True
def __init__(self, request):
self.request = request
def is_set(self):
return bool(self.get_value())
def get_value(self):
return self.request.GET.get(self.get_query_param(), self.default) or ''
def get_query_param(self):
return getattr(self, 'query_param', self.column)
def get_widget(self):
return self.widget(self, self.request)
def get_query_string(self):
column = self.column
query_dict = self.request.GET.copy()
if 'p' in query_dict:
del query_dict['p']
if column in query_dict:
del query_dict[self.column]
return '?' + query_dict.urlencode()
def get_choices(self):
from sentry.models import FilterValue
return SortedDict((l, l) for l in FilterValue.objects.filter(key=self.column)\
.values_list('value', flat=True)\
.order_by('value'))
def get_query_set(self, queryset):
from sentry.models import MessageIndex
kwargs = {self.column: self.get_value()}
if self.column.startswith('data__'):
return MessageIndex.objects.get_for_queryset(queryset, **kwargs)
return queryset.filter(**kwargs)
def process(self, data):
"""``self.request`` is not available within this method"""
return data
def render(self):
widget = self.get_widget()
return widget.render(self.get_value())
class StatusFilter(SentryFilter):
label = 'Status'
column = 'status'
default = '0'
def get_choices(self):
return SortedDict([
(0, 'Unresolved'),
(1, 'Resolved'),
])
class LoggerFilter(SentryFilter):
label = 'Logger'
column = 'logger'
class ServerNameFilter(SentryFilter):
label = 'Server Name'
column = 'server_name'
def get_query_set(self, queryset):
return queryset.filter(message_set__server_name=self.get_value()).distinct()
class SiteFilter(SentryFilter):
label = 'Site'
column = 'site'
def process(self, data):
if 'site' in data:
return data
if settings.SITE is None:
if 'django.contrib.sites' in django_settings.INSTALLED_APPS:
from django.contrib.sites.models import Site
try:
settings.SITE = Site.objects.get_current().name
except Site.DoesNotExist:
settings.SITE = ''
else:
settings.SITE = ''
if settings.SITE:
data['site'] = settings.SITE
return data
def get_query_set(self, queryset):
return queryset.filter(message_set__site=self.get_value()).distinct()
class LevelFilter(SentryFilter):
label = 'Level'
column = 'level'
def get_choices(self):
return SortedDict((str(k), v) for k, v in settings.LOG_LEVELS)
def get_query_set(self, queryset):
return queryset.filter(level__gte=self.get_value())
| bsd-3-clause |
kyleabeauchamp/FAHMunge | FAHMunge/fah.py | 1 | 9735 | ##############################################################################
# MDTraj: A Python Library for Loading, Saving, and Manipulating
# Molecular Dynamics Trajectories.
# Copyright 2012-2013 Stanford University and the Authors
#
# Authors: Kyle A. Beauchamp
# Contributors:
#
# MDTraj is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
"""
Code for merging and munging trajectories from FAH datasets.
"""
##############################################################################
# imports
##############################################################################
from __future__ import print_function, division
import os
import glob
import tarfile
from mdtraj.formats.hdf5 import HDF5TrajectoryFile
import mdtraj as md
import tables
from mdtraj.utils.contextmanagers import enter_temp_directory
from mdtraj.utils import six
def keynat(string):
'''A natural sort helper function for sort() and sorted()
without using regular expression.
>>> items = ('Z', 'a', '10', '1', '9')
>>> sorted(items)
['1', '10', '9', 'Z', 'a']
>>> sorted(items, key=keynat)
['1', '9', '10', 'Z', 'a']
'''
r = []
for c in string:
try:
c = int(c)
try:
r[-1] = r[-1] * 10 + c
except:
r.append(c)
except:
r.append(c)
return r
##############################################################################
# globals
##############################################################################
def strip_water(allatom_filename, protein_filename, protein_atom_indices, min_num_frames=1):
"""Strip water (or other) atoms from a Core17, Core18, or OCore FAH HDF5 trajectory.
Parameters
----------
allatom_filename : str
Path to HDF5 trajectory with all atoms. This trajectory must have been generated by
concatenate_core17 or concatenate_siegetank--e.g. it must include
extra metadata that lists the XTC files (bzipped or in OCore directories) that
have already been processed. This file will not be modified.
protein_filename : str
Path to HDF5 trajectory with all just protein atoms. This trajectory must have been generated by
concatenate_core17 or concatenate_siegetank--e.g. it must include
extra metadata that lists the XTC files (bzipped or in OCore directories) that
have already been processed. This file will be appended to.
protein_atom_indices : np.ndarray, dtype='int'
List of atom indices to extract from allatom HDF5 file.
min_num_frames : int, optional, default=1
Skip if below this number.
"""
if not os.path.exists(allatom_filename):
print("Skipping, %s not found" % allatom_filename)
return
trj_allatom = HDF5TrajectoryFile(allatom_filename, mode='r')
if len(trj_allatom) < min_num_frames:
print("Must have at least %d frames in %s to proceed!" % (min_num_frames, allatom_filename))
return
if hasattr(trj_allatom.root, "processed_filenames"):
key = "processed_filenames" # Core17, Core18 style data
elif hasattr(trj_allatom.root, "processed_directories"):
key = "processed_directories" # Siegetank style data
else:
raise(ValueError("Can't find processed files in %s" % allatom_filename))
trj_protein = HDF5TrajectoryFile(protein_filename, mode='a')
try:
trj_protein._create_earray(where='/', name=key, atom=tables.StringAtom(1024), shape=(0,))
trj_protein.topology = trj_allatom.topology.subset(protein_atom_indices)
except tables.NodeError:
pass
n_frames_allatom = len(trj_allatom)
try:
n_frames_protein = len(trj_protein)
except tables.NoSuchNodeError:
n_frames_protein = 0
filenames_allatom = getattr(trj_allatom.root, key)
filenames_protein = getattr(trj_protein._handle.root, key) # Hacky workaround of MDTraj bug #588
n_files_allatom = len(filenames_allatom)
n_files_protein = len(filenames_protein)
print("Found %d,%d filenames and %d,%d frames in %s and %s, respectively." % (n_files_allatom, n_files_protein, n_frames_allatom, n_frames_protein, allatom_filename, protein_filename))
if n_frames_protein > n_frames_allatom:
raise(ValueError("Found more frames in protein trajectory (%d) than allatom trajectory (%d)" % (n_frames_protein, n_frames_allatom)))
if n_files_protein > n_files_allatom:
raise(ValueError("Found more filenames in protein trajectory (%d) than allatom trajectory (%d)" % (n_files_protein, n_files_allatom)))
if n_frames_protein == n_frames_allatom or n_files_allatom == n_files_protein:
if not (n_frames_protein == n_frames_allatom and n_files_allatom == n_files_protein):
raise(ValueError("The trajectories must match in BOTH n_frames and n_filenames or NEITHER."))
else:
print("Same number of frames and filenames found, skipping.")
return
trj_allatom.seek(n_frames_protein) # Jump forward past what we've already stripped.
coordinates, time, cell_lengths, cell_angles, velocities, kineticEnergy, potentialEnergy, temperature, alchemicalLambda = trj_allatom.read()
trj_protein.write(coordinates=coordinates[:, protein_atom_indices], time=time, cell_lengths=cell_lengths, cell_angles=cell_angles) # Ignoring the other fields for now, TODO.
filenames_protein.append(filenames_allatom[n_files_protein:])
def concatenate_core17(path, top, output_filename):
"""Concatenate tar bzipped XTC files created by Folding@Home Core17.
Parameters
----------
path : str
Path to directory containing "results-*.tar.bz2". E.g. a single CLONE directory.
top : mdtraj.Topology
Topology for system
output_filename : str
Filename of output HDF5 file to generate.
Notes
-----
We use HDF5 because it provides an easy way to store the metadata associated
with which files have already been processed.
"""
glob_input = os.path.join(path, "results-*.tar.bz2")
filenames = glob.glob(glob_input)
filenames = sorted(filenames, key=keynat)
if len(filenames) <= 0:
return
trj_file = HDF5TrajectoryFile(output_filename, mode='a')
try:
trj_file._create_earray(where='/', name='processed_filenames',atom=trj_file.tables.StringAtom(1024), shape=(0,))
trj_file.topology = top.topology
except trj_file.tables.NodeError:
pass
for filename in filenames:
if six.b(filename) in trj_file._handle.root.processed_filenames: # On Py3, the pytables list of filenames has type byte (e.g. b"hey"), so we need to deal with this via six.
print("Already processed %s" % filename)
continue
with enter_temp_directory():
print("Processing %s" % filename)
archive = tarfile.open(filename, mode='r:bz2')
archive.extract("positions.xtc")
trj = md.load("positions.xtc", top=top)
for frame in trj:
trj_file.write(coordinates=frame.xyz, cell_lengths=frame.unitcell_lengths, cell_angles=frame.unitcell_angles)
trj_file._handle.root.processed_filenames.append([filename])
def concatenate_ocore(path, top, output_filename):
"""Concatenate XTC files created by Siegetank OCore.
Parameters
----------
path : str
Path to stream directory containing frame directories /0, /1, /2
etc.
top : mdtraj.Topology
Topology for system
output_filename : str
Filename of output HDF5 file to generate.
Notes
-----
We use HDF5 because it provides an easy way to store the metadata associated
with which files have already been processed.
"""
sorted_folders = sorted(os.listdir(path), key=lambda value: int(value))
sorted_folders = [os.path.join(path, folder) for folder in sorted_folders]
if len(sorted_folders) <= 0:
return
trj_file = HDF5TrajectoryFile(output_filename, mode='a')
try:
trj_file._create_earray(where='/', name='processed_folders',atom=trj_file.tables.StringAtom(1024), shape=(0,))
trj_file.topology = top.topology
except trj_file.tables.NodeError:
pass
for folder in sorted_folders:
if six.b(folder) in trj_file._handle.root.processed_folders: # On Py3, the pytables list of filenames has type byte (e.g. b"hey"), so we need to deal with this via six.
print("Already processed %s" % folder)
continue
print("Processing %s" % folder)
xtc_filename = os.path.join(folder, "frames.xtc")
trj = md.load(xtc_filename, top=top)
for frame in trj:
trj_file.write(coordinates=frame.xyz, cell_lengths=frame.unitcell_lengths, cell_angles=frame.unitcell_angles)
trj_file._handle.root.processed_folders.append([folder])
| lgpl-2.1 |
gusai-francelabs/datafari | windows/python/Lib/site-packages/pip/_vendor/requests/exceptions.py | 895 | 2517 | # -*- coding: utf-8 -*-
"""
requests.exceptions
~~~~~~~~~~~~~~~~~~~
This module contains the set of Requests' exceptions.
"""
from .packages.urllib3.exceptions import HTTPError as BaseHTTPError
class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
request."""
def __init__(self, *args, **kwargs):
"""
Initialize RequestException with `request` and `response` objects.
"""
response = kwargs.pop('response', None)
self.response = response
self.request = kwargs.pop('request', None)
if (response is not None and not self.request and
hasattr(response, 'request')):
self.request = self.response.request
super(RequestException, self).__init__(*args, **kwargs)
class HTTPError(RequestException):
"""An HTTP error occurred."""
class ConnectionError(RequestException):
"""A Connection error occurred."""
class ProxyError(ConnectionError):
"""A proxy error occurred."""
class SSLError(ConnectionError):
"""An SSL error occurred."""
class Timeout(RequestException):
"""The request timed out.
Catching this error will catch both
:exc:`~requests.exceptions.ConnectTimeout` and
:exc:`~requests.exceptions.ReadTimeout` errors.
"""
class ConnectTimeout(ConnectionError, Timeout):
"""The request timed out while trying to connect to the remote server.
Requests that produced this error are safe to retry.
"""
class ReadTimeout(Timeout):
"""The server did not send any data in the allotted amount of time."""
class URLRequired(RequestException):
"""A valid URL is required to make a request."""
class TooManyRedirects(RequestException):
"""Too many redirects."""
class MissingSchema(RequestException, ValueError):
"""The URL schema (e.g. http or https) is missing."""
class InvalidSchema(RequestException, ValueError):
"""See defaults.py for valid schemas."""
class InvalidURL(RequestException, ValueError):
""" The URL provided was somehow invalid. """
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""
class ContentDecodingError(RequestException, BaseHTTPError):
"""Failed to decode response content"""
class StreamConsumedError(RequestException, TypeError):
"""The content for this response was already consumed"""
class RetryError(RequestException):
"""Custom retries logic failed"""
| apache-2.0 |
ToonTownInfiniteRepo/ToontownInfinite | Panda3D-1.9.0/python/Lib/JOD/NewJamoDrum.py | 3 | 10076 | """
A generic Jam-o-Drum input interface for the Jam-o-Drum that uses the OptiPAC
for both spinners and pads.
@author: U{Ben Buchwald <[email protected]>}
Last Updated: 2/27/2006
"""
from direct.showbase.DirectObject import DirectObject
import string, sys, md5
from pandac.PandaModules import Filename
from pandac.PandaModules import WindowProperties
from pandac.PandaModules import ConfigVariableList
class JamoDrum(DirectObject):
"""
Class representing input from a Jam-o-Drum. To handle Jam-o-Drum input
accept the Panda messages JOD_SPIN_x and JOD_HIT_x where x is a number between
0 and 3 for the 4 stations. Spin messages also pass a parameter which is the
angle spun in degrees. Hit messages also pass a parameter which is the force
the pad was hit with in the range 0.0-1.0 (will probably be fairly low). With
or without actual Jam-o-Drum hardware this class will automatically respond
to the keys (j,k,l),(s,d,f),(w,e,r), and (u,i,o) corresponding to spin left 10
degrees, hit with full force, and spin right 10 degrees respectively for each
of the stations. You must call L{poll} periodically to receive input from the
real Jam-o-Drum hardware.
"""
def __init__(self, useJOD=None):
"""
@keyword useJOD: connected to actual drumpads and spinners to read from (default: read from config.prc)
@type useJOD: bool
"""
self.configPath = Filename("/c/jamoconfig.txt")
self.logPath = Filename("/c/jamoconfig.log")
self.clearConfig()
self.simulate()
self.log = sys.stdout
self.configMissing = 0
self.hardwareChanged = 0
if (useJOD==None):
useJOD = base.config.GetBool("want-jamodrum", True)
self.useJOD = useJOD
if (useJOD):
self.setLog(self.logPath)
self.devindices = range(1,base.win.getNumInputDevices())
self.readConfigFile(self.configPath)
self.prepareDevices()
props = WindowProperties()
props.setCursorHidden(1)
if (sys.platform == "win32"):
props.setZOrder(WindowProperties.ZTop)
base.win.requestProperties(props)
self.setLog(None)
def setLog(self, fn):
if (self.log != sys.stdout):
self.log.close()
self.log = sys.stdout
if (fn):
try:
self.log = open(fn.toOsSpecific(), "w")
except:
self.log = sys.stdout
def generateMouseDigest(self):
m = md5.md5()
for i in range(base.win.getNumInputDevices()):
m.update(base.win.getInputDeviceName(i))
m.update("\n")
return m.hexdigest()
def reportDevices(self):
for devindex in self.devindices:
self.log.write("Encoder Detected: "+base.win.getInputDeviceName(devindex)+"\n")
def clearConfig(self):
self.ratio = 8.71
self.wheelConfigs = [[0,0],[0,0],[0,0],[0,0]]
self.padConfigs = [[0,0],[0,0],[0,0],[0,0]]
def getIntVal(self, spec):
try:
return int(spec)
except:
return -1
def setWheelConfig(self, station, axis, device):
if (axis=="x") or (axis=="X"): axis=0
if (axis=="y") or (axis=="Y"): axis=1
istation = self.getIntVal(station)
iaxis = self.getIntVal(axis)
if (istation < 0) or (istation > 3):
self.log.write("Wheel Config: Invalid station index "+str(station)+"\n")
return
if (iaxis < 0) or (iaxis > 1):
self.log.write("Wheel Config: Invalid axis index "+str(axis)+"\n")
return
self.wheelConfigs[istation] = [iaxis, str(device)]
def setPadConfig(self, station, button, device):
istation = self.getIntVal(station)
ibutton = self.getIntVal(button)
if (istation < 0) or (istation > 3):
self.log.write("Pad Config: Invalid station index "+str(station)+"\n")
return
if (ibutton < 0) or (ibutton > 2):
self.log.write("Pad Config: Invalid button index "+str(button)+"\n")
return
self.padConfigs[istation] = [ibutton, device]
def readConfigFile(self, fn):
digest = self.generateMouseDigest()
self.clearConfig()
try:
file = open(fn.toOsSpecific(),"r")
lines = file.readlines()
file.close()
except:
self.configMissing = 1
self.log.write("Could not read "+fn.toOsSpecific()+"\n")
return
for line in lines:
line = line.strip(" \t\r\n")
if (line=="") or (line[0]=="#"):
continue
words = line.split(" ")
if (words[0]=="wheel"):
if (len(words)==4):
self.setWheelConfig(words[1],words[2],words[3])
else:
self.log.write("Wheel Config: invalid syntax\n")
elif (words[0]=="pad"):
if (len(words)==4):
self.setPadConfig(words[1],words[2],words[3])
else:
self.log.write("Pad Config: invalid syntax\n")
elif (words[0]=="ratio"):
try:
self.ratio = float(words[1])
except:
self.log.write("Ratio Config: invalid syntax\n")
elif (words[0]=="digest"):
if (len(words)==2):
if (digest != words[1]):
self.hardwareChanged = 1
else:
self.log.write("Digest: invalid syntax")
else:
self.log.write("Unrecognized config directive "+line+"\n")
def writeConfigFile(self, fn):
try:
file = open(fn.toOsSpecific(),"w")
file.write("ratio "+str(self.ratio)+"\n")
for i in range(4):
wheelinfo = self.wheelConfigs[i]
file.write("wheel "+str(i)+" "+str(wheelinfo[0])+" "+wheelinfo[1]+"\n")
padinfo = self.padConfigs[i]
file.write("pad "+str(i)+" "+str(padinfo[0])+" "+padinfo[1]+"\n")
file.close()
except:
self.log.write("Could not write "+fn.toOsSpecific()+"\n")
def findWheel(self, devaxis, devname):
for wheelindex in range(4):
wheelinfo = self.wheelConfigs[wheelindex]
wheelaxis = wheelinfo[0]
wheeldevice = wheelinfo[1]
if (devname == wheeldevice) and (devaxis == wheelaxis):
return wheelindex
return -1
def findPad(self, devbutton, devname):
for padindex in range(4):
padinfo = self.padConfigs[padindex]
padbutton = padinfo[0]
paddevice = padinfo[1]
if (devname == paddevice) and (devbutton == padbutton):
return padindex
return -1
def prepareDevices(self):
"""
Each axis or button will be associated with a wheel or pad.
Any axis or button not in the config list will be associated
with wheel -1 or pad -1.
"""
self.polls = []
for devindex in range(1, base.win.getNumInputDevices()):
devname = base.win.getInputDeviceName(devindex)
for devaxis in range(2):
target = self.findWheel(devaxis, devname)
self.log.write("Axis "+str(devaxis)+" of "+devname+" controls wheel "+str(target)+"\n")
self.polls.append([devaxis, devindex, target, 0])
for devbutton in range(3):
target = self.findPad(devbutton, devname)
sig = "mousedev"+str(devindex)+"-mouse"+str(devbutton+1)
self.log.write("Button "+str(devbutton)+" of "+devname+" controls pad "+str(target)+"\n")
self.ignore(sig)
self.accept(sig, self.hit, [target, 1.0])
def simulate(self,spin=10.0,hit=1.0):
"""
Accept keyboard keys to simulate Jam-o-Drum input.
@keyword spin: degrees to spin for each keystroke (default: 10.0)
@type spin: float
@keyword hit: force to hit for each keystroke (default: 1.0)
@type hit: float
"""
self.accept('k',self.hit,[0,hit])
self.accept('d',self.hit,[1,hit])
self.accept('e',self.hit,[2,hit])
self.accept('i',self.hit,[3,hit])
self.accept('j',self.spin,[0,spin])
self.accept('l',self.spin,[0,-spin])
self.accept('s',self.spin,[1,spin])
self.accept('f',self.spin,[1,-spin])
self.accept('w',self.spin,[2,-spin])
self.accept('r',self.spin,[2,spin])
self.accept('u',self.spin,[3,-spin])
self.accept('o',self.spin,[3,spin])
# end simulate
def poll(self):
"""
Call this each frame to poll actual drumpads and spinners for input.
If input occurs messages will be sent.
"""
if (not self.useJOD):
return
offsets = [0.0,0.0,0.0,0.0]
for info in self.polls:
axis = info[0]
devindex = info[1]
wheel = info[2]
last = info[3]
if (axis == 0):
pos = base.win.getPointer(devindex).getX()
else:
pos = base.win.getPointer(devindex).getY()
if (pos != last):
diff = (pos-last)/self.ratio
if (wheel < 0):
offsets[0] += diff
offsets[1] += diff
offsets[2] += diff
offsets[3] += diff
else:
offsets[wheel] += diff
info[3] = pos
for i in range(4):
if (offsets[i] != 0.0):
self.spin(i,offsets[i])
def spin(self,station,angle):
"""
Sends a JOD_SPIN_<station> message
"""
sig = "JOD_SPIN_"+str(station)
messenger.send(sig,[angle])
def hit(self,station,force):
"""
Sends a JOD_HIT_<station> message
"""
if (station < 0):
for station in range(4):
sig = "JOD_HIT_"+str(station)
messenger.send(sig,[force])
else:
sig = "JOD_HIT_"+str(station)
messenger.send(sig,[force])
# end class JamoDrum
| mit |
brennie/reviewboard | reviewboard/ssh/utils.py | 3 | 3339 | from __future__ import unicode_literals
import os
import paramiko
from django.utils import six
from reviewboard.ssh.client import SSHClient
from reviewboard.ssh.errors import (BadHostKeyError, SSHAuthenticationError,
SSHError, SSHInvalidPortError)
from reviewboard.ssh.policy import RaiseUnknownHostKeyPolicy
SSH_PORT = 22
try:
import urlparse
uses_netloc = urlparse.uses_netloc
urllib_parse = urlparse.urlparse
except ImportError:
import urllib.parse
uses_netloc = urllib.parse.uses_netloc
urllib_parse = urllib.parse.urlparse
# A list of known SSH URL schemes.
ssh_uri_schemes = ["ssh", "sftp"]
uses_netloc.extend(ssh_uri_schemes)
def humanize_key(key):
"""Returns a human-readable key as a series of hex characters."""
return ':'.join(["%02x" % ord(c) for c in key.get_fingerprint()])
def is_ssh_uri(url):
"""Returns whether or not a URL represents an SSH connection."""
return urllib_parse(url)[0] in ssh_uri_schemes
def check_host(netloc, username=None, password=None, namespace=None):
"""
Checks if we can connect to a host with a known key.
This will raise an exception if we cannot connect to the host. The
exception will be one of BadHostKeyError, UnknownHostKeyError, or
SCMError.
"""
from django.conf import settings
client = SSHClient(namespace=namespace)
client.set_missing_host_key_policy(RaiseUnknownHostKeyPolicy())
kwargs = {}
if ':' in netloc:
hostname, port = netloc.split(':')
try:
port = int(port)
except ValueError:
raise SSHInvalidPortError(port)
else:
hostname = netloc
port = SSH_PORT
# We normally want to notify on unknown host keys, but not when running
# unit tests.
if getattr(settings, 'RUNNING_TEST', False):
client.set_missing_host_key_policy(paramiko.WarningPolicy())
kwargs['allow_agent'] = False
try:
client.connect(hostname, port, username=username, password=password,
pkey=client.get_user_key(), **kwargs)
except paramiko.BadHostKeyException as e:
raise BadHostKeyError(e.hostname, e.key, e.expected_key)
except paramiko.AuthenticationException as e:
# Some AuthenticationException instances have allowed_types set,
# and some don't.
allowed_types = getattr(e, 'allowed_types', [])
if 'publickey' in allowed_types:
key = client.get_user_key()
else:
key = None
raise SSHAuthenticationError(allowed_types=allowed_types, user_key=key)
except paramiko.SSHException as e:
msg = six.text_type(e)
if msg == 'No authentication methods available':
raise SSHAuthenticationError
else:
raise SSHError(msg)
def register_rbssh(envvar):
"""Registers rbssh in an environment variable.
This is a convenience method for making sure that rbssh is set properly
in the environment for different tools. In some cases, we need to
specifically place it in the system environment using ``os.putenv``,
while in others (Mercurial, Bazaar), we need to place it in ``os.environ``.
"""
envvar = envvar.encode('utf-8')
os.putenv(envvar, b'rbssh')
os.environ[envvar] = b'rbssh'
| mit |
hbenniou/trunk | examples/gts-horse/gts-random-pack.py | 10 | 3271 |
""" CAUTION:
Running this script can take very long!
"""
from numpy import arange
from yade import pack
import pylab
# define the section shape as polygon in 2d; repeat first point at the end to close the polygon
poly=((1e-2,5e-2),(5e-2,2e-2),(7e-2,-2e-2),(1e-2,-5e-2),(1e-2,5e-2))
# show us the meridian shape
#pylab.plot(*zip(*poly)); pylab.xlim(xmin=0); pylab.grid(); pylab.title('Meridian of the revolution surface\n(close to continue)'); pylab.gca().set_aspect(aspect='equal',adjustable='box'); pylab.show()
# angles at which we want this polygon to appear
thetas=arange(0,pi/2,pi/24)
# create 3d points from the 2d ones, turning the 2d meridian around the +y axis
# for each angle, put the poly a little bit higher (+2e-3*theta);
# this is just to demonstrate that you can do whatever here as long as the resulting
# meridian has the same number of points
#
# There is origin (translation) and orientation arguments, allowing to transform all the 3d points once computed.
#
# without these transformation, it would look a little simpler:
# pts=pack.revolutionSurfaceMeridians([[(pt[0],pt[1]+2e-3*theta) for pt in poly] for theta in thetas],thetas
#
pts=pack.revolutionSurfaceMeridians([[(pt[0],pt[1]+1e-2*theta) for pt in poly] for theta in thetas],thetas,origin=Vector3(0,-.05,.1),orientation=Quaternion((1,1,0),pi/4))
# connect meridians to make surfaces
# caps will close it at the beginning and the end
# threshold will merge points closer than 1e-4; this is important: we want it to be closed for filling
surf=pack.sweptPolylines2gtsSurface(pts,capStart=True,capEnd=True,threshold=1e-4)
# add the surface as facets to the simulation, to make it visible
O.bodies.append(pack.gtsSurface2Facets(surf,color=(1,0,1)))
# now fill the inGtsSurface predicate constructed form the same surface with sphere packing generated by TriaxialTest
# with given radius and standard deviation (see documentation of pack.randomDensePack)
#
# The memoizeDb will save resulting packing into given file and next time, if you run with the same
# parameters (or parameters that can be scaled to the same one),
# it will load the packing instead of running the triaxial compaction again.
# Try running for the second time to see the speed difference!
memoizeDb='/tmp/gts-triax-packings.sqlite'
sp=SpherePack()
sp=pack.randomDensePack(pack.inGtsSurface(surf),radius=5e-3,rRelFuzz=1e-4,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
# We could also fill the horse with triaxial packing, but have nice approximation, the triaxial would run terribly long,
# since horse discard most volume of its bounding box
# Here, we would use a very crude one, however
if 1:
import gts
horse=gts.read(open('horse.coarse.gts')) #; horse.scale(.25,.25,.25)
O.bodies.append(pack.gtsSurface2Facets(horse))
sp=pack.randomDensePack(pack.inGtsSurface(horse),radius=5e-3,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
horse.translate(.07,0,0)
O.bodies.append(pack.gtsSurface2Facets(horse))
# specifying spheresInCell makes the packing periodic, with the given number of spheres, proportions being equal to that of the predicate
sp=pack.randomDensePack(pack.inGtsSurface(horse),radius=1e-3,spheresInCell=2000,memoizeDb=memoizeDb,returnSpherePack=True)
sp.toSimulation()
| gpl-2.0 |
stscieisenhamer/pyqtgraph | pyqtgraph/units.py | 55 | 1402 | # -*- coding: utf-8 -*-
## Very simple unit support:
## - creates variable names like 'mV' and 'kHz'
## - the value assigned to the variable corresponds to the scale prefix
## (mV = 0.001)
## - the actual units are purely cosmetic for making code clearer:
##
## x = 20*pA is identical to x = 20*1e-12
## No unicode variable names (μ,Ω) allowed until python 3
SI_PREFIXES = 'yzafpnum kMGTPEZY'
UNITS = 'm,s,g,W,J,V,A,F,T,Hz,Ohm,S,N,C,px,b,B'.split(',')
allUnits = {}
def addUnit(p, n):
g = globals()
v = 1000**n
for u in UNITS:
g[p+u] = v
allUnits[p+u] = v
for p in SI_PREFIXES:
if p == ' ':
p = ''
n = 0
elif p == 'u':
n = -2
else:
n = SI_PREFIXES.index(p) - 8
addUnit(p, n)
cm = 0.01
def evalUnits(unitStr):
"""
Evaluate a unit string into ([numerators,...], [denominators,...])
Examples:
N m/s^2 => ([N, m], [s, s])
A*s / V => ([A, s], [V,])
"""
pass
def formatUnits(units):
"""
Format a unit specification ([numerators,...], [denominators,...])
into a string (this is the inverse of evalUnits)
"""
pass
def simplify(units):
"""
Cancel units that appear in both numerator and denominator, then attempt to replace
groups of units with single units where possible (ie, J/s => W)
"""
pass
| mit |
Nebucatnetzer/tamagotchi | pygame/bin/activate_this.py | 669 | 1129 | """By using execfile(this_file, dict(__file__=this_file)) you will
activate this virtualenv environment.
This can be used when you must use an existing Python interpreter, not
the virtualenv bin/python
"""
try:
__file__
except NameError:
raise AssertionError(
"You must run this like execfile('path/to/activate_this.py', dict(__file__='path/to/activate_this.py'))")
import sys
import os
old_os_path = os.environ['PATH']
os.environ['PATH'] = os.path.dirname(os.path.abspath(__file__)) + os.pathsep + old_os_path
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if sys.platform == 'win32':
site_packages = os.path.join(base, 'Lib', 'site-packages')
else:
site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
prev_sys_path = list(sys.path)
import site
site.addsitedir(site_packages)
sys.real_prefix = sys.prefix
sys.prefix = base
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
| gpl-2.0 |
chjw8016/GreenOdoo7-haibao | openerp/addons/point_of_sale/controllers/main.py | 56 | 5627 | # -*- coding: utf-8 -*-
import logging
import simplejson
import os
import openerp
from openerp.addons.web.controllers.main import manifest_list, module_boot, html_template
class PointOfSaleController(openerp.addons.web.http.Controller):
_cp_path = '/pos'
@openerp.addons.web.http.httprequest
def app(self, req, s_action=None, **kw):
js = "\n ".join('<script type="text/javascript" src="%s"></script>' % i for i in manifest_list(req, None, 'js'))
css = "\n ".join('<link rel="stylesheet" href="%s">' % i for i in manifest_list(req, None, 'css'))
cookie = req.httprequest.cookies.get("instance0|session_id")
session_id = cookie.replace("%22","")
template = html_template.replace('<html','<html manifest="/pos/manifest?session_id=%s"'%session_id)
r = template % {
'js': js,
'css': css,
'modules': simplejson.dumps(module_boot(req)),
'init': 'var wc = new s.web.WebClient();wc.appendTo($(document.body));'
}
return r
@openerp.addons.web.http.httprequest
def manifest(self, req, **kwargs):
""" This generates a HTML5 cache manifest files that preloads the categories and products thumbnails
and other ressources necessary for the point of sale to work offline """
ml = ["CACHE MANIFEST"]
# loading all the images in the static/src/img/* directories
def load_css_img(srcdir,dstdir):
for f in os.listdir(srcdir):
path = os.path.join(srcdir,f)
dstpath = os.path.join(dstdir,f)
if os.path.isdir(path) :
load_css_img(path,dstpath)
elif f.endswith(('.png','.PNG','.jpg','.JPG','.jpeg','.JPEG','.gif','.GIF')):
ml.append(dstpath)
imgdir = openerp.modules.get_module_resource('point_of_sale','static/src/img');
load_css_img(imgdir,'/point_of_sale/static/src/img')
products = req.session.model('product.product')
for p in products.search_read([('pos_categ_id','!=',False)], ['name']):
product_id = p['id']
url = "/web/binary/image?session_id=%s&model=product.product&field=image&id=%s" % (req.session_id, product_id)
ml.append(url)
categories = req.session.model('pos.category')
for c in categories.search_read([],['name']):
category_id = c['id']
url = "/web/binary/image?session_id=%s&model=pos.category&field=image&id=%s" % (req.session_id, category_id)
ml.append(url)
ml += ["NETWORK:","*"]
m = "\n".join(ml)
return m
@openerp.addons.web.http.jsonrequest
def dispatch(self, request, iface, **kwargs):
method = 'iface_%s' % iface
return getattr(self, method)(request, **kwargs)
@openerp.addons.web.http.jsonrequest
def scan_item_success(self, request, ean):
"""
A product has been scanned with success
"""
print 'scan_item_success: ' + str(ean)
return
@openerp.addons.web.http.jsonrequest
def scan_item_error_unrecognized(self, request, ean):
"""
A product has been scanned without success
"""
print 'scan_item_error_unrecognized: ' + str(ean)
return
@openerp.addons.web.http.jsonrequest
def help_needed(self, request):
"""
The user wants an help (ex: light is on)
"""
print "help_needed"
return
@openerp.addons.web.http.jsonrequest
def help_canceled(self, request):
"""
The user stops the help request
"""
print "help_canceled"
return
@openerp.addons.web.http.jsonrequest
def weighting_start(self, request):
print "weighting_start"
return
@openerp.addons.web.http.jsonrequest
def weighting_read_kg(self, request):
print "weighting_read_kg"
return 0.0
@openerp.addons.web.http.jsonrequest
def weighting_end(self, request):
print "weighting_end"
return
@openerp.addons.web.http.jsonrequest
def payment_request(self, request, price):
"""
The PoS will activate the method payment
"""
print "payment_request: price:"+str(price)
return 'ok'
@openerp.addons.web.http.jsonrequest
def payment_status(self, request):
print "payment_status"
return { 'status':'waiting' }
@openerp.addons.web.http.jsonrequest
def payment_cancel(self, request):
print "payment_cancel"
return
@openerp.addons.web.http.jsonrequest
def transaction_start(self, request):
print 'transaction_start'
return
@openerp.addons.web.http.jsonrequest
def transaction_end(self, request):
print 'transaction_end'
return
@openerp.addons.web.http.jsonrequest
def cashier_mode_activated(self, request):
print 'cashier_mode_activated'
return
@openerp.addons.web.http.jsonrequest
def cashier_mode_deactivated(self, request):
print 'cashier_mode_deactivated'
return
@openerp.addons.web.http.jsonrequest
def open_cashbox(self, request):
print 'open_cashbox'
return
@openerp.addons.web.http.jsonrequest
def print_receipt(self, request, receipt):
print 'print_receipt' + str(receipt)
return
@openerp.addons.web.http.jsonrequest
def print_pdf_invoice(self, request, pdfinvoice):
print 'print_pdf_invoice' + str(pdfinvoice)
return
| mit |
jejimenez/django | django/contrib/postgres/lookups.py | 199 | 1175 | from django.db.models import Lookup, Transform
class PostgresSimpleLookup(Lookup):
def as_sql(self, qn, connection):
lhs, lhs_params = self.process_lhs(qn, connection)
rhs, rhs_params = self.process_rhs(qn, connection)
params = lhs_params + rhs_params
return '%s %s %s' % (lhs, self.operator, rhs), params
class FunctionTransform(Transform):
def as_sql(self, qn, connection):
lhs, params = qn.compile(self.lhs)
return "%s(%s)" % (self.function, lhs), params
class DataContains(PostgresSimpleLookup):
lookup_name = 'contains'
operator = '@>'
class ContainedBy(PostgresSimpleLookup):
lookup_name = 'contained_by'
operator = '<@'
class Overlap(PostgresSimpleLookup):
lookup_name = 'overlap'
operator = '&&'
class HasKey(PostgresSimpleLookup):
lookup_name = 'has_key'
operator = '?'
class HasKeys(PostgresSimpleLookup):
lookup_name = 'has_keys'
operator = '?&'
class HasAnyKeys(PostgresSimpleLookup):
lookup_name = 'has_any_keys'
operator = '?|'
class Unaccent(FunctionTransform):
bilateral = True
lookup_name = 'unaccent'
function = 'UNACCENT'
| bsd-3-clause |
bholley/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/test/test_extensions.py | 413 | 16128 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for extensions module."""
import unittest
import zlib
import set_sys_path # Update sys.path to locate mod_pywebsocket module.
from mod_pywebsocket import common
from mod_pywebsocket import extensions
class ExtensionsTest(unittest.TestCase):
"""A unittest for non-class methods in extensions.py"""
def test_parse_window_bits(self):
self.assertRaises(ValueError, extensions._parse_window_bits, None)
self.assertRaises(ValueError, extensions._parse_window_bits, 'foobar')
self.assertRaises(ValueError, extensions._parse_window_bits, ' 8 ')
self.assertRaises(ValueError, extensions._parse_window_bits, 'a8a')
self.assertRaises(ValueError, extensions._parse_window_bits, '00000')
self.assertRaises(ValueError, extensions._parse_window_bits, '00008')
self.assertRaises(ValueError, extensions._parse_window_bits, '0x8')
self.assertRaises(ValueError, extensions._parse_window_bits, '9.5')
self.assertRaises(ValueError, extensions._parse_window_bits, '8.0')
self.assertTrue(extensions._parse_window_bits, '8')
self.assertTrue(extensions._parse_window_bits, '15')
self.assertRaises(ValueError, extensions._parse_window_bits, '-8')
self.assertRaises(ValueError, extensions._parse_window_bits, '0')
self.assertRaises(ValueError, extensions._parse_window_bits, '7')
self.assertRaises(ValueError, extensions._parse_window_bits, '16')
self.assertRaises(
ValueError, extensions._parse_window_bits, '10000000')
class CompressionMethodParameterParserTest(unittest.TestCase):
"""A unittest for _parse_compression_method which parses the compression
method description used by perframe-compression and permessage-compression
extension in their "method" extension parameter.
"""
def test_parse_method_simple(self):
method_list = extensions._parse_compression_method('foo')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(0, len(method.get_parameters()))
def test_parse_method_with_parameter(self):
method_list = extensions._parse_compression_method('foo; x; y=10')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(2, len(method.get_parameters()))
self.assertTrue(method.has_parameter('x'))
self.assertEqual(None, method.get_parameter_value('x'))
self.assertTrue(method.has_parameter('y'))
self.assertEqual('10', method.get_parameter_value('y'))
def test_parse_method_with_quoted_parameter(self):
method_list = extensions._parse_compression_method(
'foo; x="Hello World"; y=10')
self.assertEqual(1, len(method_list))
method = method_list[0]
self.assertEqual('foo', method.name())
self.assertEqual(2, len(method.get_parameters()))
self.assertTrue(method.has_parameter('x'))
self.assertEqual('Hello World', method.get_parameter_value('x'))
self.assertTrue(method.has_parameter('y'))
self.assertEqual('10', method.get_parameter_value('y'))
def test_parse_method_multiple(self):
method_list = extensions._parse_compression_method('foo, bar')
self.assertEqual(2, len(method_list))
self.assertEqual('foo', method_list[0].name())
self.assertEqual(0, len(method_list[0].get_parameters()))
self.assertEqual('bar', method_list[1].name())
self.assertEqual(0, len(method_list[1].get_parameters()))
def test_parse_method_multiple_methods_with_quoted_parameter(self):
method_list = extensions._parse_compression_method(
'foo; x="Hello World", bar; y=10')
self.assertEqual(2, len(method_list))
self.assertEqual('foo', method_list[0].name())
self.assertEqual(1, len(method_list[0].get_parameters()))
self.assertTrue(method_list[0].has_parameter('x'))
self.assertEqual('Hello World',
method_list[0].get_parameter_value('x'))
self.assertEqual('bar', method_list[1].name())
self.assertEqual(1, len(method_list[1].get_parameters()))
self.assertTrue(method_list[1].has_parameter('y'))
self.assertEqual('10', method_list[1].get_parameter_value('y'))
def test_create_method_desc_simple(self):
params = common.ExtensionParameter('foo')
desc = extensions._create_accepted_method_desc('foo',
params.get_parameters())
self.assertEqual('foo', desc)
def test_create_method_desc_with_parameters(self):
params = common.ExtensionParameter('foo')
params.add_parameter('x', 'Hello, World')
params.add_parameter('y', '10')
desc = extensions._create_accepted_method_desc('foo',
params.get_parameters())
self.assertEqual('foo; x="Hello, World"; y=10', desc)
class DeflateFrameExtensionProcessorParsingTest(unittest.TestCase):
"""A unittest for checking that DeflateFrameExtensionProcessor parses given
extension parameter correctly.
"""
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('deflate-frame'))
self.assertIsInstance(processor,
extensions.DeflateFrameExtensionProcessor)
processor = extensions.get_extension_processor(
common.ExtensionParameter('x-webkit-deflate-frame'))
self.assertIsInstance(processor,
extensions.DeflateFrameExtensionProcessor)
def test_minimal_offer(self):
processor = extensions.DeflateFrameExtensionProcessor(
common.ExtensionParameter('perframe-deflate'))
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(zlib.MAX_WBITS,
processor._rfc1979_deflater._window_bits)
self.assertFalse(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_max_window_bits(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', '10')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(10, processor._rfc1979_deflater._window_bits)
def test_offer_with_out_of_range_max_window_bits(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', '0')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_max_window_bits_without_value(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('max_window_bits', None)
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_no_context_takeover(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('no_context_takeover', None)
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertTrue(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_no_context_takeover_with_value(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('no_context_takeover', 'foobar')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_unknown_parameter(self):
parameter = common.ExtensionParameter('perframe-deflate')
parameter.add_parameter('foo', 'bar')
processor = extensions.DeflateFrameExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('perframe-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
class PerMessageDeflateExtensionProcessorParsingTest(unittest.TestCase):
"""A unittest for checking that PerMessageDeflateExtensionProcessor parses
given extension parameter correctly.
"""
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('permessage-deflate'))
self.assertIsInstance(processor,
extensions.PerMessageDeflateExtensionProcessor)
def test_minimal_offer(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
self.assertEqual(zlib.MAX_WBITS,
processor._rfc1979_deflater._window_bits)
self.assertFalse(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', '10')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('server_max_window_bits', '10')],
response.get_parameters())
self.assertEqual(10, processor._rfc1979_deflater._window_bits)
def test_offer_with_out_of_range_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', '0')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_max_window_bits_without_value(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_max_window_bits', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_no_context_takeover(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_no_context_takeover', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('server_no_context_takeover', None)],
response.get_parameters())
self.assertTrue(processor._rfc1979_deflater._no_context_takeover)
def test_offer_with_no_context_takeover_with_value(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('server_no_context_takeover', 'foobar')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
def test_offer_with_unknown_parameter(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('foo', 'bar')
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
self.assertIsNone(processor.get_extension_response())
class PerMessageDeflateExtensionProcessorBuildingTest(unittest.TestCase):
"""A unittest for checking that PerMessageDeflateExtensionProcessor builds
a response based on specified options correctly.
"""
def test_response_with_max_window_bits(self):
parameter = common.ExtensionParameter('permessage-deflate')
parameter.add_parameter('client_max_window_bits', None)
processor = extensions.PerMessageDeflateExtensionProcessor(parameter)
processor.set_client_max_window_bits(10)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('client_max_window_bits', '10')],
response.get_parameters())
def test_response_with_max_window_bits_without_client_permission(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_max_window_bits(10)
response = processor.get_extension_response()
self.assertIsNone(response)
def test_response_with_true_for_no_context_takeover(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_no_context_takeover(True)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual([('client_no_context_takeover', None)],
response.get_parameters())
def test_response_with_false_for_no_context_takeover(self):
processor = extensions.PerMessageDeflateExtensionProcessor(
common.ExtensionParameter('permessage-deflate'))
processor.set_client_no_context_takeover(False)
response = processor.get_extension_response()
self.assertEqual('permessage-deflate', response.name())
self.assertEqual(0, len(response.get_parameters()))
class PerMessageCompressExtensionProcessorTest(unittest.TestCase):
def test_registry(self):
processor = extensions.get_extension_processor(
common.ExtensionParameter('permessage-compress'))
self.assertIsInstance(processor,
extensions.PerMessageCompressExtensionProcessor)
if __name__ == '__main__':
unittest.main()
# vi:sts=4 sw=4 et
| mpl-2.0 |
Jgarcia-IAS/localizacion | openerp/addons/l10n_be_invoice_bba/partner.py | 379 | 2268 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Created by Luc De Meyer
# Copyright (c) 2010 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import time
from openerp.tools.translate import _
class res_partner(osv.osv):
""" add field to indicate default 'Communication Type' on customer invoices """
_inherit = 'res.partner'
def _get_comm_type(self, cr, uid, context=None):
res = self.pool.get('account.invoice')._get_reference_type(cr, uid,context=context)
return res
_columns = {
'out_inv_comm_type': fields.selection(_get_comm_type, 'Communication Type', change_default=True,
help='Select Default Communication Type for Outgoing Invoices.' ),
'out_inv_comm_algorithm': fields.selection([
('random','Random'),
('date','Date'),
('partner_ref','Customer Reference'),
], 'Communication Algorithm',
help='Select Algorithm to generate the Structured Communication on Outgoing Invoices.' ),
}
def _commercial_fields(self, cr, uid, context=None):
return super(res_partner, self)._commercial_fields(cr, uid, context=context) + \
['out_inv_comm_type', 'out_inv_comm_algorithm']
_default = {
'out_inv_comm_type': 'none',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
PeterLauris/aifh | vol1/python-examples/examples/example_ocr.py | 4 | 8445 | #!/usr/bin/env python
"""
Artificial Intelligence for Humans
Volume 1: Fundamental Algorithms
Python Version
http://www.aifh.org
http://www.jeffheaton.com
Code repository:
https://github.com/jeffheaton/aifh
Copyright 2013 by Jeff Heaton
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
For more information on Heaton Research copyrights, licenses
and trademarks visit:
http://www.heatonresearch.com/copyright
============================================================================================================
This example shows how to do very basic OCR using distance metrics. To use this program draw
a character under "Draw Here". Then input the letter that you drew in the box next to "Learn:".
Click the "Learn:" button and this character is added to the trained characters. Repeat this for
a few characters. Finally, draw a character and click recognize. Your previous characters will
be scanned and the character with the shortest distance is shown.
"""
__author__ = 'jheaton'
import Tkinter as tk
import tkMessageBox
import sys
from scipy.spatial import distance
class Application(tk.Frame):
DRAW_AREA = 256
DOWN_SAMPLE_WIDTH = 5
DOWN_SAMPLE_HEIGHT = 7
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.grid()
self.b1 = None
self.canvas_draw = None
self.x_old = None
self.y_old = None
self.button_quit = None
self.button_recognize = None
self.button_learn = None
self.entry_learn_char = None
self.button_clear = None
self.list_learned = None
self.learned_patterns = {}
self.create_widgets()
self.clear()
def create_widgets(self):
l1 = tk.Label(self, text="Draw Here")
l1.grid(row=0, column=0)
l1 = tk.Label(self, text="Trained Characters")
l1.grid(row=0, column=1, columnspan=2)
self.canvas_draw = tk.Canvas(self, width=Application.DRAW_AREA, height=Application.DRAW_AREA)
self.canvas_draw.grid(row=1, column=0)
self.list_learned = tk.Listbox(self, height=10, )
self.list_learned.grid(row=1, column=1, sticky=tk.N + tk.E + tk.S + tk.W, columnspan=2)
self.button_learn = tk.Button(self, text='Learn:', command=self.learn)
self.button_learn.grid(row=2, column=0, sticky=tk.N + tk.E + tk.S + tk.W)
self.entry_learn_char = tk.Entry(self)
self.entry_learn_char.grid(row=2, column=1, sticky=tk.N + tk.E + tk.S + tk.W, columnspan=2)
self.button_recognize = tk.Button(self, text='Recognize', command=self.recognize)
self.button_recognize.grid(row=3, column=0, sticky=tk.N + tk.E + tk.S + tk.W)
self.button_quit = tk.Button(self, text='Quit', command=self.quit)
self.button_quit.grid(row=3, column=1, sticky=tk.N + tk.E + tk.S + tk.W)
self.button_clear = tk.Button(self, text='Clear', command=self.clear)
self.button_clear.grid(row=3, column=2, sticky=tk.N + tk.E + tk.S + tk.W)
self.canvas_draw.bind("<Motion>", self.motion)
self.canvas_draw.bind("<ButtonPress-1>", self.b1down)
self.canvas_draw.bind("<ButtonRelease-1>", self.b1up)
def b1down(self, event):
self.b1 = "down" # you only want to draw when the button is down
# because "Motion" events happen -all the time-
def b1up(self, event):
self.b1 = "up"
self.x_old = None # reset the line when you let go of the button
self.y_old = None
def motion(self, event):
if self.b1 == "down":
if self.x_old is not None and self.y_old is not None:
event.widget.create_line(self.x_old, self.y_old, event.x, event.y, smooth=tk.TRUE)
#self.plot_line(self.xold,self.yold,event.x,event.y)
self.draw_data[event.y][event.x] = True
self.x_old = event.x
self.y_old = event.y
def vertical_line_clear(self, col):
for row in range(0, Application.DRAW_AREA):
if self.draw_data[row][col]:
return False
return True
def horizontal_line_clear(self, row):
for col in range(0, Application.DRAW_AREA):
if self.draw_data[row][col]:
return False
return True
def down_sample_region(self, x, y):
start_x = int(self.clip_left + (x * self.ratioX))
start_x = int(self.clip_top + (y * self.ratioY))
end_x = int(start_x + self.ratioX)
end_y = int(start_x + self.ratioY)
for yy in range(start_x, end_y + 1):
for xx in range(start_x, end_x + 1):
if self.draw_data[yy][xx]:
return True
return False
def down_sample(self):
# Find bounding rectangle.
# Find left side of bounding rectangle
self.clip_left = 0
for col in range(0, Application.DRAW_AREA):
if not self.vertical_line_clear(col):
self.clip_left = col
break
# Find right side of bounding rectangle
self.clip_right = 0
for col in range(Application.DRAW_AREA - 1, -1, -1):
if not self.vertical_line_clear(col):
self.clip_right = col
break
# Find top side of bounding rectangle
self.clip_top = 0
for row in range(0, Application.DRAW_AREA):
if not self.horizontal_line_clear(row):
self.clip_top = row
break
# Find bottom side of bounding rectangle
self.clip_bottom = 0
for row in range(Application.DRAW_AREA - 1, -1, -1):
if not self.horizontal_line_clear(row):
self.clip_bottom = row
break
self.canvas_draw.create_rectangle(
self.clip_left,
self.clip_top,
self.clip_right,
self.clip_bottom)
# Now down sample to 5x7.
result = []
self.ratioX = float(self.clip_right - self.clip_left) / Application.DOWN_SAMPLE_WIDTH
self.ratioY = float(self.clip_bottom - self.clip_top) / Application.DOWN_SAMPLE_HEIGHT
for y in range(0, Application.DOWN_SAMPLE_HEIGHT):
for x in range(0, Application.DOWN_SAMPLE_WIDTH):
if self.down_sample_region(x, y):
result.append(1)
else:
result.append(0)
return result
def clear(self):
self.entry_learn_char.delete(0, tk.END)
self.canvas_draw.delete("all")
self.draw_data = [[False] * Application.DRAW_AREA for _ in range(Application.DRAW_AREA)]
def recognize(self):
best = "?"
best_distance = sys.float_info.max
sample = self.down_sample()
for key in self.learned_patterns.keys():
other_sample = self.learned_patterns[key]
dist = distance.euclidean(sample, other_sample)
if dist < best_distance:
best_distance = dist
best = key
tkMessageBox.showinfo("Learn", "I believe you drew a: " + best)
def learn(self):
learned_char = self.entry_learn_char.get()
if len(learned_char) > 1 or len(learned_char) == 0:
tkMessageBox.showinfo("Learn", "Please enter a single character to learn")
return
if learned_char in self.learned_patterns:
tkMessageBox.showinfo("Learn", "Already learned that character, please choose another")
return
self.list_learned.insert(tk.END, learned_char)
self.learned_patterns[learned_char] = self.down_sample()
# Clear and notify user.
self.clear()
tkMessageBox.showinfo("Learn", "Learned the pattern for: " + learned_char)
app = Application()
app.master.title('Python OCR')
app.mainloop() | apache-2.0 |
wagtail/wagtail | wagtail/tests/testapp/migrations/0058_blockcountsstreammodel_minmaxcountstreammodel.py | 6 | 1214 | # Generated by Django 2.1.7 on 2019-03-28 02:30
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('tests', '0057_customdocumentwithauthor'),
]
operations = [
migrations.CreateModel(
name='BlockCountsStreamModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', wagtail.core.fields.StreamField([('text', wagtail.core.blocks.CharBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])),
],
),
migrations.CreateModel(
name='MinMaxCountStreamModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', wagtail.core.fields.StreamField([('text', wagtail.core.blocks.CharBlock()), ('rich_text', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())])),
],
),
]
| bsd-3-clause |
nhejazi/scikit-learn | sklearn/decomposition/tests/test_online_lda.py | 38 | 16445 | import sys
import numpy as np
from scipy.linalg import block_diag
from scipy.sparse import csr_matrix
from scipy.special import psi
from sklearn.decomposition import LatentDirichletAllocation
from sklearn.decomposition._online_lda import (_dirichlet_expectation_1d,
_dirichlet_expectation_2d)
from sklearn.utils.testing import assert_allclose
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_greater_equal
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import if_safe_multiprocessing_with_blas
from sklearn.utils.testing import assert_warns
from sklearn.exceptions import NotFittedError
from sklearn.externals.six.moves import xrange
from sklearn.externals.six import StringIO
def _build_sparse_mtx():
# Create 3 topics and each topic has 3 distinct words.
# (Each word only belongs to a single topic.)
n_components = 3
block = n_components * np.ones((3, 3))
blocks = [block] * n_components
X = block_diag(*blocks)
X = csr_matrix(X)
return (n_components, X)
def test_lda_default_prior_params():
# default prior parameter should be `1 / topics`
# and verbose params should not affect result
n_components, X = _build_sparse_mtx()
prior = 1. / n_components
lda_1 = LatentDirichletAllocation(n_components=n_components,
doc_topic_prior=prior,
topic_word_prior=prior, random_state=0)
lda_2 = LatentDirichletAllocation(n_components=n_components,
random_state=0)
topic_distr_1 = lda_1.fit_transform(X)
topic_distr_2 = lda_2.fit_transform(X)
assert_almost_equal(topic_distr_1, topic_distr_2)
def test_lda_fit_batch():
# Test LDA batch learning_offset (`fit` method with 'batch' learning)
rng = np.random.RandomState(0)
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components,
evaluate_every=1, learning_method='batch',
random_state=rng)
lda.fit(X)
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for component in lda.components_:
# Find top 3 words in each LDA component
top_idx = set(component.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
def test_lda_fit_online():
# Test LDA online learning (`fit` method with 'online' learning)
rng = np.random.RandomState(0)
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components,
learning_offset=10., evaluate_every=1,
learning_method='online', random_state=rng)
lda.fit(X)
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for component in lda.components_:
# Find top 3 words in each LDA component
top_idx = set(component.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
def test_lda_partial_fit():
# Test LDA online learning (`partial_fit` method)
# (same as test_lda_batch)
rng = np.random.RandomState(0)
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components,
learning_offset=10., total_samples=100,
random_state=rng)
for i in xrange(3):
lda.partial_fit(X)
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for c in lda.components_:
top_idx = set(c.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
def test_lda_dense_input():
# Test LDA with dense input.
rng = np.random.RandomState(0)
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components,
learning_method='batch', random_state=rng)
lda.fit(X.toarray())
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for component in lda.components_:
# Find top 3 words in each LDA component
top_idx = set(component.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
def test_lda_transform():
# Test LDA transform.
# Transform result cannot be negative and should be normalized
rng = np.random.RandomState(0)
X = rng.randint(5, size=(20, 10))
n_components = 3
lda = LatentDirichletAllocation(n_components=n_components,
random_state=rng)
X_trans = lda.fit_transform(X)
assert_true((X_trans > 0.0).any())
assert_array_almost_equal(np.sum(X_trans, axis=1),
np.ones(X_trans.shape[0]))
def test_lda_fit_transform():
# Test LDA fit_transform & transform
# fit_transform and transform result should be the same
for method in ('online', 'batch'):
rng = np.random.RandomState(0)
X = rng.randint(10, size=(50, 20))
lda = LatentDirichletAllocation(n_components=5, learning_method=method,
random_state=rng)
X_fit = lda.fit_transform(X)
X_trans = lda.transform(X)
assert_array_almost_equal(X_fit, X_trans, 4)
def test_lda_partial_fit_dim_mismatch():
# test `n_features` mismatch in `partial_fit`
rng = np.random.RandomState(0)
n_components = rng.randint(3, 6)
n_col = rng.randint(6, 10)
X_1 = np.random.randint(4, size=(10, n_col))
X_2 = np.random.randint(4, size=(10, n_col + 1))
lda = LatentDirichletAllocation(n_components=n_components,
learning_offset=5., total_samples=20,
random_state=rng)
lda.partial_fit(X_1)
assert_raises_regexp(ValueError, r"^The provided data has",
lda.partial_fit, X_2)
def test_invalid_params():
# test `_check_params` method
X = np.ones((5, 10))
invalid_models = (
('n_components', LatentDirichletAllocation(n_components=0)),
('learning_method',
LatentDirichletAllocation(learning_method='unknown')),
('total_samples', LatentDirichletAllocation(total_samples=0)),
('learning_offset', LatentDirichletAllocation(learning_offset=-1)),
)
for param, model in invalid_models:
regex = r"^Invalid %r parameter" % param
assert_raises_regexp(ValueError, regex, model.fit, X)
def test_lda_negative_input():
# test pass dense matrix with sparse negative input.
X = -np.ones((5, 10))
lda = LatentDirichletAllocation()
regex = r"^Negative values in data passed"
assert_raises_regexp(ValueError, regex, lda.fit, X)
def test_lda_no_component_error():
# test `transform` and `perplexity` before `fit`
rng = np.random.RandomState(0)
X = rng.randint(4, size=(20, 10))
lda = LatentDirichletAllocation()
regex = r"^no 'components_' attribute"
assert_raises_regexp(NotFittedError, regex, lda.transform, X)
assert_raises_regexp(NotFittedError, regex, lda.perplexity, X)
def test_lda_transform_mismatch():
# test `n_features` mismatch in partial_fit and transform
rng = np.random.RandomState(0)
X = rng.randint(4, size=(20, 10))
X_2 = rng.randint(4, size=(10, 8))
n_components = rng.randint(3, 6)
lda = LatentDirichletAllocation(n_components=n_components,
random_state=rng)
lda.partial_fit(X)
assert_raises_regexp(ValueError, r"^The provided data has",
lda.partial_fit, X_2)
@if_safe_multiprocessing_with_blas
def test_lda_multi_jobs():
n_components, X = _build_sparse_mtx()
# Test LDA batch training with multi CPU
for method in ('online', 'batch'):
rng = np.random.RandomState(0)
lda = LatentDirichletAllocation(n_components=n_components, n_jobs=2,
learning_method=method,
evaluate_every=1, random_state=rng)
lda.fit(X)
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for c in lda.components_:
top_idx = set(c.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
@if_safe_multiprocessing_with_blas
def test_lda_partial_fit_multi_jobs():
# Test LDA online training with multi CPU
rng = np.random.RandomState(0)
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, n_jobs=2,
learning_offset=5., total_samples=30,
random_state=rng)
for i in range(2):
lda.partial_fit(X)
correct_idx_grps = [(0, 1, 2), (3, 4, 5), (6, 7, 8)]
for c in lda.components_:
top_idx = set(c.argsort()[-3:][::-1])
assert_true(tuple(sorted(top_idx)) in correct_idx_grps)
def test_lda_preplexity_mismatch():
# test dimension mismatch in `perplexity` method
rng = np.random.RandomState(0)
n_components = rng.randint(3, 6)
n_samples = rng.randint(6, 10)
X = np.random.randint(4, size=(n_samples, 10))
lda = LatentDirichletAllocation(n_components=n_components,
learning_offset=5., total_samples=20,
random_state=rng)
lda.fit(X)
# invalid samples
invalid_n_samples = rng.randint(4, size=(n_samples + 1, n_components))
assert_raises_regexp(ValueError, r'Number of samples',
lda._perplexity_precomp_distr, X, invalid_n_samples)
# invalid topic number
invalid_n_components = rng.randint(4, size=(n_samples, n_components + 1))
assert_raises_regexp(ValueError, r'Number of topics',
lda._perplexity_precomp_distr, X,
invalid_n_components)
def test_lda_perplexity():
# Test LDA perplexity for batch training
# perplexity should be lower after each iteration
n_components, X = _build_sparse_mtx()
for method in ('online', 'batch'):
lda_1 = LatentDirichletAllocation(n_components=n_components,
max_iter=1, learning_method=method,
total_samples=100, random_state=0)
lda_2 = LatentDirichletAllocation(n_components=n_components,
max_iter=10, learning_method=method,
total_samples=100, random_state=0)
lda_1.fit(X)
perp_1 = lda_1.perplexity(X, sub_sampling=False)
lda_2.fit(X)
perp_2 = lda_2.perplexity(X, sub_sampling=False)
assert_greater_equal(perp_1, perp_2)
perp_1_subsampling = lda_1.perplexity(X, sub_sampling=True)
perp_2_subsampling = lda_2.perplexity(X, sub_sampling=True)
assert_greater_equal(perp_1_subsampling, perp_2_subsampling)
def test_lda_score():
# Test LDA score for batch training
# score should be higher after each iteration
n_components, X = _build_sparse_mtx()
for method in ('online', 'batch'):
lda_1 = LatentDirichletAllocation(n_components=n_components,
max_iter=1, learning_method=method,
total_samples=100, random_state=0)
lda_2 = LatentDirichletAllocation(n_components=n_components,
max_iter=10, learning_method=method,
total_samples=100, random_state=0)
lda_1.fit_transform(X)
score_1 = lda_1.score(X)
lda_2.fit_transform(X)
score_2 = lda_2.score(X)
assert_greater_equal(score_2, score_1)
def test_perplexity_input_format():
# Test LDA perplexity for sparse and dense input
# score should be the same for both dense and sparse input
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, max_iter=1,
learning_method='batch',
total_samples=100, random_state=0)
lda.fit(X)
perp_1 = lda.perplexity(X)
perp_2 = lda.perplexity(X.toarray())
assert_almost_equal(perp_1, perp_2)
def test_lda_score_perplexity():
# Test the relationship between LDA score and perplexity
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, max_iter=10,
random_state=0)
lda.fit(X)
perplexity_1 = lda.perplexity(X, sub_sampling=False)
score = lda.score(X)
perplexity_2 = np.exp(-1. * (score / np.sum(X.data)))
assert_almost_equal(perplexity_1, perplexity_2)
def test_lda_fit_perplexity():
# Test that the perplexity computed during fit is consistent with what is
# returned by the perplexity method
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, max_iter=1,
learning_method='batch', random_state=0,
evaluate_every=1)
lda.fit(X)
# Perplexity computed at end of fit method
perplexity1 = lda.bound_
# Result of perplexity method on the train set
perplexity2 = lda.perplexity(X)
assert_almost_equal(perplexity1, perplexity2)
def test_doc_topic_distr_deprecation():
# Test that the appropriate warning message is displayed when a user
# attempts to pass the doc_topic_distr argument to the perplexity method
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, max_iter=1,
learning_method='batch',
total_samples=100, random_state=0)
distr1 = lda.fit_transform(X)
distr2 = None
assert_warns(DeprecationWarning, lda.perplexity, X, distr1)
assert_warns(DeprecationWarning, lda.perplexity, X, distr2)
def test_lda_empty_docs():
"""Test LDA on empty document (all-zero rows)."""
Z = np.zeros((5, 4))
for X in [Z, csr_matrix(Z)]:
lda = LatentDirichletAllocation(max_iter=750).fit(X)
assert_almost_equal(lda.components_.sum(axis=0),
np.ones(lda.components_.shape[1]))
def test_dirichlet_expectation():
"""Test Cython version of Dirichlet expectation calculation."""
x = np.logspace(-100, 10, 10000)
expectation = np.empty_like(x)
_dirichlet_expectation_1d(x, 0, expectation)
assert_allclose(expectation, np.exp(psi(x) - psi(np.sum(x))),
atol=1e-19)
x = x.reshape(100, 100)
assert_allclose(_dirichlet_expectation_2d(x),
psi(x) - psi(np.sum(x, axis=1)[:, np.newaxis]),
rtol=1e-11, atol=3e-9)
def check_verbosity(verbose, evaluate_every, expected_lines,
expected_perplexities):
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_components=n_components, max_iter=3,
learning_method='batch',
verbose=verbose,
evaluate_every=evaluate_every,
random_state=0)
out = StringIO()
old_out, sys.stdout = sys.stdout, out
try:
lda.fit(X)
finally:
sys.stdout = old_out
n_lines = out.getvalue().count('\n')
n_perplexity = out.getvalue().count('perplexity')
assert_equal(expected_lines, n_lines)
assert_equal(expected_perplexities, n_perplexity)
def test_verbosity():
for verbose, evaluate_every, expected_lines, expected_perplexities in [
(False, 1, 0, 0),
(False, 0, 0, 0),
(True, 0, 3, 0),
(True, 1, 3, 3),
(True, 2, 3, 1),
]:
yield (check_verbosity, verbose, evaluate_every, expected_lines,
expected_perplexities)
def test_lda_n_topics_deprecation():
n_components, X = _build_sparse_mtx()
lda = LatentDirichletAllocation(n_topics=10, learning_method='batch')
assert_warns(DeprecationWarning, lda.fit, X)
| bsd-3-clause |
BigDataehealthTools/GNOME_Viewer | GNOME_Viewer/urls.py | 1 | 1584 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Authors of this page : Beatriz Kanzki & Victor Dupuy
from django.conf.urls import url
from django.contrib import admin
from django.conf.urls.static import static
from Web import views as web
from GenomeViewer import views as GenomeViewer
# Config variables
import settings
urlpatterns = [
url(r'^$', web.index),
url(r'^adamGenomeViewer/(?P<chromosome>[0-9]{0,2})/(?P<position>[0-9]{0,50})/(?P<rsid>[0-9a-zA-Z_ ()]{0,200})/(?P<userWidth>[0-9]{0,6})/(?P<userHeight>[0-9]{0,6})', GenomeViewer.adamGenomeViewer),
url(r'^uploadFile/', GenomeViewer.uploadFile),
url(r'^extractHeader/', GenomeViewer.extractHeader),
url(r'^fileGenomeViewer/', GenomeViewer.fileGenomeViewer),
url(r'^admin/', admin.site.urls),
] +static(settings.STATIC_URL,document_root=settings.STATIC_ROOT)
| apache-2.0 |
heavyengineer/p2pool | p2pool/test/bitcoin/test_getwork.py | 275 | 4273 | import unittest
from p2pool.bitcoin import getwork, data as bitcoin_data
class Test(unittest.TestCase):
def test_all(self):
cases = [
{
'target': '0000000000000000000000000000000000000000000000f2b944000000000000',
'midstate': '5982f893102dec03e374b472647c4f19b1b6d21ae4b2ac624f3d2f41b9719404',
'hash1': '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'data': '0000000163930d52a5ffca79b29b95a659a302cd4e1654194780499000002274000000002e133d9e51f45bc0886d05252038e421e82bff18b67dc14b90d9c3c2f422cd5c4dd4598e1a44b9f200000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000'
},
{
'midstate' : 'f4a9b048c0cb9791bc94b13ee0eec21e713963d524fd140b58bb754dd7b0955f',
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
{
'data' : '000000019a1d7342fb62090bda686b22d90f9f73d0f5c418b9c980cd0000011a00000000680b07c8a2f97ecd831f951806857e09f98a3b81cdef1fa71982934fef8dc3444e18585d1a0abbcf00000000000000800000000000000000000000000000000000000000000000000000000000000000000000000000000080020000',
'hash1' : '00000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000010000',
'target' : '0000000000000000000000000000000000000000000000cfbb0a000000000000',
'extrathing': 'hi!',
},
]
for case in cases:
ba = getwork.BlockAttempt.from_getwork(case)
extra = dict(case)
del extra['data'], extra['hash1'], extra['target']
extra.pop('midstate', None)
getwork_check = ba.getwork(**extra)
assert getwork_check == case or dict((k, v) for k, v in getwork_check.iteritems() if k != 'midstate') == case
case2s = [
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
0x44b9f20000000000000000000000000000000000000000000000,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
432*2**230,
),
getwork.BlockAttempt(
1,
0x148135e10208db85abb62754341a392eab1f186aab077a831cf7,
0x534ea08be1ab529f484369344b6d5423ef5a0767db9b3ebb4e182bbb67962520,
1305759879,
bitcoin_data.FloatingInteger.from_target_upper_bound(0x44b9f20000000000000000000000000000000000000000000000),
7*2**240,
)
]
for case2 in case2s:
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
case2 = case2.update(previous_block=case2.previous_block - 10)
assert getwork.BlockAttempt.from_getwork(case2.getwork()) == case2
assert getwork.BlockAttempt.from_getwork(case2.getwork(ident='hi')) == case2
| gpl-3.0 |
theonewolf/siegvswolf | lib/requests/status_codes.py | 695 | 3136 | # -*- coding: utf-8 -*-
from .structures import LookupDict
_codes = {
# Informational.
100: ('continue',),
101: ('switching_protocols',),
102: ('processing',),
103: ('checkpoint',),
122: ('uri_too_long', 'request_uri_too_long'),
200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'),
201: ('created',),
202: ('accepted',),
203: ('non_authoritative_info', 'non_authoritative_information'),
204: ('no_content',),
205: ('reset_content', 'reset'),
206: ('partial_content', 'partial'),
207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'),
208: ('already_reported',),
226: ('im_used',),
# Redirection.
300: ('multiple_choices',),
301: ('moved_permanently', 'moved', '\\o-'),
302: ('found',),
303: ('see_other', 'other'),
304: ('not_modified',),
305: ('use_proxy',),
306: ('switch_proxy',),
307: ('temporary_redirect', 'temporary_moved', 'temporary'),
308: ('resume_incomplete', 'resume'),
# Client Error.
400: ('bad_request', 'bad'),
401: ('unauthorized',),
402: ('payment_required', 'payment'),
403: ('forbidden',),
404: ('not_found', '-o-'),
405: ('method_not_allowed', 'not_allowed'),
406: ('not_acceptable',),
407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'),
408: ('request_timeout', 'timeout'),
409: ('conflict',),
410: ('gone',),
411: ('length_required',),
412: ('precondition_failed', 'precondition'),
413: ('request_entity_too_large',),
414: ('request_uri_too_large',),
415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
417: ('expectation_failed',),
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
422: ('unprocessable_entity', 'unprocessable'),
423: ('locked',),
424: ('failed_dependency', 'dependency'),
425: ('unordered_collection', 'unordered'),
426: ('upgrade_required', 'upgrade'),
428: ('precondition_required', 'precondition'),
429: ('too_many_requests', 'too_many'),
431: ('header_fields_too_large', 'fields_too_large'),
444: ('no_response', 'none'),
449: ('retry_with', 'retry'),
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
451: ('unavailable_for_legal_reasons', 'legal_reasons'),
499: ('client_closed_request',),
# Server Error.
500: ('internal_server_error', 'server_error', '/o\\', '✗'),
501: ('not_implemented',),
502: ('bad_gateway',),
503: ('service_unavailable', 'unavailable'),
504: ('gateway_timeout',),
505: ('http_version_not_supported', 'http_version'),
506: ('variant_also_negotiates',),
507: ('insufficient_storage',),
509: ('bandwidth_limit_exceeded', 'bandwidth'),
510: ('not_extended',),
}
codes = LookupDict(name='status_codes')
for (code, titles) in list(_codes.items()):
for title in titles:
setattr(codes, title, code)
if not title.startswith('\\'):
setattr(codes, title.upper(), code)
| mit |
southpawtech/TACTIC-DEV | src/tactic/ui/container/menu_wdg.py | 1 | 18674 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['FingerMenuWdg', 'MenuWdg', 'GearMenuWdg','Menu','MenuItem']
from pyasm.common import Common, TacticException
from pyasm.web import HtmlElement, SpanWdg, DivWdg, FloatDivWdg, WebContainer, Widget, Table
from tactic.ui.common import BaseRefreshWdg
from smart_menu_wdg import SmartMenu
class FingerMenuWdg(BaseRefreshWdg):
'''Container widget contains a menu. Each child widget is a selection
item. Best used for a small menu for individual table element widgets
@usage
menu = FingerMenuWdg(mode='horizontal', width=40, height =18, top_class='note_edit_panel')
menu_item = MenuItem('action', label='edit')
menu_item.add_behavior({'type': 'click', 'cbjs_action': 'spt.alert(123)'})
menu.add(menu_item)
'''
def __init__(my, **kwargs):
super(FingerMenuWdg, my).__init__(**kwargs)
my.items = []
# this css class identifies a container for the MenuWdg in which one can store hidden input and other info
my.menu_top_class = kwargs.get('top_class')
assert my.menu_top_class
def get_args_keys(my):
return {
'id': 'The id of the top widget',
'width': 'The width of the popup',
'background': 'style of background',
'font_size': 'font size of menu item',
'mode': 'horizontal|veritcal',
'top_class': "a css class that uniquely identifies this menu's container",
'force': 'left|right'
}
def add(my, menu_item):
my.items.append(menu_item)
def _add_spacer_row(my, menu_table, height, label_width):
tbody = menu_table.add_tbody()
tbody.add_style("display","table-row-group")
tr = menu_table.add_row()
tr.add_looks( "smenu" )
# label
td = menu_table.add_cell()
td.add_style("width", ("%spx" % label_width))
td.add_style("height", ("%spx" % height))
def init(my):
my.mode = my.kwargs.get('mode')
if not my.mode:
my.mode = 'vertical'
def set_activator_over(my, item, activator_match_class, activator_parent_class='', js_action='', top_class='', offset={'x':0, 'y':0}):
'''usually called in handle_layout_behaviours() for best relay expectation
@item: the layout widget (i.e. TableLayoutWdg) to add this behavior to
@activator_match_class: class of the element to appear beside when the mouse fires mouseover
@activator_parent_class: (optional) class of the element assigned to menu.activator_el
(a close parent of the element with activator_match_class). It could be more precise when used as position reference
@top_class: a common top class for all: this defaults to spt_table
@js_action: extra js action one can run when the mouse is over the activator'''
main_action = '''
var parent = bvr.src_el.getParent('.' + bvr.top_class);
if (!parent) {
bvr.top_class = 'spt_tab_content';
parent = bvr.src_el.getParent('.' + bvr.top_class);
}
var menu_top = parent.getElement('.' + bvr.menu_top_class);
var menu = menu_top.getElement('.spt_menu_top');
// don't use getSize()
var menu_width = 100;
var finger_div = menu.getElement('.spt_finger_menu');
if (finger_div)
menu_width = parseInt(finger_div.getStyle('width'), 10);
var activator_parent = bvr.activator_parent_class ? bvr.src_el.getParent('.' + bvr.activator_parent_class) : bvr.src_el;
var panel = bvr.src_el.getParent(".spt_popup");
var pos = bvr.src_el.getPosition();
var body = $(document.body);
var scroll_top = body.scrollTop;
var scroll_left = body.scrollLeft;
pos.x = pos.x - scroll_left;
pos.y = pos.y - scroll_top;
var size = activator_parent.getSize();
var x_offset = size ? size.x : 400;
var client_width = document.body.clientWidth;
/*
console.log("offset_X " + x_offset)
console.log("pos X " + pos.x)
console.log("menu width" + menu_width)
*/
var is_left;
var force = finger_div.getAttribute("spt_finger_force");
if (force) {
is_left = force == "left";
}
else if ((x_offset+ pos.x + menu_width) > client_width ) {
is_left = true;
}
// store the variable for activator out calculation
menu_top.is_left = is_left;
if (is_left) {
pos.x = pos.x - menu_width + 3;
if (finger_div) {
finger_div.setStyle("border-width", "1px 0px 1px 1px");
finger_div.setStyle("border-radius", "12px 0px 0px 12px");
finger_div.setStyle("padding-left", "10px");
finger_div.setStyle("padding-right", "0px");
}
}
else {
pos.x = pos.x + x_offset;
if (finger_div) {
finger_div.setStyle("border-width", "1px 1px 1px 0px");
finger_div.setStyle("border-radius", "0px 12px 12px 0px");
finger_div.setStyle("padding-left", "0px");
finger_div.setStyle("padding-right", "10px");
}
}
if (menu_top) {
//for refresh purpose called by the menu_item's cbjs_action
menu.activator_el = activator_parent;
menu_top.position({position: 'upperLeft', relativeTo: body, offset: pos});
//menu_top.setStyle("left", left_pos);
//menu_top.setStyle("top", pos.y );
menu_top.setStyle("z-index", 1000);
spt.show(menu_top);
spt.show(menu);
spt.body.add_focus_element(menu_top);
}
'''
if not top_class:
top_class = "spt_layout_top"
if js_action:
main_action = '''%s
%s'''%(main_action, js_action)
item.add_relay_behavior({
'type': 'mouseover',
'bvr_match_class': activator_match_class,
'activator_parent_class': activator_parent_class,
'top_class': top_class,
'menu_top_class': my.menu_top_class,
'cbjs_action': main_action,
'offset': offset
})
def set_activator_out(my, item, activator_match_class, top_class='', js_action=''):
''' usually called in handle_layout_behaviours() for best relay performance
@item: the layout widget (i.e. TableLayoutWdg) to add this behavior to
@activator_match_class: class of the elemnent from which the mouse fires mouseleave to hide the menu
@top_class: a common top class for all: this defaults to spt_table
@js_action: extra js action one can run when the mouse is leaving the activator'''
main_action = '''
var target = spt.get_event_target( evt );
var edit_menu = bvr.src_el.getParent('.'+bvr.top_class).getElement('.' + bvr.menu_top_class);
if (!edit_menu) {
log.critical('edit_menu not found!')
//return;
}
else {
var menu_pos = edit_menu.getPosition();
// when is_left, evt.x tends to be 80 pixels bigger, so increase the tolerance
var tolerance = edit_menu.is_left ? 5000 : 1500;
var diff = (menu_pos.x - evt.page.x) * (menu_pos.y - evt.page.y);
if (Math.abs(diff) > tolerance) {
spt.hide(edit_menu);
}
else {
spt.finger_menu.timeout_id = setTimeout( function() {
spt.hide(edit_menu);
}, 500 )
}
}
'''
if not top_class:
#top_class = "spt_table"
top_class = "spt_layout_top"
if js_action:
main_action = '''%s
%s'''%(main_action, js_action)
item.add_relay_behavior({
'type': 'mouseleave',
'bvr_match_class': activator_match_class,
'menu_top_class': my.menu_top_class,
'top_class': top_class,
'cbjs_action': main_action
} )
def get_display(my):
#content.add_event("oncontextmenu", "spt.side_bar.manage_context_menu_action_cbk(); return false")
context_menu = DivWdg()
context_menu.add_class('spt_menu_top')
context_menu.add_behavior( {
'type': 'load',
'cbjs_action': '''
spt.finger_menu = {};
spt.finger_menu.timeout_id = -1;
'''
} )
context_menu.add_behavior( {
'type': 'mouseover',
'cbjs_action': '''
if (spt.finger_menu.timeout_id != -1) {
clearTimeout(spt.finger_menu.timeout_id);
spt.finger_menu.timeout_id = -1;
}
'''
} )
#context_menu.set_box_shadow(color='#fff')
# this may not be needed as it is set in JS
context_menu.add_style("z-index: 200")
# set up what happens when the mouse leaves the actual menu
my._set_menu_out(context_menu)
width = my.kwargs.get('width')
height = my.kwargs.get('height')
if not height:
height = 20
if not width:
width = 35
font_size = my.kwargs.get('font_size')
if not font_size:
font_size = 'smaller'
force = my.kwargs.get("force")
if my.mode == 'horizontal':
div = DivWdg(css='spt_finger_menu')
if force:
div.add_attr("spt_finger_force", force)
div.add_style("border-color: #aaa")
div.add_style("border-style: solid")
if force == "left":
div.add_style("border-width: 1px 0px 1px 1px")
div.add_style("border-radius: 12px 0px 0px 12px")
else:
div.add_style("border-width: 1px 1px 1px 0px")
div.add_style("border-radius: 0px 12px 12px 0px")
div.set_box_shadow(value="0px 0px 2px 1px")
#div.add_style("z-index: 1000")
total_width = width * len(my.items) + 15
div.add_style('width', total_width)
div.add_styles('height: %spx; padding: 2px;' %height)
context_menu.add(div)
div.add_color('background','background', -10)
palette = div.get_palette()
sb_title_bg = palette.color('side_bar_title')
bg_color = div.get_color('background', -10)
color = div.get_color('color')
for item in my.items:
mouse_enter_bvr = {'type':'mouseenter', 'cbjs_action': '''
bvr.src_el.setStyles({'background': '%s', 'color': 'white'})''' %sb_title_bg}
mouse_leave_bvr = {'type':'mouseleave', 'cbjs_action': '''
bvr.src_el.setStyles({'background': '%s', 'color': '%s'})''' %(bg_color, color)}
menu_item = FloatDivWdg(css='unselectable hand')
menu_item.add_color('background','background', -10)
menu_item.add(item.get_option('label'))
menu_item.add_behavior( mouse_enter_bvr )
menu_item.add_behavior( mouse_leave_bvr )
# add the passed-in bvr
bvr = item.get_option('bvr_cb')
menu_item.add_behavior(bvr )
menu_item.add_styles('margin: 0px 0 0 0; padding: 2px 0 2px 0; text-align: center; font-size: %s; width: %s; height: %spx'%(font_size, width, height-4))
menu_item.add_behavior({'type': 'click_up',
'cbjs_action': '''var menu = bvr.src_el.getParent('.spt_menu_top'); spt.hide(menu);'''})
div.add(menu_item)
else:
# this width only matters in vertical mode
context_menu.add_style("width: %s" %width)
menu_table = Table()
menu_table.add_styles( "text-align: left; text-indent: 4px; border-collapse: collapse; cell-padding: 8px; border-radius: 32px;" )
context_menu.add(menu_table)
my._add_spacer_row(menu_table, 3, width)
for widget in my.widgets:
tbody = menu_table.add_tbody()
tbody.add_style("display","table-row-group")
tr = menu_table.add_row()
tr.add_looks( "smenu" )
#tr.add_class( "SPT_SMENU_ENTRY" )
hover_bvr = {'type':'hover', 'add_looks': 'smenu_hilite'}
#'cbjs_action_over': 'spt.smenu.entry_over( evt, bvr );',
#'cbjs_action_out': 'spt.smenu.entry_out( evt, bvr );' }
tr.add_behavior( hover_bvr )
menu_item = menu_table.add_cell()
font_size = '4px'
menu_item.add_styles('padding: 0px 0 0 6px; font-size: %s; width: %s; height: 16px'%(font_size, width))
menu_item.add_behavior({'type': 'click_up',
'cbjs_action': '''var menu = bvr.src_el.getParent('.spt_menu_top'); spt.hide(menu);'''})
menu_item.add(widget)
my._add_spacer_row(menu_table, 3, width)
return context_menu
def _set_menu_out(my, item):
''' set up what happens when the mouse leaves the actual menu. It stays on for 4 secs'''
item.add_behavior({
'type': 'mouseleave',
'cbjs_action': '''
var edit_menus = document.getElements('.spt_menu_top');
setTimeout(function(){
for (var i = 0; i < edit_menus.length; i++) {
var edit_menu = edit_menus[i];
var menu_pos = edit_menu.getPosition();
var diff = (menu_pos.x - evt.page.x) * (menu_pos.y - evt.page.y);
// smaller tolerance here, but with 4 seconds delay
if (Math.abs(diff) > 500) {
spt.hide(edit_menu);
}
}
}, 4000);
'''
})
# DEPRECATED: use FingerMenuWdg
class MenuWdg(FingerMenuWdg):
pass
class GearMenuWdg(BaseRefreshWdg):
def init(my):
my.btn_dd = DivWdg()
my.menus = []
def add_style(my, name, value=None):
my.btn_dd.add_style(name, value)
def add(my, menu):
my.menus.append(menu.get_data())
def get_display(my):
# create the gear menu
btn_dd = my.btn_dd
btn_dd.add_styles("width: 36px; height: 18px; padding: none; padding-top: 1px;")
btn_dd.add( "<img src='/context/icons/common/transparent_pixel.gif' alt='' " \
"title='TACTIC Actions Menu' class='tactic_tip' " \
"style='text-decoration: none; padding: none; margin: none; width: 4px;' />" )
btn_dd.add( "<img src='/context/icons/silk/cog.png' alt='' " \
"title='TACTIC Actions Menu' class='tactic_tip' " \
"style='text-decoration: none; padding: none; margin: none;' />" )
btn_dd.add( "<img src='/context/icons/silk/bullet_arrow_down.png' alt='' " \
"title='TACTIC Actions Menu' class='tactic_tip' " \
"style='text-decoration: none; padding: none; margin: none;' />" )
btn_dd.add_behavior( { 'type': 'hover',
'mod_styles': 'background-image: url(/context/icons/common/gear_menu_btn_bkg_hilite.png); ' \
'background-repeat: no-repeat;' } )
smenu_set = SmartMenu.add_smart_menu_set( btn_dd, { 'DG_TABLE_GEAR_MENU': my.menus } )
SmartMenu.assign_as_local_activator( btn_dd, "DG_TABLE_GEAR_MENU", True )
return btn_dd
class Menu(object):
def __init__(my, menu_tag_suffix='MAIN', width=110, allow_icons=False):
my.opt_spec_list = []
my.data = { 'menu_tag_suffix': menu_tag_suffix, 'width': width, 'opt_spec_list': my.opt_spec_list}
def add(my, menu_item):
options = menu_item.get_options()
my.opt_spec_list.append(options)
def set_menu_tag_suffix(my, suffix):
my.data['menu_tag_suffix'] = suffix
def get_data(my):
return my.data
def add_option(name, value):
my.data[name] = value
def set_allow_icons(my, flag=True):
my.data['allow_icons'] = flag
def set_setup_cbfn(my, func):
my.data['setup_cbfn'] = func
class MenuItem(object):
def __init__(my, type, label="Label", icon=None):
assert type in ['title', 'action', 'submenu', 'separator']
if type == 'separator':
my.options = { "type": type }
else:
if icon:
my.options = { "type": type, "label": label, "icon": icon }
else:
my.options = { "type": type, "label": label }
def get_options(my):
return my.options
def get_option(my, key):
return my.options.get(key)
def set_option(my, key, value):
my.options[key] = value
def set_type(my, type):
my.options['type'] = type
def set_label(my, label):
my.options['label'] = label
def set_icon(my, icon):
my.options['icon'] = icon
def set_behavior(my, behavior):
my.options['bvr_cb'] = behavior
def add_behavior(my, behavior):
my.options['bvr_cb'] = behavior
def set_submenu_tag_suffix(my, suffix):
my.options['submenu_tag_suffix'] = suffix
| epl-1.0 |
wimac/home | Dropbox/skel/bin/sick-beard/cherrypy/_cpconfig.py | 35 | 10328 | """Configuration system for CherryPy.
Configuration in CherryPy is implemented via dictionaries. Keys are strings
which name the mapped value, which may be of any type.
Architecture
------------
CherryPy Requests are part of an Application, which runs in a global context,
and configuration data may apply to any of those three scopes:
Global: configuration entries which apply everywhere are stored in
cherrypy.config.
Application: entries which apply to each mounted application are stored
on the Application object itself, as 'app.config'. This is a two-level
dict where each key is a path, or "relative URL" (for example, "/" or
"/path/to/my/page"), and each value is a config dict. Usually, this
data is provided in the call to tree.mount(root(), config=conf),
although you may also use app.merge(conf).
Request: each Request object possesses a single 'Request.config' dict.
Early in the request process, this dict is populated by merging global
config entries, Application entries (whose path equals or is a parent
of Request.path_info), and any config acquired while looking up the
page handler (see next).
Declaration
-----------
Configuration data may be supplied as a Python dictionary, as a filename,
or as an open file object. When you supply a filename or file, CherryPy
uses Python's builtin ConfigParser; you declare Application config by
writing each path as a section header:
[/path/to/my/page]
request.stream = True
To declare global configuration entries, place them in a [global] section.
You may also declare config entries directly on the classes and methods
(page handlers) that make up your CherryPy application via the '_cp_config'
attribute. For example:
class Demo:
_cp_config = {'tools.gzip.on': True}
def index(self):
return "Hello world"
index.exposed = True
index._cp_config = {'request.show_tracebacks': False}
Note, however, that this behavior is only guaranteed for the default
dispatcher. Other dispatchers may have different restrictions on where
you can attach _cp_config attributes.
Namespaces
----------
Configuration keys are separated into namespaces by the first "." in the key.
Current namespaces:
engine: Controls the 'application engine', including autoreload.
These can only be declared in the global config.
tree: Grafts cherrypy.Application objects onto cherrypy.tree.
These can only be declared in the global config.
hooks: Declares additional request-processing functions.
log: Configures the logging for each application.
These can only be declared in the global or / config.
request: Adds attributes to each Request.
response: Adds attributes to each Response.
server: Controls the default HTTP server via cherrypy.server.
These can only be declared in the global config.
tools: Runs and configures additional request-processing packages.
wsgi: Adds WSGI middleware to an Application's "pipeline".
These can only be declared in the app's root config ("/").
checker: Controls the 'checker', which looks for common errors in
app state (including config) when the engine starts.
Global config only.
The only key that does not exist in a namespace is the "environment" entry.
This special entry 'imports' other config entries from a template stored in
cherrypy._cpconfig.environments[environment]. It only applies to the global
config, and only when you use cherrypy.config.update.
You can define your own namespaces to be called at the Global, Application,
or Request level, by adding a named handler to cherrypy.config.namespaces,
app.namespaces, or app.request_class.namespaces. The name can
be any string, and the handler must be either a callable or a (Python 2.5
style) context manager.
"""
try:
set
except NameError:
from sets import Set as set
import cherrypy
from cherrypy.lib import reprconf
# Deprecated in CherryPy 3.2--remove in 3.3
NamespaceSet = reprconf.NamespaceSet
def merge(base, other):
"""Merge one app config (from a dict, file, or filename) into another.
If the given config is a filename, it will be appended to
the list of files to monitor for "autoreload" changes.
"""
if isinstance(other, basestring):
cherrypy.engine.autoreload.files.add(other)
# Load other into base
for section, value_map in reprconf.as_dict(other).items():
if not isinstance(value_map, dict):
raise ValueError(
"Application config must include section headers, but the "
"config you tried to merge doesn't have any sections. "
"Wrap your config in another dict with paths as section "
"headers, for example: {'/': config}.")
base.setdefault(section, {}).update(value_map)
class Config(reprconf.Config):
"""The 'global' configuration data for the entire CherryPy process."""
def update(self, config):
"""Update self from a dict, file or filename."""
if isinstance(config, basestring):
# Filename
cherrypy.engine.autoreload.files.add(config)
reprconf.Config.update(self, config)
def _apply(self, config):
"""Update self from a dict."""
if isinstance(config.get("global", None), dict):
if len(config) > 1:
cherrypy.checker.global_config_contained_paths = True
config = config["global"]
if 'tools.staticdir.dir' in config:
config['tools.staticdir.section'] = "global"
reprconf.Config._apply(self, config)
def __call__(self, *args, **kwargs):
"""Decorator for page handlers to set _cp_config."""
if args:
raise TypeError(
"The cherrypy.config decorator does not accept positional "
"arguments; you must use keyword arguments.")
def tool_decorator(f):
if not hasattr(f, "_cp_config"):
f._cp_config = {}
for k, v in kwargs.items():
f._cp_config[k] = v
return f
return tool_decorator
Config.environments = environments = {
"staging": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
},
"production": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
'log.screen': False,
},
"embedded": {
# For use with CherryPy embedded in another deployment stack.
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': False,
'request.show_mismatched_params': False,
'log.screen': False,
'engine.SIGHUP': None,
'engine.SIGTERM': None,
},
"test_suite": {
'engine.autoreload_on': False,
'checker.on': False,
'tools.log_headers.on': False,
'request.show_tracebacks': True,
'request.show_mismatched_params': True,
'log.screen': False,
},
}
def _server_namespace_handler(k, v):
"""Config handler for the "server" namespace."""
atoms = k.split(".", 1)
if len(atoms) > 1:
# Special-case config keys of the form 'server.servername.socket_port'
# to configure additional HTTP servers.
if not hasattr(cherrypy, "servers"):
cherrypy.servers = {}
servername, k = atoms
if servername not in cherrypy.servers:
from cherrypy import _cpserver
cherrypy.servers[servername] = _cpserver.Server()
# On by default, but 'on = False' can unsubscribe it (see below).
cherrypy.servers[servername].subscribe()
if k == 'on':
if v:
cherrypy.servers[servername].subscribe()
else:
cherrypy.servers[servername].unsubscribe()
else:
setattr(cherrypy.servers[servername], k, v)
else:
setattr(cherrypy.server, k, v)
Config.namespaces["server"] = _server_namespace_handler
def _engine_namespace_handler(k, v):
"""Backward compatibility handler for the "engine" namespace."""
engine = cherrypy.engine
if k == 'autoreload_on':
if v:
engine.autoreload.subscribe()
else:
engine.autoreload.unsubscribe()
elif k == 'autoreload_frequency':
engine.autoreload.frequency = v
elif k == 'autoreload_match':
engine.autoreload.match = v
elif k == 'reload_files':
engine.autoreload.files = set(v)
elif k == 'deadlock_poll_freq':
engine.timeout_monitor.frequency = v
elif k == 'SIGHUP':
engine.listeners['SIGHUP'] = set([v])
elif k == 'SIGTERM':
engine.listeners['SIGTERM'] = set([v])
elif "." in k:
plugin, attrname = k.split(".", 1)
plugin = getattr(engine, plugin)
if attrname == 'on':
if v and hasattr(getattr(plugin, 'subscribe', None), '__call__'):
plugin.subscribe()
return
elif (not v) and hasattr(getattr(plugin, 'unsubscribe', None), '__call__'):
plugin.unsubscribe()
return
setattr(plugin, attrname, v)
else:
setattr(engine, k, v)
Config.namespaces["engine"] = _engine_namespace_handler
def _tree_namespace_handler(k, v):
"""Namespace handler for the 'tree' config namespace."""
cherrypy.tree.graft(v, v.script_name)
cherrypy.engine.log("Mounted: %s on %s" % (v, v.script_name or "/"))
Config.namespaces["tree"] = _tree_namespace_handler
| gpl-2.0 |
cul-it/Invenio | modules/bibencode/lib/bibencode_tester.py | 3 | 26130 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for BibEncode.
* Please run conversion_for_unit_tests.py
before you run the tests for the first time!
"""
__revision__ = "$Id$"
import unittest
from invenio import bibencode_utils
from invenio import bibencode_encode
from invenio import bibencode_metadata
import invenio.config
from invenio.bibencode_encode import encode_video
from invenio.bibencode_extract import extract_frames
from invenio.textutils import wait_for_user
from os.path import basename
import os
from urlparse import urlsplit
import shutil
import urllib2
from invenio.testutils import make_test_suite, run_test_suite
## original URL
video_url = "http://media.xiph.org/video/derf/y4m/blue_sky_1080p25.y4m"
video01 = invenio.config.CFG_TMPDIR + "/blue_sky_1080p25.y4m"
video01_out01 = invenio.config.CFG_TMPDIR + "/blue_sky_1080p.mp4"
video01_out02 = invenio.config.CFG_TMPDIR + "/blue_sky_1080p.ogg"
video01_out03 = invenio.config.CFG_TMPDIR + "/blue_sky_1080p.webm"
video01_out04 = invenio.config.CFG_TMPDIR + "/blue_sky_720p.mp4"
video01_out05 = invenio.config.CFG_TMPDIR + "/blue_sky_720p.ogg"
video01_out06 = invenio.config.CFG_TMPDIR + "/blue_sky_720p.webm"
video01_out07 = invenio.config.CFG_TMPDIR + "/blue_sky_480p.mp4"
video01_out08 = invenio.config.CFG_TMPDIR + "/blue_sky_480p.ogg"
video01_out09 = invenio.config.CFG_TMPDIR + "/blue_sky_480p.webm"
movie_no_aspect = invenio.config.CFG_TMPDIR + "/blue_sky_1080p_anamorphic.webm"
metadata = {
"title": "Super Duper Difficult Test Metadata Video File",
"author": "Invenio Author",
"album_artist": "Invenio Album Artist",
"album": "Invenio Album",
"grouping": "Invenio Grouping",
"composter": "Invenio Composer",
"year": "2011",
"track": "42",
"comment": "Invenio Comment",
"genre": "Invenio Genre",
"copyright": "Invenio Copyright",
"description": "Invenio Description",
"synopsis": "Invenio Synopsis",
"show": "Invenio Show",
"episode_id": "S04x42",
"network": "Invenio Network",
"lyrics": "Invenio Lyrics",
}
def url2name(url):
return basename(urlsplit(url)[2])
def download(url, localFileName = None):
""" Downloads a file from a remote url
"""
localName = url2name(url)
req = urllib2.Request(url)
r = urllib2.urlopen(req)
if r.info().has_key('Content-Disposition'):
# If the response has Content-Disposition, we take file name from it
localName = r.info()['Content-Disposition'].split('filename=')[1]
if localName[0] == '"' or localName[0] == "'":
localName = localName[1:-1]
elif r.url != url:
# if we were redirected, the real file name we take from the final URL
localName = url2name(r.url)
if localFileName:
# we can force to save the file as specified name
localName = localFileName
f = open(localName, 'wb')
shutil.copyfileobj(r, f)
f.close()
def printr(message):
""" Print with carriage return
"""
print("\r" + message)
class SetupTester(unittest.TestCase):
"""Prepares the necessary files for the tests"""
def test_setUp(self):
if not os.path.exists(video01):
print("Downloading sample video ... ")
download(video_url, video01)
print("Starting encoding ... ")
self.assertEqual(encode_video(video01, video01_out01, "libfaac", "libx264", 128000, 8000000, "1920x1080", 1, "-vpre medium", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out02, "libvorbis", "libtheora", 128000, 8000000, "1920x1080", 1, metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out03, "libvorbis", "libvpx", 128000, 8000000, "1920x1080", 1, "-g 320 -qmax 63", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out04, "libfaac", "libx264", 128000, 4000000, "1280x720", 1, "-vpre medium", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out05, "libvorbis", "libtheora", 128000, 4000000, "1280x720", 1, metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out06, "libvorbis", "libvpx", 128000, 4000000, "1280x720", 1, "-g 320 -qmax 63", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out07, "libfaac", "libx264", 128000, 2000000, "852x480", 1, "-vpre medium", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out08, "libvorbis", "libtheora", 128000, 2000000, "854x480", 1, metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, video01_out09, "libvorbis", "libvpx", 128000, 2000000, "852x480", 1, "-g 320 -qmax 63", metadata=metadata, update_fnc=printr), 1)
self.assertEqual(encode_video(video01, movie_no_aspect, "libvorbis", "libvpx", 128000, 8000000, "1440x1080", 1, "-g 320 -qmax 63", metadata=metadata, update_fnc=printr), 1)
print("Starting frame extraction ...")
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes1_", size=None, positions=None, numberof=10, extension='jpg', width=None, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes2_", size="640x360", positions=None, numberof=10, extension='jpg', width=None, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes3_", size=None, positions=None, numberof=10, extension='jpg', width=640, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes4_", size=None, positions=None, numberof=10, extension='jpg', width=None, height=360, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes5_", size=None, positions=None, numberof=10, extension='jpg', width=640, height=360, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes6_", size=None, positions=[1, 5, 10, 15, 20], numberof=None, extension='jpg', width=None, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes7_", size=None, positions=["00:00:01.00", "00:00:02.00","00:00:03.00", "00:00:04.00", "00:00:05.00"], numberof=None, extension='jpg', width=None, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
self.assertEqual(extract_frames(video01_out01, output_file=invenio.config.CFG_TMPDIR + "/testframes8_", size=None, positions=["00:00:01.00", 5,"00:00:03.00", 10, "00:00:05.00"], numberof=None, extension='jpg', width=None, height=None, aspect=None, profile=None, update_fnc=printr, message_fnc=printr), 1)
print("All done")
class TestFFmpegMinInstallation(unittest.TestCase):
"""Tests if the minimum FFmpeg installation is available"""
def test_ffmpeg(self):
self.assertEqual(bibencode_utils.check_ffmpeg_configuration(), None)
class TestUtilsFunctions(unittest.TestCase):
"""Tests the utility functions in bibencode_utils"""
def test_timcode_to_seconds(self):
"""Convert timecode to seconds"""
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:00"),0.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:00.00"),0.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:00.10"),0.1)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:01.00"),1.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:00.01"),0.01)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:00:10"),10.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("00:10:10"),610.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("10:10:10"),36610.0)
self.assertEqual(bibencode_utils.timecode_to_seconds("10:10:10.10"),36610.10)
def test_seconds_to_timecode(self):
"""Convert seconds to timecode"""
self.assertEqual(bibencode_utils.seconds_to_timecode(0.0),"00:00:00.00")
self.assertEqual(bibencode_utils.seconds_to_timecode(0.1),"00:00:00.10")
self.assertEqual(bibencode_utils.seconds_to_timecode(1.0),"00:00:01.00")
self.assertEqual(bibencode_utils.seconds_to_timecode(1.1),"00:00:01.10")
self.assertEqual(bibencode_utils.seconds_to_timecode(10.0),"00:00:10.00")
self.assertEqual(bibencode_utils.seconds_to_timecode(610.0),"00:10:10.00")
self.assertEqual(bibencode_utils.seconds_to_timecode(36610.0),"10:10:10.00")
self.assertEqual(bibencode_utils.seconds_to_timecode(36610.10),"10:10:10.10")
self.assertEqual(bibencode_utils.seconds_to_timecode(36601.10),"10:10:01.10")
self.assertEqual(bibencode_utils.seconds_to_timecode(36600.10),"10:10:00.10")
self.assertEqual(bibencode_utils.seconds_to_timecode("36600.10"),"10:10:00.10")
def test_is_seconds(self):
"""Tests if given value is seconds like"""
self.assertEqual(bibencode_utils.is_seconds(1), True)
self.assertEqual(bibencode_utils.is_seconds(1.1), True)
self.assertEqual(bibencode_utils.is_seconds("1"), True)
self.assertEqual(bibencode_utils.is_seconds("1.1"), True)
self.assertEqual(bibencode_utils.is_seconds("11.11"), True)
self.assertEqual(bibencode_utils.is_seconds("1s"), False)
self.assertEqual(bibencode_utils.is_seconds("1.1s"), False)
self.assertEqual(bibencode_utils.is_seconds(""), False)
def test_is_timecode(self):
"""Test if given value is a timecode"""
self.assertEqual(bibencode_utils.is_timecode("00:00:00"), True)
self.assertEqual(bibencode_utils.is_timecode("00:00:00.00"), True)
self.assertEqual(bibencode_utils.is_timecode("00:00:00.0"), True)
self.assertEqual(bibencode_utils.is_timecode("00:00:00.000"), True)
self.assertEqual(bibencode_utils.is_timecode("00:00:0.0"), False)
self.assertEqual(bibencode_utils.is_timecode("00:00"), False)
self.assertEqual(bibencode_utils.is_timecode("00:00.00"), False)
self.assertEqual(bibencode_utils.is_timecode("00"), False)
self.assertEqual(bibencode_utils.is_timecode("0"), False)
self.assertEqual(bibencode_utils.is_timecode("00.00"), False)
self.assertEqual(bibencode_utils.is_timecode("0.0"), False)
def test_aspect_string_to_float(self):
"""Tests if string contains an aspect ratio"""
self.assertAlmostEqual(bibencode_utils.aspect_string_to_float("4:3"), 1.333, places=2)
self.assertAlmostEqual(bibencode_utils.aspect_string_to_float("16:9"), 1.777, places=2)
class TestEncodeFunctions(unittest.TestCase):
"""Tests the functions of bibencode_encode"""
def test_determine_aspect(self):
"""Tests if the aspect is correctly detected"""
self.assertEqual(bibencode_encode.determine_aspect(video01_out02), ("16:9", 1920, 1080))
self.assertEqual(bibencode_encode.determine_aspect(video01_out05), ("16:9", 1280, 720))
self.assertEqual(bibencode_encode.determine_aspect(video01_out08), ("427:240", 854, 480))
def test_determine_resolution(self):
"""Tests if the resolution is correctly calculated"""
# The aspect is fully detectable in the video
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1920, 1080, None), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1280, 720, None), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 854, 480, None), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1920, None, None), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1280, None, None), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 854, None, None), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 1080, None), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 720, None), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 480, None), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1920, 1080, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1280, 720, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 854, 480, 1.78), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1920, None, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 1280, None, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, 854, None, 1.78), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 1080, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 720, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(video01_out03, None, 480, 1.78), "854x480")
# The aspect is not detectable in the video
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, 1080, None), "1440x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1280, 720, None), "960x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 854, 480, None), "640x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, None, None), "1920x1440")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1280, None, None), "1280x960")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 854, None, None), "854x640")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 1080, None), "1440x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 720, None), "960x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 480, None), "640x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, 1080, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1280, 720, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 854, 480, 1.78), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, None, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1280, None, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 854, None, 1.78), "854x480")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 1080, 1.777), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 720, 1.777), "1280x720")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, None, 480, 1.78), "854x480")
# Alternative aspect notation
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, 1080, "16:9"), "1920x1080")
self.assertEqual(bibencode_encode.determine_resolution_preserving_aspect(movie_no_aspect, 1920, 1080, "4:3"), "1440x1080")
def test_assure_quality(self):
""" Test if the quality is detected correctly"""
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1920, 1080, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1280, 720, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 4443, 2500, 6000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1280, 720, 10000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1920, 1080, 10000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1920, 1080, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, 720, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, 2500, 6000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, 720, 10000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, 1080, 10000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1920, None, None, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1280, None, None, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 4443, None, None, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, None, 10000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, None, None, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 800, 600, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, "4:3", 800, 600, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, "4:3", 1440, 1080, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333333333333333333, 800, 600, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333333333333333333, 1440, 1080, 6000000, True, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333, 800, 600, 6000000, True, 0.95), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333, 1440, 1080, 6000000, True, 0.95), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 800, 600, 6000000, True, 0.95), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, None, 1440, 1080, 6000000, True, 0.95), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333, 800, 600, 6000000, False, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(video01_out03, 1.333, 1440, 1080, 6000000, False, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, None, 800, 600, 6000000, False, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, None, 1440, 1080, 6000000, False, 1.0), True)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, None, 1920, 1080, 6000000, False, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, None, 1920, 1080, 6000000, True, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, "16:9", 1920, 1080, 6000000, False, 1.0), False)
self.assertEqual(bibencode_encode.assure_quality(movie_no_aspect, "16:9", 1920, 1080, 6000000, True, 1.0), True)
class TestExtractFunctions(unittest.TestCase):
"""Tests the functions of bibencode_extract"""
pass
class TestMetadataFunctions(unittest.TestCase):
"""Tests the functions of bibencode_metadata"""
def test_ffrobe_metadata(self):
"""Test if ffprobe metadata outputs correctly"""
metadata_check = {
'format': {'TAG:album': '"Invenio Album"',
'TAG:album_artist': '"Invenio Album Artist"',
'TAG:comment': '"Invenio Comment"',
'TAG:compatible_brands': 'isomiso2avc1mp41',
'TAG:copyright': '"Invenio Copyright"',
'TAG:creation_time': '1970-01-01 00:00:00',
'TAG:description': '"Invenio Description"',
'TAG:encoder': 'Lavf53.1.0',
'TAG:episode_id': '"S04x42"',
'TAG:genre': '"Invenio Genre"',
'TAG:grouping': '"Invenio Grouping"',
'TAG:lyrics': '"Invenio Lyrics"',
'TAG:major_brand': 'isom',
'TAG:minor_version': '512',
'TAG:network': '"Invenio Network"',
'TAG:show': '"Invenio Show"',
'TAG:synopsis': '"Invenio Synopsis"',
'TAG:title': '"Super Duper Difficult Test Metadata Video File"',
'bit_rate': '7606651.000000 ',
'duration': '10.000000 ',
'filename': '/home/oldi/videos/park_joy_1080p.mp4',
'format_long_name': 'QuickTime/MPEG-4/Motion JPEG 2000 format',
'format_name': 'mov,mp4,m4a,3gp,3g2,mj2',
'nb_streams': '1',
'size': '9508314.000000 ',
'start_time': '0.000000 '},
'streams': [{'TAG:creation_time': '1970-01-01 00:00:00',
'TAG:language': 'und',
'avg_frame_rate': '50/1',
'codec_long_name': 'H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10',
'codec_name': 'h264',
'codec_tag': '0x31637661',
'codec_tag_string': 'avc1',
'codec_time_base': '1/100',
'codec_type': 'video',
'display_aspect_ratio': '30:17',
'duration': '10.000000 ',
'has_b_frames': '2',
'height': '1088',
'index': '0',
'nb_frames': '500',
'pix_fmt': 'yuv420p',
'r_frame_rate': '50/1',
'sample_aspect_ratio': '1:1',
'start_time': '0.000000 ',
'time_base': '1/50',
'width': '1920'}]}
self.assertEqual(bibencode_metadata.ffprobe_metadata(video01_out01), metadata_check)
class TestBatchEngineFunctions(unittest.TestCase):
"""Tests the functions of bibencode_batch_engine"""
pass
class TestDaemonFunctions(unittest.TestCase):
"""Tests the functions of bibencode_daemon"""
pass
TEST_SUITE = make_test_suite(SetupTester,
TestUtilsFunctions,
TestEncodeFunctions,
TestExtractFunctions,
## TestMetadataFunctions,
TestBatchEngineFunctions,
TestDaemonFunctions)
if __name__ == "__main__":
wait_for_user("""
#######################################################
# This is the test suite for the BibEncode module #
# #
# You need to have installed ffmpeg with H.264, WebM #
# and Theora support! Please see the manual! #
# #
# Please be aware that not every aspect can be tested #
# due to the nature of video encoding and wrapping #
# external libraries like ffmpeg. The results should #
# only be seen as an indicator and do not necessarily #
# mean that there is something wrong. #
# #
# You should evaluate the output manually in the tmp #
# folder of your Invenio installation #
# #
# The test suite will download and create several #
# gigabytes of video material to perform the test! #
# The whole test might take up half an hour #
# #
# Do you wich to continue? Then enter "Yes, I know!". #
# Else press 'ctrl + c' to leave this tool. #
#######################################################
""")
run_test_suite(TEST_SUITE)
| gpl-2.0 |
BayanGroup/ansible | lib/ansible/module_utils/netcfg.py | 11 | 2329 | #
# (c) 2015 Peter Sprygada, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import re
import collections
class ConfigLine(object):
def __init__(self, text):
self.text = text
self.children = list()
self.parents = list()
self.raw = None
def __str__(self):
return self.raw
def __eq__(self, other):
if self.text == other.text:
return self.parents == other.parents
def __ne__(self, other):
return not self.__eq__(other)
def parse(lines, indent):
toplevel = re.compile(r'\S')
childline = re.compile(r'^\s*(.+)$')
repl = r'([{|}|;])'
ancestors = list()
config = list()
for line in str(lines).split('\n'):
text = str(re.sub(repl, '', line)).strip()
cfg = ConfigLine(text)
cfg.raw = line
if not text or text[0] in ['!', '#']:
continue
# handle top level commands
if toplevel.match(line):
ancestors = [cfg]
# handle sub level commands
else:
match = childline.match(line)
line_indent = match.start(1)
level = int(line_indent / indent)
parent_level = level - 1
cfg.parents = ancestors[:level]
if level > len(ancestors):
config.append(cfg)
continue
for i in range(level, len(ancestors)):
ancestors.pop()
ancestors.append(cfg)
ancestors[parent_level].children.append(cfg)
config.append(cfg)
return config
| gpl-3.0 |
cebrusfs/217gdb | pwndbg/color/enhance.py | 5 | 1128 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import pwndbg.color.theme as theme
import pwndbg.config as config
from pwndbg.color import generateColorFunction
config_integer_color = theme.ColoredParameter('enhance-integer-value-color', 'none', 'color of value enhance (integer)')
config_string_color = theme.ColoredParameter('enhance-string-value-color', 'none', 'color of value enhance (string)')
config_comment_color = theme.ColoredParameter('enhance-comment-color', 'none', 'color of value enhance (comment)')
config_unknown_color = theme.ColoredParameter('enhance-unknown-color', 'none', 'color of value enhance (unknown value)')
def integer(x):
return generateColorFunction(config.enhance_integer_value_color)(x)
def string(x):
return generateColorFunction(config.enhance_string_value_color)(x)
def comment(x):
return generateColorFunction(config.enhance_comment_color)(x)
def unknown(x):
return generateColorFunction(config.enhance_unknown_color)(x)
| mit |
rajalokan/nova | nova/api/openstack/compute/config_drive.py | 4 | 2374 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Config Drive extension."""
from nova.api.openstack.compute.schemas import config_drive as \
schema_config_drive
from nova.api.openstack import wsgi
from nova.policies import config_drive as cd_policies
ATTRIBUTE_NAME = "config_drive"
class ConfigDriveController(wsgi.Controller):
def _add_config_drive(self, req, servers):
for server in servers:
db_server = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show'/'detail' methods.
server[ATTRIBUTE_NAME] = db_server['config_drive']
def _show(self, req, resp_obj):
if 'server' in resp_obj.obj:
server = resp_obj.obj['server']
self._add_config_drive(req, [server])
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if context.can(cd_policies.BASE_POLICY_NAME, fatal=False):
self._show(req, resp_obj)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if 'servers' in resp_obj.obj and context.can(
cd_policies.BASE_POLICY_NAME, fatal=False):
servers = resp_obj.obj['servers']
self._add_config_drive(req, servers)
# NOTE(gmann): This function is not supposed to use 'body_deprecated_param'
# parameter as this is placed to handle scheduler_hint extension for V2.1.
def server_create(server_dict, create_kwargs, body_deprecated_param):
create_kwargs['config_drive'] = server_dict.get(ATTRIBUTE_NAME)
def get_server_create_schema(version):
return schema_config_drive.server_create
| apache-2.0 |
rednach/krill | test/test_regenerator.py | 14 | 7568 | #!/usr/bin/env python
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
import time
from shinken_test import ShinkenTest, unittest
from shinken.objects import Service
from shinken.misc.regenerator import Regenerator
class TestRegenerator(ShinkenTest):
def setUp(self):
self.setup_with_file('etc/shinken_regenerator.cfg')
def look_for_same_values(self):
# Look at Regenerator values
print "Hosts:", self.rg.hosts.__dict__
for h in self.rg.hosts:
orig_h = self.sched.hosts.find_by_name(h.host_name)
print h.state, orig_h.state
# Look for same states
self.assertEqual(orig_h.state, h.state)
self.assertEqual(orig_h.state_type, h.state_type)
# Look for same impacts
for i in h.impacts:
print "Got impact", i.get_name()
same_impacts = i.get_name() in [j.get_name() for j in orig_h.impacts]
self.assertTrue(same_impacts)
# And look for same source problems
for i in h.source_problems:
print "Got source pb", i.get_name()
same_pbs = i.get_name() in [j.get_name() for j in orig_h.source_problems]
self.assertTrue(same_pbs)
print "Services:", self.rg.services.__dict__
for s in self.rg.services:
orig_s = self.sched.services.find_srv_by_name_and_hostname(s.host.host_name, s.service_description)
print s.state, orig_s.state
self.assertEqual(orig_s.state, s.state)
self.assertEqual(orig_s.state_type, s.state_type)
# Look for same impacts too
for i in s.impacts:
print "Got impact", i.get_name()
same_impacts = i.get_name() in [j.get_name() for j in orig_s.impacts]
self.assertTrue(same_impacts)
# And look for same source problems
for i in s.source_problems:
print "Got source pb", i.get_name()
same_pbs = i.get_name() in [j.get_name() for j in orig_s.source_problems]
self.assertTrue(same_pbs)
# Look for same host
self.assertEqual(orig_s.host.get_name(), s.host.get_name())
def test_regenerator(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
# for h in self.sched.hosts:
# h.realm = h.realm.get_name()
self.sched.conf.skip_initial_broks = False
self.sched.brokers['Default-Broker'] = {'broks' : {}, 'has_full_broks' : False}
self.sched.fill_initial_broks('Default-Broker')
self.rg = Regenerator()
# Got the initial creation ones
ids = self.sched.broks.keys()
ids.sort()
t0 = time.time()
for i in ids:
b = self.sched.broks[i]
print "Manage b", b.type
b.prepare()
self.rg.manage_brok(b)
t1 = time.time()
print 'First inc', t1 - t0, len(self.sched.broks)
self.sched.broks.clear()
self.look_for_same_values()
print "Get the hosts and services"
host = self.sched.hosts.find_by_name("test_host_0")
host.checks_in_progress = []
host.act_depend_of = [] # ignore the router
router = self.sched.hosts.find_by_name("test_router_0")
router.checks_in_progress = []
router.act_depend_of = [] # ignore the router
svc = self.sched.services.find_srv_by_name_and_hostname("test_host_0", "test_ok_0")
svc.checks_in_progress = []
svc.act_depend_of = [] # no hostchecks on critical checkresults
self.scheduler_loop(3, [[host, 2, 'DOWN | value1=1 value2=2'], [router, 0, 'UP | rtt=10'], [svc, 2, 'BAD | value1=0 value2=0']])
self.assertEqual('DOWN', host.state)
self.assertEqual('HARD', host.state_type)
ids = self.sched.broks.keys()
ids.sort()
t0 = time.time()
for i in ids:
b = self.sched.broks[i]
print "Manage b", b.type
b.prepare()
self.rg.manage_brok(b)
t1 = time.time()
print 'Time', t1 - t0
self.sched.broks.clear()
self.look_for_same_values()
print 'Time', t1 - t0
b = svc.get_initial_status_brok()
b.prepare()
print "GO BENCH!"
t0 = time.time()
for i in xrange(1, 1000):
b = svc.get_initial_status_brok()
b.prepare()
s = Service({})
for (prop, value) in b.data.iteritems():
setattr(s, prop, value)
t1 = time.time()
print "Bench end:", t1 - t0
times = {}
sizes = {}
import cPickle
data = {}
cls = svc.__class__
start = time.time()
for i in xrange(1, 10000):
for prop, entry in svc.__class__.properties.items():
# Is this property intended for brokking?
if 'full_status' in entry.fill_brok:
data[prop] = svc.get_property_value_for_brok(prop, cls.properties)
if not prop in times:
times[prop] = 0
sizes[prop] = 0
t0 = time.time()
tmp = cPickle.dumps(data[prop], 0)
sizes[prop] += len(tmp)
times[prop] += time.time() - t0
print "Times"
for (k, v) in times.iteritems():
print "\t%s: %s" % (k, v)
print "\n\n"
print "Sizes"
for (k, v) in sizes.iteritems():
print "\t%s: %s" % (k, v)
print "\n"
print "total time", time.time() - start
def test_regenerator_load_from_scheduler(self):
#
# Config is not correct because of a wrong relative path
# in the main config file
#
# for h in self.sched.hosts:
# h.realm = h.realm.get_name()
self.rg = Regenerator()
self.rg.load_from_scheduler(self.sched)
self.sched.conf.skip_initial_broks = False
self.sched.brokers['Default-Broker'] = {'broks' : {}, 'has_full_broks' : False}
self.sched.fill_initial_broks('Default-Broker')
# Got the initial creation ones
ids = self.sched.broks.keys()
ids.sort()
t0 = time.time()
for i in ids:
b = self.sched.broks[i]
print "Manage b", b.type
b.prepare()
self.rg.manage_brok(b)
t1 = time.time()
print 'First inc', t1 - t0, len(self.sched.broks)
self.sched.broks.clear()
self.look_for_same_values()
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
Jorge-Rodriguez/ansible | test/units/modules/network/f5/test_bigip_vcmp_guest.py | 21 | 5777 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_vcmp_guest import ModuleParameters
from library.modules.bigip_vcmp_guest import ApiParameters
from library.modules.bigip_vcmp_guest import ModuleManager
from library.modules.bigip_vcmp_guest import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_vcmp_guest import ModuleParameters
from ansible.modules.network.f5.bigip_vcmp_guest import ApiParameters
from ansible.modules.network.f5.bigip_vcmp_guest import ModuleManager
from ansible.modules.network.f5.bigip_vcmp_guest import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso',
mgmt_network='bridged',
mgmt_address='1.2.3.4/24',
vlans=[
'vlan1',
'vlan2'
]
)
p = ModuleParameters(params=args)
assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso'
assert p.mgmt_network == 'bridged'
def test_module_parameters_mgmt_bridged_without_subnet(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4'
)
p = ModuleParameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/32'
def test_module_parameters_mgmt_address_cidr(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4/24'
)
p = ModuleParameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/24'
def test_module_parameters_mgmt_address_subnet(self):
args = dict(
mgmt_network='bridged',
mgmt_address='1.2.3.4/255.255.255.0'
)
p = ModuleParameters(params=args)
assert p.mgmt_network == 'bridged'
assert p.mgmt_address == '1.2.3.4/24'
def test_module_parameters_mgmt_route(self):
args = dict(
mgmt_route='1.2.3.4'
)
p = ModuleParameters(params=args)
assert p.mgmt_route == '1.2.3.4'
def test_module_parameters_vcmp_software_image_facts(self):
# vCMP images may include a forward slash in their names. This is probably
# related to the slots on the system, but it is not a valid value to specify
# that slot when providing an initial image
args = dict(
initial_image='BIGIP-12.1.0.1.0.1447-HF1.iso/1',
)
p = ModuleParameters(params=args)
assert p.initial_image == 'BIGIP-12.1.0.1.0.1447-HF1.iso/1'
def test_api_parameters(self):
args = dict(
initialImage="BIGIP-tmos-tier2-13.1.0.0.0.931.iso",
managementGw="2.2.2.2",
managementIp="1.1.1.1/24",
managementNetwork="bridged",
state="deployed",
vlans=[
"/Common/vlan1",
"/Common/vlan2"
]
)
p = ApiParameters(params=args)
assert p.initial_image == 'BIGIP-tmos-tier2-13.1.0.0.0.931.iso'
assert p.mgmt_route == '2.2.2.2'
assert p.mgmt_address == '1.1.1.1/24'
assert '/Common/vlan1' in p.vlans
assert '/Common/vlan2' in p.vlans
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
self.patcher1 = patch('time.sleep')
self.patcher1.start()
def tearDown(self):
self.patcher1.stop()
def test_create_vlan(self, *args):
set_module_args(dict(
name="guest1",
mgmt_network="bridged",
mgmt_address="10.10.10.10/24",
initial_image="BIGIP-13.1.0.0.0.931.iso",
server='localhost',
password='password',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=False)
mm.is_deployed = Mock(side_effect=[False, True, True, True, True])
mm.deploy_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'guest1'
| gpl-3.0 |
aidanhs/blockade | blockade/state.py | 1 | 3590 | #
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import uuid
import os
import errno
from copy import deepcopy
import yaml
from .errors import AlreadyInitializedError, NotInitializedError, \
InconsistentStateError
BLOCKADE_STATE_DIR = ".blockade"
BLOCKADE_STATE_FILE = ".blockade/state.yml"
BLOCKADE_ID_PREFIX = "blockade-"
BLOCKADE_STATE_VERSION = 1
def _assure_dir():
try:
os.mkdir(BLOCKADE_STATE_DIR)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def _state_delete():
try:
os.remove(BLOCKADE_STATE_FILE)
except OSError as e:
if e.errno not in (errno.EPERM, errno.ENOENT):
raise
try:
os.rmdir(BLOCKADE_STATE_DIR)
except OSError as e:
if e.errno not in (errno.ENOTEMPTY, errno.ENOENT):
raise
def _base_state(blockade_id, containers):
return dict(blockade_id=blockade_id, containers=containers,
version=BLOCKADE_STATE_VERSION)
class BlockadeState(object):
def __init__(self, blockade_id, containers):
self._blockade_id = blockade_id
self._containers = containers
@property
def blockade_id(self):
return self._blockade_id
@property
def containers(self):
return deepcopy(self._containers)
class BlockadeStateFactory(object):
# annoyed with how this ended up structured, and that I called it
# a factory, but fuckit..
@staticmethod
def initialize(containers, blockade_id=None):
if blockade_id is None:
blockade_id = BLOCKADE_ID_PREFIX + uuid.uuid4().hex[:10]
containers = deepcopy(containers)
f = None
path = BLOCKADE_STATE_FILE
_assure_dir()
try:
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
with os.fdopen(os.open(path, flags), "w") as f:
yaml.dump(_base_state(blockade_id, containers), f)
except OSError as e:
if e.errno == errno.EEXIST:
raise AlreadyInitializedError(
"Path %s exists. "
"You may need to destroy a previous blockade." % path)
raise
except Exception:
# clean up our created file
_state_delete()
raise
return BlockadeState(blockade_id, containers)
@staticmethod
def load():
try:
with open(BLOCKADE_STATE_FILE) as f:
state = yaml.safe_load(f)
return BlockadeState(state['blockade_id'], state['containers'])
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
raise NotInitializedError("No blockade exists in this context")
raise InconsistentStateError("Failed to load Blockade state: "
+ str(e))
except Exception as e:
raise InconsistentStateError("Failed to load Blockade state: "
+ str(e))
@staticmethod
def destroy():
_state_delete()
| apache-2.0 |
mashrin/processing.py | mode/examples/Topics/Simulate/SmokeParticleSystem/particle.py | 6 | 1187 |
# A simple Particle class, renders the particle as an image.
class Particle(object):
def __init__(self, l, img):
self.acc = PVector(0, 0)
self.vx = randomGaussian() * 0.3
self.vy = randomGaussian() * 0.3 - 1.0
self.vel = PVector(self.vx, self.vy)
self.loc = l.get()
self.lifespan = 100.0
self.img = img
def run(self):
self.update()
self.render()
# Method to apply a force vector to the Particle object
# Note we are ignoring "mass" here.
def applyForce(self, f):
self.acc.add(f)
# Method to update location
def update(self):
self.vel.add(self.acc)
self.loc.add(self.vel)
self.lifespan -= 2.5
self.acc.mult(0) # clear Acceleration.
# Method to display
def render(self):
imageMode(CENTER)
tint(255, self.lifespan)
image(self.img, self.loc.x, self.loc.y)
# Drawing a circle instead.
# fill(255,lifespan)
# noStroke()
# ellipse(self.loc.x,self.loc.y,self.img.width,self.img.height)
# Is the particle still useful?
def isDead(self):
return self.lifespan <= 0.0
| apache-2.0 |
pombredanne/SourceForge-Allura | ForgeBlog/forgeblog/main.py | 2 | 15894 | #-*- python -*-
import logging
from datetime import datetime
import urllib2
# Non-stdlib imports
import pkg_resources
import pymongo
from tg import expose, validate, redirect, flash
from tg.decorators import with_trailing_slash, without_trailing_slash
from pylons import g, c, request, response
import formencode
from formencode import validators
from webob import exc
from ming.orm import session
# Pyforge-specific imports
from allura.app import Application, ConfigOption, SitemapEntry
from allura.app import DefaultAdminController
from allura.lib import helpers as h
from allura.lib.search import search
from allura.lib.decorators import require_post, Property
from allura.lib.security import has_access, require_access
from allura.lib import widgets as w
from allura.lib.widgets.subscriptions import SubscribeForm
from allura.lib.widgets import form_fields as ffw
from allura import model as M
from allura.controllers import BaseController, AppDiscussionController
# Local imports
from forgeblog import model as BM
from forgeblog import version
from forgeblog import widgets
log = logging.getLogger(__name__)
class W:
thread=w.Thread(
page=None, limit=None, page_size=None, count=None,
style='linear')
pager = widgets.BlogPager()
new_post_form = widgets.NewPostForm()
edit_post_form = widgets.EditPostForm()
view_post_form = widgets.ViewPostForm()
label_edit = ffw.LabelEdit()
attachment_add = ffw.AttachmentAdd()
attachment_list = ffw.AttachmentList()
preview_post_form = widgets.PreviewPostForm()
subscribe_form = SubscribeForm()
class ForgeBlogApp(Application):
__version__ = version.__version__
tool_label='Blog'
default_mount_label='Blog'
default_mount_point='blog'
permissions = ['configure', 'read', 'write',
'unmoderated_post', 'post', 'moderate', 'admin']
ordinal=14
installable=True
config_options = Application.config_options
default_external_feeds = []
icons={
24:'images/blog_24.png',
32:'images/blog_32.png',
48:'images/blog_48.png'
}
def __init__(self, project, config):
Application.__init__(self, project, config)
self.root = RootController()
self.admin = BlogAdminController(self)
@Property
def external_feeds_list():
def fget(self):
globals = BM.Globals.query.get(app_config_id=self.config._id)
if globals is not None:
external_feeds = globals.external_feeds
else:
external_feeds = self.default_external_feeds
return external_feeds
def fset(self, new_external_feeds):
globals = BM.Globals.query.get(app_config_id=self.config._id)
if globals is not None:
globals.external_feeds = new_external_feeds
elif len(new_external_feeds) > 0:
globals = BM.Globals(app_config_id=self.config._id, external_feeds=new_external_feeds)
if globals is not None:
session(globals).flush()
def main_menu(self):
return [SitemapEntry(self.config.options.mount_label.title(), '.')]
@property
@h.exceptionless([], log)
def sitemap(self):
menu_id = self.config.options.mount_label.title()
with h.push_config(c, app=self):
return [
SitemapEntry(menu_id, '.')[self.sidebar_menu()] ]
@property
def show_discussion(self):
if 'show_discussion' in self.config.options:
return self.config.options['show_discussion']
else:
return True
@h.exceptionless([], log)
def sidebar_menu(self):
base = c.app.url
links = [
SitemapEntry('Home', base),
SitemapEntry('Search', base + 'search'),
]
if has_access(self, 'write')():
links += [ SitemapEntry('New Post', base + 'new') ]
return links
def admin_menu(self):
admin_url = c.project.url() + 'admin/' + self.config.options.mount_point + '/'
# temporarily disabled until some bugs are fixed
links = [SitemapEntry('External feeds', admin_url + 'exfeed', className='admin_modal')]
links += super(ForgeBlogApp, self).admin_menu(force_options=True)
return links
#return super(ForgeBlogApp, self).admin_menu(force_options=True)
def install(self, project):
'Set up any default permissions and roles here'
super(ForgeBlogApp, self).install(project)
# Setup permissions
role_admin = M.ProjectRole.by_name('Admin')._id
role_developer = M.ProjectRole.by_name('Developer')._id
role_auth = M.ProjectRole.by_name('*authenticated')._id
role_anon = M.ProjectRole.by_name('*anonymous')._id
self.config.acl = [
M.ACE.allow(role_anon, 'read'),
M.ACE.allow(role_auth, 'post'),
M.ACE.allow(role_auth, 'unmoderated_post'),
M.ACE.allow(role_developer, 'write'),
M.ACE.allow(role_developer, 'moderate'),
M.ACE.allow(role_admin, 'configure'),
M.ACE.allow(role_admin, 'admin'),
]
def uninstall(self, project):
"Remove all the tool's artifacts from the database"
BM.Attachment.query.remove(dict(app_config_id=c.app.config._id))
BM.BlogPost.query.remove(dict(app_config_id=c.app.config._id))
BM.BlogPostSnapshot.query.remove(dict(app_config_id=c.app.config._id))
super(ForgeBlogApp, self).uninstall(project)
class RootController(BaseController):
def __init__(self):
setattr(self, 'feed.atom', self.feed)
setattr(self, 'feed.rss', self.feed)
self._discuss = AppDiscussionController()
def _check_security(self):
require_access(c.app, 'read')
@expose('jinja:forgeblog:templates/blog/index.html')
@with_trailing_slash
def index(self, page=0, limit=10, **kw):
query_filter = dict(app_config_id=c.app.config._id)
if not has_access(c.app, 'write')():
query_filter['state'] = 'published'
q = BM.BlogPost.query.find(query_filter)
post_count = q.count()
limit, page = h.paging_sanitizer(limit, page, post_count)
posts = q.sort('timestamp', pymongo.DESCENDING) \
.skip(page * limit).limit(limit)
c.form = W.preview_post_form
c.pager = W.pager
return dict(posts=posts, page=page, limit=limit, count=post_count)
@expose('jinja:forgeblog:templates/blog/search.html')
@validate(dict(q=validators.UnicodeString(if_empty=None),
history=validators.StringBool(if_empty=False)))
def search(self, q=None, history=None, **kw):
'local tool search'
results = []
count=0
if not q:
q = ''
else:
results = search(
q,
fq=[
'state_s:published',
'is_history_b:%s' % history,
'project_id_s:%s' % c.project._id,
'mount_point_s:%s'% c.app.config.options.mount_point ])
if results: count=results.hits
return dict(q=q, history=history, results=results or [], count=count)
@expose('jinja:forgeblog:templates/blog/edit_post.html')
@without_trailing_slash
def new(self, **kw):
require_access(c.app, 'write')
now = datetime.utcnow()
post = dict(
state='published')
c.form = W.new_post_form
return dict(post=post)
@expose()
@require_post()
@validate(form=W.edit_post_form, error_handler=new)
@without_trailing_slash
def save(self, **kw):
require_access(c.app, 'write')
post = BM.BlogPost()
for k,v in kw.iteritems():
setattr(post, k, v)
post.neighborhood_id=c.project.neighborhood_id
post.make_slug()
post.commit()
M.Thread.new(discussion_id=post.app_config.discussion_id,
ref_id=post.index_id(),
subject='%s discussion' % post.title)
redirect(h.really_unicode(post.url()).encode('utf-8'))
@without_trailing_slash
@expose()
@validate(dict(
since=h.DateTimeConverter(if_empty=None, if_invalid=None),
until=h.DateTimeConverter(if_empty=None, if_invalid=None),
offset=validators.Int(if_empty=None),
limit=validators.Int(if_empty=None)))
def feed(self, since=None, until=None, offset=None, limit=None):
if request.environ['PATH_INFO'].endswith('.atom'):
feed_type = 'atom'
else:
feed_type = 'rss'
title = '%s - %s' % (c.project.name, c.app.config.options.mount_label)
feed = M.Feed.feed(
dict(project_id=c.project._id, app_config_id=c.app.config._id),
feed_type,
title,
c.app.url,
title,
since, until, offset, limit)
response.headers['Content-Type'] = ''
response.content_type = 'application/xml'
return feed.writeString('utf-8')
@with_trailing_slash
@expose('jinja:allura:templates/markdown_syntax_dialog.html')
def markdown_syntax_dialog(self):
'Static dialog page about how to use markdown.'
return dict()
@expose()
def _lookup(self, year, month, name, *rest):
slug = '/'.join((year, month, urllib2.unquote(name).decode('utf-8')))
post = BM.BlogPost.query.get(slug=slug, app_config_id=c.app.config._id)
if post is None:
raise exc.HTTPNotFound()
return PostController(post), rest
class PostController(BaseController):
def __init__(self, post):
self.post = post
setattr(self, 'feed.atom', self.feed)
setattr(self, 'feed.rss', self.feed)
def _check_security(self):
require_access(self.post, 'read')
@expose('jinja:forgeblog:templates/blog/post.html')
@with_trailing_slash
@validate(dict(page=validators.Int(if_empty=0),
limit=validators.Int(if_empty=25)))
def index(self, page=0, limit=25, **kw):
if self.post.state == 'draft':
require_access(self.post, 'write')
c.form = W.view_post_form
c.subscribe_form = W.subscribe_form
c.thread = W.thread
post_count = self.post.discussion_thread.post_count
limit, page = h.paging_sanitizer(limit, page, post_count)
version = kw.pop('version', None)
post = self._get_version(version)
base_post = self.post
return dict(post=post, base_post=base_post,
page=page, limit=limit, count=post_count)
@expose('jinja:forgeblog:templates/blog/edit_post.html')
@without_trailing_slash
def edit(self, **kw):
require_access(self.post, 'write')
c.form = W.edit_post_form
c.attachment_add = W.attachment_add
c.attachment_list = W.attachment_list
c.label_edit = W.label_edit
return dict(post=self.post)
@without_trailing_slash
@expose('jinja:forgeblog:templates/blog/post_history.html')
def history(self):
posts = self.post.history()
return dict(title=self.post.title, posts=posts)
@without_trailing_slash
@expose('jinja:forgeblog:templates/blog/post_diff.html')
def diff(self, v1, v2):
p1 = self._get_version(int(v1))
p2 = self._get_version(int(v2))
result = h.diff_text(p1.text, p2.text)
return dict(p1=p1, p2=p2, edits=result)
@expose()
@require_post()
@validate(form=W.edit_post_form, error_handler=edit)
@without_trailing_slash
def save(self, delete=None, **kw):
require_access(self.post, 'write')
if delete:
self.post.delete()
flash('Post deleted', 'info')
redirect(h.really_unicode(c.app.url).encode('utf-8'))
for k,v in kw.iteritems():
setattr(self.post, k, v)
self.post.commit()
redirect('.')
@without_trailing_slash
@require_post()
@expose()
def revert(self, version):
require_access(self.post, 'write')
orig = self._get_version(version)
if orig:
self.post.text = orig.text
self.post.commit()
redirect('.')
@expose()
@validate(W.subscribe_form)
def subscribe(self, subscribe=None, unsubscribe=None):
if subscribe:
self.post.subscribe(type='direct')
elif unsubscribe:
self.post.unsubscribe()
redirect(h.really_unicode(request.referer).encode('utf-8'))
@without_trailing_slash
@expose()
@validate(dict(
since=h.DateTimeConverter(if_empty=None, if_invalid=None),
until=h.DateTimeConverter(if_empty=None, if_invalid=None),
offset=validators.Int(if_empty=None),
limit=validators.Int(if_empty=None)))
def feed(self, since=None, until=None, offset=None, limit=None):
if request.environ['PATH_INFO'].endswith('.atom'):
feed_type = 'atom'
else:
feed_type = 'rss'
feed = M.Feed.feed(
dict(ref_id=self.post.index_id()),
feed_type,
'Recent changes to %s' % self.post.title,
self.post.url(),
'Recent changes to %s' % self.post.title,
since, until, offset, limit)
response.headers['Content-Type'] = ''
response.content_type = 'application/xml'
return feed.writeString('utf-8')
def _get_version(self, version):
if not version: return self.post
try:
return self.post.get_version(version)
except ValueError:
raise exc.HTTPNotFound()
class BlogAdminController(DefaultAdminController):
def __init__(self, app):
self.app = app
@without_trailing_slash
@expose('jinja:forgeblog:templates/blog/admin_options.html')
def options(self):
return dict(app=self.app,
allow_config=has_access(self.app, 'configure')())
@without_trailing_slash
@expose()
@require_post()
def set_options(self, show_discussion=False):
self.app.config.options['show_discussion'] = show_discussion and True or False
flash('Blog options updated')
redirect(h.really_unicode(c.project.url()+'admin/tools').encode('utf-8'))
@without_trailing_slash
@expose('jinja:forgeblog:templates/blog/admin_exfeed.html')
def exfeed(self):
#self.app.external_feeds_list = ['feed1', 'feed2']
#log.info("EXFEED: %s" % self.app.external_feeds_list)
feeds_list = []
for feed in self.app.external_feeds_list:
feeds_list.append(feed)
return dict(app=self.app,
feeds_list=feeds_list,
allow_config=has_access(self.app, 'configure')())
@without_trailing_slash
@expose()
@require_post()
def set_exfeed(self, new_exfeed=None, **kw):
exfeed_val = kw.get('exfeed', [])
if type(exfeed_val) == unicode:
tmp_exfeed_list = []
tmp_exfeed_list.append(exfeed_val)
else:
tmp_exfeed_list = exfeed_val
if new_exfeed is not None and new_exfeed != '':
tmp_exfeed_list.append(new_exfeed)
exfeed_list = []
invalid_list = []
v = validators.URL()
for link in tmp_exfeed_list:
try:
v.to_python(link)
exfeed_list.append(link)
except formencode.api.Invalid:
invalid_list.append(link)
self.app.external_feeds_list = exfeed_list
flash('External feeds updated')
if len(invalid_list) > 0:
flash('Invalid link(s): %s' % ','.join(link for link in invalid_list), 'error')
redirect(c.project.url()+'admin/tools')
| apache-2.0 |
surajssd/kuma | vendor/packages/pygments/formatters/__init__.py | 44 | 3597 | # -*- coding: utf-8 -*-
"""
pygments.formatters
~~~~~~~~~~~~~~~~~~~
Pygments formatters.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
import sys
import types
import fnmatch
from os.path import basename
from pygments.formatters._mapping import FORMATTERS
from pygments.plugin import find_plugin_formatters
from pygments.util import ClassNotFound, itervalues
__all__ = ['get_formatter_by_name', 'get_formatter_for_filename',
'get_all_formatters'] + list(FORMATTERS)
_formatter_cache = {} # classes by name
_pattern_cache = {}
def _fn_matches(fn, glob):
"""Return whether the supplied file name fn matches pattern filename."""
if glob not in _pattern_cache:
pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob))
return pattern.match(fn)
return _pattern_cache[glob].match(fn)
def _load_formatters(module_name):
"""Load a formatter (and all others in the module too)."""
mod = __import__(module_name, None, None, ['__all__'])
for formatter_name in mod.__all__:
cls = getattr(mod, formatter_name)
_formatter_cache[cls.name] = cls
def get_all_formatters():
"""Return a generator for all formatter classes."""
# NB: this returns formatter classes, not info like get_all_lexers().
for info in itervalues(FORMATTERS):
if info[1] not in _formatter_cache:
_load_formatters(info[0])
yield _formatter_cache[info[1]]
for _, formatter in find_plugin_formatters():
yield formatter
def find_formatter_class(alias):
"""Lookup a formatter by alias.
Returns None if not found.
"""
for module_name, name, aliases, _, _ in itervalues(FORMATTERS):
if alias in aliases:
if name not in _formatter_cache:
_load_formatters(module_name)
return _formatter_cache[name]
for _, cls in find_plugin_formatters():
if alias in cls.aliases:
return cls
def get_formatter_by_name(_alias, **options):
"""Lookup and instantiate a formatter by alias.
Raises ClassNotFound if not found.
"""
cls = find_formatter_class(_alias)
if cls is None:
raise ClassNotFound("no formatter found for name %r" % _alias)
return cls(**options)
def get_formatter_for_filename(fn, **options):
"""Lookup and instantiate a formatter by filename pattern.
Raises ClassNotFound if not found.
"""
fn = basename(fn)
for modname, name, _, filenames, _ in itervalues(FORMATTERS):
for filename in filenames:
if _fn_matches(fn, filename):
if name not in _formatter_cache:
_load_formatters(modname)
return _formatter_cache[name](**options)
for cls in find_plugin_formatters():
for filename in cls.filenames:
if _fn_matches(fn, filename):
return cls(**options)
raise ClassNotFound("no formatter found for file name %r" % fn)
class _automodule(types.ModuleType):
"""Automatically import formatters."""
def __getattr__(self, name):
info = FORMATTERS.get(name)
if info:
_load_formatters(info[0])
cls = _formatter_cache[info[1]]
setattr(self, name, cls)
return cls
raise AttributeError(name)
oldmod = sys.modules[__name__]
newmod = _automodule(__name__)
newmod.__dict__.update(oldmod.__dict__)
sys.modules[__name__] = newmod
del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types
| mpl-2.0 |
twhyntie/image-heatmap | make_image_heatmap.py | 1 | 3834 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#...for the plotting.
import matplotlib.pyplot as plt
#...for the image manipulation.
import matplotlib.image as mpimg
#...for the MATH.
import numpy as np
# For scaling images.
import scipy.ndimage.interpolation as inter
#...for the colours.
from matplotlib import colorbar, colors
# For playing with the tick marks on the colour map axis.
from matplotlib import ticker
# Load the LaTeX text plot libraries.
from matplotlib import rc
# Uncomment to use LaTeX for the plot text.
rc('font',**{'family':'serif','serif':['Computer Modern']})
rc('text', usetex=True)
# Load in the image.
## The scan image as a NumPy array.
scan_img = mpimg.imread("scan.png")
print(" *")
print(" * Image dimensions: %s" % (str(scan_img.shape)))
## The figure upon which to display the scan image.
plot = plt.figure(101, figsize=(5.0, 5.0), dpi=150, facecolor='w', edgecolor='w')
# Adjust the position of the axes.
#plot.subplots_adjust(bottom=0.17, left=0.15)
plot.subplots_adjust(bottom=0.05, left=0.15, right=0.99, top=0.95)
## The plot axes.
plotax = plot.add_subplot(111)
# Set the x axis label.
plt.xlabel("$x$")
# Set the y axis label.
plt.ylabel("$y$")
# Add the original scan image to the plot.
plt.imshow(scan_img)
## The blob centre x values [pixels].
blob_xs = []
## The blob centre x values [pixels].
blob_ys = []
## The blob radii [pixels].
blob_rs = []
# Open the blob data file and retrieve the x, y, and r values.
with open("blobs.csv", "r") as f:
for l in f.readlines():
blob_xs.append(float(l.split(",")[0]))
blob_ys.append(float(l.split(",")[1]))
blob_rs.append(float(l.split(",")[2]))
## The image scale factor.
scale = 6.0
## The width of the image scaled up by the scale factor [pixels].
w = scan_img.shape[0]
## The original width of the image [pixels].
w_o = w / scale
## The height of the image scaled up by the scale factor [pixels].
h = scan_img.shape[1]
## The original height of the image [pixels].
h_o = h / scale
print(" * Image dimensions (w,h) = (%d,%d) -> (w_o,h_o) = (%d,%d)" % (w,h,w_o,h_o))
## The number of bins in each dimension of the heatmap.
#
# We are using the original image dimensions so that our heat map
# maps to the pixels in the original image. This is mainly for
# aesthetic reasons - there would be nothing to stop us using more
# (or fewer) bins.
bins = [w_o, h_o]
## The dimensions of the heat map, taken from the scaled-up image.
map_range = [[0, w], [0, h]]
# Create the heat map using NumPy's 2D histogram functionality.
centre_heatmap, x_edges, y_edges = np.histogram2d(blob_ys, blob_xs, bins=bins, range=map_range)
## The scaled heat map image.
#
# We need to scale the heat map array because although the bin widths
# are > 1, the resultant histogram (when made into an image) creates
# an image with one pixel per bin.
zoom_img = inter.zoom(centre_heatmap, (scale, scale), order=0, prefilter=False)
## The colo(u)r map for the heat map.
cmap = plt.cm.gnuplot
## The maximum number of blob centres in the heat map.
bc_max = np.amax(centre_heatmap)
#
print(" * Maximum value in the heat map is %d." % (bc_max))
## The maximum value to use in the colo(u)r map axis.
color_map_max = bc_max
# Add the (scaled) heat map (2D histogram) to the plot.
zoomed_heat_map = plt.imshow(zoom_img, alpha=0.8, cmap=cmap,norm=colors.Normalize(vmin=0,vmax=color_map_max))
## The heat map colo(u)r bar.
cb = plt.colorbar(alpha=1.0, mappable=zoomed_heat_map)
## An object to neaten up the colour map axis tick marks.
tick_locator = ticker.MaxNLocator(nbins=7)
#
cb.locator = tick_locator
#
cb.update_ticks()
# Add a grid.
plt.grid(1)
# Crop the plot limits to the limits of the scan iteself.
plotax.set_xlim([0, h])
plotax.set_ylim([w, 0])
# Save the figure.
plot.savefig("heatmap.png")
print(" *")
| mit |
yury-s/v8-inspector | Source/chrome/tools/checkbins/checkbins.py | 77 | 4325 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that all EXE and DLL files in the provided directory were built
correctly.
In essense it runs a subset of BinScope tests ensuring that binaries have
/NXCOMPAT, /DYNAMICBASE and /SAFESEH.
"""
import os
import optparse
import sys
# Find /third_party/pefile based on current directory and script path.
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
'third_party', 'pefile'))
import pefile
PE_FILE_EXTENSIONS = ['.exe', '.dll']
DYNAMICBASE_FLAG = 0x0040
NXCOMPAT_FLAG = 0x0100
NO_SEH_FLAG = 0x0400
MACHINE_TYPE_AMD64 = 0x8664
# Please do not add your file here without confirming that it indeed doesn't
# require /NXCOMPAT and /DYNAMICBASE. Contact [email protected] or your local
# Windows guru for advice.
EXCLUDED_FILES = ['chrome_frame_mini_installer.exe',
'mini_installer.exe',
'wow_helper.exe',
'xinput1_3.dll' # Microsoft DirectX redistributable.
]
def IsPEFile(path):
return (os.path.isfile(path) and
os.path.splitext(path)[1].lower() in PE_FILE_EXTENSIONS and
os.path.basename(path) not in EXCLUDED_FILES)
def main(options, args):
directory = args[0]
pe_total = 0
pe_passed = 0
for file in os.listdir(directory):
path = os.path.abspath(os.path.join(directory, file))
if not IsPEFile(path):
continue
pe = pefile.PE(path, fast_load=True)
pe.parse_data_directories(directories=[
pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG']])
pe_total = pe_total + 1
success = True
# Check for /DYNAMICBASE.
if pe.OPTIONAL_HEADER.DllCharacteristics & DYNAMICBASE_FLAG:
if options.verbose:
print "Checking %s for /DYNAMICBASE... PASS" % path
else:
success = False
print "Checking %s for /DYNAMICBASE... FAIL" % path
# Check for /NXCOMPAT.
if pe.OPTIONAL_HEADER.DllCharacteristics & NXCOMPAT_FLAG:
if options.verbose:
print "Checking %s for /NXCOMPAT... PASS" % path
else:
success = False
print "Checking %s for /NXCOMPAT... FAIL" % path
# Check for /SAFESEH. Binaries should meet one of the following
# criteria:
# 1) Have no SEH table as indicated by the DLL characteristics
# 2) Have a LOAD_CONFIG section containing a valid SEH table
# 3) Be a 64-bit binary, in which case /SAFESEH isn't required
#
# Refer to the following MSDN article for more information:
# http://msdn.microsoft.com/en-us/library/9a89h429.aspx
if (pe.OPTIONAL_HEADER.DllCharacteristics & NO_SEH_FLAG or
(hasattr(pe, "DIRECTORY_ENTRY_LOAD_CONFIG") and
pe.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerCount > 0 and
pe.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerTable != 0) or
pe.FILE_HEADER.Machine == MACHINE_TYPE_AMD64):
if options.verbose:
print "Checking %s for /SAFESEH... PASS" % path
else:
success = False
print "Checking %s for /SAFESEH... FAIL" % path
# ASLR is weakened on Windows 64-bit when the ImageBase is below 4GB
# (because the loader will never be rebase the image above 4GB).
if pe.FILE_HEADER.Machine == MACHINE_TYPE_AMD64:
if pe.OPTIONAL_HEADER.ImageBase <= 0xFFFFFFFF:
print("Checking %s ImageBase (0x%X < 4GB)... FAIL" %
(path, pe.OPTIONAL_HEADER.ImageBase))
success = False
elif options.verbose:
print("Checking %s ImageBase (0x%X > 4GB)... PASS" %
(path, pe.OPTIONAL_HEADER.ImageBase))
# Update tally.
if success:
pe_passed = pe_passed + 1
print "Result: %d files found, %d files passed" % (pe_total, pe_passed)
if pe_passed != pe_total:
sys.exit(1)
if __name__ == '__main__':
usage = "Usage: %prog [options] DIRECTORY"
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option("-v", "--verbose", action="store_true",
default=False, help="Print debug logging")
options, args = option_parser.parse_args()
if not args:
option_parser.print_help()
sys.exit(0)
main(options, args)
| bsd-3-clause |
zygh0st/terminator | terminatorlib/debugserver.py | 10 | 5015 | #!/usr/bin/python
#
# Copyright (c) 2008, Thomas Hurst <[email protected]>
#
# Use of this file is unrestricted provided this notice is retained.
# If you use it, it'd be nice if you dropped me a note. Also beer.
from terminatorlib.util import dbg, err
from terminatorlib.version import APP_NAME, APP_VERSION
import socket
import threading
import SocketServer
import code
import sys
import readline
import rlcompleter
import re
def ddbg(msg):
# uncomment this to get lots of spam from debugserver
return
dbg(msg)
class PythonConsoleServer(SocketServer.BaseRequestHandler):
env = None
def setup(self):
dbg('debugserver: connect from %s' % str(self.client_address))
ddbg('debugserver: env=%r' % PythonConsoleServer.env)
self.console = TerminatorConsole(PythonConsoleServer.env)
def handle(self):
ddbg("debugserver: handling")
try:
self.socketio = self.request.makefile()
sys.stdout = self.socketio
sys.stdin = self.socketio
sys.stderr = self.socketio
self.console.run(self)
finally:
sys.stdout = sys.__stdout__
sys.stdin = sys.__stdin__
sys.stderr = sys.__stderr__
self.socketio.close()
ddbg("debugserver: done handling")
def verify_request(self, request, client_address):
return True
def finish(self):
ddbg('debugserver: disconnect from %s' % str(self.client_address))
# rfc1116/rfc1184
LINEMODE = chr(34) # Linemode negotiation
NULL = chr(0)
ECHO = chr(1)
CR = chr(13)
LF = chr(10)
SE = chr(240) # End subnegotiation
NOP = chr(241)
DM = chr(242) # Data Mark
BRK = chr(243) # Break
IP = chr(244) # Interrupt Process
AO = chr(245) # Abort Output
AYT = chr(246) # Are You There
EC = chr(247) # Erase Character
EL = chr(248) # Erase Line
GA = chr(249) # Go Ahead
SB = chr(250) # Subnegotiation follows
WILL = chr(251) # Subnegotiation commands
WONT = chr(252)
DO = chr(253)
DONT = chr(254)
IAC = chr(255) # Interpret As Command
UIAC = '(^|[^' + IAC + '])' + IAC # Unescaped IAC
BareLF = re.compile('([^' + CR + '])' + CR)
DoDont = re.compile(UIAC +'[' + DO + DONT + '](.)')
WillWont = re.compile(UIAC + '[' + WILL + WONT + '](.)')
AreYouThere = re.compile(UIAC + AYT)
IpTelnet = re.compile(UIAC + IP)
OtherTelnet = re.compile(UIAC + '[^' + IAC + ']')
# See http://blade.nagaokaut.ac.jp/cgi-bin/scat.rb/ruby/ruby-talk/205335 for telnet bits
# Python doesn't make this an especially neat conversion :(
class TerminatorConsole(code.InteractiveConsole):
def parse_telnet(self, data):
odata = data
data = re.sub(BareLF, '\\1', data)
data = data.replace(CR + NULL, '')
data = data.replace(NULL, '')
bits = re.findall(DoDont, data)
ddbg("bits = %r" % bits)
if bits:
data = re.sub(DoDont, '\\1', data)
ddbg("telnet: DO/DON'T answer")
# answer DO and DON'T with WON'T
for bit in bits:
self.write(IAC + WONT + bit[1])
bits = re.findall(WillWont, data)
if bits:
data = re.sub(WillWont, '\\1', data)
ddbg("telnet: WILL/WON'T answer")
for bit in bits:
# answer WILLs and WON'T with DON'Ts
self.write(IAC + DONT + bit[1])
bits = re.findall(AreYouThere, data)
if bits:
ddbg("telnet: am I there answer")
data = re.sub(AreYouThere, '\\1', data)
for bit in bits:
self.write("Yes, I'm still here, I think.\n")
(data, interrupts) = re.subn(IpTelnet, '\\1', data)
if interrupts:
ddbg("debugserver: Ctrl-C detected")
raise KeyboardInterrupt
data = re.sub(OtherTelnet, '\\1', data) # and any other Telnet codes
data = data.replace(IAC + IAC, IAC) # and handle escapes
if data != odata:
ddbg("debugserver: Replaced %r with %r" % (odata, data))
return data
def raw_input(self, prompt = None):
ddbg("debugserver: raw_input prompt = %r" % prompt)
if prompt:
self.write(prompt)
buf = ''
compstate = 0
while True:
data = self.server.socketio.read(1)
ddbg('raw_input: char=%r' % data)
if data == LF or data == '\006':
buf = self.parse_telnet(buf + data)
if buf != '':
return buf
elif data == '\004' or data == '': # ^D
raise EOFError
else:
buf += data
def write(self, data):
ddbg("debugserver: write %r" % data)
self.server.socketio.write(data)
self.server.socketio.flush()
def run(self, server):
self.server = server
self.write("Welcome to the %s-%s debug server, have a nice stay\n" % (APP_NAME, APP_VERSION))
self.interact()
try:
self.write("Time to go. Bye!\n")
except:
pass
def spawn(env):
PythonConsoleServer.env = env
tcpserver = SocketServer.TCPServer(('127.0.0.1', 0), PythonConsoleServer)
dbg("debugserver: listening on %s" % str(tcpserver.server_address))
debugserver = threading.Thread(target=tcpserver.serve_forever, name="DebugServer")
debugserver.setDaemon(True)
debugserver.start()
return(debugserver, tcpserver)
| gpl-2.0 |
MiLk/ansible | lib/ansible/modules/monitoring/zabbix_group.py | 35 | 7601 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2013-2014, Epic Games, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: zabbix_group
short_description: Zabbix host groups creates/deletes
description:
- Create host groups if they do not exist.
- Delete existing host groups if they exist.
version_added: "1.8"
author:
- "(@cove)"
- "Tony Minfei Ding"
- "Harrison Gu (@harrisongu)"
requirements:
- "python >= 2.6"
- zabbix-api
options:
server_url:
description:
- Url of Zabbix server, with protocol (http or https).
C(url) is an alias for C(server_url).
required: true
aliases: [ "url" ]
login_user:
description:
- Zabbix user name.
required: true
login_password:
description:
- Zabbix user password.
required: true
http_login_user:
description:
- Basic Auth login
required: false
default: None
version_added: "2.1"
http_login_password:
description:
- Basic Auth password
required: false
default: None
version_added: "2.1"
state:
description:
- Create or delete host group.
required: false
default: "present"
choices: [ "present", "absent" ]
timeout:
description:
- The timeout of API request(seconds).
default: 10
host_groups:
description:
- List of host groups to create or delete.
required: true
aliases: [ "host_group" ]
notes:
- Too many concurrent updates to the same group may cause Zabbix to return errors, see examples for a workaround if needed.
'''
EXAMPLES = '''
# Base create host groups example
- name: Create host groups
local_action:
module: zabbix_group
server_url: http://monitor.example.com
login_user: username
login_password: password
state: present
host_groups:
- Example group1
- Example group2
# Limit the Zabbix group creations to one host since Zabbix can return an error when doing concurrent updates
- name: Create host groups
local_action:
module: zabbix_group
server_url: http://monitor.example.com
login_user: username
login_password: password
state: present
host_groups:
- Example group1
- Example group2
when: inventory_hostname==groups['group_name'][0]
'''
try:
from zabbix_api import ZabbixAPI, ZabbixAPISubClass
from zabbix_api import Already_Exists
HAS_ZABBIX_API = True
except ImportError:
HAS_ZABBIX_API = False
class HostGroup(object):
def __init__(self, module, zbx):
self._module = module
self._zapi = zbx
# create host group(s) if not exists
def create_host_group(self, group_names):
try:
group_add_list = []
for group_name in group_names:
result = self._zapi.hostgroup.get({'filter': {'name': group_name}})
if not result:
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.hostgroup.create({'name': group_name})
group_add_list.append(group_name)
except Already_Exists:
return group_add_list
return group_add_list
except Exception as e:
self._module.fail_json(msg="Failed to create host group(s): %s" % e)
# delete host group(s)
def delete_host_group(self, group_ids):
try:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._zapi.hostgroup.delete(group_ids)
except Exception as e:
self._module.fail_json(msg="Failed to delete host group(s), Exception: %s" % e)
# get group ids by name
def get_group_ids(self, host_groups):
group_ids = []
group_list = self._zapi.hostgroup.get({'output': 'extend', 'filter': {'name': host_groups}})
for group in group_list:
group_id = group['groupid']
group_ids.append(group_id)
return group_ids, group_list
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(type='str', required=True, aliases=['url']),
login_user=dict(type='str', required=True),
login_password=dict(type='str', required=True, no_log=True),
http_login_user=dict(type='str',required=False, default=None),
http_login_password=dict(type='str',required=False, default=None, no_log=True),
host_groups=dict(type='list', required=True, aliases=['host_group']),
state=dict(default="present", choices=['present','absent']),
timeout=dict(type='int', default=10)
),
supports_check_mode=True
)
if not HAS_ZABBIX_API:
module.fail_json(msg="Missing required zabbix-api module (check docs or install with: pip install zabbix-api)")
server_url = module.params['server_url']
login_user = module.params['login_user']
login_password = module.params['login_password']
http_login_user = module.params['http_login_user']
http_login_password = module.params['http_login_password']
host_groups = module.params['host_groups']
state = module.params['state']
timeout = module.params['timeout']
zbx = None
# login to zabbix
try:
zbx = ZabbixAPI(server_url, timeout=timeout, user=http_login_user, passwd=http_login_password)
zbx.login(login_user, login_password)
except Exception as e:
module.fail_json(msg="Failed to connect to Zabbix server: %s" % e)
hostGroup = HostGroup(module, zbx)
group_ids = []
group_list = []
if host_groups:
group_ids, group_list = hostGroup.get_group_ids(host_groups)
if state == "absent":
# delete host groups
if group_ids:
delete_group_names = []
hostGroup.delete_host_group(group_ids)
for group in group_list:
delete_group_names.append(group['name'])
module.exit_json(changed=True,
result="Successfully deleted host group(s): %s." % ",".join(delete_group_names))
else:
module.exit_json(changed=False, result="No host group(s) to delete.")
else:
# create host groups
group_add_list = hostGroup.create_host_group(host_groups)
if len(group_add_list) > 0:
module.exit_json(changed=True, result="Successfully created host group(s): %s" % group_add_list)
else:
module.exit_json(changed=False)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
wuga214/Django-Wuga | env/lib/python2.7/site-packages/django/conf/locale/zh_Hant/formats.py | 1008 | 1810 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
TIME_FORMAT = 'H:i' # 20:45
DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
YEAR_MONTH_FORMAT = 'Y年n月' # 2016年9月
MONTH_DAY_FORMAT = 'm月j日' # 9月5日
SHORT_DATE_FORMAT = 'Y年n月j日' # 2016年9月5日
SHORT_DATETIME_FORMAT = 'Y年n月j日 H:i' # 2016年9月5日 20:45
FIRST_DAY_OF_WEEK = 1 # 星期一 (Monday)
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y/%m/%d', # '2016/09/05'
'%Y-%m-%d', # '2016-09-05'
'%Y年%n月%j日', # '2016年9月5日'
]
TIME_INPUT_FORMATS = [
'%H:%M', # '20:45'
'%H:%M:%S', # '20:45:29'
'%H:%M:%S.%f', # '20:45:29.000200'
]
DATETIME_INPUT_FORMATS = [
'%Y/%m/%d %H:%M', # '2016/09/05 20:45'
'%Y-%m-%d %H:%M', # '2016-09-05 20:45'
'%Y年%n月%j日 %H:%M', # '2016年9月5日 14:45'
'%Y/%m/%d %H:%M:%S', # '2016/09/05 20:45:29'
'%Y-%m-%d %H:%M:%S', # '2016-09-05 20:45:29'
'%Y年%n月%j日 %H:%M:%S', # '2016年9月5日 20:45:29'
'%Y/%m/%d %H:%M:%S.%f', # '2016/09/05 20:45:29.000200'
'%Y-%m-%d %H:%M:%S.%f', # '2016-09-05 20:45:29.000200'
'%Y年%n月%j日 %H:%n:%S.%f', # '2016年9月5日 20:45:29.000200'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ''
NUMBER_GROUPING = 4
| apache-2.0 |
patrickstocklin/chattR | lib/python2.7/site-packages/django/contrib/auth/management/__init__.py | 91 | 6477 | """
Creates permissions for all installed apps that need permissions.
"""
from __future__ import unicode_literals
import getpass
import unicodedata
from django.apps import apps
from django.contrib.auth import get_permission_codename
from django.core import exceptions
from django.core.management.base import CommandError
from django.db import DEFAULT_DB_ALIAS, router
from django.utils.encoding import DEFAULT_LOCALE_ENCODING
from django.utils import six
def _get_all_permissions(opts, ctype):
"""
Returns (codename, name) for all permissions in the given opts.
"""
builtin = _get_builtin_permissions(opts)
custom = list(opts.permissions)
_check_permission_clashing(custom, builtin, ctype)
return builtin + custom
def _get_builtin_permissions(opts):
"""
Returns (codename, name) for all autogenerated permissions.
By default, this is ('add', 'change', 'delete')
"""
perms = []
for action in opts.default_permissions:
perms.append((get_permission_codename(action, opts),
'Can %s %s' % (action, opts.verbose_name_raw)))
return perms
def _check_permission_clashing(custom, builtin, ctype):
"""
Check that permissions for a model do not clash. Raises CommandError if
there are duplicate permissions.
"""
pool = set()
builtin_codenames = set(p[0] for p in builtin)
for codename, _name in custom:
if codename in pool:
raise CommandError(
"The permission codename '%s' is duplicated for model '%s.%s'." %
(codename, ctype.app_label, ctype.model_class().__name__))
elif codename in builtin_codenames:
raise CommandError(
"The permission codename '%s' clashes with a builtin permission "
"for model '%s.%s'." %
(codename, ctype.app_label, ctype.model_class().__name__))
pool.add(codename)
def create_permissions(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, **kwargs):
if not app_config.models_module:
return
try:
Permission = apps.get_model('auth', 'Permission')
except LookupError:
return
if not router.allow_migrate_model(using, Permission):
return
from django.contrib.contenttypes.models import ContentType
# This will hold the permissions we're looking for as
# (content_type, (codename, name))
searched_perms = list()
# The codenames and ctypes that should exist.
ctypes = set()
for klass in app_config.get_models():
# Force looking up the content types in the current database
# before creating foreign keys to them.
ctype = ContentType.objects.db_manager(using).get_for_model(klass)
ctypes.add(ctype)
for perm in _get_all_permissions(klass._meta, ctype):
searched_perms.append((ctype, perm))
# Find all the Permissions that have a content_type for a model we're
# looking for. We don't need to check for codenames since we already have
# a list of the ones we're going to create.
all_perms = set(Permission.objects.using(using).filter(
content_type__in=ctypes,
).values_list(
"content_type", "codename"
))
perms = [
Permission(codename=codename, name=name, content_type=ct)
for ct, (codename, name) in searched_perms
if (ct.pk, codename) not in all_perms
]
# Validate the permissions before bulk_creation to avoid cryptic
# database error when the verbose_name is longer than 50 characters
permission_name_max_length = Permission._meta.get_field('name').max_length
verbose_name_max_length = permission_name_max_length - 11 # len('Can change ') prefix
for perm in perms:
if len(perm.name) > permission_name_max_length:
raise exceptions.ValidationError(
"The verbose_name of %s.%s is longer than %s characters" % (
perm.content_type.app_label,
perm.content_type.model,
verbose_name_max_length,
)
)
Permission.objects.using(using).bulk_create(perms)
if verbosity >= 2:
for perm in perms:
print("Adding permission '%s'" % perm)
def get_system_username():
"""
Try to determine the current system user's username.
:returns: The username as a unicode string, or an empty string if the
username could not be determined.
"""
try:
result = getpass.getuser()
except (ImportError, KeyError):
# KeyError will be raised by os.getpwuid() (called by getuser())
# if there is no corresponding entry in the /etc/passwd file
# (a very restricted chroot environment, for example).
return ''
if six.PY2:
try:
result = result.decode(DEFAULT_LOCALE_ENCODING)
except UnicodeDecodeError:
# UnicodeDecodeError - preventive treatment for non-latin Windows.
return ''
return result
def get_default_username(check_db=True):
"""
Try to determine the current system user's username to use as a default.
:param check_db: If ``True``, requires that the username does not match an
existing ``auth.User`` (otherwise returns an empty string).
:returns: The username, or an empty string if no username can be
determined.
"""
# This file is used in apps.py, it should not trigger models import.
from django.contrib.auth import models as auth_app
# If the User model has been swapped out, we can't make any assumptions
# about the default user name.
if auth_app.User._meta.swapped:
return ''
default_username = get_system_username()
try:
default_username = (unicodedata.normalize('NFKD', default_username)
.encode('ascii', 'ignore').decode('ascii')
.replace(' ', '').lower())
except UnicodeDecodeError:
return ''
# Run the username validator
try:
auth_app.User._meta.get_field('username').run_validators(default_username)
except exceptions.ValidationError:
return ''
# Don't return the default username if it is already taken.
if check_db and default_username:
try:
auth_app.User._default_manager.get(username=default_username)
except auth_app.User.DoesNotExist:
pass
else:
return ''
return default_username
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.