repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
adoosii/edx-platform | pavelib/acceptance_test.py | 12 | 1983 | """
Acceptance test tasks
"""
from paver.easy import task, cmdopts, needs
from pavelib.utils.test.suites import AcceptanceTestSuite
from optparse import make_option
try:
from pygments.console import colorize
except ImportError:
colorize = lambda color, text: text # pylint: disable=invalid-name
__test__ = False # do not collect
@task
@needs(
'pavelib.prereqs.install_prereqs',
'pavelib.utils.test.utils.clean_reports_dir',
)
@cmdopts([
("system=", "s", "System to act on"),
("default_store=", "m", "Default modulestore to use for course creation"),
("fasttest", "a", "Run without collectstatic"),
("extra_args=", "e", "adds as extra args to the test command"),
make_option("--verbose", action="store_const", const=2, dest="verbosity"),
make_option("-q", "--quiet", action="store_const", const=0, dest="verbosity"),
make_option("-v", "--verbosity", action="count", dest="verbosity"),
make_option("--pdb", action="store_true", help="Launches an interactive debugger upon error"),
])
def test_acceptance(options):
"""
Run the acceptance tests for the either lms or cms
"""
opts = {
'fasttest': getattr(options, 'fasttest', False),
'system': getattr(options, 'system', None),
'default_store': getattr(options, 'default_store', None),
'verbosity': getattr(options, 'verbosity', 3),
'extra_args': getattr(options, 'extra_args', ''),
'pdb': getattr(options, 'pdb', False),
}
if opts['system'] not in ['cms', 'lms']:
msg = colorize(
'red',
'No system specified, running tests for both cms and lms.'
)
print msg
if opts['default_store'] not in ['draft', 'split']:
msg = colorize(
'red',
'No modulestore specified, running tests for both draft and split.'
)
print msg
suite = AcceptanceTestSuite('{} acceptance'.format(opts['system']), **opts)
suite.run()
| agpl-3.0 |
khchine5/xl | lino_xl/lib/ledger/mixins.py | 1 | 10913 | # -*- coding: UTF-8 -*-
# Copyright 2008-2017 Luc Saffre
# License: BSD (see file COPYING for details)
"""Model mixins for `lino_xl.lib.ledger`.
.. autosummary::
"""
from __future__ import unicode_literals
from builtins import str
from django.db import models
from lino.api import dd, rt, _
from lino.mixins import Sequenced
from etgen.html import E
from .choicelists import VoucherTypes
from .roles import LedgerUser
# from etgen.html import E
# from lino.modlib.notify.utils import rich_text_to_elems
# FKMATCH = False
if dd.is_installed('ledger'):
project_model = dd.plugins.ledger.project_model
else:
project_model = None
class ProjectRelated(dd.Model):
class Meta:
abstract = True
project = dd.ForeignKey(
project_model,
blank=True, null=True,
related_name="%(app_label)s_%(class)s_set_by_project")
@classmethod
def get_registrable_fields(cls, site):
for f in super(ProjectRelated, cls).get_registrable_fields(site):
yield f
if project_model:
yield 'project'
class PartnerRelated(dd.Model):
class Meta:
abstract = True
partner = dd.ForeignKey(
'contacts.Partner',
related_name="%(app_label)s_%(class)s_set_by_partner",
blank=True, null=True)
payment_term = dd.ForeignKey(
'ledger.PaymentTerm',
related_name="%(app_label)s_%(class)s_set_by_payment_term",
blank=True, null=True)
def get_partner(self):
"""Overrides Voucher.get_partner"""
return self.partner
def get_print_language(self):
p = self.get_partner()
if p is not None:
return p.language
def get_recipient(self):
return self.partner
recipient = property(get_recipient)
def partner_changed(self, ar=None):
# does nothing but we need it so that subclasses like
# BankAccount can call super().partner_changed()
pass
@classmethod
def get_registrable_fields(cls, site):
for f in super(PartnerRelated, cls).get_registrable_fields(site):
yield f
yield 'partner'
yield 'payment_term'
def full_clean(self, *args, **kw):
self.fill_defaults()
super(PartnerRelated, self).full_clean(*args, **kw)
def fill_defaults(self):
if not self.payment_term and self.partner_id:
self.payment_term = self.partner.payment_term
if self.payment_term:
self.payment_term_changed()
def payment_term_changed(self, ar=None):
if self.payment_term:
self.due_date = self.payment_term.get_due_date(self.entry_date)
class Matching(dd.Model):
class Meta:
abstract = True
match = dd.CharField(
_("Match"), max_length=20, blank=True,
help_text=_("The movement to be matched."))
@classmethod
def get_match_choices(cls, journal, partner):
"""This is the general algorithm.
"""
matchable_accounts = rt.models.accounts.Account.objects.filter(
matchrule__journal=journal)
fkw = dict(account__in=matchable_accounts)
fkw.update(cleared=False)
if partner:
fkw.update(partner=partner)
qs = rt.models.ledger.Movement.objects.filter(**fkw)
qs = qs.order_by('value_date')
# qs = qs.distinct('match')
return qs.values_list('match', flat=True)
@dd.chooser(simple_values=True)
def match_choices(cls, journal, partner):
# todo: move this to implementing classes?
return cls.get_match_choices(journal, partner)
def get_match(self):
return self.match or self.get_default_match()
class VoucherItem(dd.Model):
allow_cascaded_delete = ['voucher']
class Meta:
abstract = True
# title = models.CharField(_("Description"), max_length=200, blank=True)
def get_row_permission(self, ar, state, ba):
"""Items of registered invoices may not be edited
"""
if not self.voucher.state.editable:
if not ba.action.readonly:
return False
return super(VoucherItem, self).get_row_permission(ar, state, ba)
def get_ana_account(self):
return None
class SequencedVoucherItem(Sequenced):
class Meta:
abstract = True
def get_siblings(self):
return self.voucher.items.all()
class AccountVoucherItem(VoucherItem, SequencedVoucherItem):
class Meta:
abstract = True
account = dd.ForeignKey(
'accounts.Account',
related_name="%(app_label)s_%(class)s_set_by_account")
def get_base_account(self, tt):
return self.account
@dd.chooser()
def account_choices(self, voucher):
if voucher and voucher.journal:
return voucher.journal.get_allowed_accounts()
return rt.models.accounts.Account.objects.none()
# def set_partner_invoice_account(sender, instance=None, **kwargs):
# if instance.account:
# return
# if not instance.voucher:
# return
# p = instance.voucher.partner
# if not p:
# return
# tt = instance.voucher.get_trade_type()
# instance.account = tt.get_partner_invoice_account(p)
# @dd.receiver(dd.post_analyze)
# def on_post_analyze(sender, **kw):
# for m in rt.models_by_base(AccountVoucherItem):
# dd.post_init.connect(set_partner_invoice_account, sender=m)
def JournalRef(**kw):
# ~ kw.update(blank=True,null=True) # Django Ticket #12708
kw.update(related_name="%(app_label)s_%(class)s_set_by_journal")
return dd.ForeignKey('ledger.Journal', **kw)
def VoucherNumber(*args, **kwargs):
return models.IntegerField(*args, **kwargs)
class PeriodRange(dd.Model):
class Meta:
abstract = True
start_period = dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, verbose_name=_("Start period"),
related_name="%(app_label)s_%(class)s_set_by_start_period")
end_period = dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
verbose_name=_("End period"),
related_name="%(app_label)s_%(class)s_set_by_end_period")
def get_period_filter(self, fieldname, **kwargs):
return rt.models.ledger.AccountingPeriod.get_period_filter(
fieldname, self.start_period, self.end_period, **kwargs)
class PeriodRangeObservable(dd.Model):
class Meta:
abstract = True
observable_period_field = 'accounting_period'
@classmethod
def setup_parameters(cls, fields):
fields.update(
start_period=dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
help_text=_("Start of observed period range"),
verbose_name=_("Period from")))
fields.update(
end_period=dd.ForeignKey(
'ledger.AccountingPeriod',
blank=True, null=True,
help_text=_(
"Optional end of observed period range. "
"Leave empty to consider only the Start period."),
verbose_name=_("Period until")))
super(PeriodRangeObservable, cls).setup_parameters(fields)
@classmethod
def get_request_queryset(cls, ar, **kwargs):
pv = ar.param_values
qs = super(PeriodRangeObservable, cls).get_request_queryset(ar, **kwargs)
flt = rt.models.ledger.AccountingPeriod.get_period_filter(
cls.observable_period_field, pv.start_period, pv.end_period)
return qs.filter(**flt)
@classmethod
def get_title_tags(cls, ar):
for t in super(PeriodRangeObservable, cls).get_title_tags(ar):
yield t
pv = ar.param_values
if pv.start_period is not None:
if pv.end_period is None:
yield str(pv.start_period)
else:
yield "{}..{}".format(pv.start_period, pv.end_period)
class ItemsByVoucher(dd.Table):
label = _("Content")
required_roles = dd.login_required(LedgerUser)
master_key = 'voucher'
order_by = ["seqno"]
auto_fit_column_widths = True
display_mode = 'html'
preview_limit = 0
class VouchersByPartnerBase(dd.VirtualTable):
"""Shows all ledger vouchers of a given partner.
This is a :class:`lino.core.tables.VirtualTable` with a customized
slave summary.
"""
label = _("Partner vouchers")
required_roles = dd.login_required(LedgerUser)
order_by = ["-entry_date", '-id']
master = 'contacts.Partner'
display_mode = 'summary'
_master_field_name = 'partner'
_voucher_base = PartnerRelated
@classmethod
def get_data_rows(self, ar):
obj = ar.master_instance
rows = []
if obj is not None:
flt = {self._master_field_name: obj}
for M in rt.models_by_base(self._voucher_base):
rows += list(M.objects.filter(**flt))
# def by_date(a, b):
# return cmp(b.entry_date, a.entry_date)
rows.sort(key= lambda i: i.entry_date)
return rows
@dd.displayfield(_("Voucher"))
def voucher(self, row, ar):
return ar.obj2html(row)
@dd.virtualfield('ledger.Movement.partner')
def partner(self, row, ar):
return row.partner
@dd.virtualfield('ledger.Voucher.entry_date')
def entry_date(self, row, ar):
return row.entry_date
@dd.virtualfield('ledger.Voucher.state')
def state(self, row, ar):
return row.state
@classmethod
def get_table_summary(self, obj, ar):
elems = []
sar = self.request(master_instance=obj)
# elems += ["Partner:", unicode(ar.master_instance)]
for voucher in sar:
vc = voucher.get_mti_leaf()
if vc and vc.state.name == "draft":
elems += [ar.obj2html(vc), " "]
vtypes = []
for vt in VoucherTypes.items():
if issubclass(vt.model, self._voucher_base):
vtypes.append(vt)
actions = []
def add_action(btn):
if btn is None:
return False
actions.append(btn)
return True
if not ar.get_user().user_type.readonly:
flt = {self._master_field_name: obj}
for vt in vtypes:
for jnl in vt.get_journals():
sar = vt.table_class.insert_action.request_from(
ar, master_instance=jnl,
known_values=flt)
btn = sar.ar2button(label=str(jnl), icon_name=None)
if len(actions):
actions.append(', ')
actions.append(btn)
elems += [E.br(), str(_("Create voucher in journal")), " "] + actions
return E.div(*elems)
| bsd-2-clause |
maartenq/ansible | lib/ansible/modules/network/nxos/nxos_vtp_domain.py | 70 | 5923 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vtp_domain
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages VTP domain configuration.
description:
- Manages VTP domain configuration.
author:
- Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- VTP feature must be active on the device to use this module.
- This module is used to manage only VTP domain names.
- VTP domain names are case-sensible.
- If it's never been configured before, VTP version is set to 1 by default.
Otherwise, it leaves the previous configured version untouched.
Use M(nxos_vtp_version) to change it.
- Use this in combination with M(nxos_vtp_password) and M(nxos_vtp_version)
to fully manage VTP operations.
options:
domain:
description:
- VTP domain name.
required: true
'''
EXAMPLES = '''
# ENSURE VTP DOMAIN IS CONFIGURED
- nxos_vtp_domain:
domain: ntc
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"domain": "ntc"}
existing:
description:
- k/v pairs of existing vtp domain
returned: always
type: dict
sample: {"domain": "testing", "version": "2", "vtp_password": "\"}
end_state:
description: k/v pairs of vtp domain after module execution
returned: always
type: dict
sample: {"domain": "ntc", "version": "2", "vtp_password": "\"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["vtp domain ntc"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.network.nxos.nxos import load_config, run_commands
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
def execute_show_command(command, module, command_type='cli_show'):
if 'status' not in command:
output = 'json'
else:
output = 'text'
cmds = [{
'command': command,
'output': output,
}]
body = run_commands(module, cmds)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_vtp_config(module):
command = 'show vtp status'
body = execute_show_command(
command, module)[0]
vtp_parsed = {}
if body:
version_regex = r'.*VTP version running\s+:\s+(?P<version>\d).*'
domain_regex = r'.*VTP Domain Name\s+:\s+(?P<domain>\S+).*'
try:
match_version = re.match(version_regex, body, re.DOTALL)
version = match_version.groupdict()['version']
except AttributeError:
version = ''
try:
match_domain = re.match(domain_regex, body, re.DOTALL)
domain = match_domain.groupdict()['domain']
except AttributeError:
domain = ''
if domain and version:
vtp_parsed['domain'] = domain
vtp_parsed['version'] = version
vtp_parsed['vtp_password'] = get_vtp_password(module)
return vtp_parsed
def get_vtp_password(module):
command = 'show vtp password'
body = execute_show_command(command, module)[0]
try:
password = body['passwd']
if password:
return str(password)
else:
return ""
except TypeError:
return ""
def main():
argument_spec = dict(
domain=dict(type='str', required=True),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
domain = module.params['domain']
existing = get_vtp_config(module)
end_state = existing
args = dict(domain=domain)
changed = False
proposed = dict((k, v) for k, v in args.items() if v is not None)
delta = dict(set(proposed.items()).difference(existing.items()))
commands = []
if delta:
commands.append(['vtp domain {0}'.format(domain)])
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_vtp_config(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
anandpdoshi/frappe | frappe/core/page/data_import_tool/importer.py | 1 | 8937 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, json
import frappe.permissions
import frappe.async
from frappe import _
from frappe.utils.csvutils import getlink
from frappe.utils.dateutils import parse_date
from frappe.utils import cint, cstr, flt, getdate, get_datetime
from frappe.core.page.data_import_tool.data_import_tool import get_data_keys
#@frappe.async.handler
@frappe.whitelist()
def upload(rows = None, submit_after_import=None, ignore_encoding_errors=False, overwrite=None,
ignore_links=False, pre_process=None, via_console=False):
"""upload data"""
frappe.flags.mute_emails = True
frappe.flags.in_import = True
# extra input params
params = json.loads(frappe.form_dict.get("params") or '{}')
if params.get("submit_after_import"):
submit_after_import = True
if params.get("ignore_encoding_errors"):
ignore_encoding_errors = True
from frappe.utils.csvutils import read_csv_content_from_uploaded_file
def get_data_keys_definition():
return get_data_keys()
def bad_template():
frappe.throw(_("Please do not change the rows above {0}").format(get_data_keys_definition().data_separator))
def check_data_length():
max_rows = 5000
if not data:
frappe.throw(_("No data found"))
elif not via_console and len(data) > max_rows:
frappe.throw(_("Only allowed {0} rows in one import").format(max_rows))
def get_start_row():
for i, row in enumerate(rows):
if row and row[0]==get_data_keys_definition().data_separator:
return i+1
bad_template()
def get_header_row(key):
return get_header_row_and_idx(key)[0]
def get_header_row_and_idx(key):
for i, row in enumerate(header):
if row and row[0]==key:
return row, i
return [], -1
def filter_empty_columns(columns):
empty_cols = filter(lambda x: x in ("", None), columns)
if empty_cols:
if columns[-1*len(empty_cols):] == empty_cols:
# filter empty columns if they exist at the end
columns = columns[:-1*len(empty_cols)]
else:
frappe.msgprint(_("Please make sure that there are no empty columns in the file."),
raise_exception=1)
return columns
def make_column_map():
doctype_row, row_idx = get_header_row_and_idx(get_data_keys_definition().doctype)
if row_idx == -1: # old style
return
dt = None
for i, d in enumerate(doctype_row[1:]):
if d not in ("~", "-"):
if d: # value in doctype_row
if doctype_row[i]==dt:
# prev column is doctype (in case of parentfield)
doctype_parentfield[dt] = doctype_row[i+1]
else:
dt = d
doctypes.append(d)
column_idx_to_fieldname[dt] = {}
column_idx_to_fieldtype[dt] = {}
if dt:
column_idx_to_fieldname[dt][i+1] = rows[row_idx + 2][i+1]
column_idx_to_fieldtype[dt][i+1] = rows[row_idx + 4][i+1]
def get_doc(start_idx):
if doctypes:
doc = {}
for idx in xrange(start_idx, len(rows)):
if (not doc) or main_doc_empty(rows[idx]):
for dt in doctypes:
d = {}
for column_idx in column_idx_to_fieldname[dt]:
try:
fieldname = column_idx_to_fieldname[dt][column_idx]
fieldtype = column_idx_to_fieldtype[dt][column_idx]
d[fieldname] = rows[idx][column_idx]
if fieldtype in ("Int", "Check"):
d[fieldname] = cint(d[fieldname])
elif fieldtype in ("Float", "Currency", "Percent"):
d[fieldname] = flt(d[fieldname])
elif fieldtype == "Date":
d[fieldname] = getdate(parse_date(d[fieldname])) if d[fieldname] else None
elif fieldtype == "Datetime":
if d[fieldname]:
_date, _time = d[fieldname].split()
_date = parse_date(d[fieldname])
d[fieldname] = get_datetime(_date + " " + _time)
else:
d[fieldname] = None
except IndexError:
pass
# scrub quotes from name and modified
if d.get("name") and d["name"].startswith('"'):
d["name"] = d["name"][1:-1]
if sum([0 if not val else 1 for val in d.values()]):
d['doctype'] = dt
if dt == doctype:
doc.update(d)
else:
if not overwrite:
d['parent'] = doc["name"]
d['parenttype'] = doctype
d['parentfield'] = doctype_parentfield[dt]
doc.setdefault(d['parentfield'], []).append(d)
else:
break
return doc
else:
doc = frappe._dict(zip(columns, rows[start_idx][1:]))
doc['doctype'] = doctype
return doc
def main_doc_empty(row):
return not (row and ((len(row) > 1 and row[1]) or (len(row) > 2 and row[2])))
users = frappe.db.sql_list("select name from tabUser")
def prepare_for_insert(doc):
# don't block data import if user is not set
# migrating from another system
if not doc.owner in users:
doc.owner = frappe.session.user
if not doc.modified_by in users:
doc.modified_by = frappe.session.user
# header
if not rows:
rows = read_csv_content_from_uploaded_file(ignore_encoding_errors)
start_row = get_start_row()
header = rows[:start_row]
data = rows[start_row:]
doctype = get_header_row(get_data_keys_definition().main_table)[1]
columns = filter_empty_columns(get_header_row(get_data_keys_definition().columns)[1:])
doctypes = []
doctype_parentfield = {}
column_idx_to_fieldname = {}
column_idx_to_fieldtype = {}
if submit_after_import and not cint(frappe.db.get_value("DocType",
doctype, "is_submittable")):
submit_after_import = False
parenttype = get_header_row(get_data_keys_definition().parent_table)
if len(parenttype) > 1:
parenttype = parenttype[1]
# check permissions
if not frappe.permissions.can_import(parenttype or doctype):
frappe.flags.mute_emails = False
return {"messages": [_("Not allowed to Import") + ": " + _(doctype)], "error": True}
# allow limit rows to be uploaded
check_data_length()
make_column_map()
if overwrite==None:
overwrite = params.get('overwrite')
# delete child rows (if parenttype)
parentfield = None
if parenttype:
parentfield = get_parent_field(doctype, parenttype)
if overwrite:
delete_child_rows(data, doctype)
ret = []
def log(msg):
if via_console:
print msg.encode('utf-8')
else:
ret.append(msg)
def as_link(doctype, name):
if via_console:
return "{0}: {1}".format(doctype, name)
else:
return getlink(doctype, name)
error = False
total = len(data)
for i, row in enumerate(data):
# bypass empty rows
if main_doc_empty(row):
continue
row_idx = i + start_row
doc = None
# publish task_update
frappe.publish_realtime("data_import_progress", {"progress": [i, total]},
user=frappe.session.user)
try:
doc = get_doc(row_idx)
if pre_process:
pre_process(doc)
if parentfield:
parent = frappe.get_doc(parenttype, doc["parent"])
doc = parent.append(parentfield, doc)
parent.save()
log('Inserted row for %s at #%s' % (as_link(parenttype,
doc.parent), unicode(doc.idx)))
else:
if overwrite and doc["name"] and frappe.db.exists(doctype, doc["name"]):
original = frappe.get_doc(doctype, doc["name"])
original_name = original.name
original.update(doc)
# preserve original name for case sensitivity
original.name = original_name
original.flags.ignore_links = ignore_links
original.save()
log('Updated row (#%d) %s' % (row_idx + 1, as_link(original.doctype, original.name)))
doc = original
else:
doc = frappe.get_doc(doc)
prepare_for_insert(doc)
doc.flags.ignore_links = ignore_links
doc.insert()
log('Inserted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name)))
if submit_after_import:
doc.submit()
log('Submitted row (#%d) %s' % (row_idx + 1, as_link(doc.doctype, doc.name)))
except Exception, e:
error = True
if doc:
frappe.errprint(doc if isinstance(doc, dict) else doc.as_dict())
err_msg = frappe.local.message_log and "\n\n".join(frappe.local.message_log) or cstr(e)
log('Error for row (#%d) %s : %s' % (row_idx + 1,
len(row)>1 and row[1] or "", err_msg))
frappe.errprint(frappe.get_traceback())
finally:
frappe.local.message_log = []
if error:
frappe.db.rollback()
else:
frappe.db.commit()
frappe.flags.mute_emails = False
frappe.flags.in_import = False
return {"messages": ret, "error": error}
def get_parent_field(doctype, parenttype):
parentfield = None
# get parentfield
if parenttype:
for d in frappe.get_meta(parenttype).get_table_fields():
if d.options==doctype:
parentfield = d.fieldname
break
if not parentfield:
frappe.msgprint(_("Did not find {0} for {0} ({1})").format("parentfield", parenttype, doctype))
raise Exception
return parentfield
def delete_child_rows(rows, doctype):
"""delete child rows for all parents"""
for p in list(set([r[1] for r in rows])):
if p:
frappe.db.sql("""delete from `tab{0}` where parent=%s""".format(doctype), p)
| mit |
tedelhourani/ansible | lib/ansible/modules/network/netscaler/netscaler_gslb_vserver.py | 22 | 33857 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Citrix Systems
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: netscaler_gslb_vserver
short_description: Configure gslb vserver entities in Netscaler.
description:
- Configure gslb vserver entities in Netscaler.
version_added: "2.4.0"
author: George Nikolopoulos (@giorgos-nikolopoulos)
options:
name:
description:
- >-
Name for the GSLB virtual server. Must begin with an ASCII alphanumeric or underscore C(_) character,
and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space, colon C(:), at C(@),
equals C(=), and hyphen C(-) characters. Can be changed after the virtual server is created.
- "Minimum length = 1"
servicetype:
choices:
- 'HTTP'
- 'FTP'
- 'TCP'
- 'UDP'
- 'SSL'
- 'SSL_BRIDGE'
- 'SSL_TCP'
- 'NNTP'
- 'ANY'
- 'SIP_UDP'
- 'SIP_TCP'
- 'SIP_SSL'
- 'RADIUS'
- 'RDP'
- 'RTSP'
- 'MYSQL'
- 'MSSQL'
- 'ORACLE'
description:
- "Protocol used by services bound to the virtual server."
- >-
dnsrecordtype:
choices:
- 'A'
- 'AAAA'
- 'CNAME'
- 'NAPTR'
description:
- "DNS record type to associate with the GSLB virtual server's domain name."
- "Default value: A"
- "Possible values = A, AAAA, CNAME, NAPTR"
lbmethod:
choices:
- 'ROUNDROBIN'
- 'LEASTCONNECTION'
- 'LEASTRESPONSETIME'
- 'SOURCEIPHASH'
- 'LEASTBANDWIDTH'
- 'LEASTPACKETS'
- 'STATICPROXIMITY'
- 'RTT'
- 'CUSTOMLOAD'
description:
- "Load balancing method for the GSLB virtual server."
- "Default value: LEASTCONNECTION"
- >-
Possible values = ROUNDROBIN, LEASTCONNECTION, LEASTRESPONSETIME, SOURCEIPHASH, LEASTBANDWIDTH,
LEASTPACKETS, STATICPROXIMITY, RTT, CUSTOMLOAD
backuplbmethod:
choices:
- 'ROUNDROBIN'
- 'LEASTCONNECTION'
- 'LEASTRESPONSETIME'
- 'SOURCEIPHASH'
- 'LEASTBANDWIDTH'
- 'LEASTPACKETS'
- 'STATICPROXIMITY'
- 'RTT'
- 'CUSTOMLOAD'
description:
- >-
Backup load balancing method. Becomes operational if the primary load balancing method fails or
cannot be used. Valid only if the primary method is based on either round-trip time (RTT) or static
proximity.
netmask:
description:
- "IPv4 network mask for use in the SOURCEIPHASH load balancing method."
- "Minimum length = 1"
v6netmasklen:
description:
- >-
Number of bits to consider, in an IPv6 source IP address, for creating the hash that is required by
the C(SOURCEIPHASH) load balancing method.
- "Default value: C(128)"
- "Minimum value = C(1)"
- "Maximum value = C(128)"
tolerance:
description:
- >-
Site selection tolerance, in milliseconds, for implementing the RTT load balancing method. If a
site's RTT deviates from the lowest RTT by more than the specified tolerance, the site is not
considered when the NetScaler appliance makes a GSLB decision. The appliance implements the round
robin method of global server load balancing between sites whose RTT values are within the specified
tolerance. If the tolerance is 0 (zero), the appliance always sends clients the IP address of the
site with the lowest RTT.
- "Minimum value = C(0)"
- "Maximum value = C(100)"
persistencetype:
choices:
- 'SOURCEIP'
- 'NONE'
description:
- "Use source IP address based persistence for the virtual server."
- >-
After the load balancing method selects a service for the first packet, the IP address received in
response to the DNS query is used for subsequent requests from the same client.
persistenceid:
description:
- >-
The persistence ID for the GSLB virtual server. The ID is a positive integer that enables GSLB sites
to identify the GSLB virtual server, and is required if source IP address based or spill over based
persistence is enabled on the virtual server.
- "Minimum value = C(0)"
- "Maximum value = C(65535)"
persistmask:
description:
- >-
The optional IPv4 network mask applied to IPv4 addresses to establish source IP address based
persistence.
- "Minimum length = 1"
v6persistmasklen:
description:
- >-
Number of bits to consider in an IPv6 source IP address when creating source IP address based
persistence sessions.
- "Default value: C(128)"
- "Minimum value = C(1)"
- "Maximum value = C(128)"
timeout:
description:
- "Idle time, in minutes, after which a persistence entry is cleared."
- "Default value: C(2)"
- "Minimum value = C(2)"
- "Maximum value = C(1440)"
mir:
choices:
- 'enabled'
- 'disabled'
description:
- "Include multiple IP addresses in the DNS responses sent to clients."
disableprimaryondown:
choices:
- 'enabled'
- 'disabled'
description:
- >-
Continue to direct traffic to the backup chain even after the primary GSLB virtual server returns to
the UP state. Used when spillover is configured for the virtual server.
dynamicweight:
choices:
- 'SERVICECOUNT'
- 'SERVICEWEIGHT'
- 'DISABLED'
description:
- >-
Specify if the appliance should consider the service count, service weights, or ignore both when
using weight-based load balancing methods. The state of the number of services bound to the virtual
server help the appliance to select the service.
considereffectivestate:
choices:
- 'NONE'
- 'STATE_ONLY'
description:
- >-
If the primary state of all bound GSLB services is DOWN, consider the effective states of all the
GSLB services, obtained through the Metrics Exchange Protocol (MEP), when determining the state of
the GSLB virtual server. To consider the effective state, set the parameter to STATE_ONLY. To
disregard the effective state, set the parameter to NONE.
- >-
The effective state of a GSLB service is the ability of the corresponding virtual server to serve
traffic. The effective state of the load balancing virtual server, which is transferred to the GSLB
service, is UP even if only one virtual server in the backup chain of virtual servers is in the UP
state.
comment:
description:
- "Any comments that you might want to associate with the GSLB virtual server."
somethod:
choices:
- 'CONNECTION'
- 'DYNAMICCONNECTION'
- 'BANDWIDTH'
- 'HEALTH'
- 'NONE'
description:
- "Type of threshold that, when exceeded, triggers spillover. Available settings function as follows:"
- "* C(CONNECTION) - Spillover occurs when the number of client connections exceeds the threshold."
- >-
* C(DYNAMICCONNECTION) - Spillover occurs when the number of client connections at the GSLB virtual
server exceeds the sum of the maximum client (Max Clients) settings for bound GSLB services. Do not
specify a spillover threshold for this setting, because the threshold is implied by the Max Clients
settings of the bound GSLB services.
- >-
* C(BANDWIDTH) - Spillover occurs when the bandwidth consumed by the GSLB virtual server's incoming and
outgoing traffic exceeds the threshold.
- >-
* C(HEALTH) - Spillover occurs when the percentage of weights of the GSLB services that are UP drops
below the threshold. For example, if services gslbSvc1, gslbSvc2, and gslbSvc3 are bound to a virtual
server, with weights 1, 2, and 3, and the spillover threshold is 50%, spillover occurs if gslbSvc1
and gslbSvc3 or gslbSvc2 and gslbSvc3 transition to DOWN.
- "* C(NONE) - Spillover does not occur."
sopersistence:
choices:
- 'enabled'
- 'disabled'
description:
- >-
If spillover occurs, maintain source IP address based persistence for both primary and backup GSLB
virtual servers.
sopersistencetimeout:
description:
- "Timeout for spillover persistence, in minutes."
- "Default value: C(2)"
- "Minimum value = C(2)"
- "Maximum value = C(1440)"
sothreshold:
description:
- >-
Threshold at which spillover occurs. Specify an integer for the CONNECTION spillover method, a
bandwidth value in kilobits per second for the BANDWIDTH method (do not enter the units), or a
percentage for the HEALTH method (do not enter the percentage symbol).
- "Minimum value = C(1)"
- "Maximum value = C(4294967287)"
sobackupaction:
choices:
- 'DROP'
- 'ACCEPT'
- 'REDIRECT'
description:
- >-
Action to be performed if spillover is to take effect, but no backup chain to spillover is usable or
exists.
appflowlog:
choices:
- 'enabled'
- 'disabled'
description:
- "Enable logging appflow flow information."
domain_bindings:
description:
- >-
List of bindings for domains for this glsb vserver.
suboptions:
cookietimeout:
description:
- Timeout, in minutes, for the GSLB site cookie.
domainname:
description:
- Domain name for which to change the time to live (TTL) and/or backup service IP address.
ttl:
description:
- Time to live (TTL) for the domain.
sitedomainttl:
description:
- >-
TTL, in seconds, for all internally created site domains (created when a site prefix is
configured on a GSLB service) that are associated with this virtual server.
- Minimum value = C(1)
service_bindings:
description:
- List of bindings for gslb services bound to this gslb virtual server.
suboptions:
servicename:
description:
- Name of the GSLB service for which to change the weight.
weight:
description:
- Weight to assign to the GSLB service.
disabled:
description:
- When set to C(yes) the GSLB Vserver state will be set to C(disabled).
- When set to C(no) the GSLB Vserver state will be set to C(enabled).
- >-
Note that due to limitations of the underlying NITRO API a C(disabled) state change alone
does not cause the module result to report a changed status.
type: bool
default: false
extends_documentation_fragment: netscaler
requirements:
- nitro python sdk
'''
EXAMPLES = '''
'''
RETURN = '''
'''
import copy
try:
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver import gslbvserver
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver_gslbservice_binding import gslbvserver_gslbservice_binding
from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbvserver_domain_binding import gslbvserver_domain_binding
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
PYTHON_SDK_IMPORTED = True
except ImportError as e:
PYTHON_SDK_IMPORTED = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netscaler import (
ConfigProxy,
get_nitro_client,
netscaler_common_arguments,
log,
loglines,
ensure_feature_is_enabled,
get_immutables_intersection,
complete_missing_attributes
)
gslbvserver_domain_binding_rw_attrs = [
'name',
'domainname',
'backupipflag',
'cookietimeout',
'backupip',
'ttl',
'sitedomainttl',
'cookie_domainflag',
]
gslbvserver_gslbservice_binding_rw_attrs = [
'name',
'servicename',
'weight',
]
def get_actual_domain_bindings(client, module):
log('get_actual_domain_bindings')
# Get actual domain bindings and index them by domainname
actual_domain_bindings = {}
if gslbvserver_domain_binding.count(client, name=module.params['name']) != 0:
# Get all domain bindings associated with the named gslb vserver
fetched_domain_bindings = gslbvserver_domain_binding.get(client, name=module.params['name'])
# index by domainname
for binding in fetched_domain_bindings:
complete_missing_attributes(binding, gslbvserver_domain_binding_rw_attrs, fill_value=None)
actual_domain_bindings[binding.domainname] = binding
return actual_domain_bindings
def get_configured_domain_bindings_proxys(client, module):
log('get_configured_domain_bindings_proxys')
configured_domain_proxys = {}
# Get configured domain bindings and index them by domainname
if module.params['domain_bindings'] is not None:
for configured_domain_binding in module.params['domain_bindings']:
binding_values = copy.deepcopy(configured_domain_binding)
binding_values['name'] = module.params['name']
gslbvserver_domain_binding_proxy = ConfigProxy(
actual=gslbvserver_domain_binding(),
client=client,
attribute_values_dict=binding_values,
readwrite_attrs=gslbvserver_domain_binding_rw_attrs,
readonly_attrs=[],
)
configured_domain_proxys[configured_domain_binding['domainname']] = gslbvserver_domain_binding_proxy
return configured_domain_proxys
def sync_domain_bindings(client, module):
log('sync_domain_bindings')
actual_domain_bindings = get_actual_domain_bindings(client, module)
configured_domain_proxys = get_configured_domain_bindings_proxys(client, module)
# Delete actual bindings not in configured bindings
for domainname, actual_domain_binding in actual_domain_bindings.items():
if domainname not in configured_domain_proxys.keys():
log('Deleting absent binding for domain %s' % domainname)
gslbvserver_domain_binding.delete(client, actual_domain_binding)
# Delete actual bindings that differ from configured
for proxy_key, binding_proxy in configured_domain_proxys.items():
if proxy_key in actual_domain_bindings:
actual_binding = actual_domain_bindings[proxy_key]
if not binding_proxy.has_equal_attributes(actual_binding):
log('Deleting differing binding for domain %s' % binding_proxy.domainname)
gslbvserver_domain_binding.delete(client, actual_binding)
log('Adding anew binding for domain %s' % binding_proxy.domainname)
binding_proxy.add()
# Add configured domains that are missing from actual
for proxy_key, binding_proxy in configured_domain_proxys.items():
if proxy_key not in actual_domain_bindings.keys():
log('Adding domain binding for domain %s' % binding_proxy.domainname)
binding_proxy.add()
def domain_bindings_identical(client, module):
log('domain_bindings_identical')
actual_domain_bindings = get_actual_domain_bindings(client, module)
configured_domain_proxys = get_configured_domain_bindings_proxys(client, module)
actual_keyset = set(actual_domain_bindings.keys())
configured_keyset = set(configured_domain_proxys.keys())
symmetric_difference = actual_keyset ^ configured_keyset
log('symmetric difference %s' % symmetric_difference)
if len(symmetric_difference) != 0:
return False
# Item for item equality test
for key, proxy in configured_domain_proxys.items():
diff = proxy.diff_object(actual_domain_bindings[key])
if 'backupipflag' in diff:
del diff['backupipflag']
if not len(diff) == 0:
return False
# Fallthrough to True result
return True
def get_actual_service_bindings(client, module):
log('get_actual_service_bindings')
# Get actual domain bindings and index them by domainname
actual_bindings = {}
if gslbvserver_gslbservice_binding.count(client, name=module.params['name']) != 0:
# Get all service bindings associated with the named gslb vserver
fetched_bindings = gslbvserver_gslbservice_binding.get(client, name=module.params['name'])
# index by servicename
for binding in fetched_bindings:
complete_missing_attributes(binding, gslbvserver_gslbservice_binding_rw_attrs, fill_value=None)
actual_bindings[binding.servicename] = binding
return actual_bindings
def get_configured_service_bindings(client, module):
log('get_configured_service_bindings_proxys')
configured_proxys = {}
# Get configured domain bindings and index them by domainname
if module.params['service_bindings'] is not None:
for configured_binding in module.params['service_bindings']:
binding_values = copy.deepcopy(configured_binding)
binding_values['name'] = module.params['name']
gslbvserver_service_binding_proxy = ConfigProxy(
actual=gslbvserver_gslbservice_binding(),
client=client,
attribute_values_dict=binding_values,
readwrite_attrs=gslbvserver_gslbservice_binding_rw_attrs,
readonly_attrs=[],
)
configured_proxys[configured_binding['servicename']] = gslbvserver_service_binding_proxy
return configured_proxys
def sync_service_bindings(client, module):
actual = get_actual_service_bindings(client, module)
configured = get_configured_service_bindings(client, module)
# Delete extraneous
extraneous_service_bindings = list(set(actual.keys()) - set(configured.keys()))
for servicename in extraneous_service_bindings:
log('Deleting missing binding from service %s' % servicename)
binding = actual[servicename]
binding.name = module.params['name']
gslbvserver_gslbservice_binding.delete(client, binding)
# Recreate different
common_service_bindings = list(set(actual.keys()) & set(configured.keys()))
for servicename in common_service_bindings:
proxy = configured[servicename]
binding = actual[servicename]
if not proxy.has_equal_attributes(actual):
log('Recreating differing service binding %s' % servicename)
gslbvserver_gslbservice_binding.delete(client, binding)
proxy.add()
# Add missing
missing_service_bindings = list(set(configured.keys()) - set(actual.keys()))
for servicename in missing_service_bindings:
proxy = configured[servicename]
log('Adding missing service binding %s' % servicename)
proxy.add()
def service_bindings_identical(client, module):
actual_bindings = get_actual_service_bindings(client, module)
configured_proxys = get_configured_service_bindings(client, module)
actual_keyset = set(actual_bindings.keys())
configured_keyset = set(configured_proxys.keys())
symmetric_difference = actual_keyset ^ configured_keyset
if len(symmetric_difference) != 0:
return False
# Item for item equality test
for key, proxy in configured_proxys.items():
if key in actual_bindings.keys():
if not proxy.has_equal_attributes(actual_bindings[key]):
return False
# Fallthrough to True result
return True
def gslb_vserver_exists(client, module):
if gslbvserver.count_filtered(client, 'name:%s' % module.params['name']) > 0:
return True
else:
return False
def gslb_vserver_identical(client, module, gslb_vserver_proxy):
gslb_vserver_list = gslbvserver.get_filtered(client, 'name:%s' % module.params['name'])
diff_dict = gslb_vserver_proxy.diff_object(gslb_vserver_list[0])
if len(diff_dict) != 0:
return False
else:
return True
def all_identical(client, module, gslb_vserver_proxy):
return (
gslb_vserver_identical(client, module, gslb_vserver_proxy) and
domain_bindings_identical(client, module) and
service_bindings_identical(client, module)
)
def diff_list(client, module, gslb_vserver_proxy):
gslb_vserver_list = gslbvserver.get_filtered(client, 'name:%s' % module.params['name'])
return gslb_vserver_proxy.diff_object(gslb_vserver_list[0])
def do_state_change(client, module, gslb_vserver_proxy):
if module.params['disabled']:
log('Disabling glsb_vserver')
result = gslbvserver.disable(client, gslb_vserver_proxy.actual)
else:
log('Enabling gslbvserver')
result = gslbvserver.enable(client, gslb_vserver_proxy.actual)
return result
def main():
module_specific_arguments = dict(
name=dict(type='str'),
servicetype=dict(
type='str',
choices=[
'HTTP',
'FTP',
'TCP',
'UDP',
'SSL',
'SSL_BRIDGE',
'SSL_TCP',
'NNTP',
'ANY',
'SIP_UDP',
'SIP_TCP',
'SIP_SSL',
'RADIUS',
'RDP',
'RTSP',
'MYSQL',
'MSSQL',
'ORACLE',
]
),
dnsrecordtype=dict(
type='str',
choices=[
'A',
'AAAA',
'CNAME',
'NAPTR',
]
),
lbmethod=dict(
type='str',
choices=[
'ROUNDROBIN',
'LEASTCONNECTION',
'LEASTRESPONSETIME',
'SOURCEIPHASH',
'LEASTBANDWIDTH',
'LEASTPACKETS',
'STATICPROXIMITY',
'RTT',
'CUSTOMLOAD',
]
),
backuplbmethod=dict(
type='str',
choices=[
'ROUNDROBIN',
'LEASTCONNECTION',
'LEASTRESPONSETIME',
'SOURCEIPHASH',
'LEASTBANDWIDTH',
'LEASTPACKETS',
'STATICPROXIMITY',
'RTT',
'CUSTOMLOAD',
]
),
netmask=dict(type='str'),
v6netmasklen=dict(type='float'),
tolerance=dict(type='float'),
persistencetype=dict(
type='str',
choices=[
'SOURCEIP',
'NONE',
]
),
persistenceid=dict(type='float'),
persistmask=dict(type='str'),
v6persistmasklen=dict(type='float'),
timeout=dict(type='float'),
mir=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
disableprimaryondown=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
dynamicweight=dict(
type='str',
choices=[
'SERVICECOUNT',
'SERVICEWEIGHT',
'DISABLED',
]
),
considereffectivestate=dict(
type='str',
choices=[
'NONE',
'STATE_ONLY',
]
),
comment=dict(type='str'),
somethod=dict(
type='str',
choices=[
'CONNECTION',
'DYNAMICCONNECTION',
'BANDWIDTH',
'HEALTH',
'NONE',
]
),
sopersistence=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
sopersistencetimeout=dict(type='float'),
sothreshold=dict(type='float'),
sobackupaction=dict(
type='str',
choices=[
'DROP',
'ACCEPT',
'REDIRECT',
]
),
appflowlog=dict(
type='str',
choices=[
'enabled',
'disabled',
]
),
domainname=dict(type='str'),
cookie_domain=dict(type='str'),
)
hand_inserted_arguments = dict(
domain_bindings=dict(type='list'),
service_bindings=dict(type='list'),
disabled=dict(
type='bool',
default=False,
),
)
argument_spec = dict()
argument_spec.update(netscaler_common_arguments)
argument_spec.update(module_specific_arguments)
argument_spec.update(hand_inserted_arguments)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module_result = dict(
changed=False,
failed=False,
loglines=loglines,
)
# Fail the module if imports failed
if not PYTHON_SDK_IMPORTED:
module.fail_json(msg='Could not load nitro python sdk')
# Fallthrough to rest of execution
client = get_nitro_client(module)
try:
client.login()
except nitro_exception as e:
msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg)
except Exception as e:
if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>":
module.fail_json(msg='Connection error %s' % str(e))
elif str(type(e)) == "<class 'requests.exceptions.SSLError'>":
module.fail_json(msg='SSL Error %s' % str(e))
else:
module.fail_json(msg='Unexpected error during login %s' % str(e))
readwrite_attrs = [
'name',
'servicetype',
'dnsrecordtype',
'lbmethod',
'backuplbmethod',
'netmask',
'v6netmasklen',
'tolerance',
'persistencetype',
'persistenceid',
'persistmask',
'v6persistmasklen',
'timeout',
'mir',
'disableprimaryondown',
'dynamicweight',
'considereffectivestate',
'comment',
'somethod',
'sopersistence',
'sopersistencetimeout',
'sothreshold',
'sobackupaction',
'appflowlog',
'cookie_domain',
]
readonly_attrs = [
'curstate',
'status',
'lbrrreason',
'iscname',
'sitepersistence',
'totalservices',
'activeservices',
'statechangetimesec',
'statechangetimemsec',
'tickssincelaststatechange',
'health',
'policyname',
'priority',
'gotopriorityexpression',
'type',
'vsvrbindsvcip',
'vsvrbindsvcport',
'__count',
]
immutable_attrs = [
'name',
'servicetype',
]
transforms = {
'mir': [lambda v: v.upper()],
'disableprimaryondown': [lambda v: v.upper()],
'sopersistence': [lambda v: v.upper()],
'appflowlog': [lambda v: v.upper()],
}
# Instantiate config proxy
gslb_vserver_proxy = ConfigProxy(
actual=gslbvserver(),
client=client,
attribute_values_dict=module.params,
readwrite_attrs=readwrite_attrs,
readonly_attrs=readonly_attrs,
immutable_attrs=immutable_attrs,
transforms=transforms,
)
try:
ensure_feature_is_enabled(client, 'GSLB')
# Apply appropriate state
if module.params['state'] == 'present':
log('Applying state present')
if not gslb_vserver_exists(client, module):
log('Creating object')
if not module.check_mode:
gslb_vserver_proxy.add()
sync_domain_bindings(client, module)
sync_service_bindings(client, module)
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
elif not all_identical(client, module, gslb_vserver_proxy):
log('Entering update actions')
# Check if we try to change value of immutable attributes
if not gslb_vserver_identical(client, module, gslb_vserver_proxy):
log('Updating gslb vserver')
immutables_changed = get_immutables_intersection(gslb_vserver_proxy, diff_list(client, module, gslb_vserver_proxy).keys())
if immutables_changed != []:
module.fail_json(
msg='Cannot update immutable attributes %s' % (immutables_changed,),
diff=diff_list(client, module, gslb_vserver_proxy),
**module_result
)
if not module.check_mode:
gslb_vserver_proxy.update()
# Update domain bindings
if not domain_bindings_identical(client, module):
if not module.check_mode:
sync_domain_bindings(client, module)
# Update service bindings
if not service_bindings_identical(client, module):
if not module.check_mode:
sync_service_bindings(client, module)
module_result['changed'] = True
if not module.check_mode:
if module.params['save_config']:
client.save_config()
else:
module_result['changed'] = False
if not module.check_mode:
res = do_state_change(client, module, gslb_vserver_proxy)
if res.errorcode != 0:
msg = 'Error when setting disabled state. errorcode: %s message: %s' % (res.errorcode, res.message)
module.fail_json(msg=msg, **module_result)
# Sanity check for state
if not module.check_mode:
if not gslb_vserver_exists(client, module):
module.fail_json(msg='GSLB Vserver does not exist', **module_result)
if not gslb_vserver_identical(client, module, gslb_vserver_proxy):
module.fail_json(msg='GSLB Vserver differs from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
if not domain_bindings_identical(client, module):
module.fail_json(msg='Domain bindings differ from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
if not service_bindings_identical(client, module):
module.fail_json(msg='Service bindings differ from configured', diff=diff_list(client, module, gslb_vserver_proxy), **module_result)
elif module.params['state'] == 'absent':
if gslb_vserver_exists(client, module):
if not module.check_mode:
gslb_vserver_proxy.delete()
if module.params['save_config']:
client.save_config()
module_result['changed'] = True
else:
module_result['changed'] = False
# Sanity check for state
if not module.check_mode:
if gslb_vserver_exists(client, module):
module.fail_json(msg='GSLB Vserver still exists', **module_result)
except nitro_exception as e:
msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message)
module.fail_json(msg=msg, **module_result)
client.logout()
module.exit_json(**module_result)
if __name__ == "__main__":
main()
| gpl-3.0 |
ehashman/oh-mainline | vendor/packages/Django/django/utils/importlib.py | 124 | 1228 | # Taken from Python 2.7 with permission from/by the original author.
import sys
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in range(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
| agpl-3.0 |
philetus/geosolver | workbench/tree.py | 1 | 7545 | from includes import *
from parameters import Settings
class Tree:
def __init__(self, root):
self.settings = Settings()
self.orientation = self.settings.dvData.treeAlignment
self.maxDepth = 100
self.siblingSeperation = 5
self.subtreeSeperation = 5
self.levelSeperation = 40
self.maxLevelHeight = []
self.maxLevelWidth = []
self.previousLevelNode = []
self.root = None
self.topXAdjustment = 0
self.topYAdjustment = 0
self.rootOffset = QtCore.QPoint()
def firstWalk(self, tree, node, level):
leftSibbling = None
node.position.setX(0.0)
node.position.setY(0.0)
node.leftNeighbour = None
node.rightNeighbour = None
tree.setLevelHeight(node, level)
tree.setLevelWidth(node, level)
tree.setNeighbours(node, level)
if (node.getChildrenCount() == 0) or (level == tree.maxDepth):
leftSibling = node.getLeftSibling()
if leftSibling != None:
node.prelim = leftSibling.prelim + tree.getNodeSize(leftSibling) + tree.siblingSeperation
else:
node.prelim = 0.0
else:
for chldNode in node.children:
self.firstWalk(tree, chldNode, level+1)
midPoint = node.getChildrenCenter(tree)
midPoint -= tree.getNodeSize(node)/2.0
leftSibling = node.getLeftSibling()
if leftSibling != None:
node.prelim = leftSibling.prelim + tree.getNodeSize(leftSibling) + tree.siblingSeperation
node.modifier = node.prelim - midPoint
self.apportion(tree, node, level)
else:
node.prelim = midPoint
def apportion(self, tree, node, level):
k = tree.maxDepth - level
j = 1
if node.getChildrenCount() != 0:
firstChild = node.children[0]
firstChildLeftNeighbour = node.children[0].leftNeighbour
else:
firstChild = None
firstChildLeftNeighbour = None
while firstChild != None and firstChildLeftNeighbour != None and j <= k:
modifierSumRight = 0.0
modifierSumLeft = 0.0
rightAncestor = firstChild
leftAncestor = firstChildLeftNeighbour
for i in range(j):
rightAncestor = rightAncestor.parentNode
leftAncestor = leftAncestor.parentNode
modifierSumRight += rightAncestor.modifier
modifierSumLeft += leftAncestor.modifier
totalGap = (firstChildLeftNeighbour.prelim + modifierSumLeft + tree.getNodeSize(firstChildLeftNeighbour) + tree.subtreeSeperation) - (firstChild.prelim + modifierSumRight)
if totalGap > 0:
subtreeAux = node
numSubtrees = 0
while subtreeAux != None and subtreeAux != leftAncestor:
numSubtrees +=1
subtreeAux = subtreeAux.getLeftSibling()
if subtreeAux != None:
subtreeMoveAux = node
singleGap = totalGap / numSubtrees
while subtreeMoveAux != None and subtreeMoveAux != leftAncestor:
subtreeMoveAux.prelim += totalGap
subtreeMoveAux.modifier += totalGap
totalGap -= singleGap
subtreeMoveAux = subtreeMoveAux.getLeftSibling()
j += 1
if firstChild.getChildrenCount() == 0:
firstChild = tree.getLeftMost(node, 0, j)
else:
firstChild = firstChild.children[0]
if firstChild != None:
firstChildLeftNeighbour = firstChild.leftNeighbour
def secondWalk(self, tree, node, level, posX, posY):
if level <= tree.maxDepth:
xTmp = tree.rootOffset.x() + node.prelim + posX
yTmp = tree.rootOffset.y() + posY
maxSizeTmp = 0
nodeSizeTmp = 0
flag = False
if self.orientation == TreeOrientation.TOP or self.orientation == TreeOrientation.BOTTOM:
maxSizeTmp = tree.maxLevelHeight[level]
nodeSizeTmp = node.height
elif self.orientation == TreeOrientation.LEFT or self.orientation == TreeOrientation.RIGHT:
maxSizeTmp = tree.maxLevelWidth[level]
nodeSizeTmp = node.width
flag = True
node.position.setX(xTmp)
node.position.setY(yTmp)
if flag:
swapTmp = node.position.x()
node.position.setX(node.position.y())
node.position.setY(swapTmp)
if self.orientation == TreeOrientation.BOTTOM:
node.position.setY(-node.position.y() - nodeSizeTmp)
elif self.orientation == TreeOrientation.RIGHT:
node.position.setX(-node.position.x() - nodeSizeTmp)
if node.getChildrenCount() != 0:
self.secondWalk(tree, node.children[0], level+1, posX + node.modifier, posY + maxSizeTmp + tree.levelSeperation)
rightSibling = node.getRightSibling()
if rightSibling != None:
self.secondWalk(tree, rightSibling, level, posX, posY)
def positionTree(self):
self.maxLevelWidth = []
self.maxLevelHeight = []
self.previousLevelNode = []
self.firstWalk(self, self.root, 0)
self.rootOffset.setX( self.topXAdjustment + self.root.position.x())
self.rootOffset.setY( self.topYAdjustment + self.root.position.y())
self.secondWalk(self, self.root, 0, 0, 0)
def updateTree(self):
self.positionTree()
def setLevelHeight(self, node, level):
if len(self.maxLevelHeight) <= level:
for i in range(level-len(self.maxLevelHeight)+1):
self.maxLevelHeight += [None]
if self.maxLevelHeight[level]< node.height:
self.maxLevelHeight[level] = node.height
def setLevelWidth(self, node, level):
if len(self.maxLevelWidth) <= level:
for i in range(level-len(self.maxLevelWidth)+1):
self.maxLevelWidth += [None]
if self.maxLevelWidth[level]< node.width:
self.maxLevelWidth[level] = node.width
def setNeighbours(self, node, level):
if len(self.previousLevelNode) > level:
node.leftNeighbour = self.previousLevelNode[level]
else:
for i in range(level - len(self.previousLevelNode)+1):
self.previousLevelNode += [None]
if node.leftNeighbour != None:
node.leftNeighbour.rightNeighbour = node
self.previousLevelNode[level] = node
def getLeftMost(self, node, level, maxLevel):
if level >= maxLevel:
return node
if node.getChildrenCount() == 0:
return None
for chldNode in node.children:
leftMostDescendant = self.getLeftMost(chldNode, level+1, maxLevel)
if leftMostDescendant != None:
return leftMostDescendant
def getNodeSize(self, node):
if self.orientation == TreeOrientation.TOP or self.orientation == TreeOrientation.BOTTOM:
return node.width
elif self.orientation == TreeOrientation.LEFT or self.orientation == TreeOrientation.RIGHT:
return node.height
def clear(self, node):
node.clear()
for childNode in node.children:
self.clear(childNode)
def __str__(self):
pass
def __str_recursive_(self):
pass
class Node:
def __init__(self, parentNode):
self.prelim = 0
self.position = QtCore.QPointF()
self.modifier = 0.0
self.width = 50.0
self.height = 40.0
self.isCollapsed = False
self.canCollapse = True
self.parentNode = parentNode
self.leftNeighbour = None
self.rightNeighbour = None
self.children = []
self.variables = []
def collapse(self):
pass
def expand(self):
pass
def getLeftSibling(self):
if self.leftNeighbour != None and self.leftNeighbour.parentNode == self.parentNode:
return self.leftNeighbour
else:
return None
def getRightSibling(self):
if self.rightNeighbour != None and self.rightNeighbour.parentNode == self.parentNode:
return self.rightNeighbour
else:
return None
def getChildrenCenter(self, tree):
if len(self.children) > 0:
return self.children[0].prelim + ((self.children[-1].prelim - self.children[0].prelim) + tree.getNodeSize(self.children[-1]))/2.0
else:
return 0.0
def getChildrenCount(self):
if self.isCollapsed:
return 0
else:
return len(self.children)
def clear(self):
self.position.setX(0.0)
self.position.setY(0.0)
self.prelim = 0.0
self.modifier = 0.0
| gpl-3.0 |
myang321/django | django/contrib/gis/gdal/prototypes/geom.py | 450 | 4735 | from ctypes import POINTER, c_char_p, c_double, c_int, c_void_p
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.errcheck import check_envelope
from django.contrib.gis.gdal.prototypes.generation import (
const_string_output, double_output, geom_output, int_output, srs_output,
string_output, void_output,
)
# ### Generation routines specific to this module ###
def env_func(f, argtypes):
"For getting OGREnvelopes."
f.argtypes = argtypes
f.restype = None
f.errcheck = check_envelope
return f
def pnt_func(f):
"For accessing point information."
return double_output(f, [c_void_p, c_int])
def topology_func(f):
f.argtypes = [c_void_p, c_void_p]
f.restype = c_int
f.errchck = bool
return f
# ### OGR_G ctypes function prototypes ###
# GeoJSON routines.
from_json = geom_output(lgdal.OGR_G_CreateGeometryFromJson, [c_char_p])
to_json = string_output(lgdal.OGR_G_ExportToJson, [c_void_p], str_result=True, decoding='ascii')
to_kml = string_output(lgdal.OGR_G_ExportToKML, [c_void_p, c_char_p], str_result=True, decoding='ascii')
# GetX, GetY, GetZ all return doubles.
getx = pnt_func(lgdal.OGR_G_GetX)
gety = pnt_func(lgdal.OGR_G_GetY)
getz = pnt_func(lgdal.OGR_G_GetZ)
# Geometry creation routines.
from_wkb = geom_output(lgdal.OGR_G_CreateFromWkb, [c_char_p, c_void_p, POINTER(c_void_p), c_int], offset=-2)
from_wkt = geom_output(lgdal.OGR_G_CreateFromWkt, [POINTER(c_char_p), c_void_p, POINTER(c_void_p)], offset=-1)
create_geom = geom_output(lgdal.OGR_G_CreateGeometry, [c_int])
clone_geom = geom_output(lgdal.OGR_G_Clone, [c_void_p])
get_geom_ref = geom_output(lgdal.OGR_G_GetGeometryRef, [c_void_p, c_int])
get_boundary = geom_output(lgdal.OGR_G_GetBoundary, [c_void_p])
geom_convex_hull = geom_output(lgdal.OGR_G_ConvexHull, [c_void_p])
geom_diff = geom_output(lgdal.OGR_G_Difference, [c_void_p, c_void_p])
geom_intersection = geom_output(lgdal.OGR_G_Intersection, [c_void_p, c_void_p])
geom_sym_diff = geom_output(lgdal.OGR_G_SymmetricDifference, [c_void_p, c_void_p])
geom_union = geom_output(lgdal.OGR_G_Union, [c_void_p, c_void_p])
# Geometry modification routines.
add_geom = void_output(lgdal.OGR_G_AddGeometry, [c_void_p, c_void_p])
import_wkt = void_output(lgdal.OGR_G_ImportFromWkt, [c_void_p, POINTER(c_char_p)])
# Destroys a geometry
destroy_geom = void_output(lgdal.OGR_G_DestroyGeometry, [c_void_p], errcheck=False)
# Geometry export routines.
to_wkb = void_output(lgdal.OGR_G_ExportToWkb, None, errcheck=True) # special handling for WKB.
to_wkt = string_output(lgdal.OGR_G_ExportToWkt, [c_void_p, POINTER(c_char_p)], decoding='ascii')
to_gml = string_output(lgdal.OGR_G_ExportToGML, [c_void_p], str_result=True, decoding='ascii')
get_wkbsize = int_output(lgdal.OGR_G_WkbSize, [c_void_p])
# Geometry spatial-reference related routines.
assign_srs = void_output(lgdal.OGR_G_AssignSpatialReference, [c_void_p, c_void_p], errcheck=False)
get_geom_srs = srs_output(lgdal.OGR_G_GetSpatialReference, [c_void_p])
# Geometry properties
get_area = double_output(lgdal.OGR_G_GetArea, [c_void_p])
get_centroid = void_output(lgdal.OGR_G_Centroid, [c_void_p, c_void_p])
get_dims = int_output(lgdal.OGR_G_GetDimension, [c_void_p])
get_coord_dim = int_output(lgdal.OGR_G_GetCoordinateDimension, [c_void_p])
set_coord_dim = void_output(lgdal.OGR_G_SetCoordinateDimension, [c_void_p, c_int], errcheck=False)
get_geom_count = int_output(lgdal.OGR_G_GetGeometryCount, [c_void_p])
get_geom_name = const_string_output(lgdal.OGR_G_GetGeometryName, [c_void_p], decoding='ascii')
get_geom_type = int_output(lgdal.OGR_G_GetGeometryType, [c_void_p])
get_point_count = int_output(lgdal.OGR_G_GetPointCount, [c_void_p])
get_point = void_output(lgdal.OGR_G_GetPoint,
[c_void_p, c_int, POINTER(c_double), POINTER(c_double), POINTER(c_double)], errcheck=False
)
geom_close_rings = void_output(lgdal.OGR_G_CloseRings, [c_void_p], errcheck=False)
# Topology routines.
ogr_contains = topology_func(lgdal.OGR_G_Contains)
ogr_crosses = topology_func(lgdal.OGR_G_Crosses)
ogr_disjoint = topology_func(lgdal.OGR_G_Disjoint)
ogr_equals = topology_func(lgdal.OGR_G_Equals)
ogr_intersects = topology_func(lgdal.OGR_G_Intersects)
ogr_overlaps = topology_func(lgdal.OGR_G_Overlaps)
ogr_touches = topology_func(lgdal.OGR_G_Touches)
ogr_within = topology_func(lgdal.OGR_G_Within)
# Transformation routines.
geom_transform = void_output(lgdal.OGR_G_Transform, [c_void_p, c_void_p])
geom_transform_to = void_output(lgdal.OGR_G_TransformTo, [c_void_p, c_void_p])
# For retrieving the envelope of the geometry.
get_envelope = env_func(lgdal.OGR_G_GetEnvelope, [c_void_p, POINTER(OGREnvelope)])
| bsd-3-clause |
ronfung/incubator-airflow | airflow/utils/dates.py | 28 | 8416 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime, date, timedelta
from dateutil.relativedelta import relativedelta # for doctest
import six
from croniter import croniter
cron_presets = {
'@hourly': '0 * * * *',
'@daily': '0 0 * * *',
'@weekly': '0 0 * * 0',
'@monthly': '0 0 1 * *',
'@yearly': '0 0 1 1 *',
}
def date_range(
start_date,
end_date=None,
num=None,
delta=None):
"""
Get a set of dates as a list based on a start, end and delta, delta
can be something that can be added to ``datetime.datetime``
or a cron expression as a ``str``
:param start_date: anchor date to start the series from
:type start_date: datetime.datetime
:param end_date: right boundary for the date range
:type end_date: datetime.datetime
:param num: alternatively to end_date, you can specify the number of
number of entries you want in the range. This number can be negative,
output will always be sorted regardless
:type num: int
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta=timedelta(1))
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0), datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 1, 3), delta='0 0 * * *')
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 1, 2, 0, 0), datetime.datetime(2016, 1, 3, 0, 0)]
>>> date_range(datetime(2016, 1, 1), datetime(2016, 3, 3), delta="0 0 0 * *")
[datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2016, 2, 1, 0, 0), datetime.datetime(2016, 3, 1, 0, 0)]
"""
if not delta:
return []
if end_date and start_date > end_date:
raise Exception("Wait. start_date needs to be before end_date")
if end_date and num:
raise Exception("Wait. Either specify end_date OR num")
if not end_date and not num:
end_date = datetime.now()
delta_iscron = False
if isinstance(delta, six.string_types):
delta_iscron = True
cron = croniter(delta, start_date)
elif isinstance(delta, timedelta):
delta = abs(delta)
l = []
if end_date:
while start_date <= end_date:
l.append(start_date)
if delta_iscron:
start_date = cron.get_next(datetime)
else:
start_date += delta
else:
for i in range(abs(num)):
l.append(start_date)
if delta_iscron:
if num > 0:
start_date = cron.get_next(datetime)
else:
start_date = cron.get_prev(datetime)
else:
if num > 0:
start_date += delta
else:
start_date -= delta
return sorted(l)
def round_time(dt, delta, start_date=datetime.min):
"""
Returns the datetime of the form start_date + i * delta
which is closest to dt for any non-negative integer i.
Note that delta may be a datetime.timedelta or a dateutil.relativedelta
>>> round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 1, 2), relativedelta(months=1))
datetime.datetime(2015, 1, 1, 0, 0)
>>> round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 16, 0, 0)
>>> round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 15, 0, 0)
>>> round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
>>> round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(2015, 9, 14, 0, 0))
datetime.datetime(2015, 9, 14, 0, 0)
"""
if isinstance(delta, six.string_types):
# It's cron based, so it's easy
cron = croniter(delta, start_date)
prev = cron.get_prev(datetime)
if prev == start_date:
return start_date
else:
return prev
# Ignore the microseconds of dt
dt -= timedelta(microseconds=dt.microsecond)
# We are looking for a datetime in the form start_date + i * delta
# which is as close as possible to dt. Since delta could be a relative
# delta we don't know it's exact length in seconds so we cannot rely on
# division to find i. Instead we employ a binary search algorithm, first
# finding an upper and lower limit and then disecting the interval until
# we have found the closest match.
# We first search an upper limit for i for which start_date + upper * delta
# exceeds dt.
upper = 1
while start_date + upper*delta < dt:
# To speed up finding an upper limit we grow this exponentially by a
# factor of 2
upper *= 2
# Since upper is the first value for which start_date + upper * delta
# exceeds dt, upper // 2 is below dt and therefore forms a lower limited
# for the i we are looking for
lower = upper // 2
# We now continue to intersect the interval between
# start_date + lower * delta and start_date + upper * delta
# until we find the closest value
while True:
# Invariant: start + lower * delta < dt <= start + upper * delta
# If start_date + (lower + 1)*delta exceeds dt, then either lower or
# lower+1 has to be the solution we are searching for
if start_date + (lower + 1)*delta >= dt:
# Check if start_date + (lower + 1)*delta or
# start_date + lower*delta is closer to dt and return the solution
if (
(start_date + (lower + 1) * delta) - dt <=
dt - (start_date + lower * delta)):
return start_date + (lower + 1)*delta
else:
return start_date + lower * delta
# We intersect the interval and either replace the lower or upper
# limit with the candidate
candidate = lower + (upper - lower) // 2
if start_date + candidate*delta >= dt:
upper = candidate
else:
lower = candidate
# in the special case when start_date > dt the search for upper will
# immediately stop for upper == 1 which results in lower = upper // 2 = 0
# and this function returns start_date.
def infer_time_unit(time_seconds_arr):
"""
Determine the most appropriate time unit for an array of time durations
specified in seconds.
e.g. 5400 seconds => 'minutes', 36000 seconds => 'hours'
"""
if len(time_seconds_arr) == 0:
return 'hours'
max_time_seconds = max(time_seconds_arr)
if max_time_seconds <= 60*2:
return 'seconds'
elif max_time_seconds <= 60*60*2:
return 'minutes'
elif max_time_seconds <= 24*60*60*2:
return 'hours'
else:
return 'days'
def scale_time_units(time_seconds_arr, unit):
"""
Convert an array of time durations in seconds to the specified time unit.
"""
if unit == 'minutes':
return list(map(lambda x: x*1.0/60, time_seconds_arr))
elif unit == 'hours':
return list(map(lambda x: x*1.0/(60*60), time_seconds_arr))
elif unit == 'days':
return list(map(lambda x: x*1.0/(24*60*60), time_seconds_arr))
return time_seconds_arr
def days_ago(n, hour=0, minute=0, second=0, microsecond=0):
"""
Get a datetime object representing `n` days ago. By default the time is
set to midnight.
"""
today = datetime.today().replace(
hour=hour,
minute=minute,
second=second,
microsecond=microsecond)
return today - timedelta(days=n)
| apache-2.0 |
ReganBell/QReview | networkx/algorithms/connectivity/cuts.py | 41 | 22905 | # -*- coding: utf-8 -*-
"""
Flow based cut algorithms
"""
import itertools
import networkx as nx
# Define the default maximum flow function to use in all flow based
# cut algorithms.
from networkx.algorithms.flow import edmonds_karp, shortest_augmenting_path
from networkx.algorithms.flow import build_residual_network
default_flow_func = edmonds_karp
from .utils import (build_auxiliary_node_connectivity,
build_auxiliary_edge_connectivity)
__author__ = '\n'.join(['Jordi Torrents <[email protected]>'])
__all__ = ['minimum_st_node_cut',
'minimum_node_cut',
'minimum_st_edge_cut',
'minimum_edge_cut']
def minimum_st_edge_cut(G, s, t, flow_func=None, auxiliary=None,
residual=None):
"""Returns the edges of the cut-set of a minimum (s, t)-cut.
This function returns the set of edges of minimum cardinality that,
if removed, would destroy all paths among source and target in G.
Edge weights are not considered
Parameters
----------
G : NetworkX graph
Edges of the graph are expected to have an attribute called
'capacity'. If this attribute is not present, the edge is
considered to have infinite capacity.
s : node
Source node for the flow.
t : node
Sink node for the flow.
auxiliary : NetworkX DiGraph
Auxiliary digraph to compute flow based node connectivity. It has
to have a graph attribute called mapping with a dictionary mapping
node names in G and in the auxiliary digraph. If provided
it will be reused instead of recreated. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See :meth:`node_connectivity` for
details. The choice of the default function may change from version
to version and should not be relied on. Default value: None.
residual : NetworkX DiGraph
Residual network to compute maximum flow. If provided it will be
reused instead of recreated. Default value: None.
Returns
-------
cutset : set
Set of edges that, if removed from the graph, will disconnect it.
See also
--------
:meth:`minimum_cut`
:meth:`minimum_node_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
Examples
--------
This function is not imported in the base NetworkX namespace, so you
have to explicitly import it from the connectivity package:
>>> from networkx.algorithms.connectivity import minimum_st_edge_cut
We use in this example the platonic icosahedral graph, which has edge
connectivity 5.
>>> G = nx.icosahedral_graph()
>>> len(minimum_st_edge_cut(G, 0, 6))
5
If you need to compute local edge cuts on several pairs of
nodes in the same graph, it is recommended that you reuse the
data structures that NetworkX uses in the computation: the
auxiliary digraph for edge connectivity, and the residual
network for the underlying maximum flow computation.
Example of how to compute local edge cuts among all pairs of
nodes of the platonic icosahedral graph reusing the data
structures.
>>> import itertools
>>> # You also have to explicitly import the function for
>>> # building the auxiliary digraph from the connectivity package
>>> from networkx.algorithms.connectivity import (
... build_auxiliary_edge_connectivity)
>>> H = build_auxiliary_edge_connectivity(G)
>>> # And the function for building the residual network from the
>>> # flow package
>>> from networkx.algorithms.flow import build_residual_network
>>> # Note that the auxiliary digraph has an edge attribute named capacity
>>> R = build_residual_network(H, 'capacity')
>>> result = dict.fromkeys(G, dict())
>>> # Reuse the auxiliary digraph and the residual network by passing them
>>> # as parameters
>>> for u, v in itertools.combinations(G, 2):
... k = len(minimum_st_edge_cut(G, u, v, auxiliary=H, residual=R))
... result[u][v] = k
>>> all(result[u][v] == 5 for u, v in itertools.combinations(G, 2))
True
You can also use alternative flow algorithms for computing edge
cuts. For instance, in dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better than
the default :meth:`edmonds_karp` which is faster for sparse
networks with highly skewed degree distributions. Alternative flow
functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(minimum_st_edge_cut(G, 0, 6, flow_func=shortest_augmenting_path))
5
"""
if flow_func is None:
flow_func = default_flow_func
if auxiliary is None:
H = build_auxiliary_edge_connectivity(G)
else:
H = auxiliary
kwargs = dict(capacity='capacity', flow_func=flow_func, residual=residual)
cut_value, partition = nx.minimum_cut(H, s, t, **kwargs)
reachable, non_reachable = partition
# Any edge in the original graph linking the two sets in the
# partition is part of the edge cutset
cutset = set()
for u, nbrs in ((n, G[n]) for n in reachable):
cutset.update((u, v) for v in nbrs if v in non_reachable)
return cutset
def minimum_st_node_cut(G, s, t, flow_func=None, auxiliary=None, residual=None):
r"""Returns a set of nodes of minimum cardinality that disconnect source
from target in G.
This function returns the set of nodes of minimum cardinality that,
if removed, would destroy all paths among source and target in G.
Parameters
----------
G : NetworkX graph
s : node
Source node.
t : node
Target node.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The choice
of the default function may change from version to version and
should not be relied on. Default value: None.
auxiliary : NetworkX DiGraph
Auxiliary digraph to compute flow based node connectivity. It has
to have a graph attribute called mapping with a dictionary mapping
node names in G and in the auxiliary digraph. If provided
it will be reused instead of recreated. Default value: None.
residual : NetworkX DiGraph
Residual network to compute maximum flow. If provided it will be
reused instead of recreated. Default value: None.
Returns
-------
cutset : set
Set of nodes that, if removed, would destroy all paths between
source and target in G.
Examples
--------
This function is not imported in the base NetworkX namespace, so you
have to explicitly import it from the connectivity package:
>>> from networkx.algorithms.connectivity import minimum_st_node_cut
We use in this example the platonic icosahedral graph, which has node
connectivity 5.
>>> G = nx.icosahedral_graph()
>>> len(minimum_st_node_cut(G, 0, 6))
5
If you need to compute local st cuts between several pairs of
nodes in the same graph, it is recommended that you reuse the
data structures that NetworkX uses in the computation: the
auxiliary digraph for node connectivity and node cuts, and the
residual network for the underlying maximum flow computation.
Example of how to compute local st node cuts reusing the data
structures:
>>> # You also have to explicitly import the function for
>>> # building the auxiliary digraph from the connectivity package
>>> from networkx.algorithms.connectivity import (
... build_auxiliary_node_connectivity)
>>> H = build_auxiliary_node_connectivity(G)
>>> # And the function for building the residual network from the
>>> # flow package
>>> from networkx.algorithms.flow import build_residual_network
>>> # Note that the auxiliary digraph has an edge attribute named capacity
>>> R = build_residual_network(H, 'capacity')
>>> # Reuse the auxiliary digraph and the residual network by passing them
>>> # as parameters
>>> len(minimum_st_node_cut(G, 0, 6, auxiliary=H, residual=R))
5
You can also use alternative flow algorithms for computing minimum st
node cuts. For instance, in dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better than
the default :meth:`edmonds_karp` which is faster for sparse
networks with highly skewed degree distributions. Alternative flow
functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(minimum_st_node_cut(G, 0, 6, flow_func=shortest_augmenting_path))
5
Notes
-----
This is a flow based implementation of minimum node cut. The algorithm
is based in solving a number of maximum flow computations to determine
the capacity of the minimum cut on an auxiliary directed network that
corresponds to the minimum node cut of G. It handles both directed
and undirected graphs. This implementation is based on algorithm 11
in [1]_.
See also
--------
:meth:`minimum_node_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if auxiliary is None:
H = build_auxiliary_node_connectivity(G)
else:
H = auxiliary
mapping = H.graph.get('mapping', None)
if mapping is None:
raise nx.NetworkXError('Invalid auxiliary digraph.')
kwargs = dict(flow_func=flow_func, residual=residual, auxiliary=H)
# The edge cut in the auxiliary digraph corresponds to the node cut in the
# original graph.
edge_cut = minimum_st_edge_cut(H, '%sB' % mapping[s], '%sA' % mapping[t],
**kwargs)
# Each node in the original graph maps to two nodes of the auxiliary graph
node_cut = set(H.node[node]['id'] for edge in edge_cut for node in edge)
return node_cut - set([s, t])
def minimum_node_cut(G, s=None, t=None, flow_func=None):
r"""Returns a set of nodes of minimum cardinality that disconnects G.
If source and target nodes are provided, this function returns the
set of nodes of minimum cardinality that, if removed, would destroy
all paths among source and target in G. If not, it returns a set
of nodes of minimum cardinality that disconnects G.
Parameters
----------
G : NetworkX graph
s : node
Source node. Optional. Default value: None.
t : node
Target node. Optional. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The
choice of the default function may change from version
to version and should not be relied on. Default value: None.
Returns
-------
cutset : set
Set of nodes that, if removed, would disconnect G. If source
and target nodes are provided, the set contians the nodes that
if removed, would destroy all paths between source and target.
Examples
--------
>>> # Platonic icosahedral graph has node connectivity 5
>>> G = nx.icosahedral_graph()
>>> node_cut = nx.minimum_node_cut(G)
>>> len(node_cut)
5
You can use alternative flow algorithms for the underlying maximum
flow computation. In dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better
than the default :meth:`edmonds_karp`, which is faster for
sparse networks with highly skewed degree distributions. Alternative
flow functions have to be explicitly imported from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> node_cut == nx.minimum_node_cut(G, flow_func=shortest_augmenting_path)
True
If you specify a pair of nodes (source and target) as parameters,
this function returns a local st node cut.
>>> len(nx.minimum_node_cut(G, 3, 7))
5
If you need to perform several local st cuts among different
pairs of nodes on the same graph, it is recommended that you reuse
the data structures used in the maximum flow computations. See
:meth:`minimum_st_node_cut` for details.
Notes
-----
This is a flow based implementation of minimum node cut. The algorithm
is based in solving a number of maximum flow computations to determine
the capacity of the minimum cut on an auxiliary directed network that
corresponds to the minimum node cut of G. It handles both directed
and undirected graphs. This implementation is based on algorithm 11
in [1]_.
See also
--------
:meth:`minimum_st_node_cut`
:meth:`minimum_cut`
:meth:`minimum_edge_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if (s is not None and t is None) or (s is None and t is not None):
raise nx.NetworkXError('Both source and target must be specified.')
# Local minimum node cut.
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return minimum_st_node_cut(G, s, t, flow_func=flow_func)
# Global minimum node cut.
# Analog to the algoritm 11 for global node connectivity in [1].
if G.is_directed():
if not nx.is_weakly_connected(G):
raise nx.NetworkXError('Input graph is not connected')
iter_func = itertools.permutations
def neighbors(v):
return itertools.chain.from_iterable([G.predecessors_iter(v),
G.successors_iter(v)])
else:
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is not connected')
iter_func = itertools.combinations
neighbors = G.neighbors_iter
# Reuse the auxiliary digraph and the residual network.
H = build_auxiliary_node_connectivity(G)
R = build_residual_network(H, 'capacity')
kwargs = dict(flow_func=flow_func, auxiliary=H, residual=R)
# Choose a node with minimum degree.
v = min(G, key=G.degree)
# Initial node cutset is all neighbors of the node with minimum degree.
min_cut = set(G[v])
# Compute st node cuts between v and all its non-neighbors nodes in G.
for w in set(G) - set(neighbors(v)) - set([v]):
this_cut = minimum_st_node_cut(G, v, w, **kwargs)
if len(min_cut) >= len(this_cut):
min_cut = this_cut
# Also for non adjacent pairs of neighbors of v.
for x, y in iter_func(neighbors(v), 2):
if y in G[x]:
continue
this_cut = minimum_st_node_cut(G, x, y, **kwargs)
if len(min_cut) >= len(this_cut):
min_cut = this_cut
return min_cut
def minimum_edge_cut(G, s=None, t=None, flow_func=None):
r"""Returns a set of edges of minimum cardinality that disconnects G.
If source and target nodes are provided, this function returns the
set of edges of minimum cardinality that, if removed, would break
all paths among source and target in G. If not, it returns a set of
edges of minimum cardinality that disconnects G.
Parameters
----------
G : NetworkX graph
s : node
Source node. Optional. Default value: None.
t : node
Target node. Optional. Default value: None.
flow_func : function
A function for computing the maximum flow among a pair of nodes.
The function has to accept at least three parameters: a Digraph,
a source node, and a target node. And return a residual network
that follows NetworkX conventions (see :meth:`maximum_flow` for
details). If flow_func is None, the default maximum flow function
(:meth:`edmonds_karp`) is used. See below for details. The
choice of the default function may change from version
to version and should not be relied on. Default value: None.
Returns
-------
cutset : set
Set of edges that, if removed, would disconnect G. If source
and target nodes are provided, the set contians the edges that
if removed, would destroy all paths between source and target.
Examples
--------
>>> # Platonic icosahedral graph has edge connectivity 5
>>> G = nx.icosahedral_graph()
>>> len(nx.minimum_edge_cut(G))
5
You can use alternative flow algorithms for the underlying
maximum flow computation. In dense networks the algorithm
:meth:`shortest_augmenting_path` will usually perform better
than the default :meth:`edmonds_karp`, which is faster for
sparse networks with highly skewed degree distributions.
Alternative flow functions have to be explicitly imported
from the flow package.
>>> from networkx.algorithms.flow import shortest_augmenting_path
>>> len(nx.minimum_edge_cut(G, flow_func=shortest_augmenting_path))
5
If you specify a pair of nodes (source and target) as parameters,
this function returns the value of local edge connectivity.
>>> nx.edge_connectivity(G, 3, 7)
5
If you need to perform several local computations among different
pairs of nodes on the same graph, it is recommended that you reuse
the data structures used in the maximum flow computations. See
:meth:`local_edge_connectivity` for details.
Notes
-----
This is a flow based implementation of minimum edge cut. For
undirected graphs the algorithm works by finding a 'small' dominating
set of nodes of G (see algorithm 7 in [1]_) and computing the maximum
flow between an arbitrary node in the dominating set and the rest of
nodes in it. This is an implementation of algorithm 6 in [1]_. For
directed graphs, the algorithm does n calls to the max flow function.
It is an implementation of algorithm 8 in [1]_.
See also
--------
:meth:`minimum_st_edge_cut`
:meth:`minimum_node_cut`
:meth:`stoer_wagner`
:meth:`node_connectivity`
:meth:`edge_connectivity`
:meth:`maximum_flow`
:meth:`edmonds_karp`
:meth:`preflow_push`
:meth:`shortest_augmenting_path`
References
----------
.. [1] Abdol-Hossein Esfahanian. Connectivity Algorithms.
http://www.cse.msu.edu/~cse835/Papers/Graph_connectivity_revised.pdf
"""
if (s is not None and t is None) or (s is None and t is not None):
raise nx.NetworkXError('Both source and target must be specified.')
# reuse auxiliary digraph and residual network
H = build_auxiliary_edge_connectivity(G)
R = build_residual_network(H, 'capacity')
kwargs = dict(flow_func=flow_func, residual=R, auxiliary=H)
# Local minimum edge cut if s and t are not None
if s is not None and t is not None:
if s not in G:
raise nx.NetworkXError('node %s not in graph' % s)
if t not in G:
raise nx.NetworkXError('node %s not in graph' % t)
return minimum_st_edge_cut(H, s, t, **kwargs)
# Global minimum edge cut
# Analog to the algoritm for global edge connectivity
if G.is_directed():
# Based on algorithm 8 in [1]
if not nx.is_weakly_connected(G):
raise nx.NetworkXError('Input graph is not connected')
# Initial cutset is all edges of a node with minimum degree
node = min(G, key=G.degree)
min_cut = G.edges(node)
nodes = G.nodes()
n = len(nodes)
for i in range(n):
try:
this_cut = minimum_st_edge_cut(H, nodes[i], nodes[i+1], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
except IndexError: # Last node!
this_cut = minimum_st_edge_cut(H, nodes[i], nodes[0], **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
return min_cut
else: # undirected
# Based on algorithm 6 in [1]
if not nx.is_connected(G):
raise nx.NetworkXError('Input graph is not connected')
# Initial cutset is all edges of a node with minimum degree
node = min(G, key=G.degree)
min_cut = G.edges(node)
# A dominating set is \lambda-covering
# We need a dominating set with at least two nodes
for node in G:
D = nx.dominating_set(G, start_with=node)
v = D.pop()
if D:
break
else:
# in complete graphs the dominating set will always be of one node
# thus we return min_cut, which now contains the edges of a node
# with minimum degree
return min_cut
for w in D:
this_cut = minimum_st_edge_cut(H, v, w, **kwargs)
if len(this_cut) <= len(min_cut):
min_cut = this_cut
return min_cut
| bsd-3-clause |
Srisai85/scipy | scipy/linalg/lapack.py | 46 | 5636 | """
Low-level LAPACK functions
==========================
This module contains low-level functions from the LAPACK library.
.. versionadded:: 0.12.0
.. warning::
These functions do little to no error checking.
It is possible to cause crashes by mis-using them,
so prefer using the higher-level routines in `scipy.linalg`.
Finding functions
=================
.. autosummary::
get_lapack_funcs
All functions
=============
.. autosummary::
:toctree: generated/
sgbsv
dgbsv
cgbsv
zgbsv
sgbtrf
dgbtrf
cgbtrf
zgbtrf
sgbtrs
dgbtrs
cgbtrs
zgbtrs
sgebal
dgebal
cgebal
zgebal
sgees
dgees
cgees
zgees
sgeev
dgeev
cgeev
zgeev
sgeev_lwork
dgeev_lwork
cgeev_lwork
zgeev_lwork
sgegv
dgegv
cgegv
zgegv
sgehrd
dgehrd
cgehrd
zgehrd
sgehrd_lwork
dgehrd_lwork
cgehrd_lwork
zgehrd_lwork
sgelss
dgelss
cgelss
zgelss
sgelss_lwork
dgelss_lwork
cgelss_lwork
zgelss_lwork
sgelsd
dgelsd
cgelsd
zgelsd
sgelsd_lwork
dgelsd_lwork
cgelsd_lwork
zgelsd_lwork
sgelsy
dgelsy
cgelsy
zgelsy
sgelsy_lwork
dgelsy_lwork
cgelsy_lwork
zgelsy_lwork
sgeqp3
dgeqp3
cgeqp3
zgeqp3
sgeqrf
dgeqrf
cgeqrf
zgeqrf
sgerqf
dgerqf
cgerqf
zgerqf
sgesdd
dgesdd
cgesdd
zgesdd
sgesdd_lwork
dgesdd_lwork
cgesdd_lwork
zgesdd_lwork
sgesv
dgesv
cgesv
zgesv
sgetrf
dgetrf
cgetrf
zgetrf
sgetri
dgetri
cgetri
zgetri
sgetri_lwork
dgetri_lwork
cgetri_lwork
zgetri_lwork
sgetrs
dgetrs
cgetrs
zgetrs
sgges
dgges
cgges
zgges
sggev
dggev
cggev
zggev
chbevd
zhbevd
chbevx
zhbevx
cheev
zheev
cheevd
zheevd
cheevr
zheevr
chegv
zhegv
chegvd
zhegvd
chegvx
zhegvx
slarf
dlarf
clarf
zlarf
slarfg
dlarfg
clarfg
zlarfg
slartg
dlartg
clartg
zlartg
dlasd4
slasd4
slaswp
dlaswp
claswp
zlaswp
slauum
dlauum
clauum
zlauum
spbsv
dpbsv
cpbsv
zpbsv
spbtrf
dpbtrf
cpbtrf
zpbtrf
spbtrs
dpbtrs
cpbtrs
zpbtrs
sposv
dposv
cposv
zposv
spotrf
dpotrf
cpotrf
zpotrf
spotri
dpotri
cpotri
zpotri
spotrs
dpotrs
cpotrs
zpotrs
crot
zrot
strsyl
dtrsyl
ctrsyl
ztrsyl
strtri
dtrtri
ctrtri
ztrtri
strtrs
dtrtrs
ctrtrs
ztrtrs
cunghr
zunghr
cungqr
zungqr
cungrq
zungrq
cunmqr
zunmqr
sgtsv
dgtsv
cgtsv
zgtsv
sptsv
dptsv
cptsv
zptsv
slamch
dlamch
sorghr
dorghr
sorgqr
dorgqr
sorgrq
dorgrq
sormqr
dormqr
ssbev
dsbev
ssbevd
dsbevd
ssbevx
dsbevx
ssyev
dsyev
ssyevd
dsyevd
ssyevr
dsyevr
ssygv
dsygv
ssygvd
dsygvd
ssygvx
dsygvx
slange
dlange
clange
zlange
"""
#
# Author: Pearu Peterson, March 2002
#
from __future__ import division, print_function, absolute_import
__all__ = ['get_lapack_funcs']
from .blas import _get_funcs
# Backward compatibility:
from .blas import find_best_blas_type as find_best_lapack_type
from scipy.linalg import _flapack
try:
from scipy.linalg import _clapack
except ImportError:
_clapack = None
# Backward compatibility
from scipy._lib._util import DeprecatedImport as _DeprecatedImport
clapack = _DeprecatedImport("scipy.linalg.blas.clapack", "scipy.linalg.lapack")
flapack = _DeprecatedImport("scipy.linalg.blas.flapack", "scipy.linalg.lapack")
# Expose all functions (only flapack --- clapack is an implementation detail)
empty_module = None
from scipy.linalg._flapack import *
del empty_module
# some convenience alias for complex functions
_lapack_alias = {
'corghr': 'cunghr', 'zorghr': 'zunghr',
'corghr_lwork': 'cunghr_lwork', 'zorghr_lwork': 'zunghr_lwork',
'corgqr': 'cungqr', 'zorgqr': 'zungqr',
'cormqr': 'cunmqr', 'zormqr': 'zunmqr',
'corgrq': 'cungrq', 'zorgrq': 'zungrq',
}
def get_lapack_funcs(names, arrays=(), dtype=None):
"""Return available LAPACK function objects from names.
Arrays are used to determine the optimal prefix of LAPACK routines.
Parameters
----------
names : str or sequence of str
Name(s) of LAPACK functions without type prefix.
arrays : sequence of ndarrays, optional
Arrays can be given to determine optimal prefix of LAPACK
routines. If not given, double-precision routines will be
used, otherwise the most generic type in arrays will be used.
dtype : str or dtype, optional
Data-type specifier. Not used if `arrays` is non-empty.
Returns
-------
funcs : list
List containing the found function(s).
Notes
-----
This routine automatically chooses between Fortran/C
interfaces. Fortran code is used whenever possible for arrays with
column major order. In all other cases, C code is preferred.
In LAPACK, the naming convention is that all functions start with a
type prefix, which depends on the type of the principal
matrix. These can be one of {'s', 'd', 'c', 'z'} for the numpy
types {float32, float64, complex64, complex128} respectevely, and
are stored in attribute `typecode` of the returned functions.
"""
return _get_funcs(names, arrays, dtype,
"LAPACK", _flapack, _clapack,
"flapack", "clapack", _lapack_alias)
| bsd-3-clause |
maohongyuan/kbengine | kbe/src/lib/python/Lib/encodings/iso8859_4.py | 272 | 13376 | """ Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-4',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
'\u0138' # 0xA2 -> LATIN SMALL LETTER KRA
'\u0156' # 0xA3 -> LATIN CAPITAL LETTER R WITH CEDILLA
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
'\u013b' # 0xA6 -> LATIN CAPITAL LETTER L WITH CEDILLA
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
'\u0112' # 0xAA -> LATIN CAPITAL LETTER E WITH MACRON
'\u0122' # 0xAB -> LATIN CAPITAL LETTER G WITH CEDILLA
'\u0166' # 0xAC -> LATIN CAPITAL LETTER T WITH STROKE
'\xad' # 0xAD -> SOFT HYPHEN
'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
'\u02db' # 0xB2 -> OGONEK
'\u0157' # 0xB3 -> LATIN SMALL LETTER R WITH CEDILLA
'\xb4' # 0xB4 -> ACUTE ACCENT
'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
'\u013c' # 0xB6 -> LATIN SMALL LETTER L WITH CEDILLA
'\u02c7' # 0xB7 -> CARON
'\xb8' # 0xB8 -> CEDILLA
'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
'\u0113' # 0xBA -> LATIN SMALL LETTER E WITH MACRON
'\u0123' # 0xBB -> LATIN SMALL LETTER G WITH CEDILLA
'\u0167' # 0xBC -> LATIN SMALL LETTER T WITH STROKE
'\u014a' # 0xBD -> LATIN CAPITAL LETTER ENG
'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u012a' # 0xCF -> LATIN CAPITAL LETTER I WITH MACRON
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
'\u0136' # 0xD3 -> LATIN CAPITAL LETTER K WITH CEDILLA
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0168' # 0xDD -> LATIN CAPITAL LETTER U WITH TILDE
'\u016a' # 0xDE -> LATIN CAPITAL LETTER U WITH MACRON
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u012b' # 0xEF -> LATIN SMALL LETTER I WITH MACRON
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
'\u0137' # 0xF3 -> LATIN SMALL LETTER K WITH CEDILLA
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u0169' # 0xFD -> LATIN SMALL LETTER U WITH TILDE
'\u016b' # 0xFE -> LATIN SMALL LETTER U WITH MACRON
'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| lgpl-3.0 |
Belxjander/Kirito | Python-2.3.3-Amiga/Tools/pynche/ChipViewer.py | 1 | 5028 | """Chip viewer and widget.
In the lower left corner of the main Pynche window, you will see two
ChipWidgets, one for the selected color and one for the nearest color. The
selected color is the actual RGB value expressed as an X11 #COLOR name. The
nearest color is the named color from the X11 database that is closest to the
selected color in 3D space. There may be other colors equally close, but the
nearest one is the first one found.
Clicking on the nearest color chip selects that named color.
The ChipViewer class includes the entire lower left quandrant; i.e. both the
selected and nearest ChipWidgets.
"""
from types import StringType
from Tkinter import *
import ColorDB
class ChipWidget:
_WIDTH = 150
_HEIGHT = 80
def __init__(self,
master = None,
width = _WIDTH,
height = _HEIGHT,
text = 'Color',
initialcolor = 'blue',
presscmd = None,
releasecmd = None):
# create the text label
self.__label = Label(master, text=text)
self.__label.grid(row=0, column=0)
# create the color chip, implemented as a frame
self.__chip = Frame(master, relief=RAISED, borderwidth=2,
width=width,
height=height,
background=initialcolor)
self.__chip.grid(row=1, column=0)
# create the color name
self.__namevar = StringVar()
self.__namevar.set(initialcolor)
self.__name = Entry(master, textvariable=self.__namevar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=2, column=0)
# create the message area
self.__msgvar = StringVar()
self.__name = Entry(master, textvariable=self.__msgvar,
relief=FLAT, justify=CENTER, state=DISABLED,
font=self.__label['font'])
self.__name.grid(row=3, column=0)
# set bindings
if presscmd:
self.__chip.bind('<ButtonPress-1>', presscmd)
if releasecmd:
self.__chip.bind('<ButtonRelease-1>', releasecmd)
def set_color(self, color):
self.__chip.config(background=color)
def get_color(self):
return self.__chip['background']
def set_name(self, colorname):
self.__namevar.set(colorname)
def set_message(self, message):
self.__msgvar.set(message)
def press(self):
self.__chip.configure(relief=SUNKEN)
def release(self):
self.__chip.configure(relief=RAISED)
class ChipViewer:
def __init__(self, switchboard, master=None):
self.__sb = switchboard
self.__frame = Frame(master, relief=RAISED, borderwidth=1)
self.__frame.grid(row=3, column=0, ipadx=5, sticky='NSEW')
# create the chip that will display the currently selected color
# exactly
self.__sframe = Frame(self.__frame)
self.__sframe.grid(row=0, column=0)
self.__selected = ChipWidget(self.__sframe, text='Selected')
# create the chip that will display the nearest real X11 color
# database color name
self.__nframe = Frame(self.__frame)
self.__nframe.grid(row=0, column=1)
self.__nearest = ChipWidget(self.__nframe, text='Nearest',
presscmd = self.__buttonpress,
releasecmd = self.__buttonrelease)
def update_yourself(self, red, green, blue):
# Selected always shows the #rrggbb name of the color, nearest always
# shows the name of the nearest color in the database. BAW: should
# an exact match be indicated in some way?
#
# Always use the #rrggbb style to actually set the color, since we may
# not be using X color names (e.g. "web-safe" names)
colordb = self.__sb.colordb()
rgbtuple = (red, green, blue)
rrggbb = ColorDB.triplet_to_rrggbb(rgbtuple)
# find the nearest
nearest = colordb.nearest(red, green, blue)
nearest_tuple = colordb.find_byname(nearest)
nearest_rrggbb = ColorDB.triplet_to_rrggbb(nearest_tuple)
self.__selected.set_color(rrggbb)
self.__nearest.set_color(nearest_rrggbb)
# set the name and messages areas
self.__selected.set_name(rrggbb)
if rrggbb == nearest_rrggbb:
self.__selected.set_message(nearest)
else:
self.__selected.set_message('')
self.__nearest.set_name(nearest_rrggbb)
self.__nearest.set_message(nearest)
def __buttonpress(self, event=None):
self.__nearest.press()
def __buttonrelease(self, event=None):
self.__nearest.release()
rrggbb = self.__nearest.get_color()
red, green, blue = ColorDB.rrggbb_to_triplet(rrggbb)
self.__sb.update_views(red, green, blue)
| gpl-3.0 |
wkoathp/glance | glance/search/plugins/metadefs.py | 6 | 9241 | # Copyright 2015 Intel Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import six
import glance.db
from glance.db.sqlalchemy import models_metadef as models
from glance.search.plugins import base
from glance.search.plugins import metadefs_notification_handler
class MetadefIndex(base.IndexBase):
def __init__(self):
super(MetadefIndex, self).__init__()
self.db_api = glance.db.get_api()
def get_index_name(self):
return 'glance'
def get_document_type(self):
return 'metadef'
def get_mapping(self):
property_mapping = {
'dynamic': True,
'type': 'nested',
'properties': {
'property': {'type': 'string', 'index': 'not_analyzed'},
'type': {'type': 'string'},
'title': {'type': 'string'},
'description': {'type': 'string'},
}
}
mapping = {
'_id': {
'path': 'namespace',
},
'properties': {
'display_name': {'type': 'string'},
'description': {'type': 'string'},
'namespace': {'type': 'string', 'index': 'not_analyzed'},
'owner': {'type': 'string', 'index': 'not_analyzed'},
'visibility': {'type': 'string', 'index': 'not_analyzed'},
'resource_types': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
'prefix': {'type': 'string'},
'properties_target': {'type': 'string'},
},
},
'objects': {
'type': 'nested',
'properties': {
'id': {'type': 'string', 'index': 'not_analyzed'},
'name': {'type': 'string'},
'description': {'type': 'string'},
'properties': property_mapping,
}
},
'properties': property_mapping,
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string'},
}
}
},
}
return mapping
def get_rbac_filter(self, request_context):
# TODO(krykowski): Define base get_rbac_filter in IndexBase class
# which will provide some common subset of query pieces.
# Something like:
# def get_common_context_pieces(self, request_context):
# return [{'term': {'owner': request_context.owner,
# 'type': {'value': self.get_document_type()}}]
return [
{
"and": [
{
'or': [
{
'term': {
'owner': request_context.owner
}
},
{
'term': {
'visibility': 'public'
}
}
]
},
{
'type': {
'value': self.get_document_type()
}
}
]
}
]
def get_objects(self):
session = self.db_api.get_session()
namespaces = session.query(models.MetadefNamespace).all()
resource_types = session.query(models.MetadefResourceType).all()
resource_types_map = {r.id: r.name for r in resource_types}
for namespace in namespaces:
namespace.resource_types = self.get_namespace_resource_types(
namespace.id, resource_types_map)
namespace.objects = self.get_namespace_objects(namespace.id)
namespace.properties = self.get_namespace_properties(namespace.id)
namespace.tags = self.get_namespace_tags(namespace.id)
return namespaces
def get_namespace_resource_types(self, namespace_id, resource_types):
session = self.db_api.get_session()
namespace_resource_types = session.query(
models.MetadefNamespaceResourceType
).filter_by(namespace_id=namespace_id)
resource_associations = [{
'prefix': r.prefix,
'properties_target': r.properties_target,
'name': resource_types[r.resource_type_id],
} for r in namespace_resource_types]
return resource_associations
def get_namespace_properties(self, namespace_id):
session = self.db_api.get_session()
properties = session.query(
models.MetadefProperty
).filter_by(namespace_id=namespace_id)
return list(properties)
def get_namespace_objects(self, namespace_id):
session = self.db_api.get_session()
namespace_objects = session.query(
models.MetadefObject
).filter_by(namespace_id=namespace_id)
return list(namespace_objects)
def get_namespace_tags(self, namespace_id):
session = self.db_api.get_session()
namespace_tags = session.query(
models.MetadefTag
).filter_by(namespace_id=namespace_id)
return list(namespace_tags)
def serialize(self, obj):
object_docs = [self.serialize_object(ns_obj) for ns_obj in obj.objects]
property_docs = [self.serialize_property(prop.name, prop.json_schema)
for prop in obj.properties]
resource_type_docs = [self.serialize_namespace_resource_type(rt)
for rt in obj.resource_types]
tag_docs = [self.serialize_tag(tag) for tag in obj.tags]
namespace_doc = self.serialize_namespace(obj)
namespace_doc.update({
'objects': object_docs,
'properties': property_docs,
'resource_types': resource_type_docs,
'tags': tag_docs,
})
return namespace_doc
def serialize_namespace(self, namespace):
return {
'namespace': namespace.namespace,
'display_name': namespace.display_name,
'description': namespace.description,
'visibility': namespace.visibility,
'protected': namespace.protected,
'owner': namespace.owner,
}
def serialize_object(self, obj):
obj_properties = obj.json_schema
property_docs = []
for name, schema in six.iteritems(obj_properties):
property_doc = self.serialize_property(name, schema)
property_docs.append(property_doc)
document = {
'name': obj.name,
'description': obj.description,
'properties': property_docs,
}
return document
def serialize_property(self, name, schema):
document = copy.deepcopy(schema)
document['property'] = name
if 'default' in document:
document['default'] = str(document['default'])
if 'enum' in document:
document['enum'] = map(str, document['enum'])
return document
def serialize_namespace_resource_type(self, ns_resource_type):
return {
'name': ns_resource_type['name'],
'prefix': ns_resource_type['prefix'],
'properties_target': ns_resource_type['properties_target']
}
def serialize_tag(self, tag):
return {
'name': tag.name
}
def get_notification_handler(self):
return metadefs_notification_handler.MetadefHandler(
self.engine,
self.get_index_name(),
self.get_document_type()
)
def get_notification_supported_events(self):
return [
"metadef_namespace.create",
"metadef_namespace.update",
"metadef_namespace.delete",
"metadef_object.create",
"metadef_object.update",
"metadef_object.delete",
"metadef_property.create",
"metadef_property.update",
"metadef_property.delete",
"metadef_tag.create",
"metadef_tag.update",
"metadef_tag.delete",
"metadef_resource_type.create",
"metadef_resource_type.delete",
"metadef_namespace.delete_properties",
"metadef_namespace.delete_objects",
"metadef_namespace.delete_tags"
]
| apache-2.0 |
dropbox/dropbox-sdk-dotnet | generator/csproj.py | 1 | 15683 | from __future__ import unicode_literals
from StringIO import StringIO
COMPILE_INCLUDES = [
"Stone\\Decoder.cs",
"Stone\\Empty.cs",
"Stone\\Encoder.cs",
"Stone\\IEncoder.cs",
"Stone\\IDecoder.cs",
"Stone\\IJsonReader.cs",
"Stone\\IJsonWriter.cs",
"Stone\\ITransport.cs",
"Stone\\JsonReader.cs",
"Stone\\JsonWriter.cs",
"ApiException.cs",
"StructuredException.cs",
"Stone\\Util.cs",
"DropboxCertHelper.cs",
"DropboxClient.cs",
"DropboxClientBase.cs",
"DropboxAppClient.cs",
"DropboxTeamClient.cs",
"DropboxClientConfig.cs",
"DropboxException.cs",
"DropboxOauth2Helper.cs",
"DropboxRequestHandler.cs",
"AppProperties\\AssemblyInfo.cs",
]
NONE_INCLUDES = [
"packages.config",
]
PORTABLE40_NONE_INCLUDES = [
"app.config",
"packages.Dropbox.Api.Portable40.config",
]
PORTABLE_NONE_INCLUDES = [
"packages.Dropbox.Api.Portable.config",
]
DOC_NONE_INCLUDES = [
"packages.config",
]
CSPROJ_START_BLOCK = r"""<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworks>net45;netstandard2.0</TargetFrameworks>
<BaseIntermediateOutputPath>obj\</BaseIntermediateOutputPath>
<EnableDefaultCompileItems>false</EnableDefaultCompileItems>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
<GenerateDocumentationFile>true</GenerateDocumentationFile>
</PropertyGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' ">
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="System.Net.Http" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'net45' ">
<None Include="packages.config" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<None Remove="app.config" />
<None Remove="Dropbox.Api.nuspec" />
<None Remove="dropbox_api_key.snk" />
<None Remove="packages.config" />
<None Remove="packages.Dropbox.Api.Portable.config" />
<None Remove="packages.Dropbox.Api.Portable40.config" />
<None Remove="Settings.StyleCop" />
<None Remove="stone_summaries.xml" />
</ItemGroup>
<ItemGroup Condition=" '$(TargetFramework)' == 'netstandard2.0' ">
<PackageReference Include="Newtonsoft.Json" Version="10.0.3" />
</ItemGroup>
"""
CSPROJ_END_BLOCK = r"""
</Project>
"""
PORTABLE40_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{D7B167CE-3AF8-478E-82F2-684D38F1DF98}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.0</TargetFrameworkVersion>
<TargetFrameworkProfile>Profile344</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<MinimumVisualStudioVersion>10.0</MinimumVisualStudioVersion>
<BaseIntermediateOutputPath>portable40obj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\portable40</OutputPath>
<DefineConstants>DEBUG;TRACE;PORTABLE40</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Debug\portable40\Dropbox.Api.XML</DocumentationFile>
<RunCodeAnalysis>true</RunCodeAnalysis>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\portable40</OutputPath>
<DefineConstants>TRACE;PORTABLE40</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Release\portable40\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="Microsoft.Threading.Tasks">
<HintPath>..\packages\Microsoft.Bcl.Async.1.0.168\lib\portable-net40+sl4+win8+wp71+wpa81\Microsoft.Threading.Tasks.dll</HintPath>
</Reference>
<Reference Include="Microsoft.Threading.Tasks.Extensions">
<HintPath>..\packages\Microsoft.Bcl.Async.1.0.168\lib\portable-net40+sl4+win8+wp71+wpa81\Microsoft.Threading.Tasks.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.IO">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.IO.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http.Extensions">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.Extensions.dll</HintPath>
</Reference>
<Reference Include="System.Net.Http.Primitives">
<HintPath>..\packages\Microsoft.Net.Http.2.2.29\lib\portable-net40+sl4+win8+wp71+wpa81\System.Net.Http.Primitives.dll</HintPath>
</Reference>
<Reference Include="System.Runtime">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.Runtime.dll</HintPath>
</Reference>
<Reference Include="System.Threading.Tasks">
<HintPath>..\packages\Microsoft.Bcl.1.1.10\lib\portable-net40+sl5+win8+wp8+wpa81\System.Threading.Tasks.dll</HintPath>
</Reference>
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\portable-net40+sl5+wp80+win8+wpa81\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
PORTABLE40_CSPROJ_END_BLOCK = r""" <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
<Import Project="..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets" Condition="Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" />
<Target Name="EnsureBclBuildImported" BeforeTargets="BeforeBuild" Condition="'$(BclBuildImported)' == ''">
<Error Condition="!Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" Text="This project references NuGet package(s) that are missing on this computer. Enable NuGet Package Restore to download them. For more information, see http://go.microsoft.com/fwlink/?LinkID=317567." HelpKeyword="BCLBUILD2001" />
<Error Condition="Exists('..\packages\Microsoft.Bcl.Build.1.0.14\tools\Microsoft.Bcl.Build.targets')" Text="The build restored NuGet packages. Build the project again to include these packages in the build. For more information, see http://go.microsoft.com/fwlink/?LinkID=317568." HelpKeyword="BCLBUILD2002" />
</Target>
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
PORTABLE_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{786C830F-07A1-408B-BD7F-6EE04809D6DB}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<TargetFrameworkProfile>Profile111</TargetFrameworkProfile>
<FileAlignment>512</FileAlignment>
<ProjectTypeGuids>{786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
<MinimumVisualStudioVersion>11.0</MinimumVisualStudioVersion>
<BaseIntermediateOutputPath>portableobj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\portable</OutputPath>
<DefineConstants>DEBUG;TRACE;PORTABLE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Debug\portable\Dropbox.Api.XML</DocumentationFile>
<RunCodeAnalysis>true</RunCodeAnalysis>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>bin\Release\portable</OutputPath>
<DefineConstants>TRACE;PORTABLE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<DocumentationFile>bin\Release\portable\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\portable-net45+wp80+win8+wpa81+dnxcore50\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
PORTABLE_CSPROJ_END_BLOCK = r""" <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
DOC_CSPROJ_START_BLOCK = r"""<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{0E57A534-F4CA-402B-88F4-0B43E55264BA}</ProjectGuid>
<OutputType>Library</OutputType>
<AppDesignerFolder>AppProperties</AppDesignerFolder>
<RootNamespace>Dropbox.Api</RootNamespace>
<AssemblyName>Dropbox.Api</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">..\</SolutionDir>
<RestorePackages>true</RestorePackages>
<BaseIntermediateOutputPath>docobj\</BaseIntermediateOutputPath>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>docbin\Debug\</OutputPath>
<DefineConstants>TRACE;DEBUG;DOC</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DocumentationFile>docbin\Debug\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>docbin\Release\</OutputPath>
<DefineConstants>TRACE;DOC</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<DocumentationFile>docbin\Release\Dropbox.Api.XML</DocumentationFile>
<NoWarn>419</NoWarn>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Net.Http" />
<Reference Include="System.Xml.Linq" />
<Reference Include="System.Data.DataSetExtensions" />
<Reference Include="Microsoft.CSharp" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
<Reference Include="Newtonsoft.Json">
<HintPath>..\packages\Newtonsoft.Json.7.0.1\lib\net45\Newtonsoft.Json.dll</HintPath>
</Reference>
</ItemGroup>
"""
DOC_CSPROJ_END_BLOCK = r""" <ItemGroup>
<None Include="stone_summaries.xml" />
<None Include="Generated\namespace_summaries.xml" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project>
"""
def _include_items(buf, item_type, paths):
buf.write(' <ItemGroup>\n')
for path in paths:
file_path = path.replace('/', '\\')
buf.write(' <{0} Include="{1}" />\n'.format(item_type, file_path))
buf.write(' </ItemGroup>\n')
def make_csproj_file(files, mode):
mode = mode.lower()
if mode == 'doc':
start = DOC_CSPROJ_START_BLOCK
end = DOC_CSPROJ_END_BLOCK
none_includes = DOC_NONE_INCLUDES
elif mode == 'portable40':
start = PORTABLE40_CSPROJ_START_BLOCK
end = PORTABLE40_CSPROJ_END_BLOCK
none_includes = PORTABLE40_NONE_INCLUDES
elif mode == 'portable':
start = PORTABLE_CSPROJ_START_BLOCK
end = PORTABLE_CSPROJ_END_BLOCK
none_includes = PORTABLE_NONE_INCLUDES
else:
start = CSPROJ_START_BLOCK
end = CSPROJ_END_BLOCK
none_includes = NONE_INCLUDES
buf = StringIO()
buf.write(start)
_include_items(buf, 'Compile', COMPILE_INCLUDES)
_include_items(buf, 'Compile', sorted(files, key=lambda x: x.replace('\\', '/')))
_include_items(buf, 'None', none_includes)
buf.write(end)
return buf.getvalue()
| mit |
npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/slim/python/slim/data/tfexample_decoder_test.py | 34 | 30604 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for slim.data.tfexample_decoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.slim.python.slim.data import tfexample_decoder
from tensorflow.core.example import example_pb2
from tensorflow.core.example import feature_pb2
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import image_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import test
class TFExampleDecoderTest(test.TestCase):
def _EncodedFloatFeature(self, ndarray):
return feature_pb2.Feature(float_list=feature_pb2.FloatList(
value=ndarray.flatten().tolist()))
def _EncodedInt64Feature(self, ndarray):
return feature_pb2.Feature(int64_list=feature_pb2.Int64List(
value=ndarray.flatten().tolist()))
def _EncodedBytesFeature(self, tf_encoded):
with self.test_session():
encoded = tf_encoded.eval()
def BytesList(value):
return feature_pb2.BytesList(value=[value])
return feature_pb2.Feature(bytes_list=BytesList(encoded))
def _BytesFeature(self, ndarray):
values = ndarray.flatten().tolist()
for i in range(len(values)):
values[i] = values[i].encode('utf-8')
return feature_pb2.Feature(bytes_list=feature_pb2.BytesList(value=values))
def _StringFeature(self, value):
value = value.encode('utf-8')
return feature_pb2.Feature(bytes_list=feature_pb2.BytesList(value=[value]))
def _Encoder(self, image, image_format):
assert image_format in ['jpeg', 'JPEG', 'png', 'PNG', 'raw', 'RAW']
if image_format in ['jpeg', 'JPEG']:
tf_image = constant_op.constant(image, dtype=dtypes.uint8)
return image_ops.encode_jpeg(tf_image)
if image_format in ['png', 'PNG']:
tf_image = constant_op.constant(image, dtype=dtypes.uint8)
return image_ops.encode_png(tf_image)
if image_format in ['raw', 'RAW']:
return constant_op.constant(image.tostring(), dtype=dtypes.string)
def GenerateImage(self, image_format, image_shape):
"""Generates an image and an example containing the encoded image.
Args:
image_format: the encoding format of the image.
image_shape: the shape of the image to generate.
Returns:
image: the generated image.
example: a TF-example with a feature key 'image/encoded' set to the
serialized image and a feature key 'image/format' set to the image
encoding format ['jpeg', 'JPEG', 'png', 'PNG', 'raw'].
"""
num_pixels = image_shape[0] * image_shape[1] * image_shape[2]
image = np.linspace(
0, num_pixels - 1, num=num_pixels).reshape(image_shape).astype(np.uint8)
tf_encoded = self._Encoder(image, image_format)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/encoded': self._EncodedBytesFeature(tf_encoded),
'image/format': self._StringFeature(image_format)
}))
return image, example.SerializeToString()
def DecodeExample(self, serialized_example, item_handler, image_format):
"""Decodes the given serialized example with the specified item handler.
Args:
serialized_example: a serialized TF example string.
item_handler: the item handler used to decode the image.
image_format: the image format being decoded.
Returns:
the decoded image found in the serialized Example.
"""
serialized_example = array_ops.reshape(serialized_example, shape=[])
decoder = tfexample_decoder.TFExampleDecoder(
keys_to_features={
'image/encoded':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=''),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=image_format),
},
items_to_handlers={'image': item_handler})
[tf_image] = decoder.decode(serialized_example, ['image'])
return tf_image
def RunDecodeExample(self, serialized_example, item_handler, image_format):
tf_image = self.DecodeExample(serialized_example, item_handler,
image_format)
with self.test_session():
decoded_image = tf_image.eval()
# We need to recast them here to avoid some issues with uint8.
return decoded_image.astype(np.float32)
def testDecodeExampleWithJpegEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example, tfexample_decoder.Image(), image_format='jpeg')
# Need to use a tolerance of 1 because of noise in the jpeg encode/decode
self.assertAllClose(image, decoded_image, atol=1.001)
def testDecodeExampleWithJPEGEncoding(self):
test_image_channels = [1, 3]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='JPEG', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='JPEG')
# Need to use a tolerance of 1 because of noise in the jpeg encode/decode
self.assertAllClose(image, decoded_image, atol=1.001)
def testDecodeExampleWithNoShapeInfo(self):
test_image_channels = [1, 3]
for channels in test_image_channels:
image_shape = (2, 3, channels)
_, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
tf_decoded_image = self.DecodeExample(
serialized_example,
tfexample_decoder.Image(
shape=None, channels=channels),
image_format='jpeg')
self.assertEqual(tf_decoded_image.get_shape().ndims, 3)
def testDecodeExampleWithPngEncoding(self):
test_image_channels = [1, 3, 4]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='png', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='png')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithPNGEncoding(self):
test_image_channels = [1, 3, 4]
for channels in test_image_channels:
image_shape = (2, 3, channels)
image, serialized_example = self.GenerateImage(
image_format='PNG', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(channels=channels),
image_format='PNG')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithRawEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='raw', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(shape=image_shape),
image_format='raw')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithRAWEncoding(self):
image_shape = (2, 3, 3)
image, serialized_example = self.GenerateImage(
image_format='RAW', image_shape=image_shape)
decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(shape=image_shape),
image_format='RAW')
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithJpegEncodingAt16BitCausesError(self):
image_shape = (2, 3, 3)
unused_image, serialized_example = self.GenerateImage(
image_format='jpeg', image_shape=image_shape)
with self.assertRaises(TypeError):
unused_decoded_image = self.RunDecodeExample(
serialized_example,
tfexample_decoder.Image(dtype=dtypes.uint16),
image_format='jpeg')
def testDecodeExampleWithStringTensor(self):
tensor_shape = (2, 3, 1)
np_array = np.array([[['ab'], ['cd'], ['ef']],
[['ghi'], ['jkl'], ['mnop']]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._BytesFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.string,
default_value=constant_op.constant(
'', shape=tensor_shape, dtype=dtypes.string))
}
items_to_handlers = {'labels': tfexample_decoder.Tensor('labels'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
labels = labels.astype(np_array.dtype)
self.assertTrue(np.array_equal(np_array, labels))
def testDecodeExampleWithFloatTensor(self):
np_array = np.random.rand(2, 3, 1).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'array': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.float32)
}
items_to_handlers = {'array': tfexample_decoder.Tensor('array'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_array] = decoder.decode(serialized_example, ['array'])
self.assertAllEqual(tf_array.eval(), np_array)
def testDecodeExampleWithInt64Tensor(self):
np_array = np.random.randint(1, 10, size=(2, 3, 1))
example = example_pb2.Example(features=feature_pb2.Features(feature={
'array': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'array': parsing_ops.FixedLenFeature(np_array.shape, dtypes.int64)
}
items_to_handlers = {'array': tfexample_decoder.Tensor('array'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_array] = decoder.decode(serialized_example, ['array'])
self.assertAllEqual(tf_array.eval(), np_array)
def testDecodeExampleWithVarLenTensor(self):
np_array = np.array([[[1], [2], [3]], [[4], [5], [6]]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {'labels': tfexample_decoder.Tensor('labels'),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array.flatten())
def testDecodeExampleWithFixLenTensorWithShape(self):
np_array = np.array([[1, 2, 3], [4, 5, 6]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels':
parsing_ops.FixedLenFeature(
np_array.shape, dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.Tensor(
'labels', shape=np_array.shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array)
def testDecodeExampleWithVarLenTensorToDense(self):
np_array = np.array([[1, 2, 3], [4, 5, 6]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'labels': self._EncodedInt64Feature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.Tensor(
'labels', shape=np_array.shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels, np_array)
def testDecodeExampleShapeKeyTensor(self):
np_image = np.random.rand(2, 3, 1).astype('f')
np_labels = np.array([[[1], [2], [3]], [[4], [5], [6]]])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image': self._EncodedFloatFeature(np_image),
'image/shape': self._EncodedInt64Feature(np.array(np_image.shape)),
'labels': self._EncodedInt64Feature(np_labels),
'labels/shape': self._EncodedInt64Feature(np.array(np_labels.shape)),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'image/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'image':
tfexample_decoder.Tensor(
'image', shape_keys='image/shape'),
'labels':
tfexample_decoder.Tensor(
'labels', shape_keys='labels/shape'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image, tf_labels] = decoder.decode(serialized_example,
['image', 'labels'])
self.assertAllEqual(tf_image.eval(), np_image)
self.assertAllEqual(tf_labels.eval(), np_labels)
def testDecodeExampleMultiShapeKeyTensor(self):
np_image = np.random.rand(2, 3, 1).astype('f')
np_labels = np.array([[[1], [2], [3]], [[4], [5], [6]]])
height, width, depth = np_labels.shape
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image': self._EncodedFloatFeature(np_image),
'image/shape': self._EncodedInt64Feature(np.array(np_image.shape)),
'labels': self._EncodedInt64Feature(np_labels),
'labels/height': self._EncodedInt64Feature(np.array([height])),
'labels/width': self._EncodedInt64Feature(np.array([width])),
'labels/depth': self._EncodedInt64Feature(np.array([depth])),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'image/shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/height': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/width': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'labels/depth': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'image':
tfexample_decoder.Tensor(
'image', shape_keys='image/shape'),
'labels':
tfexample_decoder.Tensor(
'labels',
shape_keys=['labels/height', 'labels/width', 'labels/depth']),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image, tf_labels] = decoder.decode(serialized_example,
['image', 'labels'])
self.assertAllEqual(tf_image.eval(), np_image)
self.assertAllEqual(tf_labels.eval(), np_labels)
def testDecodeExampleWithSparseTensor(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {'labels': tfexample_decoder.SparseTensor(),}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_values.shape)
def testDecodeExampleWithSparseTensorWithKeyShape(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
'shape': self._EncodedInt64Feature(np_shape),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
'shape': parsing_ops.VarLenFeature(dtype=dtypes.int64),
}
items_to_handlers = {
'labels': tfexample_decoder.SparseTensor(shape_key='shape'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_shape)
def testDecodeExampleWithSparseTensorWithGivenShape(self):
np_indices = np.array([[1], [2], [5]])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {
'labels': tfexample_decoder.SparseTensor(shape=np_shape),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllEqual(labels.indices, np_indices)
self.assertAllEqual(labels.values, np_values)
self.assertAllEqual(labels.dense_shape, np_shape)
def testDecodeExampleWithSparseTensorToDense(self):
np_indices = np.array([1, 2, 5])
np_values = np.array([0.1, 0.2, 0.6]).astype('f')
np_shape = np.array([6])
np_dense = np.array([0.0, 0.1, 0.2, 0.0, 0.0, 0.6]).astype('f')
example = example_pb2.Example(features=feature_pb2.Features(feature={
'indices': self._EncodedInt64Feature(np_indices),
'values': self._EncodedFloatFeature(np_values),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'indices': parsing_ops.VarLenFeature(dtype=dtypes.int64),
'values': parsing_ops.VarLenFeature(dtype=dtypes.float32),
}
items_to_handlers = {
'labels':
tfexample_decoder.SparseTensor(
shape=np_shape, densify=True),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_labels] = decoder.decode(serialized_example, ['labels'])
labels = tf_labels.eval()
self.assertAllClose(labels, np_dense)
def testDecodeExampleWithTensor(self):
tensor_shape = (2, 3, 1)
np_array = np.random.rand(2, 3, 1)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/depth_map': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/depth_map':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.float32,
default_value=array_ops.zeros(tensor_shape))
}
items_to_handlers = {'depth': tfexample_decoder.Tensor('image/depth_map')}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_depth] = decoder.decode(serialized_example, ['depth'])
depth = tf_depth.eval()
self.assertAllClose(np_array, depth)
def testDecodeExampleWithItemHandlerCallback(self):
np.random.seed(0)
tensor_shape = (2, 3, 1)
np_array = np.random.rand(2, 3, 1)
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/depth_map': self._EncodedFloatFeature(np_array),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/depth_map':
parsing_ops.FixedLenFeature(
tensor_shape,
dtypes.float32,
default_value=array_ops.zeros(tensor_shape))
}
def HandleDepth(keys_to_tensors):
depth = list(keys_to_tensors.values())[0]
depth += 1
return depth
items_to_handlers = {
'depth':
tfexample_decoder.ItemHandlerCallback('image/depth_map',
HandleDepth)
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_depth] = decoder.decode(serialized_example, ['depth'])
depth = tf_depth.eval()
self.assertAllClose(np_array, depth - 1)
def testDecodeImageWithItemHandlerCallback(self):
image_shape = (2, 3, 3)
for image_encoding in ['jpeg', 'png']:
image, serialized_example = self.GenerateImage(
image_format=image_encoding, image_shape=image_shape)
with self.test_session():
def ConditionalDecoding(keys_to_tensors):
"""See base class."""
image_buffer = keys_to_tensors['image/encoded']
image_format = keys_to_tensors['image/format']
def DecodePng():
return image_ops.decode_png(image_buffer, 3)
def DecodeJpg():
return image_ops.decode_jpeg(image_buffer, 3)
image = control_flow_ops.case(
{
math_ops.equal(image_format, 'png'): DecodePng,
},
default=DecodeJpg,
exclusive=True)
image = array_ops.reshape(image, image_shape)
return image
keys_to_features = {
'image/encoded':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=''),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value='jpeg')
}
items_to_handlers = {
'image':
tfexample_decoder.ItemHandlerCallback(
['image/encoded', 'image/format'], ConditionalDecoding)
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_image] = decoder.decode(serialized_example, ['image'])
decoded_image = tf_image.eval()
if image_encoding == 'jpeg':
# For jenkins:
image = image.astype(np.float32)
decoded_image = decoded_image.astype(np.float32)
self.assertAllClose(image, decoded_image, rtol=.5, atol=1.001)
else:
self.assertAllClose(image, decoded_image, atol=0)
def testDecodeExampleWithBoundingBox(self):
num_bboxes = 10
np_ymin = np.random.rand(num_bboxes, 1)
np_xmin = np.random.rand(num_bboxes, 1)
np_ymax = np.random.rand(num_bboxes, 1)
np_xmax = np.random.rand(num_bboxes, 1)
np_bboxes = np.hstack([np_ymin, np_xmin, np_ymax, np_xmax])
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/object/bbox/ymin': self._EncodedFloatFeature(np_ymin),
'image/object/bbox/xmin': self._EncodedFloatFeature(np_xmin),
'image/object/bbox/ymax': self._EncodedFloatFeature(np_ymax),
'image/object/bbox/xmax': self._EncodedFloatFeature(np_xmax),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
keys_to_features = {
'image/object/bbox/ymin': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/xmin': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/ymax': parsing_ops.VarLenFeature(dtypes.float32),
'image/object/bbox/xmax': parsing_ops.VarLenFeature(dtypes.float32),
}
items_to_handlers = {
'object/bbox':
tfexample_decoder.BoundingBox(['ymin', 'xmin', 'ymax', 'xmax'],
'image/object/bbox/'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
[tf_bboxes] = decoder.decode(serialized_example, ['object/bbox'])
bboxes = tf_bboxes.eval()
self.assertAllClose(np_bboxes, bboxes)
def testDecodeExampleWithRepeatedImages(self):
image_shape = (2, 3, 3)
image_format = 'png'
image, _ = self.GenerateImage(
image_format=image_format, image_shape=image_shape)
tf_encoded = self._Encoder(image, image_format)
with self.test_session():
tf_string = tf_encoded.eval()
example = example_pb2.Example(features=feature_pb2.Features(feature={
'image/encoded': feature_pb2.Feature(bytes_list=feature_pb2.BytesList(
value=[tf_string, tf_string])),
'image/format': self._StringFeature(image_format),
}))
serialized_example = example.SerializeToString()
with self.test_session():
serialized_example = array_ops.reshape(serialized_example, shape=[])
decoder = tfexample_decoder.TFExampleDecoder(
keys_to_features={
'image/encoded':
parsing_ops.FixedLenFeature(
(2,), dtypes.string),
'image/format':
parsing_ops.FixedLenFeature(
(), dtypes.string, default_value=image_format),
},
items_to_handlers={'image': tfexample_decoder.Image(repeated=True)})
[tf_image] = decoder.decode(serialized_example, ['image'])
output_image = tf_image.eval()
self.assertEqual(output_image.shape, (2, 2, 3, 3))
self.assertAllEqual(np.squeeze(output_image[0, :, :, :]), image)
self.assertAllEqual(np.squeeze(output_image[1, :, :, :]), image)
if __name__ == '__main__':
test.main()
| apache-2.0 |
ianawilson/kafka-python | kafka/common.py | 15 | 6023 | import inspect
import sys
from collections import namedtuple
###############
# Structs #
###############
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-MetadataAPI
MetadataRequest = namedtuple("MetadataRequest",
["topics"])
MetadataResponse = namedtuple("MetadataResponse",
["brokers", "topics"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-ProduceAPI
ProduceRequest = namedtuple("ProduceRequest",
["topic", "partition", "messages"])
ProduceResponse = namedtuple("ProduceResponse",
["topic", "partition", "error", "offset"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-FetchAPI
FetchRequest = namedtuple("FetchRequest",
["topic", "partition", "offset", "max_bytes"])
FetchResponse = namedtuple("FetchResponse",
["topic", "partition", "error", "highwaterMark", "messages"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetAPI
OffsetRequest = namedtuple("OffsetRequest",
["topic", "partition", "time", "max_offsets"])
OffsetResponse = namedtuple("OffsetResponse",
["topic", "partition", "error", "offsets"])
# https://cwiki.apache.org/confluence/display/KAFKA/A+Guide+To+The+Kafka+Protocol#AGuideToTheKafkaProtocol-OffsetCommit/FetchAPI
OffsetCommitRequest = namedtuple("OffsetCommitRequest",
["topic", "partition", "offset", "metadata"])
OffsetCommitResponse = namedtuple("OffsetCommitResponse",
["topic", "partition", "error"])
OffsetFetchRequest = namedtuple("OffsetFetchRequest",
["topic", "partition"])
OffsetFetchResponse = namedtuple("OffsetFetchResponse",
["topic", "partition", "offset", "metadata", "error"])
# Other useful structs
BrokerMetadata = namedtuple("BrokerMetadata",
["nodeId", "host", "port"])
TopicMetadata = namedtuple("TopicMetadata",
["topic", "error", "partitions"])
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
OffsetAndMessage = namedtuple("OffsetAndMessage",
["offset", "message"])
Message = namedtuple("Message",
["magic", "attributes", "key", "value"])
TopicAndPartition = namedtuple("TopicAndPartition",
["topic", "partition"])
KafkaMessage = namedtuple("KafkaMessage",
["topic", "partition", "offset", "key", "value"])
# Define retry policy for async producer
# Limit value: int >= 0, 0 means no retries
RetryOptions = namedtuple("RetryOptions",
["limit", "backoff_ms", "retry_on_timeouts"])
#################
# Exceptions #
#################
class KafkaError(RuntimeError):
pass
class BrokerResponseError(KafkaError):
pass
class UnknownError(BrokerResponseError):
errno = -1
message = 'UNKNOWN'
class OffsetOutOfRangeError(BrokerResponseError):
errno = 1
message = 'OFFSET_OUT_OF_RANGE'
class InvalidMessageError(BrokerResponseError):
errno = 2
message = 'INVALID_MESSAGE'
class UnknownTopicOrPartitionError(BrokerResponseError):
errno = 3
message = 'UNKNOWN_TOPIC_OR_PARTITON'
class InvalidFetchRequestError(BrokerResponseError):
errno = 4
message = 'INVALID_FETCH_SIZE'
class LeaderNotAvailableError(BrokerResponseError):
errno = 5
message = 'LEADER_NOT_AVAILABLE'
class NotLeaderForPartitionError(BrokerResponseError):
errno = 6
message = 'NOT_LEADER_FOR_PARTITION'
class RequestTimedOutError(BrokerResponseError):
errno = 7
message = 'REQUEST_TIMED_OUT'
class BrokerNotAvailableError(BrokerResponseError):
errno = 8
message = 'BROKER_NOT_AVAILABLE'
class ReplicaNotAvailableError(BrokerResponseError):
errno = 9
message = 'REPLICA_NOT_AVAILABLE'
class MessageSizeTooLargeError(BrokerResponseError):
errno = 10
message = 'MESSAGE_SIZE_TOO_LARGE'
class StaleControllerEpochError(BrokerResponseError):
errno = 11
message = 'STALE_CONTROLLER_EPOCH'
class OffsetMetadataTooLargeError(BrokerResponseError):
errno = 12
message = 'OFFSET_METADATA_TOO_LARGE'
class StaleLeaderEpochCodeError(BrokerResponseError):
errno = 13
message = 'STALE_LEADER_EPOCH_CODE'
class KafkaUnavailableError(KafkaError):
pass
class KafkaTimeoutError(KafkaError):
pass
class FailedPayloadsError(KafkaError):
def __init__(self, payload, *args):
super(FailedPayloadsError, self).__init__(*args)
self.payload = payload
class ConnectionError(KafkaError):
pass
class BufferUnderflowError(KafkaError):
pass
class ChecksumError(KafkaError):
pass
class ConsumerFetchSizeTooSmall(KafkaError):
pass
class ConsumerNoMoreData(KafkaError):
pass
class ConsumerTimeout(KafkaError):
pass
class ProtocolError(KafkaError):
pass
class UnsupportedCodecError(KafkaError):
pass
class KafkaConfigurationError(KafkaError):
pass
class AsyncProducerQueueFull(KafkaError):
def __init__(self, failed_msgs, *args):
super(AsyncProducerQueueFull, self).__init__(*args)
self.failed_msgs = failed_msgs
def _iter_broker_errors():
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, BrokerResponseError) and obj != BrokerResponseError:
yield obj
kafka_errors = dict([(x.errno, x) for x in _iter_broker_errors()])
def check_error(response):
if isinstance(response, Exception):
raise response
if response.error:
error_class = kafka_errors.get(response.error, UnknownError)
raise error_class(response)
RETRY_BACKOFF_ERROR_TYPES = (
KafkaUnavailableError, LeaderNotAvailableError,
ConnectionError, FailedPayloadsError
)
RETRY_REFRESH_ERROR_TYPES = (
NotLeaderForPartitionError, UnknownTopicOrPartitionError,
LeaderNotAvailableError, ConnectionError
)
RETRY_ERROR_TYPES = RETRY_BACKOFF_ERROR_TYPES + RETRY_REFRESH_ERROR_TYPES
| apache-2.0 |
PaulWay/insights-core | insights/parsers/tests/test_rabbitmq_log.py | 1 | 1163 | from insights.parsers.rabbitmq_log import RabbitMQStartupLog
from insights.parsers.rabbitmq_log import RabbitMQStartupErrLog
from insights.tests import context_wrap
STARTUP_LOG = """
starting file handle cache server ...done
starting worker pool ...done
starting database ...done
starting empty DB check ...done
starting exchange recovery ...done
starting queue supervisor and queue recovery ...BOOT ERROR: FAILED
"""
STARTUP_ERR_LOG = """
Error: {node_start_failed,normal}
Crash dump was written to: erl_crash.dump
Kernel pid terminated (application_controller) ({application_start_failure,kernel,{shutdown,{kernel,start,[normal,[]]}}})
"""
def test_rabbitmq_startup_log():
log = RabbitMQStartupLog(context_wrap(STARTUP_LOG))
assert len(log.get('done')) == 5
def test_rabbitmq_start_err_log():
log = RabbitMQStartupErrLog(context_wrap(STARTUP_ERR_LOG))
assert len(log.get('Error')) == 1
| apache-2.0 |
SCP-028/UGA | protein_pka/mcce/mcce.py | 1 | 17127 | #!python3
"""
Predict protein pKa based on MCCE method.
http://pka.engr.ccny.cuny.edu/
Require MCCE 3.0 to work: https://anaconda.org/SalahSalah/mcce/files
"""
import asyncio
import glob
import gzip
import locale
import logging
import math
import os
import re
import shutil
import subprocess
import sys
import time
from multiprocessing import Pool
from urllib.request import urlopen
import aioftp
import pandas as pd
import uvloop
# Sapelo Locale is broken, quick fix
locale.setlocale(locale.LC_ALL, "en_US.UTF-8")
# Set working directory
ROOTPATH = os.path.dirname(os.path.realpath(sys.argv[0]))
os.chdir(ROOTPATH)
# Log settings
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.FileHandler(f"./pKa_calculation_{__file__}.log")
handler.setLevel(logging.INFO)
formatter = logging.Formatter(
"%(asctime)s\t%(levelname)s\t"
"[%(filename)s:%(lineno)s -%(funcName)12s()]\t%(message)s"
)
handler.setFormatter(formatter)
logger.addHandler(handler)
class pdb:
def __init__(self):
self.all_ids = []
self.download_ids = [] # Download -> Unzip -> Preprocess -> Calculate
self.unzip_ids = [] # Unzip -> Preprocess -> Calculate
self.preprocess_ids = [] # Preprocess -> Calculate
self.ready_ids = [] # Calculate
self.finished_ids = [] # Successfully calculated IDs
self.error_ids = [] # Error in download, unzip, or calculation
# IDs this script will work on (messy queue implementation)
self.working_ids = []
def load_id(self):
"""
First try to get existing pKa values,
then get the list of PDB files to download.
"""
for folder in ["./pdb", "./annotation", "./results"]:
try:
os.makedirs(folder)
except OSError:
pass
self.finished_ids = [id[-8:-4] for id in glob.glob("./results/*.pka")]
logger.debug(f"{len(self.finished_ids)} finished files.")
# Create file even at first run so that the results folder doesn't get deleted
with open("./results/finished_ids.list", "a") as f:
f.write("\n".join(self.finished_ids))
self.ready_ids = list(set(
[id[-12:-8].upper() for id in glob.glob("./pdb/*/*.pdb.bak")]) - set(self.finished_ids))
logger.debug(f"{len(self.ready_ids)} files ready to be calculated.")
self.preprocess_ids = list(set([id[-8:-4].upper() for id in glob.glob(
"./pdb/*/*.pdb") if "out" not in id]) - set(self.finished_ids) - set(self.ready_ids))
logger.debug(
f"{len(self.preprocess_ids)} files ready to be preprocessed.")
self.unzip_ids = [id[-11:-7].upper() for id in glob.glob("./*.ent.gz")]
logger.debug(f"{len(self.unzip_ids)} files ready to be unzipped.")
if not os.path.exists("./annotation/uniprot_id_mapping.dat"):
with urlopen("ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/idmapping/by_organism/HUMAN_9606_idmapping.dat.gz") as remotefile:
logger.debug(
"Saving UniProt ID mapping data since it doesn't exist...")
with open("./annotation/uniprot_id_mapping.dat.gz", "wb") as f:
f.write(remotefile.read())
with gzip.open(
"./annotation/uniprot_id_mapping.dat.gz", "rb") as inFile, open(
"./annotation/uniprot_id_mapping.dat", "wb") as outFile:
shutil.copyfileobj(inFile, outFile)
os.remove("./annotation/uniprot_id_mapping.dat.gz")
else:
logger.debug("UniProt ID mapping data exists.")
logger.debug("Reading all possible PDB IDs...")
annot = pd.read_csv("./annotation/uniprot_id_mapping.dat",
sep="\t", header=None,
names=["uniprot", "id", "value"])
self.all_ids = annot.loc[annot.id == "PDB", "value"].tolist()
self.download_ids = list(set(self.all_ids) - set(self.unzip_ids) - set(
self.preprocess_ids) - set(self.ready_ids) - set(self.finished_ids))
logger.info(
f"{len(self.download_ids)} PDB files need to be downloaded.")
def get_link(self, ids):
""" Get PDB file links from:
ftp://ftp.wwpdb.org/pub/pdb/data/structures/divided/pdb/ ,
and create folders to store the files.
Parameters
----------
ids: list
The PDB IDs to download.
Returns
-------
Links to download.
"""
if isinstance(ids, list):
ids = [id[:4].lower() for id in ids] # pdb file IDs
pdb_names = [f"{id}.ent.gz" for id in ids] # pdb filenames
# subdirectory of the pdb files
pdbDirs = [id[1:3].lower() for id in ids]
remoteaddr = [
f"ftp://ftp.wwpdb.org/pub/pdb/data/structures/divided/pdb/{pdbDir}/pdb{pdb_name}" for pdbDir, pdb_name in zip(pdbDirs, pdb_names)]
else:
raise TypeError(f"{id} is not a string or list.")
return remoteaddr
def make_dirs(self, ids):
"""Make sure the download directory exists."""
for id in ids:
try:
os.makedirs(os.path.join(ROOTPATH, "pdb", id.upper()))
except OSError:
pass
async def download_worker(self, session, url):
"""Download the given url to working directory."""
url = url[len("ftp://ftp.wwpdb.org"):]
logger.debug(f"Downloading {url}")
try:
await session.download(url)
self.unzip_ids.append(url[-11:-7].upper())
except Exception as e:
self.error_ids.append(url[-11:-7].upper())
logger.warning(f"Error when downloading {url}: {e}")
async def download_session(self, sem, work_queue):
""" Get urls from the queue and pass to worker.
Parameters
----------
sem: asyncio.Semaphore object
work_queue: asyncio.Queue object
"""
while not work_queue.empty():
url = await work_queue.get()
logger.debug(f"Got url from queue: {url}")
async with sem:
async with aioftp.ClientSession("ftp.wwpdb.org") as session:
await self.download_worker(session, url)
def download_queue(self, urls):
""" Create a queue to download all the given urls.
Parameters
----------
urls: list
A list of urls to download.
Returns
-------
Downloaded "*.ent.gz" files in working directory.
"""
logger.debug(f"{len(urls)} urls to download.")
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
q = asyncio.Queue()
sem = asyncio.Semaphore(10)
[q.put_nowait(url) for url in urls]
tasks = [asyncio.ensure_future(self.download_session(sem, q))
for _ in range(len(urls))]
loop.run_until_complete(asyncio.gather(*tasks))
# Zero-sleep to allow underlying connections to close
loop.run_until_complete(asyncio.sleep(0))
loop.close()
def check_mcce(self):
"""Check if MCCE 3.0 exists."""
if not os.path.exists(os.path.join(ROOTPATH, "mcce3.0")):
if not os.path.exists(os.path.join(ROOTPATH, "mcce3.0.tar.bz2")):
logger.debug("MCCE isn't downloaded yet. Retrieving...")
with urlopen("https://anaconda.org/SalahSalah/mcce/3.0/download/linux-64/mcce-3.0-0.tar.bz2") as remotefile:
with open("./mcce-3.0-0.tar.bz2", 'wb') as f:
f.write(remotefile.read())
subprocess.run(["tar", "-xjf", "mcce-3.0-0.tar.bz2"])
shutil.move("./info/recipe/mcce3.0", "./mcce3.0")
shutil.rmtree(os.path.join(ROOTPATH, "info"), ignore_errors=True)
shutil.rmtree(os.path.join(ROOTPATH, "bin"), ignore_errors=True)
else:
logger.info("MCCE 3.0 exists, proceeding to calculation...")
def unzip(self, id):
"""Unzip downloaded *.ent.gz file."""
try:
saved_pdb = os.path.join(ROOTPATH, "pdb", id, f"{id}.pdb")
with gzip.open(f"pdb{id.lower()}.ent.gz", "rb") as inFile, open(saved_pdb, "wb") as outFile:
shutil.copyfileobj(inFile, outFile)
os.remove(f"pdb{id.lower()}.ent.gz")
self.preprocess_ids.append(id)
except Exception as e:
self.error_ids.append(id)
logger.warning(f"Unzip of {id} unsuccessful: {e}")
def preprocess(self, id, backup=True):
"""
This program will:
1) strip lines other than ATOM and HETATM records
2) keep the first model of an NMR structure
3) delete H and D atoms
4) MSE to MET residue
5) keep only one atom alternate position
6) keep defined chains, if chain ID(s) are given in command
7) remove some cofactors and salt ions
Parameters
----------
id: str
The PDB ID to find the file.
backup: bool, optional
Whether to backup the original file or not. Default is True,
and save to "original.bak".
Returns
-------
Nothing, modify the file in place.
"""
removable_res = [
" ZN", "PCA", "XYP", " NA", " CL", " CA", " MG", " MN", "HOH"
]
model_start = False
newlines = []
ID = id.upper()
filepath = os.path.join(ROOTPATH, "pdb", ID, f"{ID}.pdb")
if backup:
shutil.copy2(filepath, f"{filepath}.bak")
with open(filepath) as f:
for line in f:
if line[:5] == "MODEL":
model_start = True
if model_start and line[:6] == "ENDMDL":
break
if line[:6] != "ATOM " and line[:6] != "HETATM":
continue # discard non ATOM records
if line[13] == "H" or line[12] == "H":
continue
if line[16] == "A":
line = f"{line[:16]} {line[17:]}"
elif line[16] != " ":
continue # delete this line, alternative posion is not A or empty
if line[:6] == "HETATM" and line[17:20] == "MSE":
if line[12:15] == "SE ":
line = f"ATOM {line[6:12]} SD{line[15:17]}MET{line[20:]}"
else:
line = f"ATOM {line[6:17]}MET{line[20:]}"
res = line[17:20]
if res in removable_res:
continue
newlines.append(line.rstrip())
with open(filepath, "w") as f:
f.write("\n".join(newlines))
logger.debug(f"{ID} preprocessing complete.")
def set_params(self, id, quickrun=True):
"""
Set the parameters for MCCE.
Parameters
----------
id: str
The PDB ID of the file.
quickrun: bool, optional
Use "run.prm.quick" or "run.prm.default".
Returns
-------
run.prm: a file describing the parameters that points to the PDB file.
"""
pkgpath = os.path.join(ROOTPATH, "mcce3.0")
ID = id.upper()
filepath = os.path.join(ROOTPATH, "pdb", ID)
newlines = []
if quickrun:
shutil.copy2(
os.path.join(pkgpath, "run.prm.quick"),
os.path.join(filepath, "run.prm")
)
else:
shutil.copy2([
os.path.join(pkgpath, "run.prm.default"),
os.path.join(filepath, "run.prm")
])
with open(os.path.join(filepath, "run.prm")) as f:
for line in f:
line = line.rstrip()
if line.endswith("(INPDB)"):
line = re.sub(r"^[^\s]+", fr"{id}.pdb", line)
if line.endswith(("(DO_PREMCCE)", "(DO_ROTAMERS)",
"(DO_ENERGY)", "(DO_MONTE)")):
line = re.sub(r"^f", r"t", line)
if line.endswith("(EPSILON_PROT)"):
line = re.sub(r"^[\d\.]+", r"8.0", line)
if line.startswith("/home/mcce/mcce3.0"):
line = re.sub(r"^/.*3\.0", pkgpath,
line)
newlines.append(line)
with open(os.path.join(filepath, "run.prm"), "w") as f:
f.write("\n".join(newlines))
self.ready_ids.append(ID)
logger.debug(f"Parameters set for {ID}.")
def split_ready_ids(self, num):
""" A naive queue implementation for multiple scripts.
Parameters
----------
num: int
Which part of the IDs to work on.
Returns
-------
A list of the actual IDs to work on, and save the lists of IDs for
other scripts to work with if this is the first instance.
"""
if os.path.isfile(os.path.join(ROOTPATH, "results", "working_ids.list")):
with open(os.path.join(ROOTPATH, "results", f"working_ids.list{num}"), "r") as f:
self.working_ids = [line.strip() for line in f]
else:
n = math.ceil(len(self.ready_ids) / 10)
self.working_ids = [self.ready_ids[i:i + n]
for i in range(0, len(self.ready_ids), n)]
metafile = []
for i, ids in enumerate(self.working_ids):
metafile.append(os.path.join(
ROOTPATH, "results", f"working_ids.list{i}"))
with open(os.path.join(ROOTPATH, "results", f"working_ids.list{i}"), "w") as f:
f.write("\n".join(ids))
logger.debug(
f"Saved {len(ids)} IDs to file working_ids.list{i} .")
with open(os.path.join(ROOTPATH, "results", "working_ids.list"), "w") as f:
f.write("\n".join(metafile))
self.working_ids = self.working_ids[num]
def calc_pka(self, id, clean=True):
""" Calculate protein pKa values using MCCE.
https://sites.google.com/site/mccewiki/home
Parameters
----------
id: str
The PDB ID of the protein calculated.
clean: bool, optional
Only keep the PDB file, run log and pKa output.
Returns
-------
A set of files in a subdirectory named after the ID.
See user manual for detail.
"""
id = id.upper()
os.chdir(os.path.realpath(os.path.join(ROOTPATH, "pdb", id)))
logger.info(f"{id} calculation started.")
start = time.time()
with open(f"{id}.run.log", "w") as f:
subprocess.run(f"{ROOTPATH}/mcce3.0/mcce", stdout=f)
with open(f"{id}.run.log", "rb") as f:
last = f.readlines()[-1].decode().lstrip()
if last.startswith(("Fatal", "FATAL", "WARNING", "STOP")):
self.error_ids.append(id)
logger.warning(
f"{id} calculation aborted after {time.time() - start}s, due to {last}")
else:
self.finished_ids.append(id)
logger.info(
f"{id} calculation finished, used {time.time() - start}s.")
shutil.move("pK.out", os.path.join(
ROOTPATH, "results", f"{id}.pka"))
if clean:
del_list = [i for i in os.listdir() if i not in (
"pK.out", f"{id}.run.log", f"{id}.pdb.bak")]
[os.remove(item) for item in del_list]
if __name__ == "__main__":
x = pdb()
x.load_id()
urls = x.get_link(x.download_ids)
x.make_dirs(x.all_ids)
x.download_queue(urls)
x.check_mcce()
for id in x.unzip_ids:
x.unzip(id)
for id in x.preprocess_ids:
try:
x.preprocess(id)
x.set_params(id)
except Exception as e:
x.error_ids.append(id)
logger.warning(f"Preprocess of {id}: {e}")
# subprocess.run(["find", ".", "-type", "d", "-empty", "-delete"])
x.split_ready_ids(0) # 0 - 9, run 0 first to generate other lists
with Pool(os.cpu_count()) as p:
p.map(x.calc_pka, x.working_ids)
with open("./results/finished_ids.list", "a") as f:
f.write("\n".join(x.working_ids))
with open("./results/error_ids.list", "a") as f:
f.write("\n".join(x.error_ids))
| apache-2.0 |
Einsteinish/PyTune3 | utils/bootstrap_story_hash.py | 1 | 1046 | import time
import pymongo
from django.conf import settings
from apps.rss_feeds.models import MStory, Feed
db = settings.MONGODB
batch = 0
start = 0
for f in xrange(start, Feed.objects.latest('pk').pk):
if f < batch*100000: continue
start = time.time()
try:
cp1 = time.time() - start
# if feed.active_premium_subscribers < 1: continue
stories = MStory.objects.filter(story_feed_id=f, story_hash__exists=False)\
.only('id', 'story_feed_id', 'story_guid')\
.read_preference(pymongo.ReadPreference.SECONDARY)
cp2 = time.time() - start
count = 0
for story in stories:
count += 1
db.pytune.stories.update({"_id": story.id}, {"$set": {
"story_hash": story.feed_guid_hash
}})
cp3 = time.time() - start
print "%s: %3s stories (%s/%s/%s)" % (f, count, round(cp1, 2), round(cp2, 2), round(cp3, 2))
except Exception, e:
print " ***> (%s) %s" % (f, e)
| mit |
alvin319/CarnotKE | jyhton/lib-python/2.7/wsgiref/headers.py | 229 | 5879 | """Manage HTTP Response Headers
Much of this module is red-handedly pilfered from email.message in the stdlib,
so portions are Copyright (C) 2001,2002 Python Software Foundation, and were
written by Barry Warsaw.
"""
from types import ListType, TupleType
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
import re
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _formatparam(param, value=None, quote=1):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
if quote or tspecials.search(value):
value = value.replace('\\', '\\\\').replace('"', r'\"')
return '%s="%s"' % (param, value)
else:
return '%s=%s' % (param, value)
else:
return param
class Headers:
"""Manage a collection of HTTP response headers"""
def __init__(self,headers):
if type(headers) is not ListType:
raise TypeError("Headers must be a list of name/value tuples")
self._headers = headers
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __setitem__(self, name, val):
"""Set the value of a header."""
del self[name]
self._headers.append((name, val))
def __delitem__(self,name):
"""Delete all occurrences of a header, if present.
Does *not* raise an exception if the header is missing.
"""
name = name.lower()
self._headers[:] = [kv for kv in self._headers if kv[0].lower() != name]
def __getitem__(self,name):
"""Get the first header value for 'name'
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, the first exactly which
occurrance gets returned is undefined. Use getall() to get all
the values matching a header field name.
"""
return self.get(name)
def has_key(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
__contains__ = has_key
def get_all(self, name):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original header
list or were added to this instance, and may contain duplicates. Any
fields deleted and re-inserted are always appended to the header list.
If no fields exist with the given name, returns an empty list.
"""
name = name.lower()
return [kv[1] for kv in self._headers if kv[0].lower()==name]
def get(self,name,default=None):
"""Get the first header value for 'name', or return 'default'"""
name = name.lower()
for k,v in self._headers:
if k.lower()==name:
return v
return default
def keys(self):
"""Return a list of all the header field names.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all header values.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the header fields and values.
These will be sorted in the order they were in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def __repr__(self):
return "Headers(%r)" % self._headers
def __str__(self):
"""str() returns the formatted headers, complete with end line,
suitable for direct HTTP transmission."""
return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
def setdefault(self,name,value):
"""Return first matching header value for 'name', or 'value'
If there is no header named 'name', add a new header with name 'name'
and value 'value'."""
result = self.get(name)
if result is None:
self._headers.append((name,value))
return value
else:
return result
def add_header(self, _name, _value, **_params):
"""Extended header setting.
_name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added.
Example:
h.add_header('content-disposition', 'attachment', filename='bud.gif')
Note that unlike the corresponding 'email.message' method, this does
*not* handle '(charset, language, value)' tuples: all values must be
strings or None.
"""
parts = []
if _value is not None:
parts.append(_value)
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
self._headers.append((_name, "; ".join(parts)))
| apache-2.0 |
yannickcr/Sick-Beard | lib/subliminal/utils.py | 167 | 2027 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
import re
__all__ = ['get_keywords', 'split_keyword', 'to_unicode']
def get_keywords(guess):
"""Retrieve keywords from guessed informations
:param guess: guessed informations
:type guess: :class:`guessit.guess.Guess`
:return: lower case alphanumeric keywords
:rtype: set
"""
keywords = set()
for k in ['releaseGroup', 'screenSize', 'videoCodec', 'format']:
if k in guess:
keywords = keywords | split_keyword(guess[k].lower())
return keywords
def split_keyword(keyword):
"""Split a keyword in multiple ones on any non-alphanumeric character
:param string keyword: keyword
:return: keywords
:rtype: set
"""
split = set(re.findall(r'\w+', keyword))
return split
def to_unicode(data):
"""Convert a basestring to unicode
:param basestring data: data to decode
:return: data as unicode
:rtype: unicode
"""
if not isinstance(data, basestring):
raise ValueError('Basestring expected')
if isinstance(data, unicode):
return data
for encoding in ('utf-8', 'latin-1'):
try:
return unicode(data, encoding)
except UnicodeDecodeError:
pass
return unicode(data, 'utf-8', 'replace')
| gpl-3.0 |
pvizeli/hassio | hassio/core.py | 1 | 5205 | """Main file for HassIO."""
import asyncio
import logging
import aiohttp
import docker
from .addons import AddonManager
from .api import RestAPI
from .host_control import HostControl
from .const import (
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE,
STARTUP_INITIALIZE)
from .scheduler import Scheduler
from .dock.homeassistant import DockerHomeAssistant
from .dock.supervisor import DockerSupervisor
from .tasks import (
hassio_update, homeassistant_watchdog, homeassistant_setup,
api_sessions_cleanup)
from .tools import get_local_ip, fetch_timezone
_LOGGER = logging.getLogger(__name__)
class HassIO(object):
"""Main object of hassio."""
def __init__(self, loop, config):
"""Initialize hassio object."""
self.exit_code = 0
self.loop = loop
self.config = config
self.websession = aiohttp.ClientSession(loop=loop)
self.scheduler = Scheduler(loop)
self.api = RestAPI(config, loop)
self.dock = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
# init basic docker container
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
self.homeassistant = DockerHomeAssistant(config, loop, self.dock)
# init HostControl
self.host_control = HostControl(loop)
# init addon system
self.addons = AddonManager(config, loop, self.dock)
async def setup(self):
"""Setup HassIO orchestration."""
# supervisor
if not await self.supervisor.attach():
_LOGGER.fatal("Can't attach to supervisor docker container!")
await self.supervisor.cleanup()
# set running arch
self.config.arch = self.supervisor.arch
# set api endpoint
self.config.api_endpoint = await get_local_ip(self.loop)
# update timezone
if self.config.timezone == 'UTC':
self.config.timezone = await fetch_timezone(self.websession)
# hostcontrol
await self.host_control.load()
# schedule update info tasks
self.scheduler.register_task(
self.host_control.load, RUN_UPDATE_INFO_TASKS)
# rest api views
self.api.register_host(self.host_control)
self.api.register_network(self.host_control)
self.api.register_supervisor(
self.supervisor, self.addons, self.host_control, self.websession)
self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons)
self.api.register_security()
self.api.register_panel()
# schedule api session cleanup
self.scheduler.register_task(
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
now=True)
# first start of supervisor?
if not await self.homeassistant.exists():
_LOGGER.info("No HomeAssistant docker found.")
await homeassistant_setup(
self.config, self.loop, self.homeassistant, self.websession)
else:
await self.homeassistant.attach()
# Load addons
await self.addons.prepare()
# schedule addon update task
self.scheduler.register_task(
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
# schedule self update task
self.scheduler.register_task(
hassio_update(self.config, self.supervisor, self.websession),
RUN_UPDATE_SUPERVISOR_TASKS)
# start addon mark as initialize
await self.addons.auto_boot(STARTUP_INITIALIZE)
async def start(self):
"""Start HassIO orchestration."""
# on release channel, try update itself
# on beta channel, only read new versions
await asyncio.wait(
[hassio_update(self.config, self.supervisor, self.websession)()],
loop=self.loop
)
# start api
await self.api.start()
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
try:
# HomeAssistant is already running / supervisor have only reboot
if await self.homeassistant.is_running():
_LOGGER.info("HassIO reboot detected")
return
# start addon mark as before
await self.addons.auto_boot(STARTUP_BEFORE)
# run HomeAssistant
await self.homeassistant.run()
# start addon mark as after
await self.addons.auto_boot(STARTUP_AFTER)
finally:
# schedule homeassistant watchdog
self.scheduler.register_task(
homeassistant_watchdog(self.loop, self.homeassistant),
RUN_WATCHDOG_HOMEASSISTANT)
async def stop(self, exit_code=0):
"""Stop a running orchestration."""
# don't process scheduler anymore
self.scheduler.stop()
# process stop tasks
self.websession.close()
await self.api.stop()
self.exit_code = exit_code
self.loop.stop()
| bsd-3-clause |
yencarnacion/jaikuengine | .google_appengine/lib/yaml-3.10/yaml/reader.py | 424 | 6746 | # This module contains abstractions for the input stream. You don't have to
# looks further, there are no pretty code.
#
# We define two classes here.
#
# Mark(source, line, column)
# It's just a record and its only use is producing nice error messages.
# Parser does not use it for any other purposes.
#
# Reader(source, data)
# Reader determines the encoding of `data` and converts it to unicode.
# Reader provides the following methods and attributes:
# reader.peek(length=1) - return the next `length` characters
# reader.forward(length=1) - move the current position to `length` characters.
# reader.index - the number of the current character.
# reader.line, stream.column - the line and the column of the current character.
__all__ = ['Reader', 'ReaderError']
from error import YAMLError, Mark
import codecs, re
class ReaderError(YAMLError):
def __init__(self, name, position, character, encoding, reason):
self.name = name
self.character = character
self.position = position
self.encoding = encoding
self.reason = reason
def __str__(self):
if isinstance(self.character, str):
return "'%s' codec can't decode byte #x%02x: %s\n" \
" in \"%s\", position %d" \
% (self.encoding, ord(self.character), self.reason,
self.name, self.position)
else:
return "unacceptable character #x%04x: %s\n" \
" in \"%s\", position %d" \
% (self.character, self.reason,
self.name, self.position)
class Reader(object):
# Reader:
# - determines the data encoding and converts it to unicode,
# - checks if characters are in allowed range,
# - adds '\0' to the end.
# Reader accepts
# - a `str` object,
# - a `unicode` object,
# - a file-like object with its `read` method returning `str`,
# - a file-like object with its `read` method returning `unicode`.
# Yeah, it's ugly and slow.
def __init__(self, stream):
self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
self.buffer = u''
self.pointer = 0
self.raw_buffer = None
self.raw_decode = None
self.encoding = None
self.index = 0
self.line = 0
self.column = 0
if isinstance(stream, unicode):
self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+u'\0'
elif isinstance(stream, str):
self.name = "<string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = ''
self.determine_encoding()
def peek(self, index=0):
try:
return self.buffer[self.pointer+index]
except IndexError:
self.update(index+1)
return self.buffer[self.pointer+index]
def prefix(self, length=1):
if self.pointer+length >= len(self.buffer):
self.update(length)
return self.buffer[self.pointer:self.pointer+length]
def forward(self, length=1):
if self.pointer+length+1 >= len(self.buffer):
self.update(length+1)
while length:
ch = self.buffer[self.pointer]
self.pointer += 1
self.index += 1
if ch in u'\n\x85\u2028\u2029' \
or (ch == u'\r' and self.buffer[self.pointer] != u'\n'):
self.line += 1
self.column = 0
elif ch != u'\uFEFF':
self.column += 1
length -= 1
def get_mark(self):
if self.stream is None:
return Mark(self.name, self.index, self.line, self.column,
self.buffer, self.pointer)
else:
return Mark(self.name, self.index, self.line, self.column,
None, None)
def determine_encoding(self):
while not self.eof and len(self.raw_buffer) < 2:
self.update_raw()
if not isinstance(self.raw_buffer, unicode):
if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
self.raw_decode = codecs.utf_16_le_decode
self.encoding = 'utf-16-le'
elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
self.raw_decode = codecs.utf_16_be_decode
self.encoding = 'utf-16-be'
else:
self.raw_decode = codecs.utf_8_decode
self.encoding = 'utf-8'
self.update(1)
NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
def check_printable(self, data):
match = self.NON_PRINTABLE.search(data)
if match:
character = match.group()
position = self.index+(len(self.buffer)-self.pointer)+match.start()
raise ReaderError(self.name, position, ord(character),
'unicode', "special characters are not allowed")
def update(self, length):
if self.raw_buffer is None:
return
self.buffer = self.buffer[self.pointer:]
self.pointer = 0
while len(self.buffer) < length:
if not self.eof:
self.update_raw()
if self.raw_decode is not None:
try:
data, converted = self.raw_decode(self.raw_buffer,
'strict', self.eof)
except UnicodeDecodeError, exc:
character = exc.object[exc.start]
if self.stream is not None:
position = self.stream_pointer-len(self.raw_buffer)+exc.start
else:
position = exc.start
raise ReaderError(self.name, position, character,
exc.encoding, exc.reason)
else:
data = self.raw_buffer
converted = len(data)
self.check_printable(data)
self.buffer += data
self.raw_buffer = self.raw_buffer[converted:]
if self.eof:
self.buffer += u'\0'
self.raw_buffer = None
break
def update_raw(self, size=1024):
data = self.stream.read(size)
if data:
self.raw_buffer += data
self.stream_pointer += len(data)
else:
self.eof = True
#try:
# import psyco
# psyco.bind(Reader)
#except ImportError:
# pass
| apache-2.0 |
vmg/hg-stable | hgext/keyword.py | 92 | 27955 | # keyword.py - $Keyword$ expansion for Mercurial
#
# Copyright 2007-2012 Christian Ebert <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# $Id$
#
# Keyword expansion hack against the grain of a Distributed SCM
#
# There are many good reasons why this is not needed in a distributed
# SCM, still it may be useful in very small projects based on single
# files (like LaTeX packages), that are mostly addressed to an
# audience not running a version control system.
#
# For in-depth discussion refer to
# <http://mercurial.selenic.com/wiki/KeywordPlan>.
#
# Keyword expansion is based on Mercurial's changeset template mappings.
#
# Binary files are not touched.
#
# Files to act upon/ignore are specified in the [keyword] section.
# Customized keyword template mappings in the [keywordmaps] section.
#
# Run "hg help keyword" and "hg kwdemo" to get info on configuration.
'''expand keywords in tracked files
This extension expands RCS/CVS-like or self-customized $Keywords$ in
tracked text files selected by your configuration.
Keywords are only expanded in local repositories and not stored in the
change history. The mechanism can be regarded as a convenience for the
current user or for archive distribution.
Keywords expand to the changeset data pertaining to the latest change
relative to the working directory parent of each file.
Configuration is done in the [keyword], [keywordset] and [keywordmaps]
sections of hgrc files.
Example::
[keyword]
# expand keywords in every python file except those matching "x*"
**.py =
x* = ignore
[keywordset]
# prefer svn- over cvs-like default keywordmaps
svn = True
.. note::
The more specific you are in your filename patterns the less you
lose speed in huge repositories.
For [keywordmaps] template mapping and expansion demonstration and
control run :hg:`kwdemo`. See :hg:`help templates` for a list of
available templates and filters.
Three additional date template filters are provided:
:``utcdate``: "2006/09/18 15:13:13"
:``svnutcdate``: "2006-09-18 15:13:13Z"
:``svnisodate``: "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
The default template mappings (view with :hg:`kwdemo -d`) can be
replaced with customized keywords and templates. Again, run
:hg:`kwdemo` to control the results of your configuration changes.
Before changing/disabling active keywords, you must run :hg:`kwshrink`
to avoid storing expanded keywords in the change history.
To force expansion after enabling it, or a configuration change, run
:hg:`kwexpand`.
Expansions spanning more than one line and incremental expansions,
like CVS' $Log$, are not supported. A keyword template map "Log =
{desc}" expands to the first line of the changeset description.
'''
from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
from mercurial import localrepo, match, patch, templatefilters, templater, util
from mercurial import scmutil
from mercurial.hgweb import webcommands
from mercurial.i18n import _
import os, re, shutil, tempfile
commands.optionalrepo += ' kwdemo'
commands.inferrepo += ' kwexpand kwfiles kwshrink'
cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'
# hg commands that do not act on keywords
nokwcommands = ('add addremove annotate bundle export grep incoming init log'
' outgoing push tip verify convert email glog')
# hg commands that trigger expansion only when writing to working dir,
# not when reading filelog, and unexpand when reading from working dir
restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
# names of extensions using dorecord
recordextensions = 'record'
colortable = {
'kwfiles.enabled': 'green bold',
'kwfiles.deleted': 'cyan bold underline',
'kwfiles.enabledunknown': 'green',
'kwfiles.ignored': 'bold',
'kwfiles.ignoredunknown': 'none'
}
# date like in cvs' $Date
def utcdate(text):
''':utcdate: Date. Returns a UTC-date in this format: "2009/08/18 11:00:13".
'''
return util.datestr((util.parsedate(text)[0], 0), '%Y/%m/%d %H:%M:%S')
# date like in svn's $Date
def svnisodate(text):
''':svnisodate: Date. Returns a date in this format: "2009-08-18 13:00:13
+0200 (Tue, 18 Aug 2009)".
'''
return util.datestr(text, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
# date like in svn's $Id
def svnutcdate(text):
''':svnutcdate: Date. Returns a UTC-date in this format: "2009-08-18
11:00:13Z".
'''
return util.datestr((util.parsedate(text)[0], 0), '%Y-%m-%d %H:%M:%SZ')
templatefilters.filters.update({'utcdate': utcdate,
'svnisodate': svnisodate,
'svnutcdate': svnutcdate})
# make keyword tools accessible
kwtools = {'templater': None, 'hgcmd': ''}
def _defaultkwmaps(ui):
'''Returns default keywordmaps according to keywordset configuration.'''
templates = {
'Revision': '{node|short}',
'Author': '{author|user}',
}
kwsets = ({
'Date': '{date|utcdate}',
'RCSfile': '{file|basename},v',
'RCSFile': '{file|basename},v', # kept for backwards compatibility
# with hg-keyword
'Source': '{root}/{file},v',
'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
}, {
'Date': '{date|svnisodate}',
'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
'LastChangedRevision': '{node|short}',
'LastChangedBy': '{author|user}',
'LastChangedDate': '{date|svnisodate}',
})
templates.update(kwsets[ui.configbool('keywordset', 'svn')])
return templates
def _shrinktext(text, subfunc):
'''Helper for keyword expansion removal in text.
Depending on subfunc also returns number of substitutions.'''
return subfunc(r'$\1$', text)
def _preselect(wstatus, changed):
'''Retrieves modified and added files from a working directory state
and returns the subset of each contained in given changed files
retrieved from a change context.'''
modified, added = wstatus[:2]
modified = [f for f in modified if f in changed]
added = [f for f in added if f in changed]
return modified, added
class kwtemplater(object):
'''
Sets up keyword templates, corresponding keyword regex, and
provides keyword substitution functions.
'''
def __init__(self, ui, repo, inc, exc):
self.ui = ui
self.repo = repo
self.match = match.match(repo.root, '', [], inc, exc)
self.restrict = kwtools['hgcmd'] in restricted.split()
self.postcommit = False
kwmaps = self.ui.configitems('keywordmaps')
if kwmaps: # override default templates
self.templates = dict((k, templater.parsestring(v, False))
for k, v in kwmaps)
else:
self.templates = _defaultkwmaps(self.ui)
@util.propertycache
def escape(self):
'''Returns bar-separated and escaped keywords.'''
return '|'.join(map(re.escape, self.templates.keys()))
@util.propertycache
def rekw(self):
'''Returns regex for unexpanded keywords.'''
return re.compile(r'\$(%s)\$' % self.escape)
@util.propertycache
def rekwexp(self):
'''Returns regex for expanded keywords.'''
return re.compile(r'\$(%s): [^$\n\r]*? \$' % self.escape)
def substitute(self, data, path, ctx, subfunc):
'''Replaces keywords in data with expanded template.'''
def kwsub(mobj):
kw = mobj.group(1)
ct = cmdutil.changeset_templater(self.ui, self.repo,
False, None, '', False)
ct.use_template(self.templates[kw])
self.ui.pushbuffer()
ct.show(ctx, root=self.repo.root, file=path)
ekw = templatefilters.firstline(self.ui.popbuffer())
return '$%s: %s $' % (kw, ekw)
return subfunc(kwsub, data)
def linkctx(self, path, fileid):
'''Similar to filelog.linkrev, but returns a changectx.'''
return self.repo.filectx(path, fileid=fileid).changectx()
def expand(self, path, node, data):
'''Returns data with keywords expanded.'''
if not self.restrict and self.match(path) and not util.binary(data):
ctx = self.linkctx(path, node)
return self.substitute(data, path, ctx, self.rekw.sub)
return data
def iskwfile(self, cand, ctx):
'''Returns subset of candidates which are configured for keyword
expansion but are not symbolic links.'''
return [f for f in cand if self.match(f) and 'l' not in ctx.flags(f)]
def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
'''Overwrites selected files expanding/shrinking keywords.'''
if self.restrict or lookup or self.postcommit: # exclude kw_copy
candidates = self.iskwfile(candidates, ctx)
if not candidates:
return
kwcmd = self.restrict and lookup # kwexpand/kwshrink
if self.restrict or expand and lookup:
mf = ctx.manifest()
if self.restrict or rekw:
re_kw = self.rekw
else:
re_kw = self.rekwexp
if expand:
msg = _('overwriting %s expanding keywords\n')
else:
msg = _('overwriting %s shrinking keywords\n')
for f in candidates:
if self.restrict:
data = self.repo.file(f).read(mf[f])
else:
data = self.repo.wread(f)
if util.binary(data):
continue
if expand:
if lookup:
ctx = self.linkctx(f, mf[f])
data, found = self.substitute(data, f, ctx, re_kw.subn)
elif self.restrict:
found = re_kw.search(data)
else:
data, found = _shrinktext(data, re_kw.subn)
if found:
self.ui.note(msg % f)
fp = self.repo.wopener(f, "wb", atomictemp=True)
fp.write(data)
fp.close()
if kwcmd:
self.repo.dirstate.normal(f)
elif self.postcommit:
self.repo.dirstate.normallookup(f)
def shrink(self, fname, text):
'''Returns text with all keyword substitutions removed.'''
if self.match(fname) and not util.binary(text):
return _shrinktext(text, self.rekwexp.sub)
return text
def shrinklines(self, fname, lines):
'''Returns lines with keyword substitutions removed.'''
if self.match(fname):
text = ''.join(lines)
if not util.binary(text):
return _shrinktext(text, self.rekwexp.sub).splitlines(True)
return lines
def wread(self, fname, data):
'''If in restricted mode returns data read from wdir with
keyword substitutions removed.'''
if self.restrict:
return self.shrink(fname, data)
return data
class kwfilelog(filelog.filelog):
'''
Subclass of filelog to hook into its read, add, cmp methods.
Keywords are "stored" unexpanded, and processed on reading.
'''
def __init__(self, opener, kwt, path):
super(kwfilelog, self).__init__(opener, path)
self.kwt = kwt
self.path = path
def read(self, node):
'''Expands keywords when reading filelog.'''
data = super(kwfilelog, self).read(node)
if self.renamed(node):
return data
return self.kwt.expand(self.path, node, data)
def add(self, text, meta, tr, link, p1=None, p2=None):
'''Removes keyword substitutions when adding to filelog.'''
text = self.kwt.shrink(self.path, text)
return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
def cmp(self, node, text):
'''Removes keyword substitutions for comparison.'''
text = self.kwt.shrink(self.path, text)
return super(kwfilelog, self).cmp(node, text)
def _status(ui, repo, wctx, kwt, *pats, **opts):
'''Bails out if [keyword] configuration is not active.
Returns status of working directory.'''
if kwt:
return repo.status(match=scmutil.match(wctx, pats, opts), clean=True,
unknown=opts.get('unknown') or opts.get('all'))
if ui.configitems('keyword'):
raise util.Abort(_('[keyword] patterns cannot match'))
raise util.Abort(_('no [keyword] patterns configured'))
def _kwfwrite(ui, repo, expand, *pats, **opts):
'''Selects files and passes them to kwtemplater.overwrite.'''
wctx = repo[None]
if len(wctx.parents()) > 1:
raise util.Abort(_('outstanding uncommitted merge'))
kwt = kwtools['templater']
wlock = repo.wlock()
try:
status = _status(ui, repo, wctx, kwt, *pats, **opts)
modified, added, removed, deleted, unknown, ignored, clean = status
if modified or added or removed or deleted:
raise util.Abort(_('outstanding uncommitted changes'))
kwt.overwrite(wctx, clean, True, expand)
finally:
wlock.release()
@command('kwdemo',
[('d', 'default', None, _('show default keyword template maps')),
('f', 'rcfile', '',
_('read maps from rcfile'), _('FILE'))],
_('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...'))
def demo(ui, repo, *args, **opts):
'''print [keywordmaps] configuration and an expansion example
Show current, custom, or default keyword template maps and their
expansions.
Extend the current configuration by specifying maps as arguments
and using -f/--rcfile to source an external hgrc file.
Use -d/--default to disable current configuration.
See :hg:`help templates` for information on templates and filters.
'''
def demoitems(section, items):
ui.write('[%s]\n' % section)
for k, v in sorted(items):
ui.write('%s = %s\n' % (k, v))
fn = 'demo.txt'
tmpdir = tempfile.mkdtemp('', 'kwdemo.')
ui.note(_('creating temporary repository at %s\n') % tmpdir)
repo = localrepo.localrepository(repo.baseui, tmpdir, True)
ui.setconfig('keyword', fn, '')
svn = ui.configbool('keywordset', 'svn')
# explicitly set keywordset for demo output
ui.setconfig('keywordset', 'svn', svn)
uikwmaps = ui.configitems('keywordmaps')
if args or opts.get('rcfile'):
ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
if uikwmaps:
ui.status(_('\textending current template maps\n'))
if opts.get('default') or not uikwmaps:
if svn:
ui.status(_('\toverriding default svn keywordset\n'))
else:
ui.status(_('\toverriding default cvs keywordset\n'))
if opts.get('rcfile'):
ui.readconfig(opts.get('rcfile'))
if args:
# simulate hgrc parsing
rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
fp = repo.opener('hgrc', 'w')
fp.writelines(rcmaps)
fp.close()
ui.readconfig(repo.join('hgrc'))
kwmaps = dict(ui.configitems('keywordmaps'))
elif opts.get('default'):
if svn:
ui.status(_('\n\tconfiguration using default svn keywordset\n'))
else:
ui.status(_('\n\tconfiguration using default cvs keywordset\n'))
kwmaps = _defaultkwmaps(ui)
if uikwmaps:
ui.status(_('\tdisabling current template maps\n'))
for k, v in kwmaps.iteritems():
ui.setconfig('keywordmaps', k, v)
else:
ui.status(_('\n\tconfiguration using current keyword template maps\n'))
if uikwmaps:
kwmaps = dict(uikwmaps)
else:
kwmaps = _defaultkwmaps(ui)
uisetup(ui)
reposetup(ui, repo)
ui.write('[extensions]\nkeyword =\n')
demoitems('keyword', ui.configitems('keyword'))
demoitems('keywordset', ui.configitems('keywordset'))
demoitems('keywordmaps', kwmaps.iteritems())
keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
repo.wopener.write(fn, keywords)
repo[None].add([fn])
ui.note(_('\nkeywords written to %s:\n') % fn)
ui.note(keywords)
repo.dirstate.setbranch('demobranch')
for name, cmd in ui.configitems('hooks'):
if name.split('.', 1)[0].find('commit') > -1:
repo.ui.setconfig('hooks', name, '')
msg = _('hg keyword configuration and expansion example')
ui.note("hg ci -m '%s'\n" % msg) # check-code-ignore
repo.commit(text=msg)
ui.status(_('\n\tkeywords expanded\n'))
ui.write(repo.wread(fn))
shutil.rmtree(tmpdir, ignore_errors=True)
@command('kwexpand', commands.walkopts, _('hg kwexpand [OPTION]... [FILE]...'))
def expand(ui, repo, *pats, **opts):
'''expand keywords in the working directory
Run after (re)enabling keyword expansion.
kwexpand refuses to run if given files contain local changes.
'''
# 3rd argument sets expansion to True
_kwfwrite(ui, repo, True, *pats, **opts)
@command('kwfiles',
[('A', 'all', None, _('show keyword status flags of all files')),
('i', 'ignore', None, _('show files excluded from expansion')),
('u', 'unknown', None, _('only show unknown (not tracked) files')),
] + commands.walkopts,
_('hg kwfiles [OPTION]... [FILE]...'))
def files(ui, repo, *pats, **opts):
'''show files configured for keyword expansion
List which files in the working directory are matched by the
[keyword] configuration patterns.
Useful to prevent inadvertent keyword expansion and to speed up
execution by including only files that are actual candidates for
expansion.
See :hg:`help keyword` on how to construct patterns both for
inclusion and exclusion of files.
With -A/--all and -v/--verbose the codes used to show the status
of files are::
K = keyword expansion candidate
k = keyword expansion candidate (not tracked)
I = ignored
i = ignored (not tracked)
'''
kwt = kwtools['templater']
wctx = repo[None]
status = _status(ui, repo, wctx, kwt, *pats, **opts)
cwd = pats and repo.getcwd() or ''
modified, added, removed, deleted, unknown, ignored, clean = status
files = []
if not opts.get('unknown') or opts.get('all'):
files = sorted(modified + added + clean)
kwfiles = kwt.iskwfile(files, wctx)
kwdeleted = kwt.iskwfile(deleted, wctx)
kwunknown = kwt.iskwfile(unknown, wctx)
if not opts.get('ignore') or opts.get('all'):
showfiles = kwfiles, kwdeleted, kwunknown
else:
showfiles = [], [], []
if opts.get('all') or opts.get('ignore'):
showfiles += ([f for f in files if f not in kwfiles],
[f for f in unknown if f not in kwunknown])
kwlabels = 'enabled deleted enabledunknown ignored ignoredunknown'.split()
kwstates = zip(kwlabels, 'K!kIi', showfiles)
fm = ui.formatter('kwfiles', opts)
fmt = '%.0s%s\n'
if opts.get('all') or ui.verbose:
fmt = '%s %s\n'
for kwstate, char, filenames in kwstates:
label = 'kwfiles.' + kwstate
for f in filenames:
fm.startitem()
fm.write('kwstatus path', fmt, char,
repo.pathto(f, cwd), label=label)
fm.end()
@command('kwshrink', commands.walkopts, _('hg kwshrink [OPTION]... [FILE]...'))
def shrink(ui, repo, *pats, **opts):
'''revert expanded keywords in the working directory
Must be run before changing/disabling active keywords.
kwshrink refuses to run if given files contain local changes.
'''
# 3rd argument sets expansion to False
_kwfwrite(ui, repo, False, *pats, **opts)
def uisetup(ui):
''' Monkeypatches dispatch._parse to retrieve user command.'''
def kwdispatch_parse(orig, ui, args):
'''Monkeypatch dispatch._parse to obtain running hg command.'''
cmd, func, args, options, cmdoptions = orig(ui, args)
kwtools['hgcmd'] = cmd
return cmd, func, args, options, cmdoptions
extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
def reposetup(ui, repo):
'''Sets up repo as kwrepo for keyword substitution.
Overrides file method to return kwfilelog instead of filelog
if file matches user configuration.
Wraps commit to overwrite configured files with updated
keyword substitutions.
Monkeypatches patch and webcommands.'''
try:
if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
or '.hg' in util.splitpath(repo.root)
or repo._url.startswith('bundle:')):
return
except AttributeError:
pass
inc, exc = [], ['.hg*']
for pat, opt in ui.configitems('keyword'):
if opt != 'ignore':
inc.append(pat)
else:
exc.append(pat)
if not inc:
return
kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
class kwrepo(repo.__class__):
def file(self, f):
if f[0] == '/':
f = f[1:]
return kwfilelog(self.sopener, kwt, f)
def wread(self, filename):
data = super(kwrepo, self).wread(filename)
return kwt.wread(filename, data)
def commit(self, *args, **opts):
# use custom commitctx for user commands
# other extensions can still wrap repo.commitctx directly
self.commitctx = self.kwcommitctx
try:
return super(kwrepo, self).commit(*args, **opts)
finally:
del self.commitctx
def kwcommitctx(self, ctx, error=False):
n = super(kwrepo, self).commitctx(ctx, error)
# no lock needed, only called from repo.commit() which already locks
if not kwt.postcommit:
restrict = kwt.restrict
kwt.restrict = True
kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
False, True)
kwt.restrict = restrict
return n
def rollback(self, dryrun=False, force=False):
wlock = self.wlock()
try:
if not dryrun:
changed = self['.'].files()
ret = super(kwrepo, self).rollback(dryrun, force)
if not dryrun:
ctx = self['.']
modified, added = _preselect(self[None].status(), changed)
kwt.overwrite(ctx, modified, True, True)
kwt.overwrite(ctx, added, True, False)
return ret
finally:
wlock.release()
# monkeypatches
def kwpatchfile_init(orig, self, ui, gp, backend, store, eolmode=None):
'''Monkeypatch/wrap patch.patchfile.__init__ to avoid
rejects or conflicts due to expanded keywords in working dir.'''
orig(self, ui, gp, backend, store, eolmode)
# shrink keywords read from working dir
self.lines = kwt.shrinklines(self.fname, self.lines)
def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
opts=None, prefix=''):
'''Monkeypatch patch.diff to avoid expansion.'''
kwt.restrict = True
return orig(repo, node1, node2, match, changes, opts, prefix)
def kwweb_skip(orig, web, req, tmpl):
'''Wraps webcommands.x turning off keyword expansion.'''
kwt.match = util.never
return orig(web, req, tmpl)
def kw_amend(orig, ui, repo, commitfunc, old, extra, pats, opts):
'''Wraps cmdutil.amend expanding keywords after amend.'''
wlock = repo.wlock()
try:
kwt.postcommit = True
newid = orig(ui, repo, commitfunc, old, extra, pats, opts)
if newid != old.node():
ctx = repo[newid]
kwt.restrict = True
kwt.overwrite(ctx, ctx.files(), False, True)
kwt.restrict = False
return newid
finally:
wlock.release()
def kw_copy(orig, ui, repo, pats, opts, rename=False):
'''Wraps cmdutil.copy so that copy/rename destinations do not
contain expanded keywords.
Note that the source of a regular file destination may also be a
symlink:
hg cp sym x -> x is symlink
cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
For the latter we have to follow the symlink to find out whether its
target is configured for expansion and we therefore must unexpand the
keywords in the destination.'''
wlock = repo.wlock()
try:
orig(ui, repo, pats, opts, rename)
if opts.get('dry_run'):
return
wctx = repo[None]
cwd = repo.getcwd()
def haskwsource(dest):
'''Returns true if dest is a regular file and configured for
expansion or a symlink which points to a file configured for
expansion. '''
source = repo.dirstate.copied(dest)
if 'l' in wctx.flags(source):
source = scmutil.canonpath(repo.root, cwd,
os.path.realpath(source))
return kwt.match(source)
candidates = [f for f in repo.dirstate.copies() if
'l' not in wctx.flags(f) and haskwsource(f)]
kwt.overwrite(wctx, candidates, False, False)
finally:
wlock.release()
def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
'''Wraps record.dorecord expanding keywords after recording.'''
wlock = repo.wlock()
try:
# record returns 0 even when nothing has changed
# therefore compare nodes before and after
kwt.postcommit = True
ctx = repo['.']
wstatus = repo[None].status()
ret = orig(ui, repo, commitfunc, *pats, **opts)
recctx = repo['.']
if ctx != recctx:
modified, added = _preselect(wstatus, recctx.files())
kwt.restrict = False
kwt.overwrite(recctx, modified, False, True)
kwt.overwrite(recctx, added, False, True, True)
kwt.restrict = True
return ret
finally:
wlock.release()
def kwfilectx_cmp(orig, self, fctx):
# keyword affects data size, comparing wdir and filelog size does
# not make sense
if (fctx._filerev is None and
(self._repo._encodefilterpats or
kwt.match(fctx.path()) and 'l' not in fctx.flags() or
self.size() - 4 == fctx.size()) or
self.size() == fctx.size()):
return self._filelog.cmp(self._filenode, fctx.data())
return True
extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
extensions.wrapfunction(patch, 'diff', kw_diff)
extensions.wrapfunction(cmdutil, 'amend', kw_amend)
extensions.wrapfunction(cmdutil, 'copy', kw_copy)
for c in 'annotate changeset rev filediff diff'.split():
extensions.wrapfunction(webcommands, c, kwweb_skip)
for name in recordextensions.split():
try:
record = extensions.find(name)
extensions.wrapfunction(record, 'dorecord', kw_dorecord)
except KeyError:
pass
repo.__class__ = kwrepo
| gpl-2.0 |
grimmjow8/ansible | lib/ansible/modules/cloud/amazon/rds_subnet_group.py | 25 | 5369 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rds_subnet_group
version_added: "1.5"
short_description: manage RDS database subnet groups
description:
- Creates, modifies, and deletes RDS database subnet groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database subnet group identifier.
required: true
default: null
aliases: []
description:
description:
- Database subnet group description. Only set when a new group is added.
required: false
default: null
aliases: []
subnets:
description:
- List of subnet IDs that make up the database subnet group.
required: false
default: null
aliases: []
author: "Scott Anderson (@tastychutney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a subnet group
- rds_subnet_group:
state: present
name: norwegian-blue
description: My Fancy Ex Parrot Subnet Group
subnets:
- subnet-aaaaaaaa
- subnet-bbbbbbbb
# Remove a subnet group
- rds_subnet_group:
state: absent
name: norwegian-blue
'''
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
description = dict(required=False),
subnets = dict(required=False, type='list'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_description = module.params.get('description')
group_subnets = module.params.get('subnets') or {}
if state == 'present':
for required in ['name', 'description', 'subnets']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'subnets']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
try:
conn = connect_to_aws(boto.rds, region, **aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg = e.error_message)
try:
changed = False
exists = False
try:
matching_groups = conn.get_all_db_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except BotoServerError as e:
if e.error_code != 'DBSubnetGroupNotFoundFault':
module.fail_json(msg = e.error_message)
if state == 'absent':
if exists:
conn.delete_db_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_db_subnet_group(group_name, desc=group_description, subnet_ids=group_subnets)
changed = True
else:
# Sort the subnet groups before we compare them
matching_groups[0].subnet_ids.sort()
group_subnets.sort()
if ( (matching_groups[0].name != group_name) or (matching_groups[0].description != group_description) or (matching_groups[0].subnet_ids != group_subnets) ):
changed_group = conn.modify_db_subnet_group(group_name, description=group_description, subnet_ids=group_subnets)
changed = True
except BotoServerError as e:
module.fail_json(msg = e.error_message)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Drahflow/lymph | lymph/cli/testing.py | 10 | 5373 | import collections
from contextlib import contextmanager
import sys
import os
import tempfile
import textwrap
from kazoo.client import KazooClient
from kazoo.handlers.gevent import SequentialGeventHandler
from pkg_resources import load_entry_point
from six import StringIO, integer_types
import yaml
from lymph.discovery.zookeeper import ZookeeperServiceRegistry
from lymph.events.null import NullEventSystem
from lymph.testing import LymphIntegrationTestCase
@contextmanager
def capture_output():
real_stdout = sys.stdout
real_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
try:
yield sys.stdout, sys.stderr
finally:
sys.stdout = real_stdout
sys.stderr = real_stderr
class CliWrapper(object):
Result = collections.namedtuple('CMDLineResult', 'returncode stdout stderr')
def __init__(self, config):
handle, self.config_file_name = tempfile.mkstemp()
with open(self.config_file_name, 'w') as f:
f.write(yaml.dump(config))
self.entry_point = load_entry_point('lymph', 'console_scripts', 'lymph')
def tear_down(self):
os.remove(self.config_file_name)
def __call__(self, cmd, config=True):
with capture_output() as (stdout, stderr):
if config:
cmd = cmd + ['--config=%s' % self.config_file_name]
try:
returncode = self.entry_point(cmd)
except SystemExit as ex:
# Docopt tries to exit on its own unfortunately
returncode = (ex.args[0] or 0) if ex.args else 0
if not isinstance(returncode, integer_types):
# According to sys.exit doc, any other object beside
# an integer or None result to an exit code equal to 1.
returncode = 1
return self.Result(
returncode or 0, stdout.getvalue(), stderr.getvalue())
class CliTestMixin(object):
cli_config = {}
_help_output = None
def setUp(self):
self.__clis = []
super(CliTestMixin, self).setUp()
@property
def cli(self):
cli = CliWrapper(self.cli_config)
self.__clis.append(cli)
return cli
def tearDown(self):
for cli in self.__clis:
cli.tear_down()
super(CliTestMixin, self).tearDown()
def assert_lines_equal(self, cmd, lines, config=True):
expected_lines = set(line for line in textwrap.dedent(lines).splitlines() if line.strip())
result = self.cli(cmd, config=config)
self.assertEqual(result.returncode, 0)
self.assertEqual(set(result.stdout.splitlines()), expected_lines)
def assert_first_line_equals(self, cmd, line, config=True):
result = self.cli(cmd, config=config)
self.assertEqual(result.returncode, 0)
self.assertEqual(result.stdout.splitlines()[0].strip(), line)
def assert_command_appears_in_command_list(self):
result = self.cli(['list'])
self.assertEqual(result.returncode, 0)
self.assertIn(self.command_name, result.stdout)
def assert_help_contains_usage_information(self):
output = self._get_help_output()
self.assertIn('Usage', output)
self.assertIn(self.command_name, output)
def assert_help_contains_parameter(self, parameter, default=None):
self.assert_help_contains(parameter)
if default is not None:
self.assert_help_contains(default)
def assert_help_contains(self, text):
self.assertIn(text, self._get_help_output())
def _get_help_output(self):
if self._help_output is None:
result = self.cli([self.command_name, '--help'])
self._help_output = result.stdout
return self._help_output
class CliIntegrationTestCase(CliTestMixin, LymphIntegrationTestCase):
use_zookeeper = True
def setUp(self):
super(CliIntegrationTestCase, self).setUp()
client = KazooClient(
hosts=self.hosts,
handler=SequentialGeventHandler(),
)
self.registry = ZookeeperServiceRegistry(client)
self.events = NullEventSystem()
self.cli_config = {
"container": {
"registry": {
"class": "lymph.discovery.zookeeper:ZookeeperServiceRegistry",
"zkclient": 'dep:kazoo',
},
"events": {
"class": "lymph.events.null:NullEventSystem",
},
},
"dependencies": {
"kazoo": {
"class": "kazoo.client:KazooClient",
"hosts": self.hosts,
}
}
}
class CommandFactory(object):
"""
Encapsulates the knowledge how to create a command instance.
Intended use is to support smaller unit tests which just need an instance
of a command class to try out some method.
It only supports to pass in parameters as keyword parameters into
the command constructor.
"""
def __init__(self, command_class):
self.command_class = command_class
def __call__(self, **kwargs):
kwargs.setdefault('args', {})
kwargs.setdefault('config', {})
kwargs.setdefault('terminal', None)
return self.command_class(**kwargs)
| apache-2.0 |
mzhaom/grpc | src/python/src/grpc/framework/face/future_invocation_asynchronous_event_service_test.py | 12 | 1982 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""One of the tests of the Face layer of RPC Framework."""
import unittest
from grpc.framework.face import _test_case
from grpc.framework.face.testing import future_invocation_asynchronous_event_service_test_case as test_case
class FutureInvocationAsynchronousEventServiceTest(
_test_case.FaceTestCase,
test_case.FutureInvocationAsynchronousEventServiceTestCase,
unittest.TestCase):
pass
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
GeoCat/QGIS | python/plugins/processing/gui/menus.py | 5 | 11535 | import os
from qgis.PyQt.QtCore import QCoreApplication
from qgis.PyQt.QtWidgets import QAction, QMenu
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QApplication
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.gui.MessageDialog import MessageDialog
from processing.gui.AlgorithmDialog import AlgorithmDialog
from qgis.utils import iface
from qgis.core import QgsApplication
from processing.gui.MessageBarProgress import MessageBarProgress
from processing.gui.AlgorithmExecutor import execute
from processing.gui.Postprocessing import handleAlgorithmResults
from processing.core.Processing import Processing
from processing.tools import dataobjects
algorithmsToolbar = None
menusSettingsGroup = 'Menus'
defaultMenuEntries = {}
vectorMenu = QApplication.translate('MainWindow', 'Vect&or')
analysisToolsMenu = vectorMenu + "/" + Processing.tr('&Analysis Tools')
defaultMenuEntries.update({'qgis:distancematrix': analysisToolsMenu,
'qgis:sumlinelengths': analysisToolsMenu,
'qgis:pointsinpolygon': analysisToolsMenu,
'qgis:countpointsinpolygon': analysisToolsMenu,
'qgis:listuniquevalues': analysisToolsMenu,
'qgis:basicstatisticsforfields': analysisToolsMenu,
'qgis:nearestneighbouranalysis': analysisToolsMenu,
'qgis:meancoordinates': analysisToolsMenu,
'qgis:lineintersections': analysisToolsMenu})
researchToolsMenu = vectorMenu + "/" + Processing.tr('&Research Tools')
defaultMenuEntries.update({'qgis:randomselection': researchToolsMenu,
'qgis:randomselectionwithinsubsets': researchToolsMenu,
'qgis:randompointsinextent': researchToolsMenu,
'qgis:randompointsinlayerbounds': researchToolsMenu,
'qgis:randompointsinsidepolygonsfixed': researchToolsMenu,
'qgis:randompointsinsidepolygonsvariable': researchToolsMenu,
'qgis:regularpoints': researchToolsMenu,
'qgis:vectorgrid': researchToolsMenu,
'qgis:selectbylocation': researchToolsMenu,
'qgis:polygonfromlayerextent': researchToolsMenu})
geoprocessingToolsMenu = vectorMenu + "/" + Processing.tr('&Geoprocessing Tools')
defaultMenuEntries.update({'qgis:convexhull': geoprocessingToolsMenu,
'qgis:fixeddistancebuffer': geoprocessingToolsMenu,
'qgis:variabledistancebuffer': geoprocessingToolsMenu,
'qgis:intersection': geoprocessingToolsMenu,
'qgis:union': geoprocessingToolsMenu,
'qgis:symmetricaldifference': geoprocessingToolsMenu,
'native:clip': geoprocessingToolsMenu,
'qgis:difference': geoprocessingToolsMenu,
'qgis:dissolve': geoprocessingToolsMenu,
'qgis:eliminateselectedpolygons': geoprocessingToolsMenu})
geometryToolsMenu = vectorMenu + "/" + Processing.tr('G&eometry Tools')
defaultMenuEntries.update({'qgis:checkvalidity': geometryToolsMenu,
'qgis:exportaddgeometrycolumns': geometryToolsMenu,
'qgis:centroids': geometryToolsMenu,
'qgis:delaunaytriangulation': geometryToolsMenu,
'qgis:voronoipolygons': geometryToolsMenu,
'qgis:simplifygeometries': geometryToolsMenu,
'qgis:densifygeometries': geometryToolsMenu,
'qgis:multiparttosingleparts': geometryToolsMenu,
'qgis:singlepartstomultipart': geometryToolsMenu,
'qgis:polygonstolines': geometryToolsMenu,
'qgis:linestopolygons': geometryToolsMenu,
'qgis:extractnodes': geometryToolsMenu})
managementToolsMenu = vectorMenu + "/" + Processing.tr('&Data Management Tools')
defaultMenuEntries.update({'qgis:definecurrentprojection': managementToolsMenu,
'qgis:joinattributesbylocation': managementToolsMenu,
'qgis:splitvectorlayer': managementToolsMenu,
'qgis:mergevectorlayers': managementToolsMenu,
'qgis:createspatialindex': managementToolsMenu})
rasterMenu = Processing.tr('&Raster')
projectionsMenu = rasterMenu + "/" + Processing.tr('Projections')
defaultMenuEntries.update({'gdal:warpreproject': projectionsMenu,
'gdal:assignprojection': projectionsMenu,
'gdal:extractprojection': projectionsMenu})
conversionMenu = rasterMenu + "/" + Processing.tr('Conversion')
defaultMenuEntries.update({'gdal:rasterize': conversionMenu,
'gdal:rasterize_over': conversionMenu,
'gdal:polygonize': conversionMenu,
'gdal:translate': conversionMenu,
'gdal:rgbtopct': conversionMenu,
'gdal:pcttorgb': conversionMenu})
extractionMenu = rasterMenu + "/" + Processing.tr('Extraction')
defaultMenuEntries.update({'gdal:contour': extractionMenu,
'gdal:cliprasterbyextent': extractionMenu,
'gdal:cliprasterbymasklayer': extractionMenu})
analysisMenu = rasterMenu + "/" + Processing.tr('Analysis')
defaultMenuEntries.update({'gdal:sieve': analysisMenu,
'gdal:nearblack': analysisMenu,
'gdal:fillnodata': analysisMenu,
'gdal:proximity': analysisMenu,
'gdal:griddatametrics': analysisMenu,
'gdal:gridaverage': analysisMenu,
'gdal:gridinvdist': analysisMenu,
'gdal:gridnearestneighbor': analysisMenu,
'gdal:aspect': analysisMenu,
'gdal:hillshade': analysisMenu,
'gdal:roughness': analysisMenu,
'gdal:slope': analysisMenu,
'gdal:tpi': analysisMenu,
'gdal:tri': analysisMenu})
miscMenu = rasterMenu + "/" + Processing.tr('Miscellaneous')
defaultMenuEntries.update({'gdal:buildvirtualraster': miscMenu,
'gdal:merge': miscMenu,
'gdal:rasterinfo': miscMenu,
'gdal:overviews': miscMenu,
'gdal:tileindex': miscMenu})
def initializeMenus():
for provider in QgsApplication.processingRegistry().providers():
for alg in provider.algorithms():
d = defaultMenuEntries.get(alg.id(), "")
setting = Setting(menusSettingsGroup, "MENU_" + alg.id(),
"Menu path", d)
ProcessingConfig.addSetting(setting)
setting = Setting(menusSettingsGroup, "BUTTON_" + alg.id(),
"Add button", False)
ProcessingConfig.addSetting(setting)
setting = Setting(menusSettingsGroup, "ICON_" + alg.id(),
"Icon", "", valuetype=Setting.FILE)
ProcessingConfig.addSetting(setting)
ProcessingConfig.readSettings()
def updateMenus():
removeMenus()
QCoreApplication.processEvents()
createMenus()
def createMenus():
for alg in QgsApplication.processingRegistry().algorithms():
menuPath = ProcessingConfig.getSetting("MENU_" + alg.id())
addButton = ProcessingConfig.getSetting("BUTTON_" + alg.id())
icon = ProcessingConfig.getSetting("ICON_" + alg.id())
if icon and os.path.exists(icon):
icon = QIcon(icon)
else:
icon = None
if menuPath:
paths = menuPath.split("/")
addAlgorithmEntry(alg, paths[0], paths[-1], addButton=addButton, icon=icon)
def removeMenus():
for alg in QgsApplication.processingRegistry().algorithms():
menuPath = ProcessingConfig.getSetting("MENU_" + alg.id())
if menuPath:
paths = menuPath.split("/")
removeAlgorithmEntry(alg, paths[0], paths[-1])
def addAlgorithmEntry(alg, menuName, submenuName, actionText=None, icon=None, addButton=False):
action = QAction(icon or alg.icon(), actionText or alg.displayName(), iface.mainWindow())
action.triggered.connect(lambda: _executeAlgorithm(alg))
action.setObjectName("mProcessingUserMenu_%s" % alg.id())
if menuName:
menu = getMenu(menuName, iface.mainWindow().menuBar())
submenu = getMenu(submenuName, menu)
submenu.addAction(action)
if addButton:
global algorithmsToolbar
if algorithmsToolbar is None:
algorithmsToolbar = iface.addToolBar('ProcessingAlgorithms')
algorithmsToolbar.addAction(action)
def removeAlgorithmEntry(alg, menuName, submenuName, actionText=None, delButton=True):
if menuName:
menu = getMenu(menuName, iface.mainWindow().menuBar())
subMenu = getMenu(submenuName, menu)
action = findAction(subMenu.actions(), alg, actionText)
if action is not None:
subMenu.removeAction(action)
if len(subMenu.actions()) == 0:
subMenu.deleteLater()
if delButton:
global algorithmsToolbar
if algorithmsToolbar is not None:
action = findAction(algorithmsToolbar.actions(), alg, actionText)
if action is not None:
algorithmsToolbar.removeAction(action)
def _executeAlgorithm(alg):
ok, message = alg.canExecute()
if not ok:
dlg = MessageDialog()
dlg.setTitle(Processing.tr('Missing dependency'))
dlg.setMessage(
Processing.tr('<h3>Missing dependency. This algorithm cannot '
'be run :-( </h3>\n{0}').format(message))
dlg.exec_()
return
if (alg.countVisibleParameters()) > 0:
dlg = alg.createCustomParametersWidget(None)
if not dlg:
dlg = AlgorithmDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
# have to manually delete the dialog - otherwise it's owned by the
# iface mainWindow and never deleted
del dlg
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
else:
feedback = MessageBarProgress()
context = dataobjects.createContext(feedback)
parameters = {}
ret, results = execute(alg, parameters, context, feedback)
handleAlgorithmResults(alg, context, feedback)
feedback.close()
def getMenu(name, parent):
menus = [c for c in parent.children() if isinstance(c, QMenu) and c.title() == name]
if menus:
return menus[0]
else:
return parent.addMenu(name)
def findAction(actions, alg, actionText=None):
for action in actions:
if action.text() in [actionText, alg.displayName(), alg.name()]:
return action
return None
| gpl-2.0 |
steventimberman/masterDebater | env/lib/python2.7/site-packages/django/db/models/sql/constants.py | 633 | 1039 | """
Constants specific to the SQL storage portion of the ORM.
"""
import re
# Valid query types (a set is used for speedy lookups). These are (currently)
# considered SQL-specific; other storage systems may choose to use different
# lookup types.
QUERY_TERMS = {
'exact', 'iexact', 'contains', 'icontains', 'gt', 'gte', 'lt', 'lte', 'in',
'startswith', 'istartswith', 'endswith', 'iendswith', 'range', 'year',
'month', 'day', 'week_day', 'hour', 'minute', 'second', 'isnull', 'search',
'regex', 'iregex',
}
# Size of each "chunk" for get_iterator calls.
# Larger values are slightly faster at the expense of more storage space.
GET_ITERATOR_CHUNK_SIZE = 100
# Namedtuples for sql.* internal use.
# How many results to expect from a cursor.execute call
MULTI = 'multi'
SINGLE = 'single'
CURSOR = 'cursor'
NO_RESULTS = 'no results'
ORDER_PATTERN = re.compile(r'\?|[-+]?[.\w]+$')
ORDER_DIR = {
'ASC': ('ASC', 'DESC'),
'DESC': ('DESC', 'ASC'),
}
# SQL join types.
INNER = 'INNER JOIN'
LOUTER = 'LEFT OUTER JOIN'
| mit |
thnee/ansible | test/units/modules/network/fortios/test_fortios_system_switch_interface.py | 21 | 9741 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_switch_interface
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_switch_interface.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_switch_interface_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_switch_interface_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_switch_interface_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'switch-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_switch_interface_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
delete_method_mock.assert_called_with('system', 'switch-interface', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_switch_interface_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_switch_interface_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_switch_interface': {
'random_attribute_not_valid': 'tag',
'intra_switch_policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span_dest_port': 'test_value_6',
'span_direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_switch_interface.fortios_system(input_data, fos_instance)
expected_data = {
'intra-switch-policy': 'implicit',
'name': 'default_name_4',
'span': 'disable',
'span-dest-port': 'test_value_6',
'span-direction': 'rx',
'type': 'switch',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'switch-interface', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
2014c2g1/c2g1 | exts/w2/static/Brython2.0.0-20140209-164925/Lib/unittest/test/test_suite.py | 791 | 12066 | import unittest
import sys
from .support import LoggingResult, TestEquality
### Support code for Test_TestSuite
################################################################
class Test(object):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def test_3(self): pass
def runTest(self): pass
def _mk_TestSuite(*names):
return unittest.TestSuite(Test.Foo(n) for n in names)
################################################################
class Test_TestSuite(unittest.TestCase, TestEquality):
### Set up attributes needed by inherited tests
################################################################
# Used by TestEquality.test_eq
eq_pairs = [(unittest.TestSuite(), unittest.TestSuite())
,(unittest.TestSuite(), unittest.TestSuite([]))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_1'))]
# Used by TestEquality.test_ne
ne_pairs = [(unittest.TestSuite(), _mk_TestSuite('test_1'))
,(unittest.TestSuite([]), _mk_TestSuite('test_1'))
,(_mk_TestSuite('test_1', 'test_2'), _mk_TestSuite('test_1', 'test_3'))
,(_mk_TestSuite('test_1'), _mk_TestSuite('test_2'))]
################################################################
### /Set up attributes needed by inherited tests
### Tests for TestSuite.__init__
################################################################
# "class TestSuite([tests])"
#
# The tests iterable should be optional
def test_init__tests_optional(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should deal with empty tests iterables by allowing the
# creation of an empty suite
def test_init__empty_tests(self):
suite = unittest.TestSuite([])
self.assertEqual(suite.countTestCases(), 0)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# TestSuite should allow any iterable to provide tests
def test_init__tests_from_any_iterable(self):
def tests():
yield unittest.FunctionTestCase(lambda: None)
yield unittest.FunctionTestCase(lambda: None)
suite_1 = unittest.TestSuite(tests())
self.assertEqual(suite_1.countTestCases(), 2)
suite_2 = unittest.TestSuite(suite_1)
self.assertEqual(suite_2.countTestCases(), 2)
suite_3 = unittest.TestSuite(set(suite_1))
self.assertEqual(suite_3.countTestCases(), 2)
# "class TestSuite([tests])"
# ...
# "If tests is given, it must be an iterable of individual test cases
# or other test suites that will be used to build the suite initially"
#
# Does TestSuite() also allow other TestSuite() instances to be present
# in the tests iterable?
def test_init__TestSuite_instances_in_tests(self):
def tests():
ftc = unittest.FunctionTestCase(lambda: None)
yield unittest.TestSuite([ftc])
yield unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite(tests())
self.assertEqual(suite.countTestCases(), 2)
################################################################
### /Tests for TestSuite.__init__
# Container types should support the iter protocol
def test_iter(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(list(suite), [test1, test2])
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite returns 0?
def test_countTestCases_zero_simple(self):
suite = unittest.TestSuite()
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Presumably an empty TestSuite (even if it contains other empty
# TestSuite instances) returns 0?
def test_countTestCases_zero_nested(self):
class Test1(unittest.TestCase):
def test(self):
pass
suite = unittest.TestSuite([unittest.TestSuite()])
self.assertEqual(suite.countTestCases(), 0)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
def test_countTestCases_simple(self):
test1 = unittest.FunctionTestCase(lambda: None)
test2 = unittest.FunctionTestCase(lambda: None)
suite = unittest.TestSuite((test1, test2))
self.assertEqual(suite.countTestCases(), 2)
# "Return the number of tests represented by the this test object.
# ...this method is also implemented by the TestSuite class, which can
# return larger [greater than 1] values"
#
# Make sure this holds for nested TestSuite instances, too
def test_countTestCases_nested(self):
class Test1(unittest.TestCase):
def test1(self): pass
def test2(self): pass
test2 = unittest.FunctionTestCase(lambda: None)
test3 = unittest.FunctionTestCase(lambda: None)
child = unittest.TestSuite((Test1('test2'), test2))
parent = unittest.TestSuite((test3, child, Test1('test1')))
self.assertEqual(parent.countTestCases(), 4)
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
#
# And if there are no tests? What then?
def test_run__empty_suite(self):
events = []
result = LoggingResult(events)
suite = unittest.TestSuite()
suite.run(result)
self.assertEqual(events, [])
# "Note that unlike TestCase.run(), TestSuite.run() requires the
# "result object to be passed in."
def test_run__requires_result(self):
suite = unittest.TestSuite()
try:
suite.run()
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
# "Run the tests associated with this suite, collecting the result into
# the test result object passed as result."
def test_run(self):
events = []
result = LoggingResult(events)
class LoggingCase(unittest.TestCase):
def run(self, result):
events.append('run %s' % self._testMethodName)
def test1(self): pass
def test2(self): pass
tests = [LoggingCase('test1'), LoggingCase('test2')]
unittest.TestSuite(tests).run(result)
self.assertEqual(events, ['run test1', 'run test2'])
# "Add a TestCase ... to the suite"
def test_addTest__TestCase(self):
class Foo(unittest.TestCase):
def test(self): pass
test = Foo('test')
suite = unittest.TestSuite()
suite.addTest(test)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [test])
# "Add a ... TestSuite to the suite"
def test_addTest__TestSuite(self):
class Foo(unittest.TestCase):
def test(self): pass
suite_2 = unittest.TestSuite([Foo('test')])
suite = unittest.TestSuite()
suite.addTest(suite_2)
self.assertEqual(suite.countTestCases(), 1)
self.assertEqual(list(suite), [suite_2])
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
def test_addTests(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
test_1 = Foo('test_1')
test_2 = Foo('test_2')
inner_suite = unittest.TestSuite([test_2])
def gen():
yield test_1
yield test_2
yield inner_suite
suite_1 = unittest.TestSuite()
suite_1.addTests(gen())
self.assertEqual(list(suite_1), list(gen()))
# "This is equivalent to iterating over tests, calling addTest() for
# each element"
suite_2 = unittest.TestSuite()
for t in gen():
suite_2.addTest(t)
self.assertEqual(suite_1, suite_2)
# "Add all the tests from an iterable of TestCase and TestSuite
# instances to this test suite."
#
# What happens if it doesn't get an iterable?
def test_addTest__noniterable(self):
suite = unittest.TestSuite()
try:
suite.addTests(5)
except TypeError:
pass
else:
self.fail("Failed to raise TypeError")
def test_addTest__noncallable(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, 5)
def test_addTest__casesuiteclass(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTest, Test_TestSuite)
self.assertRaises(TypeError, suite.addTest, unittest.TestSuite)
def test_addTests__string(self):
suite = unittest.TestSuite()
self.assertRaises(TypeError, suite.addTests, "foo")
def test_function_in_suite(self):
def f(_):
pass
suite = unittest.TestSuite()
suite.addTest(f)
# when the bug is fixed this line will not crash
suite.run(unittest.TestResult())
def test_basetestsuite(self):
class Test(unittest.TestCase):
wasSetUp = False
wasTornDown = False
@classmethod
def setUpClass(cls):
cls.wasSetUp = True
@classmethod
def tearDownClass(cls):
cls.wasTornDown = True
def testPass(self):
pass
def testFail(self):
fail
class Module(object):
wasSetUp = False
wasTornDown = False
@staticmethod
def setUpModule():
Module.wasSetUp = True
@staticmethod
def tearDownModule():
Module.wasTornDown = True
Test.__module__ = 'Module'
sys.modules['Module'] = Module
self.addCleanup(sys.modules.pop, 'Module')
suite = unittest.BaseTestSuite()
suite.addTests([Test('testPass'), Test('testFail')])
self.assertEqual(suite.countTestCases(), 2)
result = unittest.TestResult()
suite.run(result)
self.assertFalse(Module.wasSetUp)
self.assertFalse(Module.wasTornDown)
self.assertFalse(Test.wasSetUp)
self.assertFalse(Test.wasTornDown)
self.assertEqual(len(result.errors), 1)
self.assertEqual(len(result.failures), 0)
self.assertEqual(result.testsRun, 2)
def test_overriding_call(self):
class MySuite(unittest.TestSuite):
called = False
def __call__(self, *args, **kw):
self.called = True
unittest.TestSuite.__call__(self, *args, **kw)
suite = MySuite()
result = unittest.TestResult()
wrapper = unittest.TestSuite()
wrapper.addTest(suite)
wrapper(result)
self.assertTrue(suite.called)
# reusing results should be permitted even if abominable
self.assertFalse(result._testRunEntered)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
WasatchPhotonics/Foreman | ControlPower.py | 2 | 7442 | # Based on PowerControl from the Griddle, including writing syslog events for
# tracking the status of the phidget
import unittest, logging, serial, sys, time
#Phidget specific imports
from Phidgets.PhidgetException import PhidgetException
from Phidgets.Devices.InterfaceKit import InterfaceKit
ZERO_RELAY = 0
ONE_RELAY = 1
TWO_RELAY = 2
THREE_RELAY = 3
import logging
import logging.handlers
from WasatchLog import PrintLogHandler
log = logging.getLogger('MyLogger')
log.setLevel(logging.DEBUG)
handler = logging.handlers.SysLogHandler(address = '/dev/log')
log.addHandler(handler)
print_handler = PrintLogHandler()
log.addHandler(print_handler)
#log.debug('this is debug')
#log.critical('this is critical')
class Test(unittest.TestCase):
def test_01_open_phidget(self):
powercont = PowerControl()
self.assertTrue( powercont.open_phidget() )
self.assertTrue( powercont.close_phidget() )
def test_02_motor(self):
powercont = PowerControl()
self.assertTrue( powercont.open_phidget() )
self.assertTrue( powercont.motor_on() )
time.sleep(2)
self.assertTrue( powercont.motor_off() )
self.assertTrue( powercont.close_phidget() )
def test_03_cycle_zero(self):
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_on())
time.sleep(3)
self.assertTrue(pc.zero_off())
def test_04_cycle_one(self):
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.one_on())
time.sleep(3)
self.assertTrue(pc.one_off())
def test_05_toggle_zero(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(ZERO_RELAY))
def test_05_zero_off(self):
log.info("Force zero off")
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_off())
self.assertTrue(pc.close_phidget() )
def test_05_zero_on(self):
log.info("Force zero on")
pc = PowerControl()
self.assertTrue(pc.open_phidget())
self.assertTrue(pc.zero_on())
self.assertTrue(pc.close_phidget() )
def test_06_toggle_one(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(ONE_RELAY))
def test_07_toggle_two(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(TWO_RELAY))
def test_08_toggle_three(self):
pc = PowerControl()
self.assertTrue(pc.toggle_line(THREE_RELAY))
class PowerControl(object):
''' PowerControl class wraps language around the 1014_2 -
PhidgetInterfaceKit 0/0/4 4 relay device. '''
def __init__(self):
#log.info("Start of power control object")
pass
def open_phidget(self):
''' Based on the InterfaceKit-simple.py example from Phidgets, create an
relay object, attach the handlers, open it and wait for the attachment.
This function's primarily purpose is to replace the prints with log
statements. '''
try:
self.interface = InterfaceKit()
except RuntimeError as e:
log.critical("Phidget runtime exception: %s" % e.details)
return 0
try:
self.interface.setOnAttachHandler( self.interfaceAttached )
self.interface.setOnDetachHandler( self.interfaceDetached )
self.interface.setOnErrorhandler( self.interfaceError )
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
try:
#print "Force open relay serial: 290968"
self.interface.openPhidget()
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
#log.info("Waiting for attach....")
try:
self.interface.waitForAttach(100)
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
try:
self.interface.closePhidget()
except PhidgetException as e:
log.critical("Close Exc. %i: %s" % (e.code, e.details))
return 0
return 1
#Event Handler Callback Functions
def interfaceAttached(self, e):
attached = e.device
#log.info("interface %i Attached!" % (attached.getSerialNum()))
def interfaceDetached(self, e):
detached = e.device
log.info("interface %i Detached!" % (detached.getSerialNum()))
def interfaceError(self, e):
try:
source = e.device
log.critical("Interface %i: Phidget Error %i: %s" % \
(source.getSerialNum(), e.eCode, e.description))
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
def close_phidget(self):
try:
self.interface.closePhidget()
except PhidgetException as e:
log.critical("Phidget Exception %i: %s" % (e.code, e.details))
return 0
return 1
def change_relay(self, relay=0, status=0):
''' Toggle the status of the phidget relay line to low(0) or high(1)'''
try:
self.interface.setOutputState(relay, status)
#self.emit_line_change(relay, status)
except Exception as e:
log.critical("Problem setting relay on %s" % e)
return 0
return 1
''' Convenience functions '''
def zero_on(self):
#log.info("Zero relay on")
return self.change_relay(relay=ZERO_RELAY, status=1)
def zero_off(self):
return self.change_relay(relay=ZERO_RELAY, status=0)
def one_on(self):
#log.info("one relay on")
return self.change_relay(relay=ONE_RELAY, status=1)
def one_off(self):
return self.change_relay(relay=ONE_RELAY, status=0)
def two_on(self):
#log.info("two relay on")
return self.change_relay(relay=TWO_RELAY, status=1)
def two_off(self):
return self.change_relay(relay=TWO_RELAY, status=0)
def three_on(self):
#log.info("two relay on")
return self.change_relay(relay=THREE_RELAY, status=1)
def three_off(self):
return self.change_relay(relay=THREE_RELAY, status=0)
def toggle_line(self, line=0):
''' Read the internal state of the specified line, then set the opposite
state for a toggle function'''
if not self.open_phidget():
log.critical("Problem opening phidget")
return 0
try:
curr_state = self.interface.getOutputState(line)
except Exception as e:
log.critical("Problem getting relay on %s" % e)
self.close_phidget()
return 0
if not self.change_relay(line, not curr_state):
log.critical("Problem changing relay")
return 0
if not self.close_phidget():
log.criticla("Problem closing phidget")
return 0
return 1
if __name__ == '__main__':
unittest.main()
| mit |
dh4nav/lammps | tools/i-pi/ipi/inputs/cell.py | 41 | 2337 | """Deals with creating the cell class.
Copyright (C) 2013, Joshua More and Michele Ceriotti
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http.//www.gnu.org/licenses/>.
Generates an cell class from a cell vector.
Classes:
InputCell: Deals with creating the Cell object from a file, and
writing the checkpoints.
"""
import numpy as np
from copy import copy
from ipi.engine.cell import *
from ipi.utils.inputvalue import *
from ipi.utils.units import UnitMap
from ipi.utils.messages import verbosity, warning
__all__ = [ 'InputCell' ]
class InputCell(InputArray):
"""Cell input class.
Handles generating the appropriate cell class from the xml input file,
and generating the xml checkpoint tags and data from an instance of the
object.
"""
attribs = copy(InputArray.attribs)
default_help = "Deals with the cell parameters. Takes as array which can be used to initialize the cell vector matrix."
default_label = "CELL"
def __init__(self, help=None, dimension=None, units=None, default=None, dtype=None):
"""Initializes InputCell.
Just calls the parent initialization function with appropriate arguments.
"""
super(InputCell,self).__init__(dtype=float, dimension="length", default=default, help=help)
def store(self, cell):
"""Takes a Cell instance and stores of minimal representation of it.
Args:
cell: A cell object.
"""
super(InputCell,self).store(cell.h)
self.shape.store((3,3))
def fetch(self):
"""Creates a cell object.
Returns:
A cell object of the appropriate type and with the appropriate
properties given the attributes of the InputCell object.
"""
h = super(InputCell,self).fetch()
h.shape = (3,3)
return Cell(h=h)
| gpl-2.0 |
sencha/chromium-spacewalk | tools/cr/cr/commands/prepare.py | 59 | 1781 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the prepare command."""
import cr
class PrepareCommand(cr.Command):
"""The implementation of the prepare command.
The prepare command is used to perform the steps needed to get an output
directory ready to use. These should not be the kind of things that need to
happen every time you build something, but the rarer things that you re-do
only when you get or add new source files, or change your build options.
This delegates all it's behavior to implementations of PrepareOut. These will
(mostly) be in the cr.actions package.
"""
def __init__(self):
super(PrepareCommand, self).__init__()
self.help = 'Prepares an output directory'
self.description = ("""
This does any preparation needed for the output directory, such as
running gyp.
""")
def Run(self):
self.Prepare()
@classmethod
def UpdateContext(cls):
for preparation in PrepareOut.Plugins():
preparation.UpdateContext()
@classmethod
def Prepare(cls):
cls.UpdateContext()
for preparation in PrepareOut.Plugins():
preparation.Prepare()
class PrepareOut(cr.Plugin, cr.Plugin.Type):
"""Base class for output directory preparation plugins.
See PrepareCommand for details.
"""
def UpdateContext(self):
"""Update the context if needed.
This is also used by commands that want the environment setup correctly, but
are not going to call Prepare directly (such as sync)."""
def Prepare(self):
"""All PrepareOut plugins must override this method to do their work."""
raise NotImplementedError('Must be overridden.')
| bsd-3-clause |
xerxes2/gpodder | src/gpodder/sync.py | 1 | 41915 | # -*- coding: utf-8 -*-
#
# gPodder - A media aggregator and podcast client
# Copyright (c) 2005-2011 Thomas Perl and the gPodder Team
#
# gPodder is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# gPodder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# sync.py -- Device synchronization
# Thomas Perl <[email protected]> 2007-12-06
# based on libipodsync.py (2006-04-05 Thomas Perl)
import gpodder
from gpodder import util
from gpodder import services
from gpodder import libconverter
from gpodder.liblogger import log
import time
import calendar
_ = gpodder.gettext
gpod_available = True
try:
import gpod
except:
gpod_available = False
log('(gpodder.sync) Could not find gpod')
pymtp_available = True
try:
import gpodder.gpopymtp as pymtp
except:
pymtp_available = False
log('(gpodder.sync) Could not load gpopymtp (libmtp not installed?).')
try:
import eyeD3
except:
log( '(gpodder.sync) Could not find eyeD3')
try:
import Image
except:
log('(gpodder.sync) Could not find Python Imaging Library (PIL)')
# Register our dependencies for the synchronization module
services.dependency_manager.depend_on(_('iPod synchronization'), _('Support synchronization of podcasts to Apple iPod devices via libgpod.'), ['gpod', 'gst'], [])
services.dependency_manager.depend_on(_('iPod OGG converter'), _('Convert OGG podcasts to MP3 files on synchronization to iPods using oggdec and LAME.'), [], ['oggdec', 'lame'])
services.dependency_manager.depend_on(_('iPod video podcasts'), _('Detect video lengths via MPlayer, to synchronize video podcasts to iPods.'), [], ['mplayer'])
services.dependency_manager.depend_on(_('Rockbox cover art support'), _('Copy podcast cover art to filesystem-based MP3 players running Rockbox.org firmware. Needs Python Imaging.'), ['Image'], [])
import os
import os.path
import glob
import time
if pymtp_available:
class MTP(pymtp.MTP):
sep = os.path.sep
def __init__(self):
pymtp.MTP.__init__(self)
self.folders = {}
def connect(self):
pymtp.MTP.connect(self)
self.folders = self.unfold(self.mtp.LIBMTP_Get_Folder_List(self.device))
def get_folder_list(self):
return self.folders
def unfold(self, folder, path=''):
result = {}
while folder:
folder = folder.contents
name = self.sep.join([path, folder.name]).lstrip(self.sep)
result[name] = folder.folder_id
if folder.child:
result.update(self.unfold(folder.child, name))
folder = folder.sibling
return result
def mkdir(self, path):
folder_id = 0
prefix = []
parts = path.split(self.sep)
while parts:
prefix.append(parts[0])
tmpath = self.sep.join(prefix)
if self.folders.has_key(tmpath):
folder_id = self.folders[tmpath]
else:
folder_id = self.create_folder(parts[0], parent=folder_id)
# log('Creating subfolder %s in %s (id=%u)' % (parts[0], self.sep.join(prefix), folder_id))
tmpath = self.sep.join(prefix + [parts[0]])
self.folders[tmpath] = folder_id
# log(">>> %s = %s" % (tmpath, folder_id))
del parts[0]
# log('MTP.mkdir: %s = %u' % (path, folder_id))
return folder_id
def open_device(config):
device_type = config.device_type
if device_type == 'ipod':
return iPodDevice(config)
elif device_type == 'filesystem':
return MP3PlayerDevice(config)
elif device_type == 'mtp':
return MTPDevice(config)
else:
return None
def get_track_length(filename):
if util.find_command('mplayer') is not None:
try:
mplayer_output = os.popen('mplayer -msglevel all=-1 -identify -vo null -ao null -frames 0 "%s" 2>/dev/null' % filename).read()
return int(float(mplayer_output[mplayer_output.index('ID_LENGTH'):].splitlines()[0][10:])*1000)
except:
pass
else:
log('Please install MPlayer for track length detection.')
try:
eyed3_info = eyeD3.Mp3AudioFile(filename)
return int(eyed3_info.getPlayTime()*1000)
except:
pass
return int(60*60*1000*3) # Default is three hours (to be on the safe side)
class SyncTrack(object):
"""
This represents a track that is on a device. You need
to specify at least the following keyword arguments,
because these will be used to display the track in the
GUI. All other keyword arguments are optional and can
be used to reference internal objects, etc... See the
iPod synchronization code for examples.
Keyword arguments needed:
playcount (How often has the track been played?)
podcast (Which podcast is this track from? Or: Folder name)
released (The release date of the episode)
If any of these fields is unknown, it should not be
passed to the function (the values will default to None
for all required fields).
"""
def __init__(self, title, length, modified, **kwargs):
self.title = title
self.length = length
self.filesize = util.format_filesize(length)
self.modified = modified
# Set some (possible) keyword arguments to default values
self.playcount = None
self.podcast = None
self.released = None
# Convert keyword arguments to object attributes
self.__dict__.update(kwargs)
class Device(services.ObservableService):
def __init__(self, config):
self._config = config
self.cancelled = False
self.allowed_types = ['audio', 'video']
self.errors = []
self.tracks_list = []
signals = ['progress', 'sub-progress', 'status', 'done', 'post-done']
services.ObservableService.__init__(self, signals)
def open(self):
pass
def cancel(self):
self.cancelled = True
self.notify('status', _('Cancelled by user'))
def close(self):
self.notify('status', _('Writing data to disk'))
if self._config.sync_disks_after_transfer and not gpodder.win32:
successful_sync = (os.system('sync') == 0)
else:
log('Not syncing disks. Unmount your device before unplugging.', sender=self)
successful_sync = True
self.notify('done')
self.notify('post-done', self, successful_sync)
return True
def add_tracks(self, tracklist=[], force_played=False):
for track in list(tracklist):
# Filter tracks that are not meant to be synchronized
does_not_exist = not track.was_downloaded(and_exists=True)
exclude_played = track.is_played and not force_played and \
self._config.only_sync_not_played
wrong_type = track.file_type() not in self.allowed_types
if does_not_exist or exclude_played or wrong_type:
log('Excluding %s from sync', track.title, sender=self)
tracklist.remove(track)
for id, track in enumerate(sorted(tracklist, key=lambda e: e.pubDate)):
if self.cancelled:
return False
self.notify('progress', id+1, len(tracklist))
added = self.add_track(track)
if self._config.on_sync_mark_played:
log('Marking as played on transfer: %s', track.url, sender=self)
track.mark(is_played=True)
if added and self._config.on_sync_delete and not track.is_locked:
log('Removing episode after transfer: %s', track.url, sender=self)
track.delete_from_disk()
return True
def convert_track(self, episode):
filename = episode.local_filename(create=False)
# The file has to exist, if we ought to transfer it, and therefore,
# local_filename(create=False) must never return None as filename
assert filename is not None
(fn, extension) = os.path.splitext(filename)
if libconverter.converters.has_converter(extension):
if self._config.disable_pre_sync_conversion:
log('Pre-sync conversion is not enabled, set disable_pre_sync_conversion to "False" to enable')
return filename
log('Converting: %s', filename, sender=self)
callback_status = lambda percentage: self.notify('sub-progress', int(percentage))
local_filename = libconverter.converters.convert(filename, callback=callback_status)
if local_filename is None:
log('Cannot convert %s', filename, sender=self)
return filename
return str(local_filename)
return filename
def remove_tracks(self, tracklist=[]):
for id, track in enumerate(tracklist):
if self.cancelled:
return False
self.notify('progress', id, len(tracklist))
self.remove_track(track)
return True
def get_all_tracks(self):
pass
def add_track(self, track):
pass
def remove_track(self, track):
pass
def get_free_space(self):
pass
def episode_on_device(self, episode):
return self._track_on_device(episode.title)
def _track_on_device(self, track_name):
for t in self.tracks_list:
title = t.title
if track_name == title:
return t
return None
class iPodDevice(Device):
def __init__(self, config):
Device.__init__(self, config)
self.mountpoint = str(self._config.ipod_mount)
self.itdb = None
self.podcast_playlist = None
def get_free_space(self):
# Reserve 10 MiB for iTunesDB writing (to be on the safe side)
RESERVED_FOR_ITDB = 1024*1024*10
return util.get_free_disk_space(self.mountpoint) - RESERVED_FOR_ITDB
def open(self):
Device.open(self)
if not gpod_available or not os.path.isdir(self.mountpoint):
return False
self.notify('status', _('Opening iPod database'))
self.itdb = gpod.itdb_parse(self.mountpoint, None)
if self.itdb is None:
return False
self.itdb.mountpoint = self.mountpoint
self.podcasts_playlist = gpod.itdb_playlist_podcasts(self.itdb)
self.master_playlist = gpod.itdb_playlist_mpl(self.itdb)
if self.podcasts_playlist:
self.notify('status', _('iPod opened'))
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
return True
else:
return False
def close(self):
if self.itdb is not None:
self.notify('status', _('Saving iPod database'))
gpod.itdb_write(self.itdb, None)
self.itdb = None
if self._config.ipod_write_gtkpod_extended:
self.notify('status', _('Writing extended gtkpod database'))
ext_filename = os.path.join(self.mountpoint, 'iPod_Control', 'iTunes', 'iTunesDB.ext')
idb_filename = os.path.join(self.mountpoint, 'iPod_Control', 'iTunes', 'iTunesDB')
if os.path.exists(ext_filename) and os.path.exists(idb_filename):
try:
db = gpod.ipod.Database(self.mountpoint)
gpod.gtkpod.parse(ext_filename, db, idb_filename)
gpod.gtkpod.write(ext_filename, db, idb_filename)
db.close()
except:
log('Error when writing iTunesDB.ext', sender=self, traceback=True)
else:
log('I could not find %s or %s. Will not update extended gtkpod DB.', ext_filename, idb_filename, sender=self)
else:
log('Not writing extended gtkpod DB. Set "ipod_write_gpod_extended" to True if I should write it.', sender=self)
Device.close(self)
return True
def update_played_or_delete(self, channel, episodes, delete_from_db):
"""
Check whether episodes on ipod are played and update as played
and delete if required.
"""
for episode in episodes:
track = self.episode_on_device(episode)
if track:
gtrack = track.libgpodtrack
if gtrack.playcount > 0:
if delete_from_db and not gtrack.rating:
log('Deleting episode from db %s', gtrack.title, sender=self)
channel.delete_episode(episode)
else:
log('Marking episode as played %s', gtrack.title, sender=self)
episode.mark(is_played=True)
def purge(self):
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
if gpod.itdb_filename_on_ipod(track) is None:
log('Episode has no file: %s', track.title, sender=self)
# self.remove_track_gpod(track)
elif track.playcount > 0 and not track.rating:
log('Purging episode: %s', track.title, sender=self)
self.remove_track_gpod(track)
def get_all_tracks(self):
tracks = []
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
filename = gpod.itdb_filename_on_ipod(track)
if filename is None:
# This can happen if the episode is deleted on the device
log('Episode has no file: %s', track.title, sender=self)
self.remove_track_gpod(track)
continue
length = util.calculate_size(filename)
timestamp = util.file_modification_timestamp(filename)
modified = util.format_date(timestamp)
try:
released = gpod.itdb_time_mac_to_host(track.time_released)
released = util.format_date(released)
except ValueError, ve:
# timestamp out of range for platform time_t (bug 418)
log('Cannot convert track time: %s', ve, sender=self)
released = 0
t = SyncTrack(track.title, length, modified, modified_sort=timestamp, libgpodtrack=track, playcount=track.playcount, released=released, podcast=track.artist)
tracks.append(t)
return tracks
def remove_track(self, track):
self.notify('status', _('Removing %s') % track.title)
self.remove_track_gpod(track.libgpodtrack)
def remove_track_gpod(self, track):
filename = gpod.itdb_filename_on_ipod(track)
try:
gpod.itdb_playlist_remove_track(self.podcasts_playlist, track)
except:
log('Track %s not in playlist', track.title, sender=self)
gpod.itdb_track_unlink(track)
util.delete_file(filename)
def add_track(self, episode):
self.notify('status', _('Adding %s') % episode.title)
for track in gpod.sw_get_playlist_tracks(self.podcasts_playlist):
if episode.url == track.podcasturl:
if track.playcount > 0:
episode.mark(is_played=True)
# Mark as played on iPod if played locally (and set podcast flags)
self.set_podcast_flags(track, episode)
return True
original_filename = episode.local_filename(create=False)
# The file has to exist, if we ought to transfer it, and therefore,
# local_filename(create=False) must never return None as filename
assert original_filename is not None
local_filename = original_filename
if util.calculate_size(original_filename) > self.get_free_space():
log('Not enough space on %s, sync aborted...', self.mountpoint, sender = self)
d = {'episode': episode.title, 'mountpoint': self.mountpoint}
message =_('Error copying %(episode)s: Not enough free space on %(mountpoint)s')
self.errors.append(message % d)
self.cancelled = True
return False
local_filename = self.convert_track(episode)
(fn, extension) = os.path.splitext(local_filename)
if extension.lower().endswith('ogg'):
log('Cannot copy .ogg files to iPod.', sender=self)
return False
track = gpod.itdb_track_new()
# Add release time to track if pubDate has a valid value
if episode.pubDate > 0:
try:
# libgpod>= 0.5.x uses a new timestamp format
track.time_released = gpod.itdb_time_host_to_mac(int(episode.pubDate))
except:
# old (pre-0.5.x) libgpod versions expect mactime, so
# we're going to manually build a good mactime timestamp here :)
#
# + 2082844800 for unixtime => mactime (1970 => 1904)
track.time_released = int(episode.pubDate + 2082844800)
track.title = str(episode.title)
track.album = str(episode.channel.title)
track.artist = str(episode.channel.title)
track.description = str(util.remove_html_tags(episode.description))
track.podcasturl = str(episode.url)
track.podcastrss = str(episode.channel.url)
track.tracklen = get_track_length(local_filename)
track.size = os.path.getsize(local_filename)
if episode.file_type() == 'audio':
track.filetype = 'mp3'
track.mediatype = 0x00000004
elif episode.file_type() == 'video':
track.filetype = 'm4v'
track.mediatype = 0x00000006
self.set_podcast_flags(track, episode)
self.set_cover_art(track, local_filename)
gpod.itdb_track_add(self.itdb, track, -1)
gpod.itdb_playlist_add_track(self.master_playlist, track, -1)
gpod.itdb_playlist_add_track(self.podcasts_playlist, track, -1)
copied = gpod.itdb_cp_track_to_ipod(track, str(local_filename), None)
if copied and gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_ipod(self, local_filename)
# If the file has been converted, delete the temporary file here
if local_filename != original_filename:
util.delete_file(local_filename)
return True
def set_podcast_flags(self, track, episode):
try:
# Set blue bullet for unplayed tracks on 5G iPods
if episode.is_played:
track.mark_unplayed = 0x01
if track.playcount == 0:
track.playcount = 1
else:
if track.playcount > 0 or track.bookmark_time > 0:
#track is partially played so no blue bullet
track.mark_unplayed = 0x01
else:
#totally unplayed
track.mark_unplayed = 0x02
# Set several flags for to podcast values
track.remember_playback_position = 0x01
track.flag1 = 0x02
track.flag2 = 0x01
track.flag3 = 0x01
track.flag4 = 0x01
except:
log('Seems like your python-gpod is out-of-date.', sender=self)
def set_cover_art(self, track, local_filename):
try:
tag = eyeD3.Tag()
if tag.link(local_filename):
if 'APIC' in tag.frames and len(tag.frames['APIC']) > 0:
apic = tag.frames['APIC'][0]
extension = 'jpg'
if apic.mimeType == 'image/png':
extension = 'png'
cover_filename = '%s.cover.%s' (local_filename, extension)
cover_file = open(cover_filename, 'w')
cover_file.write(apic.imageData)
cover_file.close()
gpod.itdb_track_set_thumbnails(track, cover_filename)
return True
except:
log('Error getting cover using eyeD3', sender=self)
try:
cover_filename = os.path.join(os.path.dirname(local_filename), 'folder.jpg')
if os.path.isfile(cover_filename):
gpod.itdb_track_set_thumbnails(track, cover_filename)
return True
except:
log('Error getting cover using channel cover', sender=self)
return False
class MP3PlayerDevice(Device):
# if different players use other filenames besides
# .scrobbler.log, add them to this list
scrobbler_log_filenames = ['.scrobbler.log']
def __init__(self, config):
Device.__init__(self, config)
self.destination = self._config.mp3_player_folder
self.buffer_size = 1024*1024 # 1 MiB
self.scrobbler_log = []
def get_free_space(self):
return util.get_free_disk_space(self.destination)
def open(self):
Device.open(self)
self.notify('status', _('Opening MP3 player'))
if util.directory_is_writable(self.destination):
self.notify('status', _('MP3 player opened'))
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
if self._config.mp3_player_use_scrobbler_log:
mp3_player_mount_point = util.find_mount_point(self.destination)
# If a moint point cannot be found look inside self.destination for scrobbler_log_filenames
# this prevents us from os.walk()'ing the entire / filesystem
if mp3_player_mount_point == '/':
mp3_player_mount_point = self.destination
log_location = self.find_scrobbler_log(mp3_player_mount_point)
if log_location is not None and self.load_audioscrobbler_log(log_location):
log('Using Audioscrobbler log data to mark tracks as played', sender=self)
return True
else:
return False
def add_track(self, episode):
self.notify('status', _('Adding %s') % episode.title.decode('utf-8', 'ignore'))
if self._config.fssync_channel_subfolders:
# Add channel title as subfolder
folder = episode.channel.title
# Clean up the folder name for use on limited devices
folder = util.sanitize_filename(folder, self._config.mp3_player_max_filename_length)
folder = os.path.join(self.destination, folder)
else:
folder = self.destination
folder = util.sanitize_encoding(folder)
from_file = util.sanitize_encoding(self.convert_track(episode))
filename_base = util.sanitize_filename(episode.sync_filename(self._config.custom_sync_name_enabled, self._config.custom_sync_name), self._config.mp3_player_max_filename_length)
to_file = filename_base + os.path.splitext(from_file)[1].lower()
# dirty workaround: on bad (empty) episode titles,
# we simply use the from_file basename
# (please, podcast authors, FIX YOUR RSS FEEDS!)
if os.path.splitext(to_file)[0] == '':
to_file = os.path.basename(from_file)
to_file = util.sanitize_encoding(os.path.join(folder, to_file))
if not os.path.exists(folder):
try:
os.makedirs(folder)
except:
log('Cannot create folder on MP3 player: %s', folder, sender=self)
return False
if self._config.mp3_player_use_scrobbler_log and not episode.is_played:
# FIXME: This misses some things when channel.title<>album tag which is what
# the scrobbling entity will be using.
if [episode.channel.title, episode.title] in self.scrobbler_log:
log('Marking "%s" from "%s" as played', episode.title, episode.channel.title, sender=self)
episode.mark(is_played=True)
if self._config.rockbox_copy_coverart and not os.path.exists(os.path.join(folder, 'cover.bmp')):
log('Creating Rockbox album art for "%s"', episode.channel.title, sender=self)
self.copy_player_cover_art(folder, from_file, \
'cover.bmp', 'BMP', self._config.rockbox_coverart_size)
if self._config.custom_player_copy_coverart \
and not os.path.exists(os.path.join(folder, \
self._config.custom_player_coverart_name)):
log('Creating custom player album art for "%s"',
episode.channel.title, sender=self)
self.copy_player_cover_art(folder, from_file, \
self._config.custom_player_coverart_name, \
self._config.custom_player_coverart_format, \
self._config.custom_player_coverart_size)
if not os.path.exists(to_file):
log('Copying %s => %s', os.path.basename(from_file), to_file.decode(util.encoding), sender=self)
copied = self.copy_file_progress(from_file, to_file)
if copied and gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_filesystem(self, from_file, to_file)
return copied
return True
def copy_file_progress(self, from_file, to_file):
try:
out_file = open(to_file, 'wb')
except IOError, ioerror:
d = {'filename': ioerror.filename, 'message': ioerror.strerror}
self.errors.append(_('Error opening %(filename)s: %(message)s') % d)
self.cancel()
return False
try:
in_file = open(from_file, 'rb')
except IOError, ioerror:
d = {'filename': ioerror.filename, 'message': ioerror.strerror}
self.errors.append(_('Error opening %(filename)s: %(message)s') % d)
self.cancel()
return False
in_file.seek(0, 2)
bytes = in_file.tell()
in_file.seek(0)
bytes_read = 0
s = in_file.read(self.buffer_size)
while s:
bytes_read += len(s)
try:
out_file.write(s)
except IOError, ioerror:
self.errors.append(ioerror.strerror)
try:
out_file.close()
except:
pass
try:
log('Trying to remove partially copied file: %s' % to_file, sender=self)
os.unlink( to_file)
log('Yeah! Unlinked %s at least..' % to_file, sender=self)
except:
log('Error while trying to unlink %s. OH MY!' % to_file, sender=self)
self.cancel()
return False
self.notify('sub-progress', int(min(100, 100*float(bytes_read)/float(bytes))))
s = in_file.read(self.buffer_size)
out_file.close()
in_file.close()
return True
def get_all_tracks(self):
tracks = []
if self._config.fssync_channel_subfolders:
files = glob.glob(os.path.join(self.destination, '*', '*'))
else:
files = glob.glob(os.path.join(self.destination, '*'))
for filename in files:
(title, extension) = os.path.splitext(os.path.basename(filename))
length = util.calculate_size(filename)
timestamp = util.file_modification_timestamp(filename)
modified = util.format_date(timestamp)
if self._config.fssync_channel_subfolders:
podcast_name = os.path.basename(os.path.dirname(filename))
else:
podcast_name = None
t = SyncTrack(title, length, modified, modified_sort=timestamp, filename=filename, podcast=podcast_name)
tracks.append(t)
return tracks
def episode_on_device(self, episode):
e = util.sanitize_filename(episode.sync_filename(self._config.custom_sync_name_enabled, self._config.custom_sync_name), self._config.mp3_player_max_filename_length)
return self._track_on_device(e)
def remove_track(self, track):
self.notify('status', _('Removing %s') % track.title)
util.delete_file(track.filename)
directory = os.path.dirname(track.filename)
if self.directory_is_empty(directory) and self._config.fssync_channel_subfolders:
try:
os.rmdir(directory)
except:
log('Cannot remove %s', directory, sender=self)
def directory_is_empty(self, directory):
files = glob.glob(os.path.join(directory, '*'))
dotfiles = glob.glob(os.path.join(directory, '.*'))
return len(files+dotfiles) == 0
def find_scrobbler_log(self, mount_point):
""" find an audioscrobbler log file from log_filenames in the mount_point dir """
for dirpath, dirnames, filenames in os.walk(mount_point):
for log_file in self.scrobbler_log_filenames:
filename = os.path.join(dirpath, log_file)
if os.path.isfile(filename):
return filename
# No scrobbler log on that device
return None
def copy_player_cover_art(self, destination, local_filename, \
cover_dst_name, cover_dst_format, \
cover_dst_size):
"""
Try to copy the channel cover to the podcast folder on the MP3
player. This makes the player, e.g. Rockbox (rockbox.org), display the
cover art in its interface.
You need the Python Imaging Library (PIL) installed to be able to
convert the cover file to a Bitmap file, which Rockbox needs.
"""
try:
cover_loc = os.path.join(os.path.dirname(local_filename), 'folder.jpg')
cover_dst = os.path.join(destination, cover_dst_name)
if os.path.isfile(cover_loc):
log('Creating cover art file on player', sender=self)
log('Cover art size is %s', cover_dst_size, sender=self)
size = (cover_dst_size, cover_dst_size)
try:
cover = Image.open(cover_loc)
cover.thumbnail(size)
cover.save(cover_dst, cover_dst_format)
except IOError:
log('Cannot create %s (PIL?)', cover_dst, traceback=True, sender=self)
return True
else:
log('No cover available to set as player cover', sender=self)
return True
except:
log('Error getting cover using channel cover', sender=self)
return False
def load_audioscrobbler_log(self, log_file):
""" Retrive track title and artist info for all the entries
in an audioscrobbler portable player format logfile
http://www.audioscrobbler.net/wiki/Portable_Player_Logging """
try:
log('Opening "%s" as AudioScrobbler log.', log_file, sender=self)
f = open(log_file, 'r')
entries = f.readlines()
f.close()
except IOError, ioerror:
log('Error: "%s" cannot be read.', log_file, sender=self)
return False
try:
# Scrobble Log Format: http://www.audioscrobbler.net/wiki/Portable_Player_Logging
# Notably some fields are optional so will appear as \t\t.
# Conforming scrobblers should strip any \t's from the actual fields.
for entry in entries:
entry = entry.split('\t')
if len(entry)>=5:
artist, album, track, pos, length, rating = entry[:6]
# L means at least 50% of the track was listened to (S means < 50%)
if 'L' in rating:
# Whatever is writing the logs will only have the taginfo in the
# file to work from. Mostly album~=channel name
if len(track):
self.scrobbler_log.append([album, track])
else:
log('Skipping logging of %s (missing track)', album)
else:
log('Skipping scrobbler entry: %d elements %s', len(entry), entry)
except:
log('Error while parsing "%s".', log_file, sender=self)
return True
class MTPDevice(Device):
def __init__(self, config):
Device.__init__(self, config)
self.__model_name = None
try:
self.__MTPDevice = MTP()
except NameError, e:
# pymtp not available / not installed (see bug 924)
log('pymtp not found: %s', str(e), sender=self)
self.__MTPDevice = None
def __callback(self, sent, total):
if self.cancelled:
return -1
percentage = round(float(sent)/float(total)*100)
text = ('%i%%' % percentage)
self.notify('progress', sent, total, text)
def __date_to_mtp(self, date):
"""
this function format the given date and time to a string representation
according to MTP specifications: YYYYMMDDThhmmss.s
return
the string representation od the given date
"""
if not date:
return ""
try:
d = time.gmtime(date)
return time.strftime("%Y%m%d-%H%M%S.0Z", d)
except Exception, exc:
log('ERROR: An error has happend while trying to convert date to an mtp string (%s)', exc, sender=self)
return None
def __mtp_to_date(self, mtp):
"""
this parse the mtp's string representation for date
according to specifications (YYYYMMDDThhmmss.s) to
a python time object
"""
if not mtp:
return None
try:
mtp = mtp.replace(" ", "0") # replace blank with 0 to fix some invalid string
d = time.strptime(mtp[:8] + mtp[9:13],"%Y%m%d%H%M%S")
_date = calendar.timegm(d)
if len(mtp)==20:
# TIME ZONE SHIFTING: the string contains a hour/min shift relative to a time zone
try:
shift_direction=mtp[15]
hour_shift = int(mtp[16:18])
minute_shift = int(mtp[18:20])
shift_in_sec = hour_shift * 3600 + minute_shift * 60
if shift_direction == "+":
_date += shift_in_sec
elif shift_direction == "-":
_date -= shift_in_sec
else:
raise ValueError("Expected + or -")
except Exception, exc:
log('WARNING: ignoring invalid time zone information for %s (%s)', mtp, exc, sender=self)
return max( 0, _date )
except Exception, exc:
log('WARNING: the mtp date "%s" can not be parsed against mtp specification (%s)', mtp, exc, sender=self)
return None
def get_name(self):
"""
this function try to find a nice name for the device.
First, it tries to find a friendly (user assigned) name
(this name can be set by other application and is stored on the device).
if no friendly name was assign, it tries to get the model name (given by the vendor).
If no name is found at all, a generic one is returned.
Once found, the name is cached internaly to prevent reading again the device
return
the name of the device
"""
if self.__model_name:
return self.__model_name
if self.__MTPDevice is None:
return _('MTP device')
self.__model_name = self.__MTPDevice.get_devicename() # actually libmtp.Get_Friendlyname
if not self.__model_name or self.__model_name == "?????":
self.__model_name = self.__MTPDevice.get_modelname()
if not self.__model_name:
self.__model_name = _('MTP device')
return self.__model_name
def open(self):
Device.open(self)
log("opening the MTP device", sender=self)
self.notify('status', _('Opening the MTP device'), )
try:
self.__MTPDevice.connect()
# build the initial tracks_list
self.tracks_list = self.get_all_tracks()
except Exception, exc:
log('unable to find an MTP device (%s)', exc, sender=self, traceback=True)
return False
self.notify('status', _('%s opened') % self.get_name())
return True
def close(self):
log("closing %s", self.get_name(), sender=self)
self.notify('status', _('Closing %s') % self.get_name())
try:
self.__MTPDevice.disconnect()
except Exception, exc:
log('unable to close %s (%s)', self.get_name(), exc, sender=self)
return False
self.notify('status', _('%s closed') % self.get_name())
Device.close(self)
return True
def add_track(self, episode):
self.notify('status', _('Adding %s...') % episode.title)
filename = str(self.convert_track(episode))
log("sending %s (%s).", filename, episode.title, sender=self)
try:
# verify free space
needed = util.calculate_size(filename)
free = self.get_free_space()
if needed > free:
log('Not enough space on device %s: %s available, but need at least %s', self.get_name(), util.format_filesize(free), util.format_filesize(needed), sender=self)
self.cancelled = True
return False
# fill metadata
metadata = pymtp.LIBMTP_Track()
metadata.title = str(episode.title)
metadata.artist = str(episode.channel.title)
metadata.album = str(episode.channel.title)
metadata.genre = "podcast"
metadata.date = self.__date_to_mtp(episode.pubDate)
metadata.duration = get_track_length(str(filename))
folder_name = ''
if episode.mimetype.startswith('audio/') and self._config.mtp_audio_folder:
folder_name = self._config.mtp_audio_folder
if episode.mimetype.startswith('video/') and self._config.mtp_video_folder:
folder_name = self._config.mtp_video_folder
if episode.mimetype.startswith('image/') and self._config.mtp_image_folder:
folder_name = self._config.mtp_image_folder
if folder_name != '' and self._config.mtp_podcast_folders:
folder_name += os.path.sep + str(episode.channel.title)
# log('Target MTP folder: %s' % folder_name)
if folder_name == '':
folder_id = 0
else:
folder_id = self.__MTPDevice.mkdir(folder_name)
# send the file
to_file = util.sanitize_filename(metadata.title) + episode.extension()
self.__MTPDevice.send_track_from_file(filename, to_file,
metadata, folder_id, callback=self.__callback)
if gpodder.user_hooks is not None:
gpodder.user_hooks.on_file_copied_to_mtp(self, filename, to_file)
except:
log('unable to add episode %s', episode.title, sender=self, traceback=True)
return False
return True
def remove_track(self, sync_track):
self.notify('status', _('Removing %s') % sync_track.mtptrack.title)
log("removing %s", sync_track.mtptrack.title, sender=self)
try:
self.__MTPDevice.delete_object(sync_track.mtptrack.item_id)
except Exception, exc:
log('unable remove file %s (%s)', sync_track.mtptrack.filename, exc, sender=self)
log('%s removed', sync_track.mtptrack.title , sender=self)
def get_all_tracks(self):
try:
listing = self.__MTPDevice.get_tracklisting(callback=self.__callback)
except Exception, exc:
log('unable to get file listing %s (%s)', exc, sender=self)
tracks = []
for track in listing:
title = track.title
if not title or title=="": title=track.filename
if len(title) > 50: title = title[0:49] + '...'
artist = track.artist
if artist and len(artist) > 50: artist = artist[0:49] + '...'
length = track.filesize
age_in_days = 0
date = self.__mtp_to_date(track.date)
if not date:
modified = track.date # not a valid mtp date. Display what mtp gave anyway
modified_sort = -1 # no idea how to sort invalid date
else:
modified = util.format_date(date)
modified_sort = date
t = SyncTrack(title, length, modified, modified_sort=modified_sort, mtptrack=track, podcast=artist)
tracks.append(t)
return tracks
def get_free_space(self):
if self.__MTPDevice is not None:
return self.__MTPDevice.get_freespace()
else:
return 0
| gpl-3.0 |
rebost/django | tests/modeltests/signals/tests.py | 40 | 5237 | from __future__ import absolute_import
from django.db.models import signals
from django.dispatch import receiver
from django.test import TestCase
from .models import Person, Car
# #8285: signals can be any callable
class PostDeleteHandler(object):
def __init__(self, data):
self.data = data
def __call__(self, signal, sender, instance, **kwargs):
self.data.append(
(instance, instance.id is None)
)
class MyReceiver(object):
def __init__(self, param):
self.param = param
self._run = False
def __call__(self, signal, sender, **kwargs):
self._run = True
signal.disconnect(receiver=self, sender=sender)
class SignalTests(TestCase):
def test_basic(self):
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered (#9989)
pre_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
data = []
def pre_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("raw", False))
)
signals.pre_save.connect(pre_save_test)
def post_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("created"), kwargs.get("raw", False))
)
signals.post_save.connect(post_save_test)
def pre_delete_test(signal, sender, instance, **kwargs):
data.append(
(instance, instance.id is None)
)
signals.pre_delete.connect(pre_delete_test)
post_delete_test = PostDeleteHandler(data)
signals.post_delete.connect(post_delete_test)
# throw a decorator syntax receiver into the mix
@receiver(signals.pre_save)
def pre_save_decorator_test(signal, sender, instance, **kwargs):
data.append(instance)
@receiver(signals.pre_save, sender=Car)
def pre_save_decorator_sender_test(signal, sender, instance, **kwargs):
data.append(instance)
p1 = Person(first_name="John", last_name="Smith")
self.assertEqual(data, [])
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, True, False),
])
data[:] = []
p1.first_name = "Tom"
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, False, False),
])
data[:] = []
# Car signal (sender defined)
c1 = Car(make="Volkswagon", model="Passat")
c1.save()
self.assertEqual(data, [
(c1, False),
c1,
c1,
(c1, True, False),
])
data[:] = []
# Calling an internal method purely so that we can trigger a "raw" save.
p1.save_base(raw=True)
self.assertEqual(data, [
(p1, True),
p1,
(p1, False, True),
])
data[:] = []
p1.delete()
self.assertEqual(data, [
(p1, False),
(p1, False),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.id = 99998
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.delete()
self.assertEqual(data, [
(p2, False),
(p2, False)
])
self.assertQuerysetEqual(
Person.objects.all(), [
"James Jones",
],
unicode
)
signals.post_delete.disconnect(post_delete_test)
signals.pre_delete.disconnect(pre_delete_test)
signals.post_save.disconnect(post_save_test)
signals.pre_save.disconnect(pre_save_test)
signals.pre_save.disconnect(pre_save_decorator_test)
signals.pre_save.disconnect(pre_save_decorator_sender_test, sender=Car)
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
self.assertEqual(pre_signals, post_signals)
def test_disconnect_in_dispatch(self):
"""
Test that signals that disconnect when being called don't mess future
dispatching.
"""
a, b = MyReceiver(1), MyReceiver(2)
signals.post_save.connect(sender=Person, receiver=a)
signals.post_save.connect(sender=Person, receiver=b)
p = Person.objects.create(first_name='John', last_name='Smith')
self.assertTrue(a._run)
self.assertTrue(b._run)
self.assertEqual(signals.post_save.receivers, [])
| bsd-3-clause |
abhijithch/MozDef | alerts/cloudtrail.py | 12 | 1882 | #!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez [email protected]
from lib.alerttask import AlertTask
class AlertCloudtrail(AlertTask):
def main(self):
# look for events in last 160 hours
date_timedelta = dict(hours=160)
# Configure filters by importing a kibana dashboard
self.filtersFromKibanaDash('cloudtrail_dashboard.json', date_timedelta)
# Search events
self.searchEventsSimple()
self.walkEvents()
# Set alert properties
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail','aws']
severity = 'INFO'
summary = ('{0} called {1} from {2}'.format(event['_source']['userIdentity']['userName'], event['_source']['eventName'], event['_source']['sourceIPAddress']))
if event['_source']['eventName'] == 'RunInstances':
for i in event['_source']['responseElements']['instancesSet']['items']:
if 'privateDnsName' in i.keys():
summary += (' running {0} '.format(i['privateDnsName']))
elif 'instanceId' in i.keys():
summary += (' running {0} '.format(i['instanceId']))
else:
summary += (' running {0} '.format(flattenDict(i)))
if event['_source']['eventName'] == 'StartInstances':
for i in event['_source']['requestParameters']['instancesSet']['items']:
summary += (' starting {0} '.format(i['instanceId']))
# Create the alert object based on these properties
return self.createAlertDict(summary, category, tags, [event], severity) | mpl-2.0 |
kyrias/cjdns | node_build/dependencies/libuv/build/gyp/test/mac/gyptest-xcode-env-order.py | 119 | 3284 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that dependent Xcode settings are processed correctly.
"""
import TestGyp
import TestMac
import subprocess
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
CHDIR = 'xcode-env-order'
INFO_PLIST_PATH = 'Test.app/Contents/Info.plist'
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', test.ALL, chdir=CHDIR)
# Env vars in 'copies' filenames.
test.built_file_must_exist('Test-copy-brace/main.c', chdir=CHDIR)
test.built_file_must_exist('Test-copy-paren/main.c', chdir=CHDIR)
test.built_file_must_exist('Test-copy-bare/main.c', chdir=CHDIR)
# Env vars in 'actions' filenames and inline actions
test.built_file_must_exist('action-copy-brace.txt', chdir=CHDIR)
test.built_file_must_exist('action-copy-paren.txt', chdir=CHDIR)
test.built_file_must_exist('action-copy-bare.txt', chdir=CHDIR)
# Env vars in 'rules' filenames and inline actions
test.built_file_must_exist('rule-copy-brace.txt', chdir=CHDIR)
test.built_file_must_exist('rule-copy-paren.txt', chdir=CHDIR)
# TODO: see comment in test.gyp for this file.
#test.built_file_must_exist('rule-copy-bare.txt', chdir=CHDIR)
# Env vars in Info.plist.
info_plist = test.built_file_path(INFO_PLIST_PATH, chdir=CHDIR)
test.must_exist(info_plist)
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BraceProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>ParenProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey1</key>
\t<string>D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey2</key>
\t<string>/Source/Project/Test</string>''')
# NOTE: For bare variables, $PRODUCT_TYPE is not replaced! It _is_ replaced
# if it's not right at the start of the string (e.g. ':$PRODUCT_TYPE'), so
# this looks like an Xcode bug. This bug isn't emulated (yet?), so check this
# only for Xcode.
if test.format == 'xcode' and TestMac.Xcode.Version() < '0500':
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey3</key>
\t<string>$PRODUCT_TYPE:D:/Source/Project/Test</string>''')
else:
# The bug has been fixed by Xcode version 5.0.0.
test.must_contain(info_plist, '''\
\t<key>BareProcessedKey3</key>
\t<string>com.apple.product-type.application:D:/Source/Project/Test</string>''')
test.must_contain(info_plist, '''\
\t<key>MixedProcessedKey</key>
\t<string>/Source/Project:Test:mh_execute</string>''')
test.pass_test()
| gpl-3.0 |
bdero/edx-platform | cms/djangoapps/contentstore/features/component_settings_editor_helpers.py | 18 | 9372 | # disable missing docstring
# pylint: disable=C0111
from lettuce import world
from nose.tools import assert_equal, assert_in # pylint: disable=E0611
from terrain.steps import reload_the_page
from common import type_in_codemirror
from selenium.webdriver.common.keys import Keys
@world.absorb
def create_component_instance(step, category, component_type=None, is_advanced=False, advanced_component=None):
"""
Create a new component in a Unit.
Parameters
----------
category: component type (discussion, html, problem, video, advanced)
component_type: for components with multiple templates, the link text in the menu
is_advanced: for problems, is the desired component under the advanced menu?
advanced_component: for advanced components, the related value of policy key 'advanced_modules'
"""
assert_in(category, ['advanced', 'problem', 'html', 'video', 'discussion'])
component_button_css = 'span.large-{}-icon'.format(category.lower())
if category == 'problem':
module_css = 'div.xmodule_CapaModule'
elif category == 'advanced':
module_css = 'div.xmodule_{}Module'.format(advanced_component.title())
else:
module_css = 'div.xmodule_{}Module'.format(category.title())
# Count how many of that module is on the page. Later we will
# assert that one more was added.
# We need to use world.browser.find_by_css instead of world.css_find
# because it's ok if there are currently zero of them.
module_count_before = len(world.browser.find_by_css(module_css))
# Disable the jquery animation for the transition to the menus.
world.disable_jquery_animations()
world.css_click(component_button_css)
if category in ('problem', 'html', 'advanced'):
world.wait_for_invisible(component_button_css)
click_component_from_menu(category, component_type, is_advanced)
expected_count = module_count_before + 1
world.wait_for(
lambda _: len(world.css_find(module_css)) == expected_count,
timeout=20
)
@world.absorb
def click_new_component_button(step, component_button_css):
step.given('I have clicked the new unit button')
world.css_click(component_button_css)
def _click_advanced():
css = 'ul.problem-type-tabs a[href="#tab2"]'
world.css_click(css)
# Wait for the advanced tab items to be displayed
tab2_css = 'div.ui-tabs-panel#tab2'
world.wait_for_visible(tab2_css)
def _find_matching_link(category, component_type):
"""
Find the link with the specified text. There should be one and only one.
"""
# The tab shows links for the given category
links = world.css_find('div.new-component-{} a'.format(category))
# Find the link whose text matches what you're looking for
matched_links = [link for link in links if link.text == component_type]
# There should be one and only one
assert_equal(len(matched_links), 1)
return matched_links[0]
def click_component_from_menu(category, component_type, is_advanced):
"""
Creates a component for a category with more
than one template, i.e. HTML and Problem.
For some problem types, it is necessary to click to
the Advanced tab.
The component_type is the link text, e.g. "Blank Common Problem"
"""
if is_advanced:
# Sometimes this click does not work if you go too fast.
world.retry_on_exception(_click_advanced,
ignored_exceptions=AssertionError)
# Retry this in case the list is empty because you tried too fast.
link = world.retry_on_exception(
lambda: _find_matching_link(category, component_type),
ignored_exceptions=AssertionError
)
# Wait for the link to be clickable. If you go too fast it is not.
world.retry_on_exception(lambda: link.click())
@world.absorb
def edit_component_and_select_settings():
world.edit_component()
world.ensure_settings_visible()
@world.absorb
def ensure_settings_visible():
# Select the 'settings' tab if there is one (it isn't displayed if it is the only option)
settings_button = world.browser.find_by_css('.settings-button')
if len(settings_button) > 0:
world.css_click('.settings-button')
@world.absorb
def edit_component(index=0):
# Verify that the "loading" indication has been hidden.
world.wait_for_loading()
# Verify that the "edit" button is present.
world.wait_for(lambda _driver: world.css_visible('a.edit-button'))
world.css_click('a.edit-button', index)
world.wait_for_ajax_complete()
@world.absorb
def select_editor_tab(tab_name):
editor_tabs = world.browser.find_by_css('.editor-tabs a')
expected_tab_text = tab_name.strip().upper()
matching_tabs = [tab for tab in editor_tabs if tab.text.upper() == expected_tab_text]
assert len(matching_tabs) == 1
tab = matching_tabs[0]
tab.click()
world.wait_for_ajax_complete()
def enter_xml_in_advanced_problem(step, text):
"""
Edits an advanced problem (assumes only on page),
types the provided XML, and saves the component.
"""
world.edit_component()
type_in_codemirror(0, text)
world.save_component()
@world.absorb
def verify_setting_entry(setting, display_name, value, explicitly_set):
"""
Verify the capa module fields are set as expected in the
Advanced Settings editor.
Parameters
----------
setting: the WebDriverElement object found in the browser
display_name: the string expected as the label
html: the expected field value
explicitly_set: True if the value is expected to have been explicitly set
for the problem, rather than derived from the defaults. This is verified
by the existence of a "Clear" button next to the field value.
"""
assert_equal(display_name, setting.find_by_css('.setting-label')[0].html.strip())
# Check if the web object is a list type
# If so, we use a slightly different mechanism for determining its value
if setting.has_class('metadata-list-enum') or setting.has_class('metadata-dict') or setting.has_class('metadata-video-translations'):
list_value = ', '.join(ele.value for ele in setting.find_by_css('.list-settings-item'))
assert_equal(value, list_value)
elif setting.has_class('metadata-videolist-enum'):
list_value = ', '.join(ele.find_by_css('input')[0].value for ele in setting.find_by_css('.videolist-settings-item'))
assert_equal(value, list_value)
else:
assert_equal(value, setting.find_by_css('.setting-input')[0].value)
# VideoList doesn't have clear button
if not setting.has_class('metadata-videolist-enum'):
settingClearButton = setting.find_by_css('.setting-clear')[0]
assert_equal(explicitly_set, settingClearButton.has_class('active'))
assert_equal(not explicitly_set, settingClearButton.has_class('inactive'))
@world.absorb
def verify_all_setting_entries(expected_entries):
settings = world.browser.find_by_css('.wrapper-comp-setting')
assert_equal(len(expected_entries), len(settings))
for (counter, setting) in enumerate(settings):
world.verify_setting_entry(
setting, expected_entries[counter][0],
expected_entries[counter][1], expected_entries[counter][2]
)
@world.absorb
def save_component():
world.css_click("a.action-save")
world.wait_for_ajax_complete()
@world.absorb
def save_component_and_reopen(step):
save_component()
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes WERE persisted after Save.
reload_the_page(step)
edit_component_and_select_settings()
@world.absorb
def cancel_component(step):
world.css_click("a.action-cancel")
# We have a known issue that modifications are still shown within the edit window after cancel (though)
# they are not persisted. Refresh the browser to make sure the changes were not persisted.
reload_the_page(step)
@world.absorb
def revert_setting_entry(label):
get_setting_entry(label).find_by_css('.setting-clear')[0].click()
@world.absorb
def get_setting_entry(label):
def get_setting():
settings = world.css_find('.wrapper-comp-setting')
for setting in settings:
if setting.find_by_css('.setting-label')[0].value == label:
return setting
return None
return world.retry_on_exception(get_setting)
@world.absorb
def get_setting_entry_index(label):
def get_index():
settings = world.css_find('.metadata_edit .wrapper-comp-setting')
for index, setting in enumerate(settings):
if setting.find_by_css('.setting-label')[0].value == label:
return index
return None
return world.retry_on_exception(get_index)
@world.absorb
def set_field_value(index, value):
"""
Set the field to the specified value.
Note: we cannot use css_fill here because the value is not set
until after you move away from that field.
Instead we will find the element, set its value, then hit the Tab key
to get to the next field.
"""
elem = world.css_find('.metadata_edit div.wrapper-comp-setting input.setting-input')[index]
elem.value = value
elem.type(Keys.TAB)
| agpl-3.0 |
arrabito/DIRAC | DataManagementSystem/scripts/dirac-dms-set-replica-status.py | 9 | 2642 | #!/usr/bin/env python
########################################################################
# $HeadURL$
########################################################################
"""
Set the status of the replicas of given files at the provided SE
"""
__RCSID__ = "$Id$"
from DIRAC.Core.Base import Script
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'\nUsage:',
' %s [option|cfgfile] ... <LFN|File> SE Status' % Script.scriptName,
'Arguments:',
' LFN: LFN',
' File: File name containing a list of affected LFNs',
' SE: Name of Storage Element',
' Status: New Status for the replica' ] ) )
Script.parseCommandLine( ignoreErrors = False )
import DIRAC
from DIRAC import gConfig, gLogger
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
import os
args = Script.getPositionalArgs()
if not len( args ) == 3:
Script.showHelp()
inputFileName = args[0]
storageElement = args[1]
status = args[2]
if os.path.exists( inputFileName ):
inputFile = open( inputFileName, 'r' )
string = inputFile.read()
inputFile.close()
lfns = sorted( string.splitlines() )
else:
lfns = [inputFileName]
fc = FileCatalog()
replicaDict = {}
res = fc.getReplicas( lfns, allStatus = True )
if not res['OK']:
gLogger.error( "Failed to get catalog replicas.", res['Message'] )
DIRAC.exit( -1 )
lfnDict = {}
for lfn, error in res['Value']['Failed'].items():
gLogger.error( "Failed to get replicas for file.", "%s:%s" % ( lfn, error ) )
for lfn, replicas in res['Value']['Successful'].items():
if not storageElement in replicas.keys():
gLogger.error( "LFN not registered at provided storage element." , "%s %s" % ( lfn, storageElement ) )
else:
lfnDict[lfn] = {'SE':storageElement, 'PFN':replicas[storageElement], 'Status':status}
if not lfnDict:
gLogger.error( "No files found at the supplied storage element." )
DIRAC.exit( 2 )
res = fc.setReplicaStatus( lfnDict )
if not res['OK']:
gLogger.error( "Failed to set catalog replica status.", res['Message'] )
DIRAC.exit( -1 )
for lfn, error in res['Value']['Failed'].items():
gLogger.error( "Failed to set replica status for file.", "%s:%s" % ( lfn, error ) )
gLogger.notice( "Successfully updated the status of %d files at %s." % ( len( res['Value']['Successful'].keys() ), storageElement ) )
DIRAC.exit( 0 )
| gpl-3.0 |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/test_rpn_target_assign_op.py | 2 | 24786 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid import Program, program_guard
from op_test import OpTest
from test_anchor_generator_op import anchor_generator_in_python
from test_generate_proposal_labels_op import _generate_groundtruth
from test_generate_proposal_labels_op import _bbox_overlaps, _box_to_delta
def rpn_target_assign(anchor_by_gt_overlap,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= rpn_positive_overlap] = 1
num_fg = int(rpn_fg_fraction * rpn_batch_size_per_im)
fg_inds = np.where(labels == 1)[0]
if len(fg_inds) > num_fg and use_random:
disable_inds = np.random.choice(
fg_inds, size=(len(fg_inds) - num_fg), replace=False)
else:
disable_inds = fg_inds[num_fg:]
labels[disable_inds] = -1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
num_bg = rpn_batch_size_per_im - np.sum(labels == 1)
bg_inds = np.where(anchor_to_gt_max < rpn_negative_overlap)[0]
if len(bg_inds) > num_bg and use_random:
enable_inds = bg_inds[np.random.randint(len(bg_inds), size=num_bg)]
else:
enable_inds = bg_inds[:num_bg]
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
labels = labels[score_index]
assert not np.any(labels == -1), "Wrong labels with -1"
gt_inds = anchor_to_gt_argmax[loc_index]
return loc_index, score_index, labels, gt_inds, bbox_inside_weight
def get_anchor(n, c, h, w):
input_feat = np.random.random((n, c, h, w)).astype('float32')
anchors, _ = anchor_generator_in_python(
input_feat=input_feat,
anchor_sizes=[32., 64.],
aspect_ratios=[0.5, 1.0],
variances=[1.0, 1.0, 1.0, 1.0],
stride=[16.0, 16.0],
offset=0.5)
return anchors
def rpn_target_assign_in_python(all_anchors,
gt_boxes,
is_crowd,
im_info,
lod,
rpn_straddle_thresh,
rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random=True):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_height = im_info[i][0]
im_width = im_info[i][1]
im_scale = im_info[i][2]
if rpn_straddle_thresh >= 0:
# Only keep anchors inside the image by a margin of straddle_thresh
inds_inside = np.where(
(all_anchors[:, 0] >= -rpn_straddle_thresh) &
(all_anchors[:, 1] >= -rpn_straddle_thresh) & (
all_anchors[:, 2] < im_width + rpn_straddle_thresh) & (
all_anchors[:, 3] < im_height + rpn_straddle_thresh))[0]
# keep only inside anchors
inside_anchors = all_anchors[inds_inside, :]
else:
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight = \
rpn_target_assign(iou, rpn_batch_size_per_im,
rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction,
use_random)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights
def retinanet_target_assign(anchor_by_gt_overlap, gt_labels, positive_overlap,
negative_overlap):
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
anchor_to_gt_max = anchor_by_gt_overlap[np.arange(
anchor_by_gt_overlap.shape[0]), anchor_to_gt_argmax]
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
gt_to_anchor_max = anchor_by_gt_overlap[gt_to_anchor_argmax, np.arange(
anchor_by_gt_overlap.shape[1])]
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
labels = np.ones((anchor_by_gt_overlap.shape[0], ), dtype=np.int32) * -1
labels[anchors_with_max_overlap] = 1
labels[anchor_to_gt_max >= positive_overlap] = 1
fg_inds = np.where(labels == 1)[0]
bbox_inside_weight = np.zeros((len(fg_inds), 4), dtype=np.float32)
bg_inds = np.where(anchor_to_gt_max < negative_overlap)[0]
enable_inds = bg_inds
fg_fake_inds = np.array([], np.int32)
fg_value = np.array([fg_inds[0]], np.int32)
fake_num = 0
for bg_id in enable_inds:
if bg_id in fg_inds:
fake_num += 1
fg_fake_inds = np.hstack([fg_fake_inds, fg_value])
labels[enable_inds] = 0
bbox_inside_weight[fake_num:, :] = 1
fg_inds = np.where(labels == 1)[0]
bg_inds = np.where(labels == 0)[0]
loc_index = np.hstack([fg_fake_inds, fg_inds])
score_index = np.hstack([fg_inds, bg_inds])
score_index_tmp = np.hstack([fg_inds])
labels = labels[score_index]
gt_inds = anchor_to_gt_argmax[loc_index]
label_inds = anchor_to_gt_argmax[score_index_tmp]
labels[0:len(fg_inds)] = np.squeeze(gt_labels[label_inds])
fg_num = len(fg_fake_inds) + len(fg_inds) + 1
assert not np.any(labels == -1), "Wrong labels with -1"
return loc_index, score_index, labels, gt_inds, bbox_inside_weight, fg_num
def retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels,
is_crowd, im_info, lod, positive_overlap,
negative_overlap):
anchor_num = all_anchors.shape[0]
batch_size = len(lod) - 1
for i in range(batch_size):
im_scale = im_info[i][2]
inds_inside = np.arange(all_anchors.shape[0])
inside_anchors = all_anchors
b, e = lod[i], lod[i + 1]
gt_boxes_slice = gt_boxes[b:e, :] * im_scale
gt_labels_slice = gt_labels[b:e, :]
is_crowd_slice = is_crowd[b:e]
not_crowd_inds = np.where(is_crowd_slice == 0)[0]
gt_boxes_slice = gt_boxes_slice[not_crowd_inds]
gt_labels_slice = gt_labels_slice[not_crowd_inds]
iou = _bbox_overlaps(inside_anchors, gt_boxes_slice)
loc_inds, score_inds, labels, gt_inds, bbox_inside_weight, fg_num = \
retinanet_target_assign(iou, gt_labels_slice,
positive_overlap, negative_overlap)
# unmap to all anchor
loc_inds = inds_inside[loc_inds]
score_inds = inds_inside[score_inds]
sampled_gt = gt_boxes_slice[gt_inds]
sampled_anchor = all_anchors[loc_inds]
box_deltas = _box_to_delta(sampled_anchor, sampled_gt, [1., 1., 1., 1.])
if i == 0:
loc_indexes = loc_inds
score_indexes = score_inds
tgt_labels = labels
tgt_bboxes = box_deltas
bbox_inside_weights = bbox_inside_weight
fg_nums = [[fg_num]]
else:
loc_indexes = np.concatenate(
[loc_indexes, loc_inds + i * anchor_num])
score_indexes = np.concatenate(
[score_indexes, score_inds + i * anchor_num])
tgt_labels = np.concatenate([tgt_labels, labels])
tgt_bboxes = np.vstack([tgt_bboxes, box_deltas])
bbox_inside_weights = np.vstack([bbox_inside_weights, \
bbox_inside_weight])
fg_nums = np.concatenate([fg_nums, [[fg_num]]])
return loc_indexes, score_indexes, tgt_bboxes, tgt_labels, bbox_inside_weights, fg_nums
class TestRpnTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
#images_shape = [[64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
#lod = [0, 4]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
rpn_straddle_thresh = 0.0
rpn_batch_size_per_im = 256
rpn_positive_overlap = 0.7
rpn_negative_overlap = 0.3
rpn_fg_fraction = 0.5
use_random = False
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights = \
rpn_target_assign_in_python(all_anchors, gt_boxes, is_crowd,
im_info, lod, rpn_straddle_thresh,
rpn_batch_size_per_im, rpn_positive_overlap,
rpn_negative_overlap,
rpn_fg_fraction, use_random)
labels = labels[:, np.newaxis]
self.op_type = "rpn_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'rpn_batch_size_per_im': rpn_batch_size_per_im,
'rpn_straddle_thresh': rpn_straddle_thresh,
'rpn_positive_overlap': rpn_positive_overlap,
'rpn_negative_overlap': rpn_negative_overlap,
'rpn_fg_fraction': rpn_fg_fraction,
'use_random': use_random
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32')
}
def test_check_output(self):
self.check_output()
class TestRetinanetTargetAssignOp(OpTest):
def setUp(self):
n, c, h, w = 2, 4, 14, 14
all_anchors = get_anchor(n, c, h, w)
gt_num = 10
all_anchors = all_anchors.reshape(-1, 4)
anchor_num = all_anchors.shape[0]
images_shape = [[64, 64], [64, 64]]
groundtruth, lod = _generate_groundtruth(images_shape, 3, 4)
lod = [0, 4, 8]
im_info = np.ones((len(images_shape), 3)).astype(np.float32)
for i in range(len(images_shape)):
im_info[i, 0] = images_shape[i][0]
im_info[i, 1] = images_shape[i][1]
im_info[i, 2] = 0.8 #scale
gt_boxes = np.vstack([v['boxes'] for v in groundtruth])
is_crowd = np.hstack([v['is_crowd'] for v in groundtruth])
gt_labels = np.vstack([
v['gt_classes'].reshape(len(v['gt_classes']), 1)
for v in groundtruth
])
gt_labels = gt_labels.reshape(len(gt_labels), 1)
all_anchors = all_anchors.astype('float32')
gt_boxes = gt_boxes.astype('float32')
gt_labels = gt_labels.astype('int32')
positive_overlap = 0.5
negative_overlap = 0.4
loc_index, score_index, tgt_bbox, labels, bbox_inside_weights, fg_num = \
retinanet_target_assign_in_python(all_anchors, gt_boxes, gt_labels, is_crowd,
im_info, lod, positive_overlap, negative_overlap)
labels = labels[:, np.newaxis]
self.op_type = "retinanet_target_assign"
self.inputs = {
'Anchor': all_anchors,
'GtBoxes': (gt_boxes, [[4, 4]]),
'GtLabels': (gt_labels, [[4, 4]]),
'IsCrowd': (is_crowd, [[4, 4]]),
'ImInfo': (im_info, [[1, 1]])
}
self.attrs = {
'positive_overlap': positive_overlap,
'negative_overlap': negative_overlap
}
self.outputs = {
'LocationIndex': loc_index.astype('int32'),
'ScoreIndex': score_index.astype('int32'),
'TargetBBox': tgt_bbox.astype('float32'),
'TargetLabel': labels.astype('int32'),
'BBoxInsideWeight': bbox_inside_weights.astype('float32'),
'ForegroundNumber': fg_num.astype('int32')
}
def test_check_output(self):
self.check_output()
class TestRetinanetTargetAssignOpError(unittest.TestCase):
def test_errors(self):
with program_guard(Program(), Program()):
bbox_pred1 = fluid.data(
name='bbox_pred1', shape=[1, 100, 4], dtype='float32')
cls_logits1 = fluid.data(
name='cls_logits1', shape=[1, 100, 10], dtype='float32')
anchor_box1 = fluid.data(
name='anchor_box1', shape=[100, 4], dtype='float32')
anchor_var1 = fluid.data(
name='anchor_var1', shape=[100, 4], dtype='float32')
gt_boxes1 = fluid.data(
name='gt_boxes1', shape=[10, 4], dtype='float32')
gt_labels1 = fluid.data(
name='gt_labels1', shape=[10, 1], dtype='int32')
is_crowd1 = fluid.data(name='is_crowd1', shape=[1], dtype='float32')
im_info1 = fluid.data(
name='im_info1', shape=[1, 3], dtype='float32')
# The `bbox_pred` must be Variable and the data type of `bbox_pred` Tensor
# one of float32 and float64.
def test_bbox_pred_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign([1], cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_bbox_pred_type)
def test_bbox_pred_tensor_dtype():
bbox_pred2 = fluid.data(
name='bbox_pred2', shape=[1, 100, 4], dtype='intt32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred2, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_bbox_pred_tensor_dtype)
# The `cls_logits` must be Variable and the data type of `cls_logits` Tensor
# one of float32 and float64.
def test_cls_logits_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, 2, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_cls_logits_type)
def test_cls_logits_tensor_dtype():
cls_logits2 = fluid.data(
name='cls_logits2', shape=[1, 100, 10], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits2, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_cls_logits_tensor_dtype)
# The `anchor_box` must be Variable and the data type of `anchor_box` Tensor
# one of float32 and float64.
def test_anchor_box_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, [5],
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_box_type)
def test_anchor_box_tensor_dtype():
anchor_box2 = fluid.data(
name='anchor_box2', shape=[100, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box2,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_box_tensor_dtype)
# The `anchor_var` must be Variable and the data type of `anchor_var` Tensor
# one of float32 and float64.
def test_anchor_var_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
5, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_var_type)
def test_anchor_var_tensor_dtype():
anchor_var2 = fluid.data(
name='anchor_var2', shape=[100, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var2, gt_boxes1, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_anchor_var_tensor_dtype)
# The `gt_boxes` must be Variable and the data type of `gt_boxes` Tensor
# one of float32 and float64.
def test_gt_boxes_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, [4], gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_boxes_type)
def test_gt_boxes_tensor_dtype():
gt_boxes2 = fluid.data(
name='gt_boxes2', shape=[10, 4], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes2, gt_labels1, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_boxes_tensor_dtype)
# The `gt_label` must be Variable and the data type of `gt_label` Tensor
# int32.
def test_gt_label_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, 9, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_label_type)
def test_gt_label_tensor_dtype():
gt_labels2 = fluid.data(
name='label2', shape=[10, 1], dtype='float32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels2, is_crowd1, im_info1, 10)
self.assertRaises(TypeError, test_gt_label_tensor_dtype)
# The `is_crowd` must be Variable and the data type of `is_crowd` Tensor
# int32.
def test_is_crowd_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, [10], im_info1, 10)
self.assertRaises(TypeError, test_is_crowd_type)
def test_is_crowd_tensor_dtype():
is_crowd2 = fluid.data(
name='is_crowd2', shape=[10, 1], dtype='float32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd2, im_info1, 10)
self.assertRaises(TypeError, test_is_crowd_tensor_dtype)
# The `im_info` must be Variable and the data type of `im_info` Tensor
# must be one of float32 and float64.
def test_im_info_type():
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, 1, 10)
self.assertRaises(TypeError, test_im_info_type)
def test_im_info_tensor_dtype():
im_info2 = fluid.data(
name='im_info2', shape=[1, 3], dtype='int32')
score_pred, loc_pred, score_target, loc_target, bbox_inside_weight, fg_num = \
fluid.layers.retinanet_target_assign(bbox_pred1, cls_logits1, anchor_box1,
anchor_var1, gt_boxes1, gt_labels1, is_crowd1, im_info2, 10)
self.assertRaises(TypeError, test_im_info_tensor_dtype)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
pusnik/pyexchange | pyexchange/connection.py | 1 | 4274 | """
(c) 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");?you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software?distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
"""
import requests
from requests_ntlm import HttpNtlmAuth
from requests.auth import HTTPBasicAuth
import logging
from .exceptions import FailedExchangeException
log = logging.getLogger('pyexchange')
class ExchangeBaseConnection(object):
""" Base class for Exchange connections."""
def send(self, body, headers=None, retries=2, timeout=30):
raise NotImplementedError
class ExchangeNTLMAuthConnection(ExchangeBaseConnection):
""" Connection to Exchange that uses NTLM authentication """
def __init__(self, url, username, password, verify_certificate=True, **kwargs):
self.url = url
self.username = username
self.password = password
self.verify_certificate = verify_certificate
self.handler = None
self.session = None
self.password_manager = None
def build_password_manager(self):
if self.password_manager:
return self.password_manager
log.debug(u'Constructing NTLM auth password manager')
self.password_manager = HttpNtlmAuth(self.username, self.password)
return self.password_manager
def build_session(self):
if self.session:
return self.session
log.debug(u'Constructing NTLM auth opener')
self.password_manager = self.build_password_manager()
self.session = requests.Session()
self.session.auth = self.password_manager
return self.session
def send(self, body, headers=None, retries=2, timeout=30):
if not self.session:
self.session = self.build_session()
try:
response = self.session.post(self.url, data=body, headers=headers, verify = self.verify_certificate)
response.raise_for_status()
except requests.exceptions.RequestException as err:
log.debug(getattr(err.response, 'content', 'No response.'))
raise FailedExchangeException(u'Unable to connect to Exchange with NTLM: %s' % err)
log.info(u'Got response: {code}'.format(code=response.status_code))
log.debug(u'Got response headers: {headers}'.format(headers=response.headers))
log.debug(u'Got body: {body}'.format(body=response.text))
return response.content
class ExchangeBasicAuthConnection(ExchangeBaseConnection):
""" Connection to Exchange, Office365 that uses Basic authentication """
def __init__(self, url, username, password, verify_certificate=True, **kwargs):
self.url = url
self.username = username
self.password = password
self.verify_certificate = verify_certificate
self.handler = None
self.session = None
self.password_manager = None
def build_password_manager(self):
if self.password_manager:
return self.password_manager
log.debug(u'Constructing basic auth password manager')
self.password_manager = HTTPBasicAuth(self.username, self.password)
return self.password_manager
def build_session(self):
if self.session:
return self.session
log.debug(u'Constructing opener with Basic auth')
self.password_manager = self.build_password_manager()
self.session = requests.Session()
self.session.auth = self.password_manager
return self.session
def send(self, body, headers=None, retries=2, timeout=30):
if not self.session:
self.session = self.build_session()
try:
response = self.session.post(self.url, data=body, headers=headers, verify = self.verify_certificate)
response.raise_for_status()
except requests.exceptions.RequestException as err:
log.debug(err.response.content)
raise FailedExchangeException(u'Unable to connect to Exchange with Basic auth: %s' % err)
log.info(u'Got response: {code}'.format(code=response.status_code))
log.debug(u'Got response headers: {headers}'.format(headers=response.headers))
log.debug(u'Got body: {body}'.format(body=response.text))
return response.content
| apache-2.0 |
denny820909/builder | lib/python2.7/site-packages/Twisted-12.2.0-py2.7-linux-x86_64.egg/twisted/trial/test/test_test_visitor.py | 90 | 2282 | from twisted.trial import unittest
from twisted.trial.runner import TestSuite, suiteVisit
pyunit = __import__('unittest')
class MockVisitor(object):
def __init__(self):
self.calls = []
def __call__(self, testCase):
self.calls.append(testCase)
class TestTestVisitor(unittest.TestCase):
def setUp(self):
self.visitor = MockVisitor()
def test_visitCase(self):
"""
Test that C{visit} works for a single test case.
"""
testCase = TestTestVisitor('test_visitCase')
testCase.visit(self.visitor)
self.assertEqual(self.visitor.calls, [testCase])
def test_visitSuite(self):
"""
Test that C{visit} hits all tests in a suite.
"""
tests = [TestTestVisitor('test_visitCase'),
TestTestVisitor('test_visitSuite')]
testSuite = TestSuite(tests)
testSuite.visit(self.visitor)
self.assertEqual(self.visitor.calls, tests)
def test_visitEmptySuite(self):
"""
Test that C{visit} on an empty suite hits nothing.
"""
TestSuite().visit(self.visitor)
self.assertEqual(self.visitor.calls, [])
def test_visitNestedSuite(self):
"""
Test that C{visit} recurses through suites.
"""
tests = [TestTestVisitor('test_visitCase'),
TestTestVisitor('test_visitSuite')]
testSuite = TestSuite([TestSuite([test]) for test in tests])
testSuite.visit(self.visitor)
self.assertEqual(self.visitor.calls, tests)
def test_visitPyunitSuite(self):
"""
Test that C{suiteVisit} visits stdlib unittest suites
"""
test = TestTestVisitor('test_visitPyunitSuite')
suite = pyunit.TestSuite([test])
suiteVisit(suite, self.visitor)
self.assertEqual(self.visitor.calls, [test])
def test_visitPyunitCase(self):
"""
Test that a stdlib test case in a suite gets visited.
"""
class PyunitCase(pyunit.TestCase):
def test_foo(self):
pass
test = PyunitCase('test_foo')
TestSuite([test]).visit(self.visitor)
self.assertEqual(
[call.id() for call in self.visitor.calls], [test.id()])
| mit |
abztrakt/django-badger | badger/migrations/0006_auto__add_field_nomination_rejecter__add_field_nomination_rejection_re.py | 9 | 11582 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Nomination.rejected_by'
db.add_column('badger_nomination', 'rejected_by',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='nomination_rejected_by', null=True, to=orm['auth.User']),
keep_default=False)
# Adding field 'Nomination.rejected_reason'
db.add_column('badger_nomination', 'rejected_reason',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Nomination.rejected_by'
db.delete_column('badger_nomination', 'rejected_by_id')
# Deleting field 'Nomination.rejected_reason'
db.delete_column('badger_nomination', 'rejected_reason')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'badger.award': {
'Meta': {'ordering': "['-modified', '-created']", 'object_name': 'Award'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'claim_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'db_index': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'award_creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'badger.badge': {
'Meta': {'ordering': "['-modified', '-created']", 'unique_together': "(('title', 'slug'),)", 'object_name': 'Badge'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'nominations_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'prerequisites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['badger.Badge']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'unique': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'badger.deferredaward': {
'Meta': {'ordering': "['-modified', '-created']", 'object_name': 'DeferredAward'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'claim_code': ('django.db.models.fields.CharField', [], {'default': "'xamuuk'", 'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'claim_group': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'db_index': 'True', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'reusable': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'badger.nomination': {
'Meta': {'object_name': 'Nomination'},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'approver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_approver'", 'null': 'True', 'to': "orm['auth.User']"}),
'award': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Award']", 'null': 'True', 'blank': 'True'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'nominee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'nomination_nominee'", 'to': "orm['auth.User']"}),
'rejected_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'nomination_rejected_by'", 'null': 'True', 'to': "orm['auth.User']"}),
'rejected_reason': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'badger.progress': {
'Meta': {'unique_together': "(('badge', 'user'),)", 'object_name': 'Progress'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'counter': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('badger.models.JSONField', [], {'null': 'True', 'blank': 'True'}),
'percent': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'progress_user'", 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['badger']
| bsd-3-clause |
smart-developerr/my-first-blog | Lib/site-packages/django/core/management/sql.py | 108 | 1972 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
def sql_flush(style, connection, only_django=False, reset_sequences=True, allow_cascade=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True, include_views=False)
else:
tables = connection.introspection.table_names(include_views=False)
seqs = connection.introspection.sequence_list() if reset_sequences else ()
statements = connection.ops.sql_flush(style, tables, seqs, allow_cascade)
return statements
def emit_pre_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the pre_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running pre-migrate handlers for application %s" % app_config.label)
models.signals.pre_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
def emit_post_migrate_signal(verbosity, interactive, db, **kwargs):
# Emit the post_migrate signal for every application.
for app_config in apps.get_app_configs():
if app_config.models_module is None:
continue
if verbosity >= 2:
print("Running post-migrate handlers for application %s" % app_config.label)
models.signals.post_migrate.send(
sender=app_config,
app_config=app_config,
verbosity=verbosity,
interactive=interactive,
using=db,
**kwargs
)
| gpl-3.0 |
angela278/UPDream | lib/requests/packages/chardet/euckrfreq.py | 3121 | 45978 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
| apache-2.0 |
stonegithubs/odoo | addons/board/__openerp__.py | 261 | 1647 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Dashboards',
'version': '1.0',
'category': 'Hidden',
'description': """
Lets the user create a custom dashboard.
========================================
Allows users to create custom dashboard.
""",
'author': 'OpenERP SA',
'depends': ['base', 'web'],
'data': [
'security/ir.model.access.csv',
'board_view.xml',
'board_mydashboard_view.xml',
'views/board.xml',
],
'qweb': ['static/src/xml/*.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
olivierdalang/QGIS | tests/src/python/test_qgsrasterlayer.py | 4 | 63707 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsRasterLayer.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from builtins import str
__author__ = 'Tim Sutton'
__date__ = '20/08/2012'
__copyright__ = 'Copyright 2012, The QGIS Project'
import qgis # NOQA
import os
import filecmp
from shutil import copyfile
from qgis.PyQt.QtCore import QSize, QFileInfo, Qt, QTemporaryDir
from qgis.PyQt.QtGui import (
QColor,
QImage,
QPainter,
QResizeEvent
)
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsRaster,
QgsRasterLayer,
QgsReadWriteContext,
QgsColorRampShader,
QgsContrastEnhancement,
QgsDataProvider,
QgsProject,
QgsMapSettings,
QgsPointXY,
QgsRasterMinMaxOrigin,
QgsRasterShader,
QgsRasterTransparency,
QgsRenderChecker,
QgsPalettedRasterRenderer,
QgsSingleBandGrayRenderer,
QgsSingleBandPseudoColorRenderer,
QgsLimitedRandomColorRamp,
QgsGradientColorRamp,
QgsHueSaturationFilter,
QgsCoordinateTransformContext,
QgsCoordinateReferenceSystem,
QgsRasterHistogram,
QgsCubicRasterResampler,
QgsBilinearRasterResampler,
QgsLayerDefinition
)
from utilities import unitTestDataPath
from qgis.testing import start_app, unittest
from qgis.testing.mocked import get_iface
# Convenience instances in case you may need them
# not used in this test
start_app()
class TestQgsRasterLayer(unittest.TestCase):
def setUp(self):
self.iface = get_iface()
QgsProject.instance().removeAllMapLayers()
self.iface.mapCanvas().viewport().resize(400, 400)
# For some reason the resizeEvent is not delivered, fake it
self.iface.mapCanvas().resizeEvent(QResizeEvent(QSize(400, 400), self.iface.mapCanvas().size()))
def testIdentify(self):
myPath = os.path.join(unitTestDataPath(), 'landsat.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
myPoint = QgsPointXY(786690, 3345803)
# print 'Extents: %s' % myRasterLayer.extent().toString()
# myResult, myRasterValues = myRasterLayer.identify(myPoint)
# assert myResult
myRasterValues = myRasterLayer.dataProvider().identify(myPoint, QgsRaster.IdentifyFormatValue).results()
assert len(myRasterValues) > 0
# Get the name of the first band
myBand = list(myRasterValues.keys())[0]
# myExpectedName = 'Band 1
myExpectedBand = 1
myMessage = 'Expected "%s" got "%s" for first raster band name' % (
myExpectedBand, myBand)
assert myExpectedBand == myBand, myMessage
# Convert each band value to a list of ints then to a string
myValues = list(myRasterValues.values())
myIntValues = []
for myValue in myValues:
myIntValues.append(int(myValue))
myValues = str(myIntValues)
myExpectedValues = '[127, 141, 112, 72, 86, 126, 156, 211, 170]'
myMessage = 'Expected: %s\nGot: %s' % (myValues, myExpectedValues)
self.assertEqual(myValues, myExpectedValues, myMessage)
def testTransparency(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
renderer = QgsSingleBandGrayRenderer(myRasterLayer.dataProvider(), 1)
myRasterLayer.setRenderer(renderer)
myRasterLayer.setContrastEnhancement(
QgsContrastEnhancement.StretchToMinimumMaximum,
QgsRasterMinMaxOrigin.MinMax)
myContrastEnhancement = myRasterLayer.renderer().contrastEnhancement()
# print ("myContrastEnhancement.minimumValue = %.17g" %
# myContrastEnhancement.minimumValue())
# print ("myContrastEnhancement.maximumValue = %.17g" %
# myContrastEnhancement.maximumValue())
# Unfortunately the minimum/maximum values calculated in C++ and Python
# are slightly different (e.g. 3.3999999521443642e+38 x
# 3.3999999521444001e+38)
# It is not clear where the precision is lost.
# We set the same values as C++.
myContrastEnhancement.setMinimumValue(-3.3319999287625854e+38)
myContrastEnhancement.setMaximumValue(3.3999999521443642e+38)
# myType = myRasterLayer.dataProvider().dataType(1);
# myEnhancement = QgsContrastEnhancement(myType);
myTransparentSingleValuePixelList = []
rasterTransparency = QgsRasterTransparency()
myTransparentPixel1 = \
QgsRasterTransparency.TransparentSingleValuePixel()
myTransparentPixel1.min = -2.5840000772112106e+38
myTransparentPixel1.max = -1.0879999684602689e+38
myTransparentPixel1.percentTransparent = 50
myTransparentSingleValuePixelList.append(myTransparentPixel1)
myTransparentPixel2 = \
QgsRasterTransparency.TransparentSingleValuePixel()
myTransparentPixel2.min = 1.359999960575336e+37
myTransparentPixel2.max = 9.520000231087593e+37
myTransparentPixel2.percentTransparent = 70
myTransparentSingleValuePixelList.append(myTransparentPixel2)
rasterTransparency.setTransparentSingleValuePixelList(
myTransparentSingleValuePixelList)
rasterRenderer = myRasterLayer.renderer()
assert rasterRenderer
rasterRenderer.setRasterTransparency(rasterTransparency)
QgsProject.instance().addMapLayers([myRasterLayer, ])
myMapSettings = QgsMapSettings()
myMapSettings.setLayers([myRasterLayer])
myMapSettings.setExtent(myRasterLayer.extent())
myChecker = QgsRenderChecker()
myChecker.setControlName("expected_raster_transparency")
myChecker.setMapSettings(myMapSettings)
myResultFlag = myChecker.runTest("raster_transparency_python")
assert myResultFlag, "Raster transparency rendering test failed"
def testIssue7023(self):
"""Check if converting a raster from 1.8 to 2 works."""
myPath = os.path.join(unitTestDataPath('raster'),
'raster-palette-crash2.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
# crash on next line
QgsProject.instance().addMapLayers([myRasterLayer])
def testShaderCrash(self):
"""Check if we assign a shader and then reassign it no crash occurs."""
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
myRasterShader = QgsRasterShader()
myColorRampShader = QgsColorRampShader()
myColorRampShader.setColorRampType(QgsColorRampShader.Interpolated)
myItems = []
myItem = QgsColorRampShader.ColorRampItem(
10, QColor('#ffff00'), 'foo')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(
100, QColor('#ff00ff'), 'bar')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(
1000, QColor('#00ff00'), 'kazam')
myItems.append(myItem)
myColorRampShader.setColorRampItemList(myItems)
myRasterShader.setRasterShaderFunction(myColorRampShader)
myPseudoRenderer = QgsSingleBandPseudoColorRenderer(
myRasterLayer.dataProvider(), 1, myRasterShader)
myRasterLayer.setRenderer(myPseudoRenderer)
return
# ####### works first time #############
myRasterShader = QgsRasterShader()
myColorRampShader = QgsColorRampShader()
myColorRampShader.setColorRampType(QgsColorRampShader.Interpolated)
myItems = []
myItem = QgsColorRampShader.ColorRampItem(10,
QColor('#ffff00'), 'foo')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(100,
QColor('#ff00ff'), 'bar')
myItems.append(myItem)
myItem = QgsColorRampShader.ColorRampItem(1000,
QColor('#00ff00'), 'kazam')
myItems.append(myItem)
myColorRampShader.setColorRampItemList(myItems)
myRasterShader.setRasterShaderFunction(myColorRampShader)
# ####### crash on next line (fixed now)##################
myPseudoRenderer = QgsSingleBandPseudoColorRenderer(
myRasterLayer.dataProvider(), 1, myRasterShader)
myRasterLayer.setRenderer(myPseudoRenderer)
def onRendererChanged(self):
self.rendererChanged = True
def test_setRenderer(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
self.rendererChanged = False
layer.rendererChanged.connect(self.onRendererChanged)
rShader = QgsRasterShader()
r = QgsSingleBandPseudoColorRenderer(layer.dataProvider(), 1, rShader)
layer.setRenderer(r)
assert self.rendererChanged
assert layer.renderer() == r
def testQgsRasterMinMaxOrigin(self):
mmo = QgsRasterMinMaxOrigin()
mmo_default = QgsRasterMinMaxOrigin()
self.assertEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.limits(), QgsRasterMinMaxOrigin.None_)
mmo.setLimits(QgsRasterMinMaxOrigin.CumulativeCut)
self.assertEqual(mmo.limits(), QgsRasterMinMaxOrigin.CumulativeCut)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.extent(), QgsRasterMinMaxOrigin.WholeRaster)
mmo.setExtent(QgsRasterMinMaxOrigin.UpdatedCanvas)
self.assertEqual(mmo.extent(), QgsRasterMinMaxOrigin.UpdatedCanvas)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertEqual(mmo.statAccuracy(), QgsRasterMinMaxOrigin.Estimated)
mmo.setStatAccuracy(QgsRasterMinMaxOrigin.Exact)
self.assertEqual(mmo.statAccuracy(), QgsRasterMinMaxOrigin.Exact)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.cumulativeCutLower(), 0.02)
mmo.setCumulativeCutLower(0.1)
self.assertAlmostEqual(mmo.cumulativeCutLower(), 0.1)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.cumulativeCutUpper(), 0.98)
mmo.setCumulativeCutUpper(0.9)
self.assertAlmostEqual(mmo.cumulativeCutUpper(), 0.9)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
self.assertAlmostEqual(mmo.stdDevFactor(), 2.0)
mmo.setStdDevFactor(2.5)
self.assertAlmostEqual(mmo.stdDevFactor(), 2.5)
self.assertNotEqual(mmo, mmo_default)
mmo = QgsRasterMinMaxOrigin()
mmo.setLimits(QgsRasterMinMaxOrigin.CumulativeCut)
mmo.setExtent(QgsRasterMinMaxOrigin.UpdatedCanvas)
mmo.setStatAccuracy(QgsRasterMinMaxOrigin.Exact)
mmo.setCumulativeCutLower(0.1)
mmo.setCumulativeCutUpper(0.9)
mmo.setStdDevFactor(2.5)
doc = QDomDocument()
parentElem = doc.createElement("test")
mmo.writeXml(doc, parentElem)
mmoUnserialized = QgsRasterMinMaxOrigin()
mmoUnserialized.readXml(parentElem)
self.assertEqual(mmo, mmoUnserialized)
def testPaletted(self):
""" test paletted raster renderer with raster with color table"""
path = os.path.join(unitTestDataPath('raster'),
'with_color_table.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 1,
[QgsPalettedRasterRenderer.Class(1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')])
self.assertEqual(renderer.nColors(), 2)
self.assertEqual(renderer.usesBands(), [1])
# test labels
self.assertEqual(renderer.label(1), 'class 2')
self.assertEqual(renderer.label(3), 'class 1')
self.assertFalse(renderer.label(101))
# test legend symbology - should be sorted by value
legend = renderer.legendSymbologyItems()
self.assertEqual(legend[0][0], 'class 2')
self.assertEqual(legend[1][0], 'class 1')
self.assertEqual(legend[0][1].name(), '#00ff00')
self.assertEqual(legend[1][1].name(), '#ff0000')
# test retrieving classes
classes = renderer.classes()
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'class 1')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
# test set label
# bad index
renderer.setLabel(1212, 'bad')
renderer.setLabel(3, 'new class')
self.assertEqual(renderer.label(3), 'new class')
# color ramp
r = QgsLimitedRandomColorRamp(5)
renderer.setSourceColorRamp(r)
self.assertEqual(renderer.sourceColorRamp().type(), 'random')
self.assertEqual(renderer.sourceColorRamp().count(), 5)
# clone
new_renderer = renderer.clone()
classes = new_renderer.classes()
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'new class')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
self.assertEqual(new_renderer.sourceColorRamp().type(), 'random')
self.assertEqual(new_renderer.sourceColorRamp().count(), 5)
# write to xml and read
doc = QDomDocument('testdoc')
elem = doc.createElement('qgis')
renderer.writeXml(doc, elem)
restored = QgsPalettedRasterRenderer.create(elem.firstChild().toElement(), layer.dataProvider())
self.assertTrue(restored)
self.assertEqual(restored.usesBands(), [1])
classes = restored.classes()
self.assertTrue(classes)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[0].label, 'class 2')
self.assertEqual(classes[1].label, 'new class')
self.assertEqual(classes[0].color.name(), '#00ff00')
self.assertEqual(classes[1].color.name(), '#ff0000')
self.assertEqual(restored.sourceColorRamp().type(), 'random')
self.assertEqual(restored.sourceColorRamp().count(), 5)
# render test
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer"), "Paletted rendering test failed")
def testPalettedBand(self):
""" test paletted raster render band"""
path = os.path.join(unitTestDataPath(),
'landsat_4326.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 2,
[QgsPalettedRasterRenderer.Class(137, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(138, QColor(255, 0, 0), 'class 1'),
QgsPalettedRasterRenderer.Class(139, QColor(0, 0, 255), 'class 1')])
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer_band2")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer_band2"), "Paletted rendering test failed")
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 3,
[QgsPalettedRasterRenderer.Class(120, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(123, QColor(255, 0, 0), 'class 1'),
QgsPalettedRasterRenderer.Class(124, QColor(0, 0, 255), 'class 1')])
layer.setRenderer(renderer)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_paletted_renderer_band3")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_paletted_renderer_band3"), "Paletted rendering test failed")
def testBrightnessContrastGamma(self):
""" test raster brightness/contrast/gamma filter"""
path = os.path.join(unitTestDataPath(),
'landsat_4326.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
layer.brightnessFilter().setContrast(100)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_contrast100")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_contrast100"), "Contrast (c = 100) rendering test failed")
layer.brightnessFilter().setContrast(-30)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_contrast30")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_contrast30"), "Contrast (c = -30) rendering test failed")
layer.brightnessFilter().setContrast(0)
layer.brightnessFilter().setBrightness(50)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_brightness50")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_brightness50"), "Brightness (b = 50) rendering test failed")
layer.brightnessFilter().setBrightness(-20)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_brightness20")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_brightness20"), "Brightness (b = -20) rendering test failed")
path = os.path.join(unitTestDataPath(),
'landsat-int16-b1.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
layer.brightnessFilter().setGamma(0.22)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_gamma022")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_gamma022"), "Gamma correction (gamma = 0.22) rendering test failed")
layer.brightnessFilter().setGamma(2.22)
ms = QgsMapSettings()
ms.setLayers([layer])
ms.setExtent(layer.extent())
checker = QgsRenderChecker()
checker.setControlName("expected_raster_gamma222")
checker.setMapSettings(ms)
self.assertTrue(checker.runTest("expected_raster_gamma222"), "Gamma correction (gamma = 2.22) rendering test failed")
def testPalettedColorTableToClassData(self):
entries = [QgsColorRampShader.ColorRampItem(5, QColor(255, 0, 0), 'item1'),
QgsColorRampShader.ColorRampItem(3, QColor(0, 255, 0), 'item2'),
QgsColorRampShader.ColorRampItem(6, QColor(0, 0, 255), 'item3'),
]
classes = QgsPalettedRasterRenderer.colorTableToClassData(entries)
self.assertEqual(classes[0].value, 5)
self.assertEqual(classes[1].value, 3)
self.assertEqual(classes[2].value, 6)
self.assertEqual(classes[0].label, 'item1')
self.assertEqual(classes[1].label, 'item2')
self.assertEqual(classes[2].label, 'item3')
self.assertEqual(classes[0].color.name(), '#ff0000')
self.assertEqual(classes[1].color.name(), '#00ff00')
self.assertEqual(classes[2].color.name(), '#0000ff')
# test #13263
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
classes = QgsPalettedRasterRenderer.colorTableToClassData(layer.dataProvider().colorTable(1))
self.assertEqual(len(classes), 4)
classes = QgsPalettedRasterRenderer.colorTableToClassData(layer.dataProvider().colorTable(15))
self.assertEqual(len(classes), 256)
def testLoadPalettedColorDataFromString(self):
"""
Test interpreting a bunch of color data format strings
"""
esri_clr_format = '1 255 255 0\n2 64 0 128\n3 255 32 32\n4 0 255 0\n5 0 0 255'
esri_clr_format_win = '1 255 255 0\r\n2 64 0 128\r\n3 255 32 32\r\n4 0 255 0\r\n5 0 0 255'
esri_clr_format_tab = '1\t255\t255\t0\n2\t64\t0\t128\n3\t255\t32\t32\n4\t0\t255\t0\n5\t0\t0\t255'
esri_clr_spaces = '1 255 255 0\n2 64 0 128\n3 255 32 32\n4 0 255 0\n5 0 0 255'
gdal_clr_comma = '1,255,255,0\n2,64,0,128\n3,255,32,32\n4,0,255,0\n5,0,0,255'
gdal_clr_colon = '1:255:255:0\n2:64:0:128\n3:255:32:32\n4:0:255:0\n5:0:0:255'
for f in [esri_clr_format,
esri_clr_format_win,
esri_clr_format_tab,
esri_clr_spaces,
gdal_clr_comma,
gdal_clr_colon]:
classes = QgsPalettedRasterRenderer.classDataFromString(f)
self.assertEqual(len(classes), 5)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#ffff00')
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#400080')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#ff2020')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#00ff00')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
grass_named_colors = '0 white\n1 yellow\n3 black\n6 blue\n9 magenta\n11 aqua\n13 grey\n14 gray\n15 orange\n19 brown\n21 purple\n22 violet\n24 indigo\n90 green\n180 cyan\n270 red\n'
classes = QgsPalettedRasterRenderer.classDataFromString(grass_named_colors)
self.assertEqual(len(classes), 16)
self.assertEqual(classes[0].value, 0)
self.assertEqual(classes[0].color.name(), '#ffffff')
self.assertEqual(classes[1].value, 1)
self.assertEqual(classes[1].color.name(), '#ffff00')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#000000')
self.assertEqual(classes[3].value, 6)
self.assertEqual(classes[3].color.name(), '#0000ff')
self.assertEqual(classes[4].value, 9)
self.assertEqual(classes[4].color.name(), '#ff00ff')
self.assertEqual(classes[5].value, 11)
self.assertEqual(classes[5].color.name(), '#00ffff')
self.assertEqual(classes[6].value, 13)
self.assertEqual(classes[6].color.name(), '#808080')
self.assertEqual(classes[7].value, 14)
self.assertEqual(classes[7].color.name(), '#808080')
self.assertEqual(classes[8].value, 15)
self.assertEqual(classes[8].color.name(), '#ffa500')
self.assertEqual(classes[9].value, 19)
self.assertEqual(classes[9].color.name(), '#a52a2a')
self.assertEqual(classes[10].value, 21)
self.assertEqual(classes[10].color.name(), '#800080')
self.assertEqual(classes[11].value, 22)
self.assertEqual(classes[11].color.name(), '#ee82ee')
self.assertEqual(classes[12].value, 24)
self.assertEqual(classes[12].color.name(), '#4b0082')
self.assertEqual(classes[13].value, 90)
self.assertEqual(classes[13].color.name(), '#008000')
self.assertEqual(classes[14].value, 180)
self.assertEqual(classes[14].color.name(), '#00ffff')
self.assertEqual(classes[15].value, 270)
self.assertEqual(classes[15].color.name(), '#ff0000')
gdal_alpha = '1:255:255:0:0\n2:64:0:128:50\n3:255:32:32:122\n4:0:255:0:200\n5:0:0:255:255'
classes = QgsPalettedRasterRenderer.classDataFromString(gdal_alpha)
self.assertEqual(len(classes), 5)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#ffff00')
self.assertEqual(classes[0].color.alpha(), 0)
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#400080')
self.assertEqual(classes[1].color.alpha(), 50)
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#ff2020')
self.assertEqual(classes[2].color.alpha(), 122)
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#00ff00')
self.assertEqual(classes[3].color.alpha(), 200)
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
self.assertEqual(classes[4].color.alpha(), 255)
# qgis style, with labels
qgis = '3 255 0 0 255 class 1\n4 0 255 0 200 class 2'
classes = QgsPalettedRasterRenderer.classDataFromString(qgis)
self.assertEqual(len(classes), 2)
self.assertEqual(classes[0].value, 3)
self.assertEqual(classes[0].color.name(), '#ff0000')
self.assertEqual(classes[0].color.alpha(), 255)
self.assertEqual(classes[0].label, 'class 1')
self.assertEqual(classes[1].value, 4)
self.assertEqual(classes[1].color.name(), '#00ff00')
self.assertEqual(classes[1].color.alpha(), 200)
self.assertEqual(classes[1].label, 'class 2')
# some bad inputs
bad = ''
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = '\n\n\n'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = 'x x x x'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 0)
bad = '1 255 0 0\n2 255 255\n3 255 0 255'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 2)
bad = '1 255 a 0'
classes = QgsPalettedRasterRenderer.classDataFromString(bad)
self.assertEqual(len(classes), 1)
def testLoadPalettedClassDataFromFile(self):
# bad file
classes = QgsPalettedRasterRenderer.classDataFromFile('ajdhjashjkdh kjahjkdhk')
self.assertEqual(len(classes), 0)
# good file!
path = os.path.join(unitTestDataPath('raster'),
'test.clr')
classes = QgsPalettedRasterRenderer.classDataFromFile(path)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].color.name(), '#000000')
self.assertEqual(classes[0].color.alpha(), 255)
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].color.name(), '#c8c8c8')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].color.name(), '#006e00')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].color.name(), '#6e4100')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].color.name(), '#0000ff')
self.assertEqual(classes[4].color.alpha(), 255)
self.assertEqual(classes[5].value, 6)
self.assertEqual(classes[5].color.name(), '#0059ff')
self.assertEqual(classes[6].value, 7)
self.assertEqual(classes[6].color.name(), '#00aeff')
self.assertEqual(classes[7].value, 8)
self.assertEqual(classes[7].color.name(), '#00fff6')
self.assertEqual(classes[8].value, 9)
self.assertEqual(classes[8].color.name(), '#eeff00')
self.assertEqual(classes[9].value, 10)
self.assertEqual(classes[9].color.name(), '#ffb600')
def testPalettedClassDataToString(self):
classes = [QgsPalettedRasterRenderer.Class(1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')]
self.assertEqual(QgsPalettedRasterRenderer.classDataToString(classes),
'1 0 255 0 255 class 2\n3 255 0 0 255 class 1')
# must be sorted by value to work OK in ArcMap
classes = [QgsPalettedRasterRenderer.Class(4, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')]
self.assertEqual(QgsPalettedRasterRenderer.classDataToString(classes),
'3 255 0 0 255 class 1\n4 0 255 0 255 class 2')
def testPalettedClassDataFromLayer(self):
# no layer
classes = QgsPalettedRasterRenderer.classDataFromRaster(None, 1)
self.assertFalse(classes)
# 10 class layer
path = os.path.join(unitTestDataPath('raster'),
'with_color_table.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer10 = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].value, 1)
self.assertEqual(classes[0].label, '1')
self.assertEqual(classes[1].value, 2)
self.assertEqual(classes[1].label, '2')
self.assertEqual(classes[2].value, 3)
self.assertEqual(classes[2].label, '3')
self.assertEqual(classes[3].value, 4)
self.assertEqual(classes[3].label, '4')
self.assertEqual(classes[4].value, 5)
self.assertEqual(classes[4].label, '5')
self.assertEqual(classes[5].value, 6)
self.assertEqual(classes[5].label, '6')
self.assertEqual(classes[6].value, 7)
self.assertEqual(classes[6].label, '7')
self.assertEqual(classes[7].value, 8)
self.assertEqual(classes[7].label, '8')
self.assertEqual(classes[8].value, 9)
self.assertEqual(classes[8].label, '9')
self.assertEqual(classes[9].value, 10)
self.assertEqual(classes[9].label, '10')
# bad band
self.assertFalse(QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 10101010))
# with ramp
r = QgsGradientColorRamp(QColor(200, 0, 0, 100), QColor(0, 200, 0, 200))
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1, r)
self.assertEqual(len(classes), 10)
self.assertEqual(classes[0].color.name(), '#c80000')
self.assertEqual(classes[1].color.name(), '#b21600')
self.assertEqual(classes[2].color.name(), '#9c2c00')
self.assertEqual(classes[3].color.name(), '#854200')
self.assertEqual(classes[4].color.name(), '#6f5900')
self.assertEqual(classes[5].color.name(), '#596f00')
self.assertEqual(classes[6].color.name(), '#428500')
self.assertEqual(classes[7].color.name(), '#2c9c00')
self.assertEqual(classes[8].color.name(), '#16b200')
self.assertEqual(classes[9].color.name(), '#00c800')
# 30 class layer
path = os.path.join(unitTestDataPath('raster'),
'unique_1.tif')
info = QFileInfo(path)
base_name = info.baseName()
layer10 = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer10.dataProvider(), 1)
self.assertEqual(len(classes), 30)
expected = [11, 21, 22, 24, 31, 82, 2002, 2004, 2014, 2019, 2027, 2029, 2030, 2080, 2081, 2082, 2088, 2092,
2097, 2098, 2099, 2105, 2108, 2110, 2114, 2118, 2126, 2152, 2184, 2220]
self.assertEqual([c.value for c in classes], expected)
# bad layer
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
classes = QgsPalettedRasterRenderer.classDataFromRaster(layer.dataProvider(), 1)
self.assertFalse(classes)
def testPalettedRendererWithNegativeColorValue(self):
""" test paletted raster renderer with negative values in color table"""
path = os.path.join(unitTestDataPath('raster'),
'hub13263.vrt')
info = QFileInfo(path)
base_name = info.baseName()
layer = QgsRasterLayer(path, base_name)
self.assertTrue(layer.isValid(), 'Raster not loaded: {}'.format(path))
renderer = QgsPalettedRasterRenderer(layer.dataProvider(), 1,
[QgsPalettedRasterRenderer.Class(-1, QColor(0, 255, 0), 'class 2'),
QgsPalettedRasterRenderer.Class(3, QColor(255, 0, 0), 'class 1')])
self.assertEqual(renderer.nColors(), 2)
self.assertEqual(renderer.usesBands(), [1])
def testClone(self):
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
renderer = layer.renderer().clone()
renderer.setOpacity(33.3)
layer.setRenderer(renderer)
# clone layer
clone = layer.clone()
# generate xml from layer
layer_doc = QDomDocument("doc")
layer_elem = layer_doc.createElement("maplayer")
layer.writeLayerXml(layer_elem, layer_doc, QgsReadWriteContext())
# generate xml from clone
clone_doc = QDomDocument("doc")
clone_elem = clone_doc.createElement("maplayer")
clone.writeLayerXml(clone_elem, clone_doc, QgsReadWriteContext())
# replace id within xml of clone
clone_id_elem = clone_elem.firstChildElement("id")
clone_id_elem_patch = clone_doc.createElement("id")
clone_id_elem_patch_value = clone_doc.createTextNode(layer.id())
clone_id_elem_patch.appendChild(clone_id_elem_patch_value)
clone_elem.replaceChild(clone_id_elem_patch, clone_id_elem)
# update doc
clone_doc.appendChild(clone_elem)
layer_doc.appendChild(layer_elem)
# compare xml documents
self.assertEqual(layer_doc.toString(), clone_doc.toString())
def testSetDataSource(self):
"""Test change data source"""
temp_dir = QTemporaryDir()
options = QgsDataProvider.ProviderOptions()
myPath = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
layer = QgsRasterLayer(myPath, myBaseName)
renderer = QgsSingleBandGrayRenderer(layer.dataProvider(), 2)
image = layer.previewAsImage(QSize(400, 400))
self.assertFalse(image.isNull())
self.assertTrue(image.save(os.path.join(temp_dir.path(), 'expected.png'), "PNG"))
layer.setDataSource(myPath.replace('4326.tif', '4326-BAD_SOURCE.tif'), 'bad_layer', 'gdal', options)
self.assertFalse(layer.isValid())
image = layer.previewAsImage(QSize(400, 400))
self.assertTrue(image.isNull())
layer.setDataSource(myPath.replace('4326-BAD_SOURCE.tif', '4326.tif'), 'bad_layer', 'gdal', options)
self.assertTrue(layer.isValid())
image = layer.previewAsImage(QSize(400, 400))
self.assertFalse(image.isNull())
self.assertTrue(image.save(os.path.join(temp_dir.path(), 'actual.png'), "PNG"))
self.assertTrue(
filecmp.cmp(os.path.join(temp_dir.path(), 'actual.png'), os.path.join(temp_dir.path(), 'expected.png')),
False)
def testWriteSld(self):
"""Test SLD generation for the XMLS fields geneerated at RasterLayer level and not to the deeper renderer level."""
myPath = os.path.join(unitTestDataPath(), 'landsat.tif')
myFileInfo = QFileInfo(myPath)
myBaseName = myFileInfo.baseName()
myRasterLayer = QgsRasterLayer(myPath, myBaseName)
myMessage = 'Raster not loaded: %s' % myPath
assert myRasterLayer.isValid(), myMessage
# do generic export with default layer values
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = root.elementsByTagName('sld:LayerFeatureConstraints')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = element.elementsByTagName('sld:FeatureTypeConstraint')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = root.elementsByTagName('sld:UserStyle')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
name = element.firstChildElement('sld:Name')
self.assertFalse(name.isNull())
self.assertEqual(name.text(), 'landsat')
abstract = element.firstChildElement('sld:Abstract')
self.assertTrue(abstract.isNull())
title = element.firstChildElement('sld:Title')
self.assertTrue(title.isNull())
featureTypeStyle = element.firstChildElement('sld:FeatureTypeStyle')
self.assertFalse(featureTypeStyle.isNull())
rule = featureTypeStyle.firstChildElement('sld:Rule')
self.assertFalse(rule.isNull())
temp = rule.firstChildElement('sld:MinScaleDenominator')
self.assertTrue(temp.isNull())
temp = rule.firstChildElement('sld:MaxScaleDenominator')
self.assertTrue(temp.isNull())
rasterSymbolizer = rule.firstChildElement('sld:RasterSymbolizer')
self.assertFalse(rule.isNull())
vendorOptions = rasterSymbolizer.elementsByTagName('sld:VendorOption')
self.assertTrue(vendorOptions.size() == 0)
# set no default values and check exported sld
myRasterLayer.setName('')
myRasterLayer.setAbstract('fake')
myRasterLayer.setTitle('fake')
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = root.elementsByTagName('sld:LayerFeatureConstraints')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = element.elementsByTagName('sld:FeatureTypeConstraint')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
elements = root.elementsByTagName('sld:UserStyle')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
# no generated if empty
name = element.firstChildElement('sld:Name')
self.assertTrue(name.isNull())
# generated if not empty
abstract = element.firstChildElement('sld:Abstract')
self.assertFalse(abstract.isNull())
self.assertEqual(abstract.text(), 'fake')
title = element.firstChildElement('sld:Title')
self.assertFalse(title.isNull())
self.assertEqual(title.text(), 'fake')
# if setScaleBasedVisibility is true print scales
myRasterLayer.setScaleBasedVisibility(True)
myRasterLayer.setMaximumScale(0.0001)
myRasterLayer.setMinimumScale(0.01)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:Rule')
self.assertEqual(len(elements), 1)
rule = elements.at(0).toElement()
self.assertFalse(rule.isNull())
temp = rule.firstChildElement('sld:MinScaleDenominator')
self.assertFalse(temp.isNull())
self.assertEqual(temp.text(), '0.0001')
temp = rule.firstChildElement('sld:MaxScaleDenominator')
self.assertFalse(temp.isNull())
self.assertEqual(temp.text(), '0.01')
# check non default hueSaturationFilter values
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleLightness)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'lightness')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleLuminosity)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'luminosity')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleAverage)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', 'average')
hue = myRasterLayer.hueSaturationFilter()
hue.setGrayscaleMode(QgsHueSaturationFilter.GrayscaleOff)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'grayScale', None)
# manage colorize vendorOption tags
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(True)
hue.setColorizeStrength(50)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', '1')
self.assertVendorOption(element, 'colorizeRed', '255')
self.assertVendorOption(element, 'colorizeGreen', '128')
self.assertVendorOption(element, 'colorizeBlue', '128')
self.assertVendorOption(element, 'colorizeStrength', '0.5')
self.assertVendorOption(element, 'saturation', '0.498039')
# other hue non default values, no colorize and saturation = 0
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(False)
hue.setSaturation(0)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', None)
self.assertVendorOption(element, 'colorizeRed', None)
self.assertVendorOption(element, 'colorizeGreen', None)
self.assertVendorOption(element, 'colorizeBlue', None)
self.assertVendorOption(element, 'colorizeStrength', None)
self.assertVendorOption(element, 'saturation', None)
self.assertVendorOption(element, 'brightness', None)
self.assertVendorOption(element, 'contrast', None)
# other hue non default values, no colorize and saturation = 100
hue = myRasterLayer.hueSaturationFilter()
hue.setColorizeOn(False)
hue.setSaturation(100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'colorizeOn', None)
self.assertVendorOption(element, 'colorizeRed', None)
self.assertVendorOption(element, 'colorizeGreen', None)
self.assertVendorOption(element, 'colorizeBlue', None)
self.assertVendorOption(element, 'colorizeStrength', None)
self.assertVendorOption(element, 'saturation', '1')
hue.setSaturation(-100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
self.assertVendorOption(root, 'saturation', '0')
# brightness filter default values
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertTrue(myRasterLayer.brightnessFilter().brightness() == 0)
self.assertTrue(myRasterLayer.brightnessFilter().contrast() == 0)
self.assertVendorOption(element, 'brightness', None)
self.assertVendorOption(element, 'contrast', None)
# brightness filter no default values
bf = myRasterLayer.brightnessFilter()
bf.setBrightness(-255)
bf.setContrast(-100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'brightness', '0')
self.assertVendorOption(element, 'contrast', '0')
bf.setBrightness(255)
bf.setContrast(100)
dom, root, errorMessage = self.layerToSld(myRasterLayer)
elements = dom.elementsByTagName('sld:RasterSymbolizer')
self.assertEqual(len(elements), 1)
element = elements.at(0).toElement()
self.assertFalse(element.isNull())
self.assertVendorOption(element, 'brightness', '1')
self.assertVendorOption(element, 'contrast', '1')
def assertVendorOption(self, root, name, expectedValue):
"""Set expectedValue=None to check that the vendor option is not present."""
vendorOptions = root.elementsByTagName('sld:VendorOption')
found = False
for vendorOptionIndex in range(vendorOptions.count()):
vendorOption = vendorOptions.at(vendorOptionIndex)
self.assertEqual('sld:VendorOption', vendorOption.nodeName())
if (vendorOption.attributes().namedItem('name').nodeValue() == name):
found = True
self.assertEqual(vendorOption.firstChild().nodeValue(), expectedValue)
if (expectedValue is None) and found:
self.fail("found VendorOption: {} where supposed not present".format(name))
if expectedValue and not found:
self.fail("Not found VendorOption: {}".format(name))
def layerToSld(self, layer, properties={}):
dom = QDomDocument()
root = dom.createElement("FakeRoot")
dom.appendChild(root)
errorMessage = ''
layer.writeSld(root, dom, errorMessage, properties)
return dom, root, errorMessage
def testHistogram(self):
"""Test histogram bindings regression GH #29700"""
l = QgsRasterLayer(unitTestDataPath('raster/landcover.img'), 'landcover')
self.assertTrue(l.isValid())
p = l.dataProvider()
# Note that this is not a correct use of the API: there is no
# need to call initHistogram(): it is called internally
# from p.histogram()
p.initHistogram(QgsRasterHistogram(), 1, 100)
h = p.histogram(1)
self.assertTrue(len(h.histogramVector), 100)
# Check it twice because it crashed in some circumstances with the old implementation
self.assertTrue(len(h.histogramVector), 100)
def testInvalidLayerStyleRestoration(self):
"""
Test that styles are correctly restored from invalid layers
"""
source_path = os.path.join(unitTestDataPath('raster'),
'band1_float32_noct_epsg4326.tif')
# copy to temp path
tmp_dir = QTemporaryDir()
tmp_path = os.path.join(tmp_dir.path(), 'test_raster.tif')
copyfile(source_path, tmp_path)
rl = QgsRasterLayer(tmp_path, 'test_raster', 'gdal')
self.assertTrue(rl.isValid())
renderer = QgsSingleBandPseudoColorRenderer(rl.dataProvider(), 1)
color_ramp = QgsGradientColorRamp(QColor(255, 255, 0), QColor(0, 0, 255))
renderer.setClassificationMin(101)
renderer.setClassificationMax(131)
renderer.createShader(color_ramp)
renderer.setOpacity(0.6)
rl.setRenderer(renderer)
rl.resampleFilter().setZoomedInResampler(QgsCubicRasterResampler())
rl.resampleFilter().setZoomedOutResampler(QgsBilinearRasterResampler())
p = QgsProject()
p.addMapLayer(rl)
project_path = os.path.join(tmp_dir.path(), 'test_project.qgs')
self.assertTrue(p.write(project_path))
# simple case, layer still exists in same path
p2 = QgsProject()
self.assertTrue(p2.read(project_path))
self.assertEqual(len(p2.mapLayers()), 1)
rl2 = list(p2.mapLayers().values())[0]
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.6)
# now, remove raster
os.remove(tmp_path)
# reload project
p2 = QgsProject()
self.assertTrue(p2.read(project_path))
self.assertEqual(len(p2.mapLayers()), 1)
rl2 = list(p2.mapLayers().values())[0]
self.assertFalse(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# invalid layers should still have renderer available
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.6)
# make a little change
rl2.renderer().setOpacity(0.8)
# now, fix path
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be retained...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
# the opacity change (and other renderer changes made while the layer was invalid) should be retained
self.assertEqual(rl2.renderer().opacity(), 0.8)
# break path
rl2.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# and restore
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be recreated...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.8)
# break again
rl2.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# export via qlr, with broken path (but hopefully correct style)
doc = QgsLayerDefinition.exportLayerDefinitionLayers([rl2], QgsReadWriteContext())
layers = QgsLayerDefinition.loadLayerDefinitionLayers(doc, QgsReadWriteContext())
self.assertEqual(len(layers), 1)
rl2 = layers[0]
self.assertFalse(rl2.isValid())
# fix path
rl2.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertTrue(rl2.isValid())
self.assertEqual(rl2.name(), 'test_raster')
# at this stage, the original style should be recreated...
self.assertIsInstance(rl2.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl2.renderer().classificationMin(), 101)
self.assertEqual(rl2.renderer().classificationMax(), 131)
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl2.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl2.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl2.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl2.renderer().opacity(), 0.8)
# another test
rl = QgsRasterLayer(source_path, 'test_raster', 'gdal')
self.assertTrue(rl.isValid())
renderer = QgsSingleBandPseudoColorRenderer(rl.dataProvider(), 1)
color_ramp = QgsGradientColorRamp(QColor(255, 255, 0), QColor(0, 0, 255))
renderer.setClassificationMin(101)
renderer.setClassificationMax(131)
renderer.createShader(color_ramp)
renderer.setOpacity(0.6)
rl.setRenderer(renderer)
rl.resampleFilter().setZoomedInResampler(QgsCubicRasterResampler())
rl.resampleFilter().setZoomedOutResampler(QgsBilinearRasterResampler())
# break path
rl.setDataSource(tmp_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
# fix path
rl.setDataSource(source_path, 'test_raster', 'gdal', QgsDataProvider.ProviderOptions())
self.assertIsInstance(rl.renderer(), QgsSingleBandPseudoColorRenderer)
self.assertEqual(rl.renderer().classificationMin(), 101)
self.assertEqual(rl.renderer().classificationMax(), 131)
self.assertEqual(rl.renderer().shader().rasterShaderFunction().sourceColorRamp().color1().name(), '#ffff00')
self.assertEqual(rl.renderer().shader().rasterShaderFunction().sourceColorRamp().color2().name(), '#0000ff')
self.assertIsInstance(rl.resampleFilter().zoomedInResampler(), QgsCubicRasterResampler)
self.assertIsInstance(rl.resampleFilter().zoomedOutResampler(), QgsBilinearRasterResampler)
self.assertEqual(rl.renderer().opacity(), 0.6)
class TestQgsRasterLayerTransformContext(unittest.TestCase):
def setUp(self):
"""Prepare tc"""
super(TestQgsRasterLayerTransformContext, self).setUp()
self.ctx = QgsCoordinateTransformContext()
self.ctx.addSourceDestinationDatumTransform(QgsCoordinateReferenceSystem('EPSG:4326'),
QgsCoordinateReferenceSystem('EPSG:3857'), 1234, 1235)
self.ctx.addCoordinateOperation(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857'), 'test')
self.rpath = os.path.join(unitTestDataPath(), 'landsat.tif')
def testTransformContextIsSetInCtor(self):
"""Test transform context can be set from ctor"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
options = QgsRasterLayer.LayerOptions(transformContext=self.ctx)
rl = QgsRasterLayer(self.rpath, 'raster', 'gdal', options)
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
def testTransformContextInheritsFromProject(self):
"""Test that when a layer is added to a project it inherits its context"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p = QgsProject()
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.addMapLayers([rl])
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
def testTransformContextIsSyncedFromProject(self):
"""Test that when a layer is synced when project context changes"""
rl = QgsRasterLayer(self.rpath, 'raster')
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p = QgsProject()
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.addMapLayers([rl])
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
# Now change the project context
tc2 = QgsCoordinateTransformContext()
p.setTransformContext(tc2)
self.assertFalse(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
self.assertFalse(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
p.setTransformContext(self.ctx)
self.assertTrue(
p.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
self.assertTrue(
rl.transformContext().hasTransform(QgsCoordinateReferenceSystem('EPSG:4326'), QgsCoordinateReferenceSystem('EPSG:3857')))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
aristotle-tek/cuny-bdif | AWS/ec2/lib/boto-2.34.0/boto/ec2/cloudwatch/metric.py | 94 | 7491 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.ec2.cloudwatch.alarm import MetricAlarm
from boto.ec2.cloudwatch.dimension import Dimension
class Metric(object):
Statistics = ['Minimum', 'Maximum', 'Sum', 'Average', 'SampleCount']
Units = ['Seconds', 'Microseconds', 'Milliseconds', 'Bytes', 'Kilobytes',
'Megabytes', 'Gigabytes', 'Terabytes', 'Bits', 'Kilobits',
'Megabits', 'Gigabits', 'Terabits', 'Percent', 'Count',
'Bytes/Second', 'Kilobytes/Second', 'Megabytes/Second',
'Gigabytes/Second', 'Terabytes/Second', 'Bits/Second',
'Kilobits/Second', 'Megabits/Second', 'Gigabits/Second',
'Terabits/Second', 'Count/Second', None]
def __init__(self, connection=None):
self.connection = connection
self.name = None
self.namespace = None
self.dimensions = None
def __repr__(self):
return 'Metric:%s' % self.name
def startElement(self, name, attrs, connection):
if name == 'Dimensions':
self.dimensions = Dimension()
return self.dimensions
def endElement(self, name, value, connection):
if name == 'MetricName':
self.name = value
elif name == 'Namespace':
self.namespace = value
else:
setattr(self, name, value)
def query(self, start_time, end_time, statistics, unit=None, period=60):
"""
:type start_time: datetime
:param start_time: The time stamp to use for determining the
first datapoint to return. The value specified is
inclusive; results include datapoints with the time stamp
specified.
:type end_time: datetime
:param end_time: The time stamp to use for determining the
last datapoint to return. The value specified is
exclusive; results will include datapoints up to the time
stamp specified.
:type statistics: list
:param statistics: A list of statistics names Valid values:
Average | Sum | SampleCount | Maximum | Minimum
:type unit: string
:param unit: The unit for the metric. Value values are:
Seconds | Microseconds | Milliseconds | Bytes | Kilobytes |
Megabytes | Gigabytes | Terabytes | Bits | Kilobits |
Megabits | Gigabits | Terabits | Percent | Count |
Bytes/Second | Kilobytes/Second | Megabytes/Second |
Gigabytes/Second | Terabytes/Second | Bits/Second |
Kilobits/Second | Megabits/Second | Gigabits/Second |
Terabits/Second | Count/Second | None
:type period: integer
:param period: The granularity, in seconds, of the returned datapoints.
Period must be at least 60 seconds and must be a multiple
of 60. The default value is 60.
"""
if not isinstance(statistics, list):
statistics = [statistics]
return self.connection.get_metric_statistics(period,
start_time,
end_time,
self.name,
self.namespace,
statistics,
self.dimensions,
unit)
def create_alarm(self, name, comparison, threshold,
period, evaluation_periods,
statistic, enabled=True, description=None,
dimensions=None, alarm_actions=None, ok_actions=None,
insufficient_data_actions=None, unit=None):
"""
Creates or updates an alarm and associates it with this metric.
Optionally, this operation can associate one or more
Amazon Simple Notification Service resources with the alarm.
When this operation creates an alarm, the alarm state is immediately
set to INSUFFICIENT_DATA. The alarm is evaluated and its StateValue is
set appropriately. Any actions associated with the StateValue is then
executed.
When updating an existing alarm, its StateValue is left unchanged.
:type alarm: boto.ec2.cloudwatch.alarm.MetricAlarm
:param alarm: MetricAlarm object.
"""
if not dimensions:
dimensions = self.dimensions
alarm = MetricAlarm(self.connection, name, self.name,
self.namespace, statistic, comparison,
threshold, period, evaluation_periods,
unit, description, dimensions,
alarm_actions, insufficient_data_actions,
ok_actions)
if self.connection.put_metric_alarm(alarm):
return alarm
def describe_alarms(self, period=None, statistic=None,
dimensions=None, unit=None):
"""
Retrieves all alarms for this metric. Specify a statistic, period,
or unit to filter the set of alarms further.
:type period: int
:param period: The period in seconds over which the statistic
is applied.
:type statistic: string
:param statistic: The statistic for the metric.
:param dimension_filters: A dictionary containing name/value
pairs that will be used to filter the results. The key in
the dictionary is the name of a Dimension. The value in
the dictionary is either a scalar value of that Dimension
name that you want to filter on, a list of values to
filter on or None if you want all metrics with that
Dimension name.
:type unit: string
:rtype list
"""
return self.connection.describe_alarms_for_metric(self.name,
self.namespace,
period,
statistic,
dimensions,
unit)
| mit |
jyundt/oval | app/main/views.py | 1 | 16557 | from collections import OrderedDict
from itertools import groupby
from operator import itemgetter, and_
import datetime
from flask import render_template, redirect, request, url_for, current_app, flash
from slackclient import SlackClient
from sqlalchemy import extract, or_
from sqlalchemy import func
from app import db
from app.util import sort_and_rank
from . import main
from .forms import FeedbackForm
from ..email import send_feedback_email
from ..models import Course, RaceClass, Racer, Team, Race, Participant
def _gen_default(year, race_class_id, race_calendar):
"""Default error case for standings type parameter
It seems useful to create a full function here in case any logging,
or more important work should be done on error.
"""
return None
def _gen_race_calendar(year, race_class_id):
"""Returns the full calendar of dates for a class and year of racing
This is necessary because dates where individuals do not participate will
not exist in their individual results otherwise.
"""
dates = Race.query.with_entities(Race.date, Race.id)\
.filter(extract("year", Race.date) == year)\
.filter(Race.points_race == True)\
.filter(Race.class_id == race_class_id).all()
dates = sorted(dates, key=lambda x: x[0])
return dates
def _make_result(name, id_, rank, total_pts, pts, race_calendar, team_name, team_id):
"""Create result dictionary to make html templates more readable
"""
result = {"name": name,
"id": id_,
"rank": rank,
"total_pts": total_pts,
"race_pts": OrderedDict([(date, "-") for date,_ in race_calendar]),
"team_name": team_name,
"team_id": team_id}
for point, date in pts:
if point:
result["race_pts"][date] = point
return result
def _gen_team_standings(race_info, race_calendar):
"""Return team standings with individual race and total points
"""
# Sort race info first by team (for grouping below) then by date
# for table construction. Filter results not associated with a team.
team_race_info = sorted(
[ri for ri in race_info if ri.team_id],
key=lambda ri: (ri.team_id, ri.race_date))
def sum_team_points_by_date(team_results):
return [
(sum(ri.team_points or 0 for ri in dg), date)
for (team_id, date), dg in
groupby(team_results, key=lambda ri: (ri.team_id, ri.race_date))]
team_points_by_date = {
team_id: sum_team_points_by_date(g) for team_id, g
in groupby(team_race_info, key=lambda ri: ri.team_id)}
# Aggregate results by team
team_agg_info = [
(team_id, team_name, sum(ri.team_points or 0 for ri in g))
for ((team_id, team_name), g) in
groupby(team_race_info, key=lambda ri: (ri.team_id, ri.team_name))
]
# Filter to only teams that have points, and
# rank by total team points.
ranked_teams = sort_and_rank(
filter(itemgetter(2), team_agg_info),
key=itemgetter(2))
results = []
for rank, (team_id, team_name, total_pts) in ranked_teams:
result = _make_result(name=team_name, id_=team_id, rank=rank, total_pts=total_pts,
pts=team_points_by_date[team_id], race_calendar=race_calendar,
team_name=None, team_id=None)
results.append(result)
return results
def _gen_ind_standings(race_info, race_calendar):
"""Return top individual racer standings with individual race and total points
Note, individual placing tiebreak is by number of wins, followed by number of
seconds places, etc.
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race points for each racer
racer_race_points = {
racer_id: list((ri.points if not ri.points_dropped else '(%d)' % ri.points or 0, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: [(ri.team_name, ri.team_id) for ri in g]
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)
}
def placing_counts(placings):
# Helper to count placings
# Returns a tuple with the count of number of first places, then number
# of seconds, etc., up to the 8th place.
placings = filter(None, placings)
if not placings:
return ()
counts_by_place = {place: sum(1 for _ in g) for place, g in groupby(sorted(placings))}
assert min(counts_by_place.keys()) >= 1
return tuple(counts_by_place.get(place) or 0 for place in xrange(1, 9))
# Group race results by racer
race_info_gby_racer = [
((racer_id, racer_name), list(g))
for ((racer_id, racer_name), g) in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Aggregate points and placings by racer
racer_agg_info = [(
racer_id,
racer_name,
sum(r.points if r.points and not r.points_dropped else 0 for r in g),
placing_counts(r.place for r in g))
for (racer_id, racer_name), g in race_info_gby_racer]
# Filter to only racers that have any points,
# rank by total points then by placings.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2, 3))
results = []
for rank, (racer_id, racer_name, racer_points, _) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
def _gen_mar_standings(race_info, race_calendar):
"""Return top MAR standings with individual race and total points
"""
# Sort race info first by racer (for grouping below) then by date
# for table construction.
racer_race_info = sorted(race_info, key=lambda ri: (ri.racer_id, ri.race_date))
# A list of per-race mar points for each racer
racer_race_mar_points = {
racer_id: list((ri.mar_points, ri.race_date) for ri in g)
for racer_id, g in groupby(racer_race_info, key=lambda ri: ri.racer_id)}
# Team info for each racer
racer_teams = {
racer_id: list((ri.team_name, ri.team_id) for ri in g)
for racer_id, g in groupby(racer_race_info, key=itemgetter(0))
}
# Aggregate mar points by racer
racer_agg_info = [
(racer_id, racer_name, sum(ri.mar_points or 0 for ri in g))
for (racer_id, racer_name), g in
groupby(racer_race_info, key=lambda ri: (ri.racer_id, ri.racer_name))]
# Filter to only racers that have any mar points,
# rank by total points.
ranked_racers = sort_and_rank(
filter(itemgetter(2), racer_agg_info),
key=itemgetter(2))
results = []
for rank, (racer_id, racer_name, racer_points) in ranked_racers:
team = racer_teams[racer_id][-1] if racer_id in racer_teams else (None, None)
result = _make_result(name=racer_name, id_=racer_id, rank=rank, total_pts=racer_points,
pts=racer_race_mar_points[racer_id], race_calendar=race_calendar,
team_name=team[0], team_id=team[1])
results.append(result)
return results
@main.route('/')
def index():
"""Fills and renders the front page index.html template
Only display recent results when they're within the past ~three months.
"""
recent_time = datetime.datetime.now() - datetime.timedelta(days=90)
recent_results = (
Race.query
.join(Participant, Race.id == Participant.race_id)
.filter(Race.date > recent_time)
.group_by(Race.id)
.having(func.count(Participant.id) > 0))
r1 = recent_results.subquery('r1')
r2 = recent_results.subquery('r2')
latest_races = (
db.session.query(r1)
.with_entities(
r1.c.id.label('id'),
r1.c.date.label('date'),
RaceClass.name.label('class_name'))
.join(r2, and_(r1.c.class_id == r2.c.class_id, r1.c.date < r2.c.date), isouter=True)
.join(RaceClass, RaceClass.id == r1.c.class_id)
.filter(r2.c.id.is_(None))
.order_by(r1.c.date.desc(), RaceClass.id))
races = latest_races.all()
return render_template('index.html', races=races)
@main.route('/standings/')
def standings():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date)
.filter_by(points_race=True)),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.filter(Race.points_race == True)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (
Racer.query.with_entities(
Racer.id.label('racer_id'), Racer.name.label('racer_name'),
Race.date.label('race_date'), Participant.points,
Participant.team_points, Participant.mar_points,
Team.id.label('team_id'), Team.name.label('team_name'), Participant.place,
Participant.points_dropped)
.join(Participant)
.join(Team, isouter=True)
.join(Race)
.filter(Race.points_race == True)
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Racer.id, Race.date.desc())
.all())
race_calendar = _gen_race_calendar(year, race_class_id)
ind_standings = _gen_ind_standings(race_info, race_calendar)
team_standings = _gen_team_standings(race_info, race_calendar)
mar_standings = _gen_mar_standings(race_info, race_calendar)
results = (
('Individual', ind_standings),
('Team', team_standings),
('MAR', mar_standings))
return render_template(
'standings.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes,
results=results, race_calendar=race_calendar)
return render_template('standings.html', selected_year=year, years=years)
@main.route('/results/')
def results():
years = sorted(set(
int(date.year) for (date,) in Race.query.with_entities(Race.date).all()),
reverse=True)
try:
req_year = int(request.args.get('year'))
except (ValueError, TypeError):
req_year = None
year = req_year if req_year is not None else (years[0] if years else None)
race_classes = [
(race_class_id.id, race_class_id.name)
for race_class_id in
RaceClass.query.with_entities(
RaceClass.id, RaceClass.name)
.join(Race)
.join(Participant)
.filter(extract("year", Race.date) == year)
.group_by(RaceClass.id)
.order_by(RaceClass.name)]
year_race_class_ids = [race_class_id for race_class_id, _ in race_classes]
try:
req_race_class_id = int(request.args.get('race_class_id'))
except (ValueError, TypeError):
req_race_class_id = None
race_class_id = (
req_race_class_id if req_race_class_id in year_race_class_ids
else (year_race_class_ids[0] if year_race_class_ids else None))
if year is not None and race_class_id is not None:
race_info = (Racer.query.with_entities(
Racer.id, Racer.name,
Team.id, Team.name,
Participant.place, Participant.mar_place,
Race.id, Race.date,
Race.course_id, Race.average_lap, Race.fast_lap,
Race.winning_time, Race.laps, Race.starters, Race.points_race,
RaceClass.id, RaceClass.name,
Course.name, Course.length_miles)
.join(Participant, Participant.racer_id == Racer.id)
.join(Team, Team.id == Participant.team_id, isouter=True)
.join(Race, Race.id == Participant.race_id)
.join(RaceClass, RaceClass.id == Race.class_id)
.join(Course, Course.id == Race.course_id)
.filter(or_(Participant.place == 1, Participant.mar_place == 1))
.filter(extract("year", Race.date) == year)
.filter(Race.class_id == race_class_id)
.order_by(Race.date)
.all())
race_info_by_date = [
(date, list(date_group))
for date, date_group in groupby(race_info, key=itemgetter(7))]
results = []
for date, date_group in race_info_by_date:
(race_id, race_date, course_id, average_lap, fast_lap, winning_time,
laps, starters, points_race, race_class_id, race_class_name,
course_name, course_length_miles) = date_group[0][6:]
winner = None
mar_winner = None
for maybe_winner in date_group:
racer_id, racer_name, team_id, team_name, place, mar_place = maybe_winner[0:6]
if place == 1:
winner = (racer_id, racer_name, team_id, team_name)
if mar_place == 1:
mar_winner = (racer_id, racer_name, team_id, team_name)
avg_lap = (average_lap.total_seconds()) if average_lap else (
(winning_time.total_seconds() / laps)
if (winning_time and laps) else None)
avg_speed = (
course_length_miles / (avg_lap / 3600)
if course_length_miles and avg_lap
else None)
results.insert(0, {
'race_id': race_id,
'date': date,
'course_name': course_name,
'winner': winner,
'mar_winner': mar_winner,
'fast_lap': fast_lap,
'avg_speed': avg_speed,
'starters': starters,
'points_race': points_race})
return render_template(
'results.html',
selected_year=year, selected_race_class_id=race_class_id,
years=years, race_classes=race_classes, results=results)
return render_template('results.html', selected_year=year, years=years)
@main.route('/feedback/', methods=['GET', 'POST'])
def send_feedback():
form = FeedbackForm()
if form.validate_on_submit():
name = form.name.data
replyaddress = form.replyaddress.data
subject = form.subject.data
feedback = form.feedback.data
send_feedback_email(name, replyaddress, subject, feedback)
message = "%s <%s> - %s: %s" % (name, replyaddress, subject, feedback)
token = current_app.config['SLACK_OAUTH_API_TOKEN']
sc = SlackClient(token)
sc.api_call("chat.postMessage", channel="#feedback", text=message,
username="Flask")
flash('Feedback sent!')
return redirect(url_for('main.index'))
return render_template('feedback.html', form=form)
@main.route('/robots.txt')
def serve_static():
return current_app.send_static_file('robots.txt')
@main.route('/favicon.ico')
def serve_favicon():
return current_app.send_static_file('favicon.ico')
| gpl-2.0 |
rec/echomesh | code/python/external/platform/darwin/numpy/distutils/environment.py | 13 | 2280 | import os
from distutils.dist import Distribution
__metaclass__ = type
class EnvironmentConfig(object):
def __init__(self, distutils_section='ALL', **kw):
self._distutils_section = distutils_section
self._conf_keys = kw
self._conf = None
self._hook_handler = None
def dump_variable(self, name):
conf_desc = self._conf_keys[name]
hook, envvar, confvar, convert = conf_desc
if not convert:
convert = lambda x : x
print('%s.%s:' % (self._distutils_section, name))
v = self._hook_handler(name, hook)
print(' hook : %s' % (convert(v),))
if envvar:
v = os.environ.get(envvar, None)
print(' environ: %s' % (convert(v),))
if confvar and self._conf:
v = self._conf.get(confvar, (None, None))[1]
print(' config : %s' % (convert(v),))
def dump_variables(self):
for name in self._conf_keys:
self.dump_variable(name)
def __getattr__(self, name):
try:
conf_desc = self._conf_keys[name]
except KeyError:
raise AttributeError(name)
return self._get_var(name, conf_desc)
def get(self, name, default=None):
try:
conf_desc = self._conf_keys[name]
except KeyError:
return default
var = self._get_var(name, conf_desc)
if var is None:
var = default
return var
def _get_var(self, name, conf_desc):
hook, envvar, confvar, convert = conf_desc
var = self._hook_handler(name, hook)
if envvar is not None:
var = os.environ.get(envvar, var)
if confvar is not None and self._conf:
var = self._conf.get(confvar, (None, var))[1]
if convert is not None:
var = convert(var)
return var
def clone(self, hook_handler):
ec = self.__class__(distutils_section=self._distutils_section,
**self._conf_keys)
ec._hook_handler = hook_handler
return ec
def use_distribution(self, dist):
if isinstance(dist, Distribution):
self._conf = dist.get_option_dict(self._distutils_section)
else:
self._conf = dist
| mit |
zstyblik/infernal-twin | build/pillow/build/lib.linux-i686-2.7/PIL/MpegImagePlugin.py | 26 | 1823 | #
# The Python Imaging Library.
# $Id$
#
# MPEG file handling
#
# History:
# 95-09-09 fl Created
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1995.
#
# See the README file for information on usage and redistribution.
#
__version__ = "0.1"
from PIL import Image, ImageFile
from PIL._binary import i8
#
# Bitstream parser
class BitStream(object):
def __init__(self, fp):
self.fp = fp
self.bits = 0
self.bitbuffer = 0
def next(self):
return i8(self.fp.read(1))
def peek(self, bits):
while self.bits < bits:
c = self.next()
if c < 0:
self.bits = 0
continue
self.bitbuffer = (self.bitbuffer << 8) + c
self.bits += 8
return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1
def skip(self, bits):
while self.bits < bits:
self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1))
self.bits += 8
self.bits = self.bits - bits
def read(self, bits):
v = self.peek(bits)
self.bits = self.bits - bits
return v
##
# Image plugin for MPEG streams. This plugin can identify a stream,
# but it cannot read it.
class MpegImageFile(ImageFile.ImageFile):
format = "MPEG"
format_description = "MPEG"
def _open(self):
s = BitStream(self.fp)
if s.read(32) != 0x1B3:
raise SyntaxError("not an MPEG file")
self.mode = "RGB"
self.size = s.read(12), s.read(12)
# --------------------------------------------------------------------
# Registry stuff
Image.register_open("MPEG", MpegImageFile)
Image.register_extension("MPEG", ".mpg")
Image.register_extension("MPEG", ".mpeg")
Image.register_mime("MPEG", "video/mpeg")
| gpl-3.0 |
fitzgen/servo | tests/wpt/css-tests/tools/pytest/testing/test_unittest.py | 171 | 22302 | from _pytest.main import EXIT_NOTESTSCOLLECTED
import pytest
def test_simple_unittest(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def testpassing(self):
self.assertEquals('foo', 'foo')
def test_failing(self):
self.assertEquals('foo', 'bar')
""")
reprec = testdir.inline_run(testpath)
assert reprec.matchreport("testpassing").passed
assert reprec.matchreport("test_failing").failed
def test_runTest_method(testdir):
testdir.makepyfile("""
import unittest
class MyTestCaseWithRunTest(unittest.TestCase):
def runTest(self):
self.assertEquals('foo', 'foo')
class MyTestCaseWithoutRunTest(unittest.TestCase):
def runTest(self):
self.assertEquals('foo', 'foo')
def test_something(self):
pass
""")
result = testdir.runpytest("-v")
result.stdout.fnmatch_lines("""
*MyTestCaseWithRunTest::runTest*
*MyTestCaseWithoutRunTest::test_something*
*2 passed*
""")
def test_isclasscheck_issue53(testdir):
testpath = testdir.makepyfile("""
import unittest
class _E(object):
def __getattr__(self, tag):
pass
E = _E()
""")
result = testdir.runpytest(testpath)
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_setup(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def setUp(self):
self.foo = 1
def setup_method(self, method):
self.foo2 = 1
def test_both(self):
self.assertEquals(1, self.foo)
assert self.foo2 == 1
def teardown_method(self, method):
assert 0, "42"
""")
reprec = testdir.inline_run("-s", testpath)
assert reprec.matchreport("test_both", when="call").passed
rep = reprec.matchreport("test_both", when="teardown")
assert rep.failed and '42' in str(rep.longrepr)
def test_setUpModule(testdir):
testpath = testdir.makepyfile("""
l = []
def setUpModule():
l.append(1)
def tearDownModule():
del l[0]
def test_hello():
assert l == [1]
def test_world():
assert l == [1]
""")
result = testdir.runpytest(testpath)
result.stdout.fnmatch_lines([
"*2 passed*",
])
def test_setUpModule_failing_no_teardown(testdir):
testpath = testdir.makepyfile("""
l = []
def setUpModule():
0/0
def tearDownModule():
l.append(1)
def test_hello():
pass
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=0, failed=1)
call = reprec.getcalls("pytest_runtest_setup")[0]
assert not call.item.module.l
def test_new_instances(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
def test_func1(self):
self.x = 2
def test_func2(self):
assert not hasattr(self, 'x')
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=2)
def test_teardown(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
l = []
def test_one(self):
pass
def tearDown(self):
self.l.append(None)
class Second(unittest.TestCase):
def test_check(self):
self.assertEquals(MyTestCase.l, [None])
""")
reprec = testdir.inline_run(testpath)
passed, skipped, failed = reprec.countoutcomes()
assert failed == 0, failed
assert passed == 2
assert passed + skipped + failed == 2
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_skip_issue148(testdir):
testpath = testdir.makepyfile("""
import unittest
@unittest.skip("hello")
class MyTestCase(unittest.TestCase):
@classmethod
def setUpClass(self):
xxx
def test_one(self):
pass
@classmethod
def tearDownClass(self):
xxx
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(skipped=1)
def test_method_and_teardown_failing_reporting(testdir):
testdir.makepyfile("""
import unittest, pytest
class TC(unittest.TestCase):
def tearDown(self):
assert 0, "down1"
def test_method(self):
assert False, "down2"
""")
result = testdir.runpytest("-s")
assert result.ret == 1
result.stdout.fnmatch_lines([
"*tearDown*",
"*assert 0*",
"*test_method*",
"*assert False*",
"*1 failed*1 error*",
])
def test_setup_failure_is_shown(testdir):
testdir.makepyfile("""
import unittest
import pytest
class TC(unittest.TestCase):
def setUp(self):
assert 0, "down1"
def test_method(self):
print ("never42")
xyz
""")
result = testdir.runpytest("-s")
assert result.ret == 1
result.stdout.fnmatch_lines([
"*setUp*",
"*assert 0*down1*",
"*1 failed*",
])
assert 'never42' not in result.stdout.str()
def test_setup_setUpClass(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
x = 0
@classmethod
def setUpClass(cls):
cls.x += 1
def test_func1(self):
assert self.x == 1
def test_func2(self):
assert self.x == 1
@classmethod
def tearDownClass(cls):
cls.x -= 1
def test_teareddown():
assert MyTestCase.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=3)
def test_setup_class(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
x = 0
def setup_class(cls):
cls.x += 1
def test_func1(self):
assert self.x == 1
def test_func2(self):
assert self.x == 1
def teardown_class(cls):
cls.x -= 1
def test_teareddown():
assert MyTestCase.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=3)
@pytest.mark.parametrize("type", ['Error', 'Failure'])
def test_testcase_adderrorandfailure_defers(testdir, type):
testdir.makepyfile("""
from unittest import TestCase
import pytest
class MyTestCase(TestCase):
def run(self, result):
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
try:
result.add%s(self, excinfo._excinfo)
except KeyboardInterrupt:
raise
except:
pytest.fail("add%s should not raise")
def test_hello(self):
pass
""" % (type, type))
result = testdir.runpytest()
assert 'should not raise' not in result.stdout.str()
@pytest.mark.parametrize("type", ['Error', 'Failure'])
def test_testcase_custom_exception_info(testdir, type):
testdir.makepyfile("""
from unittest import TestCase
import py, pytest
import _pytest._code
class MyTestCase(TestCase):
def run(self, result):
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
# we fake an incompatible exception info
from _pytest.monkeypatch import monkeypatch
mp = monkeypatch()
def t(*args):
mp.undo()
raise TypeError()
mp.setattr(_pytest._code, 'ExceptionInfo', t)
try:
excinfo = excinfo._excinfo
result.add%(type)s(self, excinfo)
finally:
mp.undo()
def test_hello(self):
pass
""" % locals())
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"NOTE: Incompatible Exception Representation*",
"*ZeroDivisionError*",
"*1 failed*",
])
def test_testcase_totally_incompatible_exception_info(testdir):
item, = testdir.getitems("""
from unittest import TestCase
class MyTestCase(TestCase):
def test_hello(self):
pass
""")
item.addError(None, 42)
excinfo = item._excinfo.pop(0)
assert 'ERROR: Unknown Incompatible' in str(excinfo.getrepr())
def test_module_level_pytestmark(testdir):
testpath = testdir.makepyfile("""
import unittest
import pytest
pytestmark = pytest.mark.xfail
class MyTestCase(unittest.TestCase):
def test_func1(self):
assert 0
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testcase_skip_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
skip = 'dont run'
def test_func(self):
pass
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testfunction_skip_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
def test_func(self):
pass
test_func.skip = 'dont run'
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testcase_todo_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
todo = 'dont run'
def test_func(self):
assert 0
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
def test_trial_testfunction_todo_property(testdir):
pytest.importorskip('twisted.trial.unittest')
testpath = testdir.makepyfile("""
from twisted.trial import unittest
class MyTestCase(unittest.TestCase):
def test_func(self):
assert 0
test_func.todo = 'dont run'
""")
reprec = testdir.inline_run(testpath, "-s")
reprec.assertoutcome(skipped=1)
class TestTrialUnittest:
def setup_class(cls):
cls.ut = pytest.importorskip("twisted.trial.unittest")
def test_trial_testcase_runtest_not_collected(self, testdir):
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
class TC(TestCase):
def test_hello(self):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
class TC(TestCase):
def runTest(self):
pass
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_trial_exceptions_with_skips(self, testdir):
testdir.makepyfile("""
from twisted.trial import unittest
import pytest
class TC(unittest.TestCase):
def test_hello(self):
pytest.skip("skip_in_method")
@pytest.mark.skipif("sys.version_info != 1")
def test_hello2(self):
pass
@pytest.mark.xfail(reason="iwanto")
def test_hello3(self):
assert 0
def test_hello4(self):
pytest.xfail("i2wanto")
def test_trial_skip(self):
pass
test_trial_skip.skip = "trialselfskip"
def test_trial_todo(self):
assert 0
test_trial_todo.todo = "mytodo"
def test_trial_todo_success(self):
pass
test_trial_todo_success.todo = "mytodo"
class TC2(unittest.TestCase):
def setup_class(cls):
pytest.skip("skip_in_setup_class")
def test_method(self):
pass
""")
result = testdir.runpytest("-rxs")
assert result.ret == 0
result.stdout.fnmatch_lines_random([
"*XFAIL*test_trial_todo*",
"*trialselfskip*",
"*skip_in_setup_class*",
"*iwanto*",
"*i2wanto*",
"*sys.version_info*",
"*skip_in_method*",
"*4 skipped*3 xfail*1 xpass*",
])
def test_trial_error(self, testdir):
testdir.makepyfile("""
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.internet import reactor
class TC(TestCase):
def test_one(self):
crash
def test_two(self):
def f(_):
crash
d = Deferred()
d.addCallback(f)
reactor.callLater(0.3, d.callback, None)
return d
def test_three(self):
def f():
pass # will never get called
reactor.callLater(0.3, f)
# will crash at teardown
def test_four(self):
def f(_):
reactor.callLater(0.3, f)
crash
d = Deferred()
d.addCallback(f)
reactor.callLater(0.3, d.callback, None)
return d
# will crash both at test time and at teardown
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*ERRORS*",
"*DelayedCalls*",
"*test_four*",
"*NameError*crash*",
"*test_one*",
"*NameError*crash*",
"*test_three*",
"*DelayedCalls*",
"*test_two*",
"*crash*",
])
def test_trial_pdb(self, testdir):
p = testdir.makepyfile("""
from twisted.trial import unittest
import pytest
class TC(unittest.TestCase):
def test_hello(self):
assert 0, "hellopdb"
""")
child = testdir.spawn_pytest(p)
child.expect("hellopdb")
child.sendeof()
def test_djangolike_testcase(testdir):
# contributed from Morten Breekevold
testdir.makepyfile("""
from unittest import TestCase, main
class DjangoLikeTestCase(TestCase):
def setUp(self):
print ("setUp()")
def test_presetup_has_been_run(self):
print ("test_thing()")
self.assertTrue(hasattr(self, 'was_presetup'))
def tearDown(self):
print ("tearDown()")
def __call__(self, result=None):
try:
self._pre_setup()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
super(DjangoLikeTestCase, self).__call__(result)
try:
self._post_teardown()
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
import sys
result.addError(self, sys.exc_info())
return
def _pre_setup(self):
print ("_pre_setup()")
self.was_presetup = True
def _post_teardown(self):
print ("_post_teardown()")
""")
result = testdir.runpytest("-s")
assert result.ret == 0
result.stdout.fnmatch_lines([
"*_pre_setup()*",
"*setUp()*",
"*test_thing()*",
"*tearDown()*",
"*_post_teardown()*",
])
def test_unittest_not_shown_in_traceback(testdir):
testdir.makepyfile("""
import unittest
class t(unittest.TestCase):
def test_hello(self):
x = 3
self.assertEquals(x, 4)
""")
res = testdir.runpytest()
assert "failUnlessEqual" not in res.stdout.str()
def test_unorderable_types(testdir):
testdir.makepyfile("""
import unittest
class TestJoinEmpty(unittest.TestCase):
pass
def make_test():
class Test(unittest.TestCase):
pass
Test.__name__ = "TestFoo"
return Test
TestFoo = make_test()
""")
result = testdir.runpytest()
assert "TypeError" not in result.stdout.str()
assert result.ret == EXIT_NOTESTSCOLLECTED
def test_unittest_typerror_traceback(testdir):
testdir.makepyfile("""
import unittest
class TestJoinEmpty(unittest.TestCase):
def test_hello(self, arg1):
pass
""")
result = testdir.runpytest()
assert "TypeError" in result.stdout.str()
assert result.ret == 1
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_unexpected_failure(testdir):
testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
@unittest.expectedFailure
def test_func1(self):
assert 0
@unittest.expectedFailure
def test_func2(self):
assert 1
""")
result = testdir.runpytest("-rxX")
result.stdout.fnmatch_lines([
"*XFAIL*MyTestCase*test_func1*",
"*XPASS*MyTestCase*test_func2*",
"*1 xfailed*1 xpass*",
])
@pytest.mark.parametrize('fix_type, stmt', [
('fixture', 'return'),
('yield_fixture', 'yield'),
])
def test_unittest_setup_interaction(testdir, fix_type, stmt):
testdir.makepyfile("""
import unittest
import pytest
class MyTestCase(unittest.TestCase):
@pytest.{fix_type}(scope="class", autouse=True)
def perclass(self, request):
request.cls.hello = "world"
{stmt}
@pytest.{fix_type}(scope="function", autouse=True)
def perfunction(self, request):
request.instance.funcname = request.function.__name__
{stmt}
def test_method1(self):
assert self.funcname == "test_method1"
assert self.hello == "world"
def test_method2(self):
assert self.funcname == "test_method2"
def test_classattr(self):
assert self.__class__.hello == "world"
""".format(fix_type=fix_type, stmt=stmt))
result = testdir.runpytest()
result.stdout.fnmatch_lines("*3 passed*")
def test_non_unittest_no_setupclass_support(testdir):
testpath = testdir.makepyfile("""
class TestFoo:
x = 0
@classmethod
def setUpClass(cls):
cls.x = 1
def test_method1(self):
assert self.x == 0
@classmethod
def tearDownClass(cls):
cls.x = 1
def test_not_teareddown():
assert TestFoo.x == 0
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=2)
def test_no_teardown_if_setupclass_failed(testdir):
testpath = testdir.makepyfile("""
import unittest
class MyTestCase(unittest.TestCase):
x = 0
@classmethod
def setUpClass(cls):
cls.x = 1
assert False
def test_func1(self):
cls.x = 10
@classmethod
def tearDownClass(cls):
cls.x = 100
def test_notTornDown():
assert MyTestCase.x == 1
""")
reprec = testdir.inline_run(testpath)
reprec.assertoutcome(passed=1, failed=1)
def test_issue333_result_clearing(testdir):
testdir.makeconftest("""
def pytest_runtest_call(__multicall__, item):
__multicall__.execute()
assert 0
""")
testdir.makepyfile("""
import unittest
class TestIt(unittest.TestCase):
def test_func(self):
0/0
""")
reprec = testdir.inline_run()
reprec.assertoutcome(failed=1)
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_raise_skip_issue748(testdir):
testdir.makepyfile(test_foo="""
import unittest
class MyTestCase(unittest.TestCase):
def test_one(self):
raise unittest.SkipTest('skipping due to reasons')
""")
result = testdir.runpytest("-v", '-rs')
result.stdout.fnmatch_lines("""
*SKIP*[1]*test_foo.py*skipping due to reasons*
*1 skipped*
""")
@pytest.mark.skipif("sys.version_info < (2,7)")
def test_unittest_skip_issue1169(testdir):
testdir.makepyfile(test_foo="""
import unittest
class MyTestCase(unittest.TestCase):
@unittest.skip("skipping due to reasons")
def test_skip(self):
self.fail()
""")
result = testdir.runpytest("-v", '-rs')
result.stdout.fnmatch_lines("""
*SKIP*[1]*skipping due to reasons*
*1 skipped*
""")
| mpl-2.0 |
napkindrawing/ansible | contrib/inventory/zone.py | 196 | 1490 | #!/usr/bin/env python
# (c) 2015, Dagobert Michelsen <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen, PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['zoneadm', 'list', '-ip'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = []
for l in pipe.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
result['all']['hosts'].append(s[1])
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'zone'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'zone'}))
else:
sys.stderr.write("Need an argument, either --list or --host <host>\n")
| gpl-3.0 |
dpiers/coderang-meteor | public/jsrepl/extern/python/unclosured/lib/python2.7/UserList.py | 327 | 3644 | """A more or less complete user-defined wrapper around list objects."""
import collections
class UserList(collections.MutableSequence):
def __init__(self, initlist=None):
self.data = []
if initlist is not None:
# XXX should this accept an arbitrary sequence?
if type(initlist) == type(self.data):
self.data[:] = initlist
elif isinstance(initlist, UserList):
self.data[:] = initlist.data[:]
else:
self.data = list(initlist)
def __repr__(self): return repr(self.data)
def __lt__(self, other): return self.data < self.__cast(other)
def __le__(self, other): return self.data <= self.__cast(other)
def __eq__(self, other): return self.data == self.__cast(other)
def __ne__(self, other): return self.data != self.__cast(other)
def __gt__(self, other): return self.data > self.__cast(other)
def __ge__(self, other): return self.data >= self.__cast(other)
def __cast(self, other):
if isinstance(other, UserList): return other.data
else: return other
def __cmp__(self, other):
return cmp(self.data, self.__cast(other))
__hash__ = None # Mutable sequence, so not hashable
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __setitem__(self, i, item): self.data[i] = item
def __delitem__(self, i): del self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return self.__class__(self.data[i:j])
def __setslice__(self, i, j, other):
i = max(i, 0); j = max(j, 0)
if isinstance(other, UserList):
self.data[i:j] = other.data
elif isinstance(other, type(self.data)):
self.data[i:j] = other
else:
self.data[i:j] = list(other)
def __delslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
del self.data[i:j]
def __add__(self, other):
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + list(other))
def __radd__(self, other):
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
else:
return self.__class__(list(other) + self.data)
def __iadd__(self, other):
if isinstance(other, UserList):
self.data += other.data
elif isinstance(other, type(self.data)):
self.data += other
else:
self.data += list(other)
return self
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __imul__(self, n):
self.data *= n
return self
def append(self, item): self.data.append(item)
def insert(self, i, item): self.data.insert(i, item)
def pop(self, i=-1): return self.data.pop(i)
def remove(self, item): self.data.remove(item)
def count(self, item): return self.data.count(item)
def index(self, item, *args): return self.data.index(item, *args)
def reverse(self): self.data.reverse()
def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
def extend(self, other):
if isinstance(other, UserList):
self.data.extend(other.data)
else:
self.data.extend(other)
| mit |
efiring/numpy-work | numpy/f2py/common_rules.py | 48 | 4630 | #!/usr/bin/env python
"""
Build common block mechanism for f2py2e.
Copyright 2000 Pearu Peterson all rights reserved,
Pearu Peterson <[email protected]>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/05/06 10:57:33 $
Pearu Peterson
"""
__version__ = "$Revision: 1.19 $"[10:-1]
import __version__
f2py_version = __version__.version
import pprint
import sys
errmess=sys.stderr.write
outmess=sys.stdout.write
show=pprint.pprint
from auxfuncs import *
import capi_maps
import func2subr
from crackfortran import rmbadname
##############
def findcommonblocks(block,top=1):
ret = []
if hascommon(block):
for n in block['common'].keys():
vars={}
for v in block['common'][n]:
vars[v]=block['vars'][v]
ret.append((n,block['common'][n],vars))
elif hasbody(block):
for b in block['body']:
ret=ret+findcommonblocks(b,0)
if top:
tret=[]
names=[]
for t in ret:
if t[0] not in names:
names.append(t[0])
tret.append(t)
return tret
return ret
def buildhooks(m):
ret = {'commonhooks':[],'initcommonhooks':[],'docs':['"COMMON blocks:\\n"']}
fwrap = ['']
def fadd(line,s=fwrap): s[0] = '%s\n %s'%(s[0],line)
chooks = ['']
def cadd(line,s=chooks): s[0] = '%s\n%s'%(s[0],line)
ihooks = ['']
def iadd(line,s=ihooks): s[0] = '%s\n%s'%(s[0],line)
doc = ['']
def dadd(line,s=doc): s[0] = '%s\n%s'%(s[0],line)
for (name,vnames,vars) in findcommonblocks(m):
lower_name = name.lower()
hnames,inames = [],[]
for n in vnames:
if isintent_hide(vars[n]): hnames.append(n)
else: inames.append(n)
if hnames:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n\t\t Hidden: %s\n'%(name,','.join(inames),','.join(hnames)))
else:
outmess('\t\tConstructing COMMON block support for "%s"...\n\t\t %s\n'%(name,','.join(inames)))
fadd('subroutine f2pyinit%s(setupfunc)'%name)
fadd('external setupfunc')
for n in vnames:
fadd(func2subr.var2fixfortran(vars,n))
if name=='_BLNK_':
fadd('common %s'%(','.join(vnames)))
else:
fadd('common /%s/ %s'%(name,','.join(vnames)))
fadd('call setupfunc(%s)'%(','.join(inames)))
fadd('end\n')
cadd('static FortranDataDef f2py_%s_def[] = {'%(name))
idims=[]
for n in inames:
ct = capi_maps.getctype(vars[n])
at = capi_maps.c2capi_map[ct]
dm = capi_maps.getarrdims(n,vars[n])
if dm['dims']: idims.append('(%s)'%(dm['dims']))
else: idims.append('')
dms=dm['dims'].strip()
if not dms: dms='-1'
cadd('\t{\"%s\",%s,{{%s}},%s},'%(n,dm['rank'],dms,at))
cadd('\t{NULL}\n};')
inames1 = rmbadname(inames)
inames1_tps = ','.join(map(lambda s:'char *'+s,inames1))
cadd('static void f2py_setup_%s(%s) {'%(name,inames1_tps))
cadd('\tint i_f2py=0;')
for n in inames1:
cadd('\tf2py_%s_def[i_f2py++].data = %s;'%(name,n))
cadd('}')
if '_' in lower_name:
F_FUNC='F_FUNC_US'
else:
F_FUNC='F_FUNC'
cadd('extern void %s(f2pyinit%s,F2PYINIT%s)(void(*)(%s));'\
%(F_FUNC,lower_name,name.upper(),
','.join(['char*']*len(inames1))))
cadd('static void f2py_init_%s(void) {'%name)
cadd('\t%s(f2pyinit%s,F2PYINIT%s)(f2py_setup_%s);'\
%(F_FUNC,lower_name,name.upper(),name))
cadd('}\n')
iadd('\tF2PyDict_SetItemString(d, \"%s\", PyFortranObject_New(f2py_%s_def,f2py_init_%s));'%(name,name,name))
tname = name.replace('_','\\_')
dadd('\\subsection{Common block \\texttt{%s}}\n'%(tname))
dadd('\\begin{description}')
for n in inames:
dadd('\\item[]{{}\\verb@%s@{}}'%(capi_maps.getarrdocsign(n,vars[n])))
if hasnote(vars[n]):
note = vars[n]['note']
if type(note) is type([]): note='\n'.join(note)
dadd('--- %s'%(note))
dadd('\\end{description}')
ret['docs'].append('"\t/%s/ %s\\n"'%(name,','.join(map(lambda v,d:v+d,inames,idims))))
ret['commonhooks']=chooks
ret['initcommonhooks']=ihooks
ret['latexdoc']=doc[0]
if len(ret['docs'])<=1: ret['docs']=''
return ret,fwrap[0]
| bsd-3-clause |
Smart-Torvy/torvy-home-assistant | tests/components/lock/test_demo.py | 23 | 1484 | """The tests for the Demo lock platform."""
import unittest
from homeassistant.bootstrap import setup_component
from homeassistant.components import lock
from tests.common import get_test_home_assistant
FRONT = 'lock.front_door'
KITCHEN = 'lock.kitchen_door'
class TestLockDemo(unittest.TestCase):
"""Test the demo lock."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.assertTrue(setup_component(self.hass, lock.DOMAIN, {
'lock': {
'platform': 'demo'
}
}))
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_is_locked(self):
"""Test if lock is locked."""
self.assertTrue(lock.is_locked(self.hass, FRONT))
self.hass.states.is_state(FRONT, 'locked')
self.assertFalse(lock.is_locked(self.hass, KITCHEN))
self.hass.states.is_state(KITCHEN, 'unlocked')
def test_locking(self):
"""Test the locking of a lock."""
lock.lock(self.hass, KITCHEN)
self.hass.block_till_done()
self.assertTrue(lock.is_locked(self.hass, KITCHEN))
def test_unlocking(self):
"""Test the unlocking of a lock."""
lock.unlock(self.hass, FRONT)
self.hass.block_till_done()
self.assertFalse(lock.is_locked(self.hass, FRONT))
| mit |
ench0/external_chromium_org_third_party_skia | platform_tools/android/tests/makefile_writer_tests.py | 65 | 7127 | #!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Test makefile_writer.py
"""
import argparse
import os
import shutil
import sys
import tempfile
import test_variables
import unittest
import utils
sys.path.append(test_variables.GYP_GEN_DIR)
import makefile_writer
import tool_makefile_writer
import vars_dict_lib
MAKEFILE_NAME = test_variables.ANDROID_MK
REBASELINE_MSG = ('If you\'ve modified makefile_writer.py, run '
'"makefile_writer_tests.py --rebaseline" to rebaseline')
TOOL_DIR = 'tool'
def generate_dummy_vars_dict(name):
"""Create a VarsDict and fill it with dummy entries.
Args:
name: string to be appended to each entry, if not None.
Returns:
A VarsDict with dummy entries.
"""
vars_dict = vars_dict_lib.VarsDict()
for key in vars_dict.keys():
entry = key.lower()
if name:
entry += '_' + name
vars_dict[key].add(entry)
return vars_dict
def generate_write_local_vars_params():
"""Generator to compute params for write_local_vars tests.
Each iteration yields a new tuple: (filename, append, name), specific to a
way to call write_local_vars for the tests.
Yields:
filename: filename corresponding to the expectation file for this
combination of params to write_local_vars.
append: boolean to pass as append parameter to write_local_vars.
name: string to pass as name parameter to write_local_vars.
"""
for append in [ True, False ]:
for name in [ None, 'arm', 'foo' ]:
filename = 'write_local_vars'
if append:
filename += '_append'
else:
filename += '_no_append'
if name:
filename += '_' + name
else:
filename += '_no_name'
yield (filename, append, name)
def generate_dummy_vars_dict_data(name, condition):
"""Create a dummy VarsDictData.
Create a dummy VarsDictData, using the name for both the contained
VarsDict and the VarsDictData
Args:
name: name used by both the returned VarsDictData and its contained
VarsDict.
condition: condition used by the returned VarsDictData.
Returns:
A VarsDictData with dummy values, using the passed in info.
"""
vars_dict = generate_dummy_vars_dict(name)
return makefile_writer.VarsDictData(vars_dict=vars_dict, name=name,
condition=condition)
def generate_dummy_makefile(target_dir):
"""Create a dummy makefile to demonstrate how it works.
Use dummy values unrelated to any gyp files. Its output should remain the
same unless/until makefile_writer.write_android_mk changes.
Args:
target_dir: directory in which to write the resulting Android.mk
"""
common_vars_dict = generate_dummy_vars_dict(None)
deviation_params = [('foo', 'COND'), ('bar', None)]
deviations = [generate_dummy_vars_dict_data(name, condition)
for (name, condition) in deviation_params]
makefile_writer.write_android_mk(target_dir=target_dir,
common=common_vars_dict,
deviations_from_common=deviations)
def generate_dummy_tool_makefile(target_dir):
"""Create a dummy makefile for a tool.
Args:
target_dir: directory in which to write the resulting Android.mk
"""
vars_dict = generate_dummy_vars_dict(None)
tool_makefile_writer.write_tool_android_mk(target_dir=target_dir,
var_dict=vars_dict)
class MakefileWriterTest(unittest.TestCase):
def test_write_group_empty(self):
f = tempfile.TemporaryFile()
assert f.tell() == 0
for empty in (None, []):
for truth in (True, False):
makefile_writer.write_group(f, 'name', empty, truth)
self.assertEqual(f.tell(), 0)
f.close()
def test_write_group(self):
animals = ('dog', 'cat', 'mouse', 'elephant')
fd, filename = tempfile.mkstemp()
with open(filename, 'w') as f:
makefile_writer.write_group(f, 'animals', animals, False)
os.close(fd)
# Now confirm that it matches expectations
utils.compare_to_expectation(filename, 'animals.txt', self.assertTrue)
with open(filename, 'w') as f:
makefile_writer.write_group(f, 'animals_append', animals, True)
# Now confirm that it matches expectations
utils.compare_to_expectation(filename, 'animals_append.txt',
self.assertTrue)
os.remove(filename)
def test_write_local_vars(self):
vars_dict = generate_dummy_vars_dict(None)
# Compare various ways of calling write_local_vars to expectations.
for (filename, append, name) in generate_write_local_vars_params():
fd, outfile = tempfile.mkstemp()
with open(outfile, 'w') as f:
makefile_writer.write_local_vars(f, vars_dict, append, name)
os.close(fd)
# Compare to the expected file.
utils.compare_to_expectation(outfile, filename, self.assertTrue,
REBASELINE_MSG)
# KNOWN_TARGETS is always a key in the input VarsDict, but it should not
# be written to the resulting file.
# Note that this assumes none of our dummy entries is 'KNOWN_TARGETS'.
known_targets_name = 'KNOWN_TARGETS'
self.assertEqual(len(vars_dict[known_targets_name]), 1)
with open(outfile, 'r') as f:
self.assertNotIn(known_targets_name, f.read())
os.remove(outfile)
def test_write_android_mk(self):
outdir = tempfile.mkdtemp()
generate_dummy_makefile(outdir)
utils.compare_to_expectation(os.path.join(outdir, MAKEFILE_NAME),
MAKEFILE_NAME, self.assertTrue, REBASELINE_MSG)
shutil.rmtree(outdir)
def test_tool_writer(self):
outdir = tempfile.mkdtemp()
tool_dir = os.path.join(outdir, TOOL_DIR)
os.mkdir(tool_dir)
generate_dummy_tool_makefile(tool_dir)
utils.compare_to_expectation(os.path.join(tool_dir, MAKEFILE_NAME),
os.path.join(TOOL_DIR, MAKEFILE_NAME),
self.assertTrue, REBASELINE_MSG)
def main():
loader = unittest.TestLoader()
suite = loader.loadTestsFromTestCase(MakefileWriterTest)
results = unittest.TextTestRunner(verbosity=2).run(suite)
print repr(results)
if not results.wasSuccessful():
raise Exception('failed one or more unittests')
def rebaseline():
generate_dummy_makefile(utils.EXPECTATIONS_DIR)
vars_dict = generate_dummy_vars_dict(None)
for (filename, append, name) in generate_write_local_vars_params():
with open(os.path.join(utils.EXPECTATIONS_DIR, filename), 'w') as f:
makefile_writer.write_local_vars(f, vars_dict, append, name)
generate_dummy_tool_makefile(os.path.join(utils.EXPECTATIONS_DIR, TOOL_DIR))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-r', '--rebaseline', help='Rebaseline expectations.',
action='store_true')
args = parser.parse_args()
if args.rebaseline:
rebaseline()
else:
main()
| bsd-3-clause |
ykaneko/quantum | quantum/scheduler/dhcp_agent_scheduler.py | 2 | 4728 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from sqlalchemy.orm import exc
from sqlalchemy.sql import exists
from quantum.common import constants
from quantum.db import agents_db
from quantum.db import agentschedulers_db
from quantum.db import models_v2
from quantum.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ChanceScheduler(object):
"""Allocate a DHCP agent for a network in a random way.
More sophisticated scheduler (similar to filter scheduler in nova?)
can be introduced later.
"""
def schedule(self, plugin, context, network):
"""Schedule the network to an active DHCP agent if there
is no active DHCP agent hosting it.
"""
#TODO(gongysh) don't schedule the networks with only
# subnets whose enable_dhcp is false
with context.session.begin(subtransactions=True):
dhcp_agents = plugin.get_dhcp_agents_hosting_networks(
context, [network['id']], active=True)
if dhcp_agents:
LOG.debug(_('Network %s is hosted already'),
network['id'])
return
enabled_dhcp_agents = plugin.get_agents_db(
context, filters={
'agent_type': [constants.AGENT_TYPE_DHCP],
'admin_state_up': [True]})
if not enabled_dhcp_agents:
LOG.warn(_('No enabled DHCP agents'))
return
active_dhcp_agents = [enabled_dhcp_agent for enabled_dhcp_agent in
enabled_dhcp_agents if not
agents_db.AgentDbMixin.is_agent_down(
enabled_dhcp_agent['heartbeat_timestamp'])]
if not active_dhcp_agents:
LOG.warn(_('No active DHCP agents'))
return
chosen_agent = random.choice(active_dhcp_agents)
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = chosen_agent
binding.network_id = network['id']
context.session.add(binding)
LOG.debug(_('Network %(network_id)s is scheduled to be hosted by '
'DHCP agent %(agent_id)s'),
{'network_id': network['id'],
'agent_id': chosen_agent['id']})
return chosen_agent
def auto_schedule_networks(self, plugin, context, host):
"""Schedule non-hosted networks to the DHCP agent on
the specified host.
"""
with context.session.begin(subtransactions=True):
query = context.session.query(agents_db.Agent)
query = query.filter(agents_db.Agent.agent_type ==
constants.AGENT_TYPE_DHCP,
agents_db.Agent.host == host,
agents_db.Agent.admin_state_up == True)
try:
dhcp_agent = query.one()
except (exc.MultipleResultsFound, exc.NoResultFound):
LOG.warn(_('No enabled DHCP agent on host %s'),
host)
return False
if agents_db.AgentDbMixin.is_agent_down(
dhcp_agent.heartbeat_timestamp):
LOG.warn(_('DHCP agent %s is not active'), dhcp_agent.id)
#TODO(gongysh) consider the disabled agent's network
net_stmt = ~exists().where(
models_v2.Network.id ==
agentschedulers_db.NetworkDhcpAgentBinding.network_id)
net_ids = context.session.query(
models_v2.Network.id).filter(net_stmt).all()
if not net_ids:
LOG.debug(_('No non-hosted networks'))
return False
for net_id in net_ids:
binding = agentschedulers_db.NetworkDhcpAgentBinding()
binding.dhcp_agent = dhcp_agent
binding.network_id = net_id[0]
context.session.add(binding)
return True
| apache-2.0 |
dennis-sheil/commandergenius | project/jni/python/src/Lib/plat-mac/Carbon/QuickTime.py | 81 | 129090 | # Generated from 'Movies.h'
def FOUR_CHAR_CODE(x): return x
xmlIdentifierUnrecognized = -1
kControllerMinimum = -0xf777
notImplementedMusicOSErr = -2071
cantSendToSynthesizerOSErr = -2072
cantReceiveFromSynthesizerOSErr = -2073
illegalVoiceAllocationOSErr = -2074
illegalPartOSErr = -2075
illegalChannelOSErr = -2076
illegalKnobOSErr = -2077
illegalKnobValueOSErr = -2078
illegalInstrumentOSErr = -2079
illegalControllerOSErr = -2080
midiManagerAbsentOSErr = -2081
synthesizerNotRespondingOSErr = -2082
synthesizerOSErr = -2083
illegalNoteChannelOSErr = -2084
noteChannelNotAllocatedOSErr = -2085
tunePlayerFullOSErr = -2086
tuneParseOSErr = -2087
MovieFileType = FOUR_CHAR_CODE('MooV')
MovieScrapType = FOUR_CHAR_CODE('moov')
MovieResourceType = FOUR_CHAR_CODE('moov')
MovieForwardPointerResourceType = FOUR_CHAR_CODE('fore')
MovieBackwardPointerResourceType = FOUR_CHAR_CODE('back')
MovieResourceAtomType = FOUR_CHAR_CODE('moov')
MovieDataAtomType = FOUR_CHAR_CODE('mdat')
FreeAtomType = FOUR_CHAR_CODE('free')
SkipAtomType = FOUR_CHAR_CODE('skip')
WideAtomPlaceholderType = FOUR_CHAR_CODE('wide')
MediaHandlerType = FOUR_CHAR_CODE('mhlr')
DataHandlerType = FOUR_CHAR_CODE('dhlr')
VideoMediaType = FOUR_CHAR_CODE('vide')
SoundMediaType = FOUR_CHAR_CODE('soun')
TextMediaType = FOUR_CHAR_CODE('text')
BaseMediaType = FOUR_CHAR_CODE('gnrc')
MPEGMediaType = FOUR_CHAR_CODE('MPEG')
MusicMediaType = FOUR_CHAR_CODE('musi')
TimeCodeMediaType = FOUR_CHAR_CODE('tmcd')
SpriteMediaType = FOUR_CHAR_CODE('sprt')
FlashMediaType = FOUR_CHAR_CODE('flsh')
MovieMediaType = FOUR_CHAR_CODE('moov')
TweenMediaType = FOUR_CHAR_CODE('twen')
ThreeDeeMediaType = FOUR_CHAR_CODE('qd3d')
SkinMediaType = FOUR_CHAR_CODE('skin')
HandleDataHandlerSubType = FOUR_CHAR_CODE('hndl')
PointerDataHandlerSubType = FOUR_CHAR_CODE('ptr ')
NullDataHandlerSubType = FOUR_CHAR_CODE('null')
ResourceDataHandlerSubType = FOUR_CHAR_CODE('rsrc')
URLDataHandlerSubType = FOUR_CHAR_CODE('url ')
WiredActionHandlerType = FOUR_CHAR_CODE('wire')
VisualMediaCharacteristic = FOUR_CHAR_CODE('eyes')
AudioMediaCharacteristic = FOUR_CHAR_CODE('ears')
kCharacteristicCanSendVideo = FOUR_CHAR_CODE('vsnd')
kCharacteristicProvidesActions = FOUR_CHAR_CODE('actn')
kCharacteristicNonLinear = FOUR_CHAR_CODE('nonl')
kCharacteristicCanStep = FOUR_CHAR_CODE('step')
kCharacteristicHasNoDuration = FOUR_CHAR_CODE('noti')
kCharacteristicHasSkinData = FOUR_CHAR_CODE('skin')
kCharacteristicProvidesKeyFocus = FOUR_CHAR_CODE('keyf')
kUserDataMovieControllerType = FOUR_CHAR_CODE('ctyp')
kUserDataName = FOUR_CHAR_CODE('name')
kUserDataTextAlbum = FOUR_CHAR_CODE('\xa9alb')
kUserDataTextArtist = FOUR_CHAR_CODE('\xa9ART')
kUserDataTextAuthor = FOUR_CHAR_CODE('\xa9aut')
kUserDataTextChapter = FOUR_CHAR_CODE('\xa9chp')
kUserDataTextComment = FOUR_CHAR_CODE('\xa9cmt')
kUserDataTextComposer = FOUR_CHAR_CODE('\xa9com')
kUserDataTextCopyright = FOUR_CHAR_CODE('\xa9cpy')
kUserDataTextCreationDate = FOUR_CHAR_CODE('\xa9day')
kUserDataTextDescription = FOUR_CHAR_CODE('\xa9des')
kUserDataTextDirector = FOUR_CHAR_CODE('\xa9dir')
kUserDataTextDisclaimer = FOUR_CHAR_CODE('\xa9dis')
kUserDataTextEncodedBy = FOUR_CHAR_CODE('\xa9enc')
kUserDataTextFullName = FOUR_CHAR_CODE('\xa9nam')
kUserDataTextGenre = FOUR_CHAR_CODE('\xa9gen')
kUserDataTextHostComputer = FOUR_CHAR_CODE('\xa9hst')
kUserDataTextInformation = FOUR_CHAR_CODE('\xa9inf')
kUserDataTextKeywords = FOUR_CHAR_CODE('\xa9key')
kUserDataTextMake = FOUR_CHAR_CODE('\xa9mak')
kUserDataTextModel = FOUR_CHAR_CODE('\xa9mod')
kUserDataTextOriginalArtist = FOUR_CHAR_CODE('\xa9ope')
kUserDataTextOriginalFormat = FOUR_CHAR_CODE('\xa9fmt')
kUserDataTextOriginalSource = FOUR_CHAR_CODE('\xa9src')
kUserDataTextPerformers = FOUR_CHAR_CODE('\xa9prf')
kUserDataTextProducer = FOUR_CHAR_CODE('\xa9prd')
kUserDataTextProduct = FOUR_CHAR_CODE('\xa9PRD')
kUserDataTextSoftware = FOUR_CHAR_CODE('\xa9swr')
kUserDataTextSpecialPlaybackRequirements = FOUR_CHAR_CODE('\xa9req')
kUserDataTextTrack = FOUR_CHAR_CODE('\xa9trk')
kUserDataTextWarning = FOUR_CHAR_CODE('\xa9wrn')
kUserDataTextWriter = FOUR_CHAR_CODE('\xa9wrt')
kUserDataTextURLLink = FOUR_CHAR_CODE('\xa9url')
kUserDataTextEditDate1 = FOUR_CHAR_CODE('\xa9ed1')
kUserDataUnicodeBit = 1L << 7
DoTheRightThing = 0
kQTNetworkStatusNoNetwork = -2
kQTNetworkStatusUncertain = -1
kQTNetworkStatusNotConnected = 0
kQTNetworkStatusConnected = 1
kMusicFlagDontPlay2Soft = 1L << 0
kMusicFlagDontSlaveToMovie = 1L << 1
dfDontDisplay = 1 << 0
dfDontAutoScale = 1 << 1
dfClipToTextBox = 1 << 2
dfUseMovieBGColor = 1 << 3
dfShrinkTextBoxToFit = 1 << 4
dfScrollIn = 1 << 5
dfScrollOut = 1 << 6
dfHorizScroll = 1 << 7
dfReverseScroll = 1 << 8
dfContinuousScroll = 1 << 9
dfFlowHoriz = 1 << 10
dfContinuousKaraoke = 1 << 11
dfDropShadow = 1 << 12
dfAntiAlias = 1 << 13
dfKeyedText = 1 << 14
dfInverseHilite = 1 << 15
dfTextColorHilite = 1 << 16
searchTextDontGoToFoundTime = 1L << 16
searchTextDontHiliteFoundText = 1L << 17
searchTextOneTrackOnly = 1L << 18
searchTextEnabledTracksOnly = 1L << 19
kTextTextHandle = 1
kTextTextPtr = 2
kTextTEStyle = 3
kTextSelection = 4
kTextBackColor = 5
kTextForeColor = 6
kTextFace = 7
kTextFont = 8
kTextSize = 9
kTextAlignment = 10
kTextHilite = 11
kTextDropShadow = 12
kTextDisplayFlags = 13
kTextScroll = 14
kTextRelativeScroll = 15
kTextHyperTextFace = 16
kTextHyperTextColor = 17
kTextKeyEntry = 18
kTextMouseDown = 19
kTextTextBox = 20
kTextEditState = 21
kTextLength = 22
k3DMediaRendererEntry = FOUR_CHAR_CODE('rend')
k3DMediaRendererName = FOUR_CHAR_CODE('name')
k3DMediaRendererCode = FOUR_CHAR_CODE('rcod')
movieProgressOpen = 0
movieProgressUpdatePercent = 1
movieProgressClose = 2
progressOpFlatten = 1
progressOpInsertTrackSegment = 2
progressOpInsertMovieSegment = 3
progressOpPaste = 4
progressOpAddMovieSelection = 5
progressOpCopy = 6
progressOpCut = 7
progressOpLoadMovieIntoRam = 8
progressOpLoadTrackIntoRam = 9
progressOpLoadMediaIntoRam = 10
progressOpImportMovie = 11
progressOpExportMovie = 12
mediaQualityDraft = 0x0000
mediaQualityNormal = 0x0040
mediaQualityBetter = 0x0080
mediaQualityBest = 0x00C0
kQTEventPayloadIsQTList = 1L << 0
kActionMovieSetVolume = 1024
kActionMovieSetRate = 1025
kActionMovieSetLoopingFlags = 1026
kActionMovieGoToTime = 1027
kActionMovieGoToTimeByName = 1028
kActionMovieGoToBeginning = 1029
kActionMovieGoToEnd = 1030
kActionMovieStepForward = 1031
kActionMovieStepBackward = 1032
kActionMovieSetSelection = 1033
kActionMovieSetSelectionByName = 1034
kActionMoviePlaySelection = 1035
kActionMovieSetLanguage = 1036
kActionMovieChanged = 1037
kActionMovieRestartAtTime = 1038
kActionMovieGotoNextChapter = 1039
kActionMovieGotoPreviousChapter = 1040
kActionMovieGotoFirstChapter = 1041
kActionMovieGotoLastChapter = 1042
kActionMovieGotoChapterByIndex = 1043
kActionMovieSetScale = 1044
kActionTrackSetVolume = 2048
kActionTrackSetBalance = 2049
kActionTrackSetEnabled = 2050
kActionTrackSetMatrix = 2051
kActionTrackSetLayer = 2052
kActionTrackSetClip = 2053
kActionTrackSetCursor = 2054
kActionTrackSetGraphicsMode = 2055
kActionTrackSetIdleFrequency = 2056
kActionTrackSetBassTreble = 2057
kActionSpriteSetMatrix = 3072
kActionSpriteSetImageIndex = 3073
kActionSpriteSetVisible = 3074
kActionSpriteSetLayer = 3075
kActionSpriteSetGraphicsMode = 3076
kActionSpritePassMouseToCodec = 3078
kActionSpriteClickOnCodec = 3079
kActionSpriteTranslate = 3080
kActionSpriteScale = 3081
kActionSpriteRotate = 3082
kActionSpriteStretch = 3083
kActionSpriteSetCanBeHitTested = 3094
kActionQTVRSetPanAngle = 4096
kActionQTVRSetTiltAngle = 4097
kActionQTVRSetFieldOfView = 4098
kActionQTVRShowDefaultView = 4099
kActionQTVRGoToNodeID = 4100
kActionQTVREnableHotSpot = 4101
kActionQTVRShowHotSpots = 4102
kActionQTVRTranslateObject = 4103
kActionQTVRSetViewState = 4109
kActionMusicPlayNote = 5120
kActionMusicSetController = 5121
kActionCase = 6144
kActionWhile = 6145
kActionGoToURL = 6146
kActionSendQTEventToSprite = 6147
kActionDebugStr = 6148
kActionPushCurrentTime = 6149
kActionPushCurrentTimeWithLabel = 6150
kActionPopAndGotoTopTime = 6151
kActionPopAndGotoLabeledTime = 6152
kActionStatusString = 6153
kActionSendQTEventToTrackObject = 6154
kActionAddChannelSubscription = 6155
kActionRemoveChannelSubscription = 6156
kActionOpenCustomActionHandler = 6157
kActionDoScript = 6158
kActionDoCompressedActions = 6159
kActionSendAppMessage = 6160
kActionLoadComponent = 6161
kActionSetFocus = 6162
kActionDontPassKeyEvent = 6163
kActionSetRandomSeed = 6164
kActionSpriteTrackSetVariable = 7168
kActionSpriteTrackNewSprite = 7169
kActionSpriteTrackDisposeSprite = 7170
kActionSpriteTrackSetVariableToString = 7171
kActionSpriteTrackConcatVariables = 7172
kActionSpriteTrackSetVariableToMovieURL = 7173
kActionSpriteTrackSetVariableToMovieBaseURL = 7174
kActionSpriteTrackSetAllSpritesHitTestingMode = 7181
kActionSpriteTrackNewImage = 7182
kActionSpriteTrackDisposeImage = 7183
kActionApplicationNumberAndString = 8192
kActionQD3DNamedObjectTranslateTo = 9216
kActionQD3DNamedObjectScaleTo = 9217
kActionQD3DNamedObjectRotateTo = 9218
kActionFlashTrackSetPan = 10240
kActionFlashTrackSetZoom = 10241
kActionFlashTrackSetZoomRect = 10242
kActionFlashTrackGotoFrameNumber = 10243
kActionFlashTrackGotoFrameLabel = 10244
kActionFlashTrackSetFlashVariable = 10245
kActionFlashTrackDoButtonActions = 10246
kActionMovieTrackAddChildMovie = 11264
kActionMovieTrackLoadChildMovie = 11265
kActionMovieTrackLoadChildMovieWithQTListParams = 11266
kActionTextTrackPasteText = 12288
kActionTextTrackSetTextBox = 12291
kActionTextTrackSetTextStyle = 12292
kActionTextTrackSetSelection = 12293
kActionTextTrackSetBackgroundColor = 12294
kActionTextTrackSetForegroundColor = 12295
kActionTextTrackSetFace = 12296
kActionTextTrackSetFont = 12297
kActionTextTrackSetSize = 12298
kActionTextTrackSetAlignment = 12299
kActionTextTrackSetHilite = 12300
kActionTextTrackSetDropShadow = 12301
kActionTextTrackSetDisplayFlags = 12302
kActionTextTrackSetScroll = 12303
kActionTextTrackRelativeScroll = 12304
kActionTextTrackFindText = 12305
kActionTextTrackSetHyperTextFace = 12306
kActionTextTrackSetHyperTextColor = 12307
kActionTextTrackKeyEntry = 12308
kActionTextTrackMouseDown = 12309
kActionTextTrackSetEditable = 12310
kActionListAddElement = 13312
kActionListRemoveElements = 13313
kActionListSetElementValue = 13314
kActionListPasteFromXML = 13315
kActionListSetMatchingFromXML = 13316
kActionListSetFromURL = 13317
kActionListExchangeLists = 13318
kActionListServerQuery = 13319
kOperandExpression = 1
kOperandConstant = 2
kOperandSubscribedToChannel = 3
kOperandUniqueCustomActionHandlerID = 4
kOperandCustomActionHandlerIDIsOpen = 5
kOperandConnectionSpeed = 6
kOperandGMTDay = 7
kOperandGMTMonth = 8
kOperandGMTYear = 9
kOperandGMTHours = 10
kOperandGMTMinutes = 11
kOperandGMTSeconds = 12
kOperandLocalDay = 13
kOperandLocalMonth = 14
kOperandLocalYear = 15
kOperandLocalHours = 16
kOperandLocalMinutes = 17
kOperandLocalSeconds = 18
kOperandRegisteredForQuickTimePro = 19
kOperandPlatformRunningOn = 20
kOperandQuickTimeVersion = 21
kOperandComponentVersion = 22
kOperandOriginalHandlerRefcon = 23
kOperandTicks = 24
kOperandMaxLoadedTimeInMovie = 25
kOperandEventParameter = 26
kOperandFreeMemory = 27
kOperandNetworkStatus = 28
kOperandQuickTimeVersionRegistered = 29
kOperandSystemVersion = 30
kOperandMovieVolume = 1024
kOperandMovieRate = 1025
kOperandMovieIsLooping = 1026
kOperandMovieLoopIsPalindrome = 1027
kOperandMovieTime = 1028
kOperandMovieDuration = 1029
kOperandMovieTimeScale = 1030
kOperandMovieWidth = 1031
kOperandMovieHeight = 1032
kOperandMovieLoadState = 1033
kOperandMovieTrackCount = 1034
kOperandMovieIsActive = 1035
kOperandMovieName = 1036
kOperandMovieID = 1037
kOperandMovieChapterCount = 1038
kOperandMovieChapterIndex = 1039
kOperandMovieChapterName = 1040
kOperandMovieChapterNameByIndex = 1041
kOperandMovieChapterIndexByName = 1042
kOperandMovieAnnotation = 1043
kOperandMovieConnectionFlags = 1044
kOperandMovieConnectionString = 1045
kOperandTrackVolume = 2048
kOperandTrackBalance = 2049
kOperandTrackEnabled = 2050
kOperandTrackLayer = 2051
kOperandTrackWidth = 2052
kOperandTrackHeight = 2053
kOperandTrackDuration = 2054
kOperandTrackName = 2055
kOperandTrackID = 2056
kOperandTrackIdleFrequency = 2057
kOperandTrackBass = 2058
kOperandTrackTreble = 2059
kOperandSpriteBoundsLeft = 3072
kOperandSpriteBoundsTop = 3073
kOperandSpriteBoundsRight = 3074
kOperandSpriteBoundsBottom = 3075
kOperandSpriteImageIndex = 3076
kOperandSpriteVisible = 3077
kOperandSpriteLayer = 3078
kOperandSpriteTrackVariable = 3079
kOperandSpriteTrackNumSprites = 3080
kOperandSpriteTrackNumImages = 3081
kOperandSpriteID = 3082
kOperandSpriteIndex = 3083
kOperandSpriteFirstCornerX = 3084
kOperandSpriteFirstCornerY = 3085
kOperandSpriteSecondCornerX = 3086
kOperandSpriteSecondCornerY = 3087
kOperandSpriteThirdCornerX = 3088
kOperandSpriteThirdCornerY = 3089
kOperandSpriteFourthCornerX = 3090
kOperandSpriteFourthCornerY = 3091
kOperandSpriteImageRegistrationPointX = 3092
kOperandSpriteImageRegistrationPointY = 3093
kOperandSpriteTrackSpriteIDAtPoint = 3094
kOperandSpriteName = 3095
kOperandSpriteCanBeHitTested = 3105
kOperandSpriteTrackAllSpritesHitTestingMode = 3106
kOperandSpriteTrackImageIDByIndex = 3107
kOperandSpriteTrackImageIndexByID = 3108
kOperandQTVRPanAngle = 4096
kOperandQTVRTiltAngle = 4097
kOperandQTVRFieldOfView = 4098
kOperandQTVRNodeID = 4099
kOperandQTVRHotSpotsVisible = 4100
kOperandQTVRViewCenterH = 4101
kOperandQTVRViewCenterV = 4102
kOperandQTVRViewStateCount = 4103
kOperandQTVRViewState = 4104
kOperandMouseLocalHLoc = 5120
kOperandMouseLocalVLoc = 5121
kOperandKeyIsDown = 5122
kOperandRandom = 5123
kOperandCanHaveFocus = 5124
kOperandHasFocus = 5125
kOperandTextTrackEditable = 6144
kOperandTextTrackCopyText = 6145
kOperandTextTrackStartSelection = 6146
kOperandTextTrackEndSelection = 6147
kOperandTextTrackTextBoxLeft = 6148
kOperandTextTrackTextBoxTop = 6149
kOperandTextTrackTextBoxRight = 6150
kOperandTextTrackTextBoxBottom = 6151
kOperandTextTrackTextLength = 6152
kOperandListCountElements = 7168
kOperandListGetElementPathByIndex = 7169
kOperandListGetElementValue = 7170
kOperandListCopyToXML = 7171
kOperandSin = 8192
kOperandCos = 8193
kOperandTan = 8194
kOperandATan = 8195
kOperandATan2 = 8196
kOperandDegreesToRadians = 8197
kOperandRadiansToDegrees = 8198
kOperandSquareRoot = 8199
kOperandExponent = 8200
kOperandLog = 8201
kOperandFlashTrackVariable = 9216
kOperandStringLength = 10240
kOperandStringCompare = 10241
kOperandStringSubString = 10242
kOperandStringConcat = 10243
kFirstMovieAction = kActionMovieSetVolume
kLastMovieAction = kActionMovieSetScale
kFirstTrackAction = kActionTrackSetVolume
kLastTrackAction = kActionTrackSetBassTreble
kFirstSpriteAction = kActionSpriteSetMatrix
kLastSpriteAction = kActionSpriteSetCanBeHitTested
kFirstQTVRAction = kActionQTVRSetPanAngle
kLastQTVRAction = kActionQTVRSetViewState
kFirstMusicAction = kActionMusicPlayNote
kLastMusicAction = kActionMusicSetController
kFirstSystemAction = kActionCase
kLastSystemAction = kActionSetRandomSeed
kFirstSpriteTrackAction = kActionSpriteTrackSetVariable
kLastSpriteTrackAction = kActionSpriteTrackDisposeImage
kFirstApplicationAction = kActionApplicationNumberAndString
kLastApplicationAction = kActionApplicationNumberAndString
kFirstQD3DNamedObjectAction = kActionQD3DNamedObjectTranslateTo
kLastQD3DNamedObjectAction = kActionQD3DNamedObjectRotateTo
kFirstFlashTrackAction = kActionFlashTrackSetPan
kLastFlashTrackAction = kActionFlashTrackDoButtonActions
kFirstMovieTrackAction = kActionMovieTrackAddChildMovie
kLastMovieTrackAction = kActionMovieTrackLoadChildMovieWithQTListParams
kFirstTextTrackAction = kActionTextTrackPasteText
kLastTextTrackAction = kActionTextTrackSetEditable
kFirstMultiTargetAction = kActionListAddElement
kLastMultiTargetAction = kActionListServerQuery
kFirstAction = kFirstMovieAction
kLastAction = kLastMultiTargetAction
kTargetMovie = FOUR_CHAR_CODE('moov')
kTargetMovieName = FOUR_CHAR_CODE('mona')
kTargetMovieID = FOUR_CHAR_CODE('moid')
kTargetRootMovie = FOUR_CHAR_CODE('moro')
kTargetParentMovie = FOUR_CHAR_CODE('mopa')
kTargetChildMovieTrackName = FOUR_CHAR_CODE('motn')
kTargetChildMovieTrackID = FOUR_CHAR_CODE('moti')
kTargetChildMovieTrackIndex = FOUR_CHAR_CODE('motx')
kTargetChildMovieMovieName = FOUR_CHAR_CODE('momn')
kTargetChildMovieMovieID = FOUR_CHAR_CODE('momi')
kTargetTrackName = FOUR_CHAR_CODE('trna')
kTargetTrackID = FOUR_CHAR_CODE('trid')
kTargetTrackType = FOUR_CHAR_CODE('trty')
kTargetTrackIndex = FOUR_CHAR_CODE('trin')
kTargetSpriteName = FOUR_CHAR_CODE('spna')
kTargetSpriteID = FOUR_CHAR_CODE('spid')
kTargetSpriteIndex = FOUR_CHAR_CODE('spin')
kTargetQD3DNamedObjectName = FOUR_CHAR_CODE('nana')
kTargetCurrentQTEventParams = FOUR_CHAR_CODE('evpa')
kQTEventType = FOUR_CHAR_CODE('evnt')
kAction = FOUR_CHAR_CODE('actn')
kWhichAction = FOUR_CHAR_CODE('whic')
kActionParameter = FOUR_CHAR_CODE('parm')
kActionTarget = FOUR_CHAR_CODE('targ')
kActionFlags = FOUR_CHAR_CODE('flag')
kActionParameterMinValue = FOUR_CHAR_CODE('minv')
kActionParameterMaxValue = FOUR_CHAR_CODE('maxv')
kActionListAtomType = FOUR_CHAR_CODE('list')
kExpressionContainerAtomType = FOUR_CHAR_CODE('expr')
kConditionalAtomType = FOUR_CHAR_CODE('test')
kOperatorAtomType = FOUR_CHAR_CODE('oper')
kOperandAtomType = FOUR_CHAR_CODE('oprn')
kCommentAtomType = FOUR_CHAR_CODE('why ')
kCustomActionHandler = FOUR_CHAR_CODE('cust')
kCustomHandlerID = FOUR_CHAR_CODE('id ')
kCustomHandlerDesc = FOUR_CHAR_CODE('desc')
kQTEventRecordAtomType = FOUR_CHAR_CODE('erec')
kQTEventMouseClick = FOUR_CHAR_CODE('clik')
kQTEventMouseClickEnd = FOUR_CHAR_CODE('cend')
kQTEventMouseClickEndTriggerButton = FOUR_CHAR_CODE('trig')
kQTEventMouseEnter = FOUR_CHAR_CODE('entr')
kQTEventMouseExit = FOUR_CHAR_CODE('exit')
kQTEventMouseMoved = FOUR_CHAR_CODE('move')
kQTEventFrameLoaded = FOUR_CHAR_CODE('fram')
kQTEventIdle = FOUR_CHAR_CODE('idle')
kQTEventKey = FOUR_CHAR_CODE('key ')
kQTEventMovieLoaded = FOUR_CHAR_CODE('load')
kQTEventRequestToModifyMovie = FOUR_CHAR_CODE('reqm')
kQTEventListReceived = FOUR_CHAR_CODE('list')
kQTEventKeyUp = FOUR_CHAR_CODE('keyU')
kActionFlagActionIsDelta = 1L << 1
kActionFlagParameterWrapsAround = 1L << 2
kActionFlagActionIsToggle = 1L << 3
kStatusStringIsURLLink = 1L << 1
kStatusStringIsStreamingStatus = 1L << 2
kStatusHasCodeNumber = 1L << 3
kStatusIsError = 1L << 4
kScriptIsUnknownType = 1L << 0
kScriptIsJavaScript = 1L << 1
kScriptIsLingoEvent = 1L << 2
kScriptIsVBEvent = 1L << 3
kScriptIsProjectorCommand = 1L << 4
kScriptIsAppleScript = 1L << 5
kQTRegistrationDialogTimeOutFlag = 1 << 0
kQTRegistrationDialogShowDialog = 1 << 1
kQTRegistrationDialogForceDialog = 1 << 2
kOperatorAdd = FOUR_CHAR_CODE('add ')
kOperatorSubtract = FOUR_CHAR_CODE('sub ')
kOperatorMultiply = FOUR_CHAR_CODE('mult')
kOperatorDivide = FOUR_CHAR_CODE('div ')
kOperatorOr = FOUR_CHAR_CODE('or ')
kOperatorAnd = FOUR_CHAR_CODE('and ')
kOperatorNot = FOUR_CHAR_CODE('not ')
kOperatorLessThan = FOUR_CHAR_CODE('< ')
kOperatorLessThanEqualTo = FOUR_CHAR_CODE('<= ')
kOperatorEqualTo = FOUR_CHAR_CODE('= ')
kOperatorNotEqualTo = FOUR_CHAR_CODE('!= ')
kOperatorGreaterThan = FOUR_CHAR_CODE('> ')
kOperatorGreaterThanEqualTo = FOUR_CHAR_CODE('>= ')
kOperatorModulo = FOUR_CHAR_CODE('mod ')
kOperatorIntegerDivide = FOUR_CHAR_CODE('idiv')
kOperatorAbsoluteValue = FOUR_CHAR_CODE('abs ')
kOperatorNegate = FOUR_CHAR_CODE('neg ')
kPlatformMacintosh = 1
kPlatformWindows = 2
kSystemIsWindows9x = 0x00010000
kSystemIsWindowsNT = 0x00020000
kMediaPropertyNonLinearAtomType = FOUR_CHAR_CODE('nonl')
kMediaPropertyHasActions = 105
loopTimeBase = 1
palindromeLoopTimeBase = 2
maintainTimeBaseZero = 4
triggerTimeFwd = 0x0001
triggerTimeBwd = 0x0002
triggerTimeEither = 0x0003
triggerRateLT = 0x0004
triggerRateGT = 0x0008
triggerRateEqual = 0x0010
triggerRateLTE = triggerRateLT | triggerRateEqual
triggerRateGTE = triggerRateGT | triggerRateEqual
triggerRateNotEqual = triggerRateGT | triggerRateEqual | triggerRateLT
triggerRateChange = 0
triggerAtStart = 0x0001
triggerAtStop = 0x0002
timeBaseBeforeStartTime = 1
timeBaseAfterStopTime = 2
callBackAtTime = 1
callBackAtRate = 2
callBackAtTimeJump = 3
callBackAtExtremes = 4
callBackAtTimeBaseDisposed = 5
callBackAtInterrupt = 0x8000
callBackAtDeferredTask = 0x4000
qtcbNeedsRateChanges = 1
qtcbNeedsTimeChanges = 2
qtcbNeedsStartStopChanges = 4
keepInRam = 1 << 0
unkeepInRam = 1 << 1
flushFromRam = 1 << 2
loadForwardTrackEdits = 1 << 3
loadBackwardTrackEdits = 1 << 4
newMovieActive = 1 << 0
newMovieDontResolveDataRefs = 1 << 1
newMovieDontAskUnresolvedDataRefs = 1 << 2
newMovieDontAutoAlternates = 1 << 3
newMovieDontUpdateForeBackPointers = 1 << 4
newMovieDontAutoUpdateClock = 1 << 5
newMovieAsyncOK = 1 << 8
newMovieIdleImportOK = 1 << 10
newMovieDontInteractWithUser = 1 << 11
trackUsageInMovie = 1 << 1
trackUsageInPreview = 1 << 2
trackUsageInPoster = 1 << 3
mediaSampleNotSync = 1 << 0
mediaSampleShadowSync = 1 << 1
pasteInParallel = 1 << 0
showUserSettingsDialog = 1 << 1
movieToFileOnlyExport = 1 << 2
movieFileSpecValid = 1 << 3
nextTimeMediaSample = 1 << 0
nextTimeMediaEdit = 1 << 1
nextTimeTrackEdit = 1 << 2
nextTimeSyncSample = 1 << 3
nextTimeStep = 1 << 4
nextTimeEdgeOK = 1 << 14
nextTimeIgnoreActiveSegment = 1 << 15
createMovieFileDeleteCurFile = 1L << 31
createMovieFileDontCreateMovie = 1L << 30
createMovieFileDontOpenFile = 1L << 29
createMovieFileDontCreateResFile = 1L << 28
flattenAddMovieToDataFork = 1L << 0
flattenActiveTracksOnly = 1L << 2
flattenDontInterleaveFlatten = 1L << 3
flattenFSSpecPtrIsDataRefRecordPtr = 1L << 4
flattenCompressMovieResource = 1L << 5
flattenForceMovieResourceBeforeMovieData = 1L << 6
movieInDataForkResID = -1
mcTopLeftMovie = 1 << 0
mcScaleMovieToFit = 1 << 1
mcWithBadge = 1 << 2
mcNotVisible = 1 << 3
mcWithFrame = 1 << 4
movieScrapDontZeroScrap = 1 << 0
movieScrapOnlyPutMovie = 1 << 1
dataRefSelfReference = 1 << 0
dataRefWasNotResolved = 1 << 1
kMovieAnchorDataRefIsDefault = 1 << 0
hintsScrubMode = 1 << 0
hintsLoop = 1 << 1
hintsDontPurge = 1 << 2
hintsUseScreenBuffer = 1 << 5
hintsAllowInterlace = 1 << 6
hintsUseSoundInterp = 1 << 7
hintsHighQuality = 1 << 8
hintsPalindrome = 1 << 9
hintsInactive = 1 << 11
hintsOffscreen = 1 << 12
hintsDontDraw = 1 << 13
hintsAllowBlacklining = 1 << 14
hintsDontUseVideoOverlaySurface = 1 << 16
hintsIgnoreBandwidthRestrictions = 1 << 17
hintsPlayingEveryFrame = 1 << 18
hintsAllowDynamicResize = 1 << 19
hintsSingleField = 1 << 20
hintsNoRenderingTimeOut = 1 << 21
hintsFlushVideoInsteadOfDirtying = 1 << 22
hintsEnableSubPixelPositioning = 1L << 23
mediaHandlerFlagBaseClient = 1
movieTrackMediaType = 1 << 0
movieTrackCharacteristic = 1 << 1
movieTrackEnabledOnly = 1 << 2
kMovieControlOptionHideController = (1L << 0)
kMovieControlOptionLocateTopLeft = (1L << 1)
kMovieControlOptionEnableEditing = (1L << 2)
kMovieControlOptionHandleEditingHI = (1L << 3)
kMovieControlOptionSetKeysEnabled = (1L << 4)
kMovieControlOptionManuallyIdled = (1L << 5)
kMovieControlDataMovieController = FOUR_CHAR_CODE('mc ')
kMovieControlDataMovie = FOUR_CHAR_CODE('moov')
kMovieControlDataManualIdling = FOUR_CHAR_CODE('manu')
movieDrawingCallWhenChanged = 0
movieDrawingCallAlways = 1
kQTCloneShareSamples = 1 << 0
kQTCloneDontCopyEdits = 1 << 1
kGetMovieImporterValidateToFind = 1L << 0
kGetMovieImporterAllowNewFile = 1L << 1
kGetMovieImporterDontConsiderGraphicsImporters = 1L << 2
kGetMovieImporterDontConsiderFileOnlyImporters = 1L << 6
kGetMovieImporterAutoImportOnly = 1L << 10
kQTGetMIMETypeInfoIsQuickTimeMovieType = FOUR_CHAR_CODE('moov')
kQTGetMIMETypeInfoIsUnhelpfulType = FOUR_CHAR_CODE('dumb')
kQTCopyUserDataReplace = FOUR_CHAR_CODE('rplc')
kQTCopyUserDataMerge = FOUR_CHAR_CODE('merg')
kMovieLoadStateError = -1L
kMovieLoadStateLoading = 1000
kMovieLoadStateLoaded = 2000
kMovieLoadStatePlayable = 10000
kMovieLoadStatePlaythroughOK = 20000
kMovieLoadStateComplete = 100000L
kQTDontUseDataToFindImporter = 1L << 0
kQTDontLookForMovieImporterIfGraphicsImporterFound = 1L << 1
kQTAllowOpeningStillImagesAsMovies = 1L << 2
kQTAllowImportersThatWouldCreateNewFile = 1L << 3
kQTAllowAggressiveImporters = 1L << 4
preloadAlways = 1L << 0
preloadOnlyIfEnabled = 1L << 1
fullScreenHideCursor = 1L << 0
fullScreenAllowEvents = 1L << 1
fullScreenDontChangeMenuBar = 1L << 2
fullScreenPreflightSize = 1L << 3
movieExecuteWiredActionDontExecute = 1L << 0
kRefConNavigationNext = 0
kRefConNavigationPrevious = 1
kRefConPropertyCanHaveFocus = 1
kRefConPropertyHasFocus = 2
kTrackFocusCanEditFlag = FOUR_CHAR_CODE('kedt')
kTrackDefaultFocusFlags = FOUR_CHAR_CODE('kfoc')
kTrackFocusDefaultRefcon = FOUR_CHAR_CODE('kref')
kTrackFocusOn = 1
kTrackHandlesTabs = 2
kFlashTrackPropertyAcceptAllClicks = FOUR_CHAR_CODE('clik')
kBackgroundSpriteLayerNum = 32767
kSpritePropertyMatrix = 1
kSpritePropertyImageDescription = 2
kSpritePropertyImageDataPtr = 3
kSpritePropertyVisible = 4
kSpritePropertyLayer = 5
kSpritePropertyGraphicsMode = 6
kSpritePropertyImageDataSize = 7
kSpritePropertyActionHandlingSpriteID = 8
kSpritePropertyCanBeHitTested = 9
kSpritePropertyImageIndex = 100
kSpriteTrackPropertyBackgroundColor = 101
kSpriteTrackPropertyOffscreenBitDepth = 102
kSpriteTrackPropertySampleFormat = 103
kSpriteTrackPropertyScaleSpritesToScaleWorld = 104
kSpriteTrackPropertyHasActions = 105
kSpriteTrackPropertyVisible = 106
kSpriteTrackPropertyQTIdleEventsFrequency = 107
kSpriteTrackPropertyAllSpritesHitTestingMode = 108
kSpriteTrackPropertyPreferredDepthInterpretationMode = 109
kSpriteImagePropertyRegistrationPoint = 1000
kSpriteImagePropertyGroupID = 1001
kSpriteTrackPreferredDepthCompatibilityMode = 0
kSpriteTrackPreferredDepthModernMode = 1
kSpriteHitTestUseSpritesOwnPropertiesMode = 0
kSpriteHitTestTreatAllSpritesAsHitTestableMode = 1
kSpriteHitTestTreatAllSpritesAsNotHitTestableMode = 2
kNoQTIdleEvents = -1
kGetSpriteWorldInvalidRegionAndLeaveIntact = -1L
kGetSpriteWorldInvalidRegionAndThenSetEmpty = -2L
kOnlyDrawToSpriteWorld = 1L << 0
kSpriteWorldPreflight = 1L << 1
kSpriteWorldDidDraw = 1L << 0
kSpriteWorldNeedsToDraw = 1L << 1
kKeyFrameAndSingleOverride = 1L << 1
kKeyFrameAndAllOverrides = 1L << 2
kScaleSpritesToScaleWorld = 1L << 1
kSpriteWorldHighQuality = 1L << 2
kSpriteWorldDontAutoInvalidate = 1L << 3
kSpriteWorldInvisible = 1L << 4
kSpriteWorldDirtyInsteadOfFlush = 1L << 5
kParentAtomIsContainer = 0
kTweenRecordNoFlags = 0
kTweenRecordIsAtInterruptTime = 0x00000001
kEffectNameAtom = FOUR_CHAR_CODE('name')
kEffectTypeAtom = FOUR_CHAR_CODE('type')
kEffectManufacturerAtom = FOUR_CHAR_CODE('manu')
pdActionConfirmDialog = 1
pdActionSetAppleMenu = 2
pdActionSetEditMenu = 3
pdActionGetDialogValues = 4
pdActionSetPreviewUserItem = 5
pdActionSetPreviewPicture = 6
pdActionSetColorPickerEventProc = 7
pdActionSetDialogTitle = 8
pdActionGetSubPanelMenu = 9
pdActionActivateSubPanel = 10
pdActionConductStopAlert = 11
pdActionModelessCallback = 12
pdActionFetchPreview = 13
pdActionSetDialogSettings = 14
pdActionGetDialogSettings = 15
pdActionGetNextSample = 16
pdActionGetPreviousSample = 17
pdActionCompactSample = 18
pdActionSetEditCallout = 19
pdActionSetSampleTime = 20
pdActionDoEditCommand = 21
pdActionGetSubPanelMenuValue = 22
pdActionCustomNewControl = 23
pdActionCustomDisposeControl = 24
pdActionCustomPositionControl = 25
pdActionCustomShowHideControl = 26
pdActionCustomHandleEvent = 27
pdActionCustomSetFocus = 28
pdActionCustomSetEditMenu = 29
pdActionCustomSetPreviewPicture = 30
pdActionCustomSetEditCallout = 31
pdActionCustomGetEnableValue = 32
pdActionCustomSetSampleTime = 33
pdActionCustomGetValue = 34
pdActionCustomDoEditCommand = 35
pdSampleTimeDisplayOptionsNone = 0x00000000
pdActionFocusOff = 0
pdActionFocusFirst = 1
pdActionFocusLast = 2
pdActionFocusForward = 3
pdActionFocusBackward = 4
elOptionsIncludeNoneInList = 0x00000001
pdOptionsCollectOneValue = 0x00000001
pdOptionsAllowOptionalInterpolations = 0x00000002
pdOptionsModalDialogBox = 0x00000004
pdOptionsEditCurrentEffectOnly = 0x00000008
pdOptionsHidePreview = 0x00000010
effectIsRealtime = 0
kAccessKeyAtomType = FOUR_CHAR_CODE('acky')
kAccessKeySystemFlag = 1L << 0
ConnectionSpeedPrefsType = FOUR_CHAR_CODE('cspd')
BandwidthManagementPrefsType = FOUR_CHAR_CODE('bwmg')
kQTIdlePriority = 10
kQTNonRealTimePriority = 20
kQTRealTimeSharedPriority = 25
kQTRealTimePriority = 30
kQTBandwidthNotifyNeedToStop = 1L << 0
kQTBandwidthNotifyGoodToGo = 1L << 1
kQTBandwidthChangeRequest = 1L << 2
kQTBandwidthQueueRequest = 1L << 3
kQTBandwidthScheduledRequest = 1L << 4
kQTBandwidthVoluntaryRelease = 1L << 5
kITextRemoveEverythingBut = 0 << 1
kITextRemoveLeaveSuggestedAlternate = 1 << 1
kITextAtomType = FOUR_CHAR_CODE('itxt')
kITextStringAtomType = FOUR_CHAR_CODE('text')
kQTParseTextHREFText = FOUR_CHAR_CODE('text')
kQTParseTextHREFBaseURL = FOUR_CHAR_CODE('burl')
kQTParseTextHREFClickPoint = FOUR_CHAR_CODE('clik')
kQTParseTextHREFUseAltDelim = FOUR_CHAR_CODE('altd')
kQTParseTextHREFDelimiter = FOUR_CHAR_CODE('delm')
kQTParseTextHREFRecomposeHREF = FOUR_CHAR_CODE('rhrf')
kQTParseTextHREFURL = FOUR_CHAR_CODE('url ')
kQTParseTextHREFTarget = FOUR_CHAR_CODE('targ')
kQTParseTextHREFChapter = FOUR_CHAR_CODE('chap')
kQTParseTextHREFIsAutoHREF = FOUR_CHAR_CODE('auto')
kQTParseTextHREFIsServerMap = FOUR_CHAR_CODE('smap')
kQTParseTextHREFHREF = FOUR_CHAR_CODE('href')
kQTParseTextHREFEMBEDArgs = FOUR_CHAR_CODE('mbed')
kTrackReferenceChapterList = FOUR_CHAR_CODE('chap')
kTrackReferenceTimeCode = FOUR_CHAR_CODE('tmcd')
kTrackReferenceModifier = FOUR_CHAR_CODE('ssrc')
kTrackModifierInput = 0x696E
kTrackModifierType = 0x7479
kTrackModifierReference = FOUR_CHAR_CODE('ssrc')
kTrackModifierObjectID = FOUR_CHAR_CODE('obid')
kTrackModifierInputName = FOUR_CHAR_CODE('name')
kInputMapSubInputID = FOUR_CHAR_CODE('subi')
kTrackModifierTypeMatrix = 1
kTrackModifierTypeClip = 2
kTrackModifierTypeGraphicsMode = 5
kTrackModifierTypeVolume = 3
kTrackModifierTypeBalance = 4
kTrackModifierTypeImage = FOUR_CHAR_CODE('vide')
kTrackModifierObjectMatrix = 6
kTrackModifierObjectGraphicsMode = 7
kTrackModifierType3d4x4Matrix = 8
kTrackModifierCameraData = 9
kTrackModifierSoundLocalizationData = 10
kTrackModifierObjectImageIndex = 11
kTrackModifierObjectLayer = 12
kTrackModifierObjectVisible = 13
kTrackModifierAngleAspectCamera = 14
kTrackModifierPanAngle = FOUR_CHAR_CODE('pan ')
kTrackModifierTiltAngle = FOUR_CHAR_CODE('tilt')
kTrackModifierVerticalFieldOfViewAngle = FOUR_CHAR_CODE('fov ')
kTrackModifierObjectQTEventSend = FOUR_CHAR_CODE('evnt')
kTrackModifierObjectCanBeHitTested = 15
kTweenTypeShort = 1
kTweenTypeLong = 2
kTweenTypeFixed = 3
kTweenTypePoint = 4
kTweenTypeQDRect = 5
kTweenTypeQDRegion = 6
kTweenTypeMatrix = 7
kTweenTypeRGBColor = 8
kTweenTypeGraphicsModeWithRGBColor = 9
kTweenTypeQTFloatSingle = 10
kTweenTypeQTFloatDouble = 11
kTweenTypeFixedPoint = 12
kTweenType3dScale = FOUR_CHAR_CODE('3sca')
kTweenType3dTranslate = FOUR_CHAR_CODE('3tra')
kTweenType3dRotate = FOUR_CHAR_CODE('3rot')
kTweenType3dRotateAboutPoint = FOUR_CHAR_CODE('3rap')
kTweenType3dRotateAboutAxis = FOUR_CHAR_CODE('3rax')
kTweenType3dRotateAboutVector = FOUR_CHAR_CODE('3rvc')
kTweenType3dQuaternion = FOUR_CHAR_CODE('3qua')
kTweenType3dMatrix = FOUR_CHAR_CODE('3mat')
kTweenType3dCameraData = FOUR_CHAR_CODE('3cam')
kTweenType3dAngleAspectCameraData = FOUR_CHAR_CODE('3caa')
kTweenType3dSoundLocalizationData = FOUR_CHAR_CODE('3slc')
kTweenTypePathToMatrixTranslation = FOUR_CHAR_CODE('gxmt')
kTweenTypePathToMatrixRotation = FOUR_CHAR_CODE('gxpr')
kTweenTypePathToMatrixTranslationAndRotation = FOUR_CHAR_CODE('gxmr')
kTweenTypePathToFixedPoint = FOUR_CHAR_CODE('gxfp')
kTweenTypePathXtoY = FOUR_CHAR_CODE('gxxy')
kTweenTypePathYtoX = FOUR_CHAR_CODE('gxyx')
kTweenTypeAtomList = FOUR_CHAR_CODE('atom')
kTweenTypePolygon = FOUR_CHAR_CODE('poly')
kTweenTypeMultiMatrix = FOUR_CHAR_CODE('mulm')
kTweenTypeSpin = FOUR_CHAR_CODE('spin')
kTweenType3dMatrixNonLinear = FOUR_CHAR_CODE('3nlr')
kTweenType3dVRObject = FOUR_CHAR_CODE('3vro')
kTweenEntry = FOUR_CHAR_CODE('twen')
kTweenData = FOUR_CHAR_CODE('data')
kTweenType = FOUR_CHAR_CODE('twnt')
kTweenStartOffset = FOUR_CHAR_CODE('twst')
kTweenDuration = FOUR_CHAR_CODE('twdu')
kTweenFlags = FOUR_CHAR_CODE('flag')
kTweenOutputMin = FOUR_CHAR_CODE('omin')
kTweenOutputMax = FOUR_CHAR_CODE('omax')
kTweenSequenceElement = FOUR_CHAR_CODE('seqe')
kTween3dInitialCondition = FOUR_CHAR_CODE('icnd')
kTweenInterpolationID = FOUR_CHAR_CODE('intr')
kTweenRegionData = FOUR_CHAR_CODE('qdrg')
kTweenPictureData = FOUR_CHAR_CODE('PICT')
kListElementType = FOUR_CHAR_CODE('type')
kListElementDataType = FOUR_CHAR_CODE('daty')
kNameAtom = FOUR_CHAR_CODE('name')
kInitialRotationAtom = FOUR_CHAR_CODE('inro')
kNonLinearTweenHeader = FOUR_CHAR_CODE('nlth')
kTweenReturnDelta = 1L << 0
kQTRestrictionClassSave = FOUR_CHAR_CODE('save')
kQTRestrictionSaveDontAddMovieResource = (1L << 0)
kQTRestrictionSaveDontFlatten = (1L << 1)
kQTRestrictionSaveDontExport = (1L << 2)
kQTRestrictionSaveDontExtract = (1L << 3)
kQTRestrictionClassEdit = FOUR_CHAR_CODE('edit')
kQTRestrictionEditDontCopy = (1L << 0)
kQTRestrictionEditDontCut = (1L << 1)
kQTRestrictionEditDontPaste = (1L << 2)
kQTRestrictionEditDontClear = (1L << 3)
kQTRestrictionEditDontModify = (1L << 4)
kQTRestrictionEditDontExtract = (1L << 5)
videoFlagDontLeanAhead = 1L << 0
txtProcDefaultDisplay = 0
txtProcDontDisplay = 1
txtProcDoDisplay = 2
findTextEdgeOK = 1 << 0
findTextCaseSensitive = 1 << 1
findTextReverseSearch = 1 << 2
findTextWrapAround = 1 << 3
findTextUseOffset = 1 << 4
dropShadowOffsetType = FOUR_CHAR_CODE('drpo')
dropShadowTranslucencyType = FOUR_CHAR_CODE('drpt')
spriteHitTestBounds = 1L << 0
spriteHitTestImage = 1L << 1
spriteHitTestInvisibleSprites = 1L << 2
spriteHitTestIsClick = 1L << 3
spriteHitTestLocInDisplayCoordinates = 1L << 4
spriteHitTestTreatAllSpritesAsHitTestable = 1L << 5
kSpriteAtomType = FOUR_CHAR_CODE('sprt')
kSpriteImagesContainerAtomType = FOUR_CHAR_CODE('imct')
kSpriteImageAtomType = FOUR_CHAR_CODE('imag')
kSpriteImageDataAtomType = FOUR_CHAR_CODE('imda')
kSpriteImageDataRefAtomType = FOUR_CHAR_CODE('imre')
kSpriteImageDataRefTypeAtomType = FOUR_CHAR_CODE('imrt')
kSpriteImageGroupIDAtomType = FOUR_CHAR_CODE('imgr')
kSpriteImageRegistrationAtomType = FOUR_CHAR_CODE('imrg')
kSpriteImageDefaultImageIndexAtomType = FOUR_CHAR_CODE('defi')
kSpriteSharedDataAtomType = FOUR_CHAR_CODE('dflt')
kSpriteNameAtomType = FOUR_CHAR_CODE('name')
kSpriteImageNameAtomType = FOUR_CHAR_CODE('name')
kSpriteUsesImageIDsAtomType = FOUR_CHAR_CODE('uses')
kSpriteBehaviorsAtomType = FOUR_CHAR_CODE('beha')
kSpriteImageBehaviorAtomType = FOUR_CHAR_CODE('imag')
kSpriteCursorBehaviorAtomType = FOUR_CHAR_CODE('crsr')
kSpriteStatusStringsBehaviorAtomType = FOUR_CHAR_CODE('sstr')
kSpriteVariablesContainerAtomType = FOUR_CHAR_CODE('vars')
kSpriteStringVariableAtomType = FOUR_CHAR_CODE('strv')
kSpriteFloatingPointVariableAtomType = FOUR_CHAR_CODE('flov')
kMovieMediaDataReference = FOUR_CHAR_CODE('mmdr')
kMovieMediaDefaultDataReferenceID = FOUR_CHAR_CODE('ddri')
kMovieMediaSlaveTime = FOUR_CHAR_CODE('slti')
kMovieMediaSlaveAudio = FOUR_CHAR_CODE('slau')
kMovieMediaSlaveGraphicsMode = FOUR_CHAR_CODE('slgr')
kMovieMediaAutoPlay = FOUR_CHAR_CODE('play')
kMovieMediaLoop = FOUR_CHAR_CODE('loop')
kMovieMediaUseMIMEType = FOUR_CHAR_CODE('mime')
kMovieMediaTitle = FOUR_CHAR_CODE('titl')
kMovieMediaAltText = FOUR_CHAR_CODE('altt')
kMovieMediaClipBegin = FOUR_CHAR_CODE('clpb')
kMovieMediaClipDuration = FOUR_CHAR_CODE('clpd')
kMovieMediaRegionAtom = FOUR_CHAR_CODE('regi')
kMovieMediaSlaveTrackDuration = FOUR_CHAR_CODE('sltr')
kMovieMediaEnableFrameStepping = FOUR_CHAR_CODE('enfs')
kMovieMediaBackgroundColor = FOUR_CHAR_CODE('bkcl')
kMovieMediaPrerollTime = FOUR_CHAR_CODE('prer')
kMovieMediaFitNone = 0
kMovieMediaFitScroll = FOUR_CHAR_CODE('scro')
kMovieMediaFitClipIfNecessary = FOUR_CHAR_CODE('hidd')
kMovieMediaFitFill = FOUR_CHAR_CODE('fill')
kMovieMediaFitMeet = FOUR_CHAR_CODE('meet')
kMovieMediaFitSlice = FOUR_CHAR_CODE('slic')
kMovieMediaSpatialAdjustment = FOUR_CHAR_CODE('fit ')
kMovieMediaRectangleAtom = FOUR_CHAR_CODE('rect')
kMovieMediaTop = FOUR_CHAR_CODE('top ')
kMovieMediaLeft = FOUR_CHAR_CODE('left')
kMovieMediaWidth = FOUR_CHAR_CODE('wd ')
kMovieMediaHeight = FOUR_CHAR_CODE('ht ')
kMoviePropertyDuration = FOUR_CHAR_CODE('dura')
kMoviePropertyTimeScale = FOUR_CHAR_CODE('tims')
kMoviePropertyTime = FOUR_CHAR_CODE('timv')
kMoviePropertyNaturalBounds = FOUR_CHAR_CODE('natb')
kMoviePropertyMatrix = FOUR_CHAR_CODE('mtrx')
kMoviePropertyTrackList = FOUR_CHAR_CODE('tlst')
kTrackPropertyMediaType = FOUR_CHAR_CODE('mtyp')
kTrackPropertyInstantiation = FOUR_CHAR_CODE('inst')
MovieControllerComponentType = FOUR_CHAR_CODE('play')
kMovieControllerQTVRFlag = 1 << 0
kMovieControllerDontDisplayToUser = 1 << 1
mcActionIdle = 1
mcActionDraw = 2
mcActionActivate = 3
mcActionDeactivate = 4
mcActionMouseDown = 5
mcActionKey = 6
mcActionPlay = 8
mcActionGoToTime = 12
mcActionSetVolume = 14
mcActionGetVolume = 15
mcActionStep = 18
mcActionSetLooping = 21
mcActionGetLooping = 22
mcActionSetLoopIsPalindrome = 23
mcActionGetLoopIsPalindrome = 24
mcActionSetGrowBoxBounds = 25
mcActionControllerSizeChanged = 26
mcActionSetSelectionBegin = 29
mcActionSetSelectionDuration = 30
mcActionSetKeysEnabled = 32
mcActionGetKeysEnabled = 33
mcActionSetPlaySelection = 34
mcActionGetPlaySelection = 35
mcActionSetUseBadge = 36
mcActionGetUseBadge = 37
mcActionSetFlags = 38
mcActionGetFlags = 39
mcActionSetPlayEveryFrame = 40
mcActionGetPlayEveryFrame = 41
mcActionGetPlayRate = 42
mcActionShowBalloon = 43
mcActionBadgeClick = 44
mcActionMovieClick = 45
mcActionSuspend = 46
mcActionResume = 47
mcActionSetControllerKeysEnabled = 48
mcActionGetTimeSliderRect = 49
mcActionMovieEdited = 50
mcActionGetDragEnabled = 51
mcActionSetDragEnabled = 52
mcActionGetSelectionBegin = 53
mcActionGetSelectionDuration = 54
mcActionPrerollAndPlay = 55
mcActionGetCursorSettingEnabled = 56
mcActionSetCursorSettingEnabled = 57
mcActionSetColorTable = 58
mcActionLinkToURL = 59
mcActionCustomButtonClick = 60
mcActionForceTimeTableUpdate = 61
mcActionSetControllerTimeLimits = 62
mcActionExecuteAllActionsForQTEvent = 63
mcActionExecuteOneActionForQTEvent = 64
mcActionAdjustCursor = 65
mcActionUseTrackForTimeTable = 66
mcActionClickAndHoldPoint = 67
mcActionShowMessageString = 68
mcActionShowStatusString = 69
mcActionGetExternalMovie = 70
mcActionGetChapterTime = 71
mcActionPerformActionList = 72
mcActionEvaluateExpression = 73
mcActionFetchParameterAs = 74
mcActionGetCursorByID = 75
mcActionGetNextURL = 76
mcActionMovieChanged = 77
mcActionDoScript = 78
mcActionRestartAtTime = 79
mcActionGetIndChapter = 80
mcActionLinkToURLExtended = 81
mcActionSetVolumeStep = 82
mcActionAutoPlay = 83
mcActionPauseToBuffer = 84
mcActionAppMessageReceived = 85
mcActionEvaluateExpressionWithType = 89
mcActionGetMovieName = 90
mcActionGetMovieID = 91
mcActionGetMovieActive = 92
mcFlagSuppressMovieFrame = 1 << 0
mcFlagSuppressStepButtons = 1 << 1
mcFlagSuppressSpeakerButton = 1 << 2
mcFlagsUseWindowPalette = 1 << 3
mcFlagsDontInvalidate = 1 << 4
mcFlagsUseCustomButton = 1 << 5
mcPositionDontInvalidate = 1 << 5
kMCIEEnabledButtonPicture = 1
kMCIEDisabledButtonPicture = 2
kMCIEDepressedButtonPicture = 3
kMCIEEnabledSizeBoxPicture = 4
kMCIEDisabledSizeBoxPicture = 5
kMCIEEnabledUnavailableButtonPicture = 6
kMCIEDisabledUnavailableButtonPicture = 7
kMCIESoundSlider = 128
kMCIESoundThumb = 129
kMCIEColorTable = 256
kMCIEIsFlatAppearance = 257
kMCIEDoButtonIconsDropOnDepress = 258
mcInfoUndoAvailable = 1 << 0
mcInfoCutAvailable = 1 << 1
mcInfoCopyAvailable = 1 << 2
mcInfoPasteAvailable = 1 << 3
mcInfoClearAvailable = 1 << 4
mcInfoHasSound = 1 << 5
mcInfoIsPlaying = 1 << 6
mcInfoIsLooping = 1 << 7
mcInfoIsInPalindrome = 1 << 8
mcInfoEditingEnabled = 1 << 9
mcInfoMovieIsInteractive = 1 << 10
mcMenuUndo = 1
mcMenuCut = 3
mcMenuCopy = 4
mcMenuPaste = 5
mcMenuClear = 6
kQTAppMessageSoftwareChanged = 1
kQTAppMessageWindowCloseRequested = 3
kQTAppMessageExitFullScreenRequested = 4
kQTAppMessageDisplayChannels = 5
kQTAppMessageEnterFullScreenRequested = 6
kFetchAsBooleanPtr = 1
kFetchAsShortPtr = 2
kFetchAsLongPtr = 3
kFetchAsMatrixRecordPtr = 4
kFetchAsModifierTrackGraphicsModeRecord = 5
kFetchAsHandle = 6
kFetchAsStr255 = 7
kFetchAsFloatPtr = 8
kFetchAsPointPtr = 9
kFetchAsNewAtomContainer = 10
kFetchAsQTEventRecordPtr = 11
kFetchAsFixedPtr = 12
kFetchAsSetControllerValuePtr = 13
kFetchAsRgnHandle = 14
kFetchAsComponentDescriptionPtr = 15
kFetchAsCString = 16
kQTCursorOpenHand = -19183
kQTCursorClosedHand = -19182
kQTCursorPointingHand = -19181
kQTCursorRightArrow = -19180
kQTCursorLeftArrow = -19179
kQTCursorDownArrow = -19178
kQTCursorUpArrow = -19177
kQTCursorIBeam = -19176
kControllerUnderstandsIdleManagers = 1 << 0
kVideoMediaResetStatisticsSelect = 0x0105
kVideoMediaGetStatisticsSelect = 0x0106
kVideoMediaGetStallCountSelect = 0x010E
kVideoMediaSetCodecParameterSelect = 0x010F
kVideoMediaGetCodecParameterSelect = 0x0110
kTextMediaSetTextProcSelect = 0x0101
kTextMediaAddTextSampleSelect = 0x0102
kTextMediaAddTESampleSelect = 0x0103
kTextMediaAddHiliteSampleSelect = 0x0104
kTextMediaDrawRawSelect = 0x0109
kTextMediaSetTextPropertySelect = 0x010A
kTextMediaRawSetupSelect = 0x010B
kTextMediaRawIdleSelect = 0x010C
kTextMediaGetTextPropertySelect = 0x010D
kTextMediaFindNextTextSelect = 0x0105
kTextMediaHiliteTextSampleSelect = 0x0106
kTextMediaSetTextSampleDataSelect = 0x0107
kSpriteMediaSetPropertySelect = 0x0101
kSpriteMediaGetPropertySelect = 0x0102
kSpriteMediaHitTestSpritesSelect = 0x0103
kSpriteMediaCountSpritesSelect = 0x0104
kSpriteMediaCountImagesSelect = 0x0105
kSpriteMediaGetIndImageDescriptionSelect = 0x0106
kSpriteMediaGetDisplayedSampleNumberSelect = 0x0107
kSpriteMediaGetSpriteNameSelect = 0x0108
kSpriteMediaGetImageNameSelect = 0x0109
kSpriteMediaSetSpritePropertySelect = 0x010A
kSpriteMediaGetSpritePropertySelect = 0x010B
kSpriteMediaHitTestAllSpritesSelect = 0x010C
kSpriteMediaHitTestOneSpriteSelect = 0x010D
kSpriteMediaSpriteIndexToIDSelect = 0x010E
kSpriteMediaSpriteIDToIndexSelect = 0x010F
kSpriteMediaGetSpriteActionsForQTEventSelect = 0x0110
kSpriteMediaSetActionVariableSelect = 0x0111
kSpriteMediaGetActionVariableSelect = 0x0112
kSpriteMediaGetIndImagePropertySelect = 0x0113
kSpriteMediaNewSpriteSelect = 0x0114
kSpriteMediaDisposeSpriteSelect = 0x0115
kSpriteMediaSetActionVariableToStringSelect = 0x0116
kSpriteMediaGetActionVariableAsStringSelect = 0x0117
kSpriteMediaNewImageSelect = 0x011B
kSpriteMediaDisposeImageSelect = 0x011C
kSpriteMediaImageIndexToIDSelect = 0x011D
kSpriteMediaImageIDToIndexSelect = 0x011E
kFlashMediaSetPanSelect = 0x0101
kFlashMediaSetZoomSelect = 0x0102
kFlashMediaSetZoomRectSelect = 0x0103
kFlashMediaGetRefConBoundsSelect = 0x0104
kFlashMediaGetRefConIDSelect = 0x0105
kFlashMediaIDToRefConSelect = 0x0106
kFlashMediaGetDisplayedFrameNumberSelect = 0x0107
kFlashMediaFrameNumberToMovieTimeSelect = 0x0108
kFlashMediaFrameLabelToMovieTimeSelect = 0x0109
kFlashMediaGetFlashVariableSelect = 0x010A
kFlashMediaSetFlashVariableSelect = 0x010B
kFlashMediaDoButtonActionsSelect = 0x010C
kFlashMediaGetSupportedSwfVersionSelect = 0x010D
kMovieMediaGetChildDoMCActionCallbackSelect = 0x0102
kMovieMediaGetDoMCActionCallbackSelect = 0x0103
kMovieMediaGetCurrentMoviePropertySelect = 0x0104
kMovieMediaGetCurrentTrackPropertySelect = 0x0105
kMovieMediaGetChildMovieDataReferenceSelect = 0x0106
kMovieMediaSetChildMovieDataReferenceSelect = 0x0107
kMovieMediaLoadChildMovieFromDataReferenceSelect = 0x0108
kMedia3DGetNamedObjectListSelect = 0x0101
kMedia3DGetRendererListSelect = 0x0102
kMedia3DGetCurrentGroupSelect = 0x0103
kMedia3DTranslateNamedObjectToSelect = 0x0104
kMedia3DScaleNamedObjectToSelect = 0x0105
kMedia3DRotateNamedObjectToSelect = 0x0106
kMedia3DSetCameraDataSelect = 0x0107
kMedia3DGetCameraDataSelect = 0x0108
kMedia3DSetCameraAngleAspectSelect = 0x0109
kMedia3DGetCameraAngleAspectSelect = 0x010A
kMedia3DSetCameraRangeSelect = 0x010D
kMedia3DGetCameraRangeSelect = 0x010E
kMedia3DGetViewObjectSelect = 0x010F
kMCSetMovieSelect = 0x0002
kMCGetIndMovieSelect = 0x0005
kMCRemoveAllMoviesSelect = 0x0006
kMCRemoveAMovieSelect = 0x0003
kMCRemoveMovieSelect = 0x0006
kMCIsPlayerEventSelect = 0x0007
kMCSetActionFilterSelect = 0x0008
kMCDoActionSelect = 0x0009
kMCSetControllerAttachedSelect = 0x000A
kMCIsControllerAttachedSelect = 0x000B
kMCSetControllerPortSelect = 0x000C
kMCGetControllerPortSelect = 0x000D
kMCSetVisibleSelect = 0x000E
kMCGetVisibleSelect = 0x000F
kMCGetControllerBoundsRectSelect = 0x0010
kMCSetControllerBoundsRectSelect = 0x0011
kMCGetControllerBoundsRgnSelect = 0x0012
kMCGetWindowRgnSelect = 0x0013
kMCMovieChangedSelect = 0x0014
kMCSetDurationSelect = 0x0015
kMCGetCurrentTimeSelect = 0x0016
kMCNewAttachedControllerSelect = 0x0017
kMCDrawSelect = 0x0018
kMCActivateSelect = 0x0019
kMCIdleSelect = 0x001A
kMCKeySelect = 0x001B
kMCClickSelect = 0x001C
kMCEnableEditingSelect = 0x001D
kMCIsEditingEnabledSelect = 0x001E
kMCCopySelect = 0x001F
kMCCutSelect = 0x0020
kMCPasteSelect = 0x0021
kMCClearSelect = 0x0022
kMCUndoSelect = 0x0023
kMCPositionControllerSelect = 0x0024
kMCGetControllerInfoSelect = 0x0025
kMCSetClipSelect = 0x0028
kMCGetClipSelect = 0x0029
kMCDrawBadgeSelect = 0x002A
kMCSetUpEditMenuSelect = 0x002B
kMCGetMenuStringSelect = 0x002C
kMCSetActionFilterWithRefConSelect = 0x002D
kMCPtInControllerSelect = 0x002E
kMCInvalidateSelect = 0x002F
kMCAdjustCursorSelect = 0x0030
kMCGetInterfaceElementSelect = 0x0031
kMCGetDoActionsProcSelect = 0x0032
kMCAddMovieSegmentSelect = 0x0033
kMCTrimMovieSegmentSelect = 0x0034
kMCSetIdleManagerSelect = 0x0035
kMCSetControllerCapabilitiesSelect = 0x0036
kMusicMediaGetIndexedTunePlayerSelect = 0x0101
kRawCodecType = FOUR_CHAR_CODE('raw ')
kCinepakCodecType = FOUR_CHAR_CODE('cvid')
kGraphicsCodecType = FOUR_CHAR_CODE('smc ')
kAnimationCodecType = FOUR_CHAR_CODE('rle ')
kVideoCodecType = FOUR_CHAR_CODE('rpza')
kComponentVideoCodecType = FOUR_CHAR_CODE('yuv2')
kJPEGCodecType = FOUR_CHAR_CODE('jpeg')
kMotionJPEGACodecType = FOUR_CHAR_CODE('mjpa')
kMotionJPEGBCodecType = FOUR_CHAR_CODE('mjpb')
kSGICodecType = FOUR_CHAR_CODE('.SGI')
kPlanarRGBCodecType = FOUR_CHAR_CODE('8BPS')
kMacPaintCodecType = FOUR_CHAR_CODE('PNTG')
kGIFCodecType = FOUR_CHAR_CODE('gif ')
kPhotoCDCodecType = FOUR_CHAR_CODE('kpcd')
kQuickDrawGXCodecType = FOUR_CHAR_CODE('qdgx')
kAVRJPEGCodecType = FOUR_CHAR_CODE('avr ')
kOpenDMLJPEGCodecType = FOUR_CHAR_CODE('dmb1')
kBMPCodecType = FOUR_CHAR_CODE('WRLE')
kWindowsRawCodecType = FOUR_CHAR_CODE('WRAW')
kVectorCodecType = FOUR_CHAR_CODE('path')
kQuickDrawCodecType = FOUR_CHAR_CODE('qdrw')
kWaterRippleCodecType = FOUR_CHAR_CODE('ripl')
kFireCodecType = FOUR_CHAR_CODE('fire')
kCloudCodecType = FOUR_CHAR_CODE('clou')
kH261CodecType = FOUR_CHAR_CODE('h261')
kH263CodecType = FOUR_CHAR_CODE('h263')
kDVCNTSCCodecType = FOUR_CHAR_CODE('dvc ')
kDVCPALCodecType = FOUR_CHAR_CODE('dvcp')
kDVCProPALCodecType = FOUR_CHAR_CODE('dvpp')
kBaseCodecType = FOUR_CHAR_CODE('base')
kFLCCodecType = FOUR_CHAR_CODE('flic')
kTargaCodecType = FOUR_CHAR_CODE('tga ')
kPNGCodecType = FOUR_CHAR_CODE('png ')
kTIFFCodecType = FOUR_CHAR_CODE('tiff')
kComponentVideoSigned = FOUR_CHAR_CODE('yuvu')
kComponentVideoUnsigned = FOUR_CHAR_CODE('yuvs')
kCMYKCodecType = FOUR_CHAR_CODE('cmyk')
kMicrosoftVideo1CodecType = FOUR_CHAR_CODE('msvc')
kSorensonCodecType = FOUR_CHAR_CODE('SVQ1')
kSorenson3CodecType = FOUR_CHAR_CODE('SVQ3')
kIndeo4CodecType = FOUR_CHAR_CODE('IV41')
kMPEG4VisualCodecType = FOUR_CHAR_CODE('mp4v')
k64ARGBCodecType = FOUR_CHAR_CODE('b64a')
k48RGBCodecType = FOUR_CHAR_CODE('b48r')
k32AlphaGrayCodecType = FOUR_CHAR_CODE('b32a')
k16GrayCodecType = FOUR_CHAR_CODE('b16g')
kMpegYUV420CodecType = FOUR_CHAR_CODE('myuv')
kYUV420CodecType = FOUR_CHAR_CODE('y420')
kSorensonYUV9CodecType = FOUR_CHAR_CODE('syv9')
k422YpCbCr8CodecType = FOUR_CHAR_CODE('2vuy')
k444YpCbCr8CodecType = FOUR_CHAR_CODE('v308')
k4444YpCbCrA8CodecType = FOUR_CHAR_CODE('v408')
k422YpCbCr16CodecType = FOUR_CHAR_CODE('v216')
k422YpCbCr10CodecType = FOUR_CHAR_CODE('v210')
k444YpCbCr10CodecType = FOUR_CHAR_CODE('v410')
k4444YpCbCrA8RCodecType = FOUR_CHAR_CODE('r408')
kBlurImageFilterType = FOUR_CHAR_CODE('blur')
kSharpenImageFilterType = FOUR_CHAR_CODE('shrp')
kEdgeDetectImageFilterType = FOUR_CHAR_CODE('edge')
kEmbossImageFilterType = FOUR_CHAR_CODE('embs')
kConvolveImageFilterType = FOUR_CHAR_CODE('genk')
kAlphaGainImageFilterType = FOUR_CHAR_CODE('gain')
kRGBColorBalanceImageFilterType = FOUR_CHAR_CODE('rgbb')
kHSLColorBalanceImageFilterType = FOUR_CHAR_CODE('hslb')
kColorSyncImageFilterType = FOUR_CHAR_CODE('sync')
kFilmNoiseImageFilterType = FOUR_CHAR_CODE('fmns')
kSolarizeImageFilterType = FOUR_CHAR_CODE('solr')
kColorTintImageFilterType = FOUR_CHAR_CODE('tint')
kLensFlareImageFilterType = FOUR_CHAR_CODE('lens')
kBrightnessContrastImageFilterType = FOUR_CHAR_CODE('brco')
kAlphaCompositorTransitionType = FOUR_CHAR_CODE('blnd')
kCrossFadeTransitionType = FOUR_CHAR_CODE('dslv')
kChannelCompositeEffectType = FOUR_CHAR_CODE('chan')
kChromaKeyTransitionType = FOUR_CHAR_CODE('ckey')
kImplodeTransitionType = FOUR_CHAR_CODE('mplo')
kExplodeTransitionType = FOUR_CHAR_CODE('xplo')
kGradientTransitionType = FOUR_CHAR_CODE('matt')
kPushTransitionType = FOUR_CHAR_CODE('push')
kSlideTransitionType = FOUR_CHAR_CODE('slid')
kWipeTransitionType = FOUR_CHAR_CODE('smpt')
kIrisTransitionType = FOUR_CHAR_CODE('smp2')
kRadialTransitionType = FOUR_CHAR_CODE('smp3')
kMatrixTransitionType = FOUR_CHAR_CODE('smp4')
kZoomTransitionType = FOUR_CHAR_CODE('zoom')
kTravellingMatteEffectType = FOUR_CHAR_CODE('trav')
kCMYKPixelFormat = FOUR_CHAR_CODE('cmyk')
k64ARGBPixelFormat = FOUR_CHAR_CODE('b64a')
k48RGBPixelFormat = FOUR_CHAR_CODE('b48r')
k32AlphaGrayPixelFormat = FOUR_CHAR_CODE('b32a')
k16GrayPixelFormat = FOUR_CHAR_CODE('b16g')
k422YpCbCr8PixelFormat = FOUR_CHAR_CODE('2vuy')
k4444YpCbCrA8PixelFormat = FOUR_CHAR_CODE('v408')
k4444YpCbCrA8RPixelFormat = FOUR_CHAR_CODE('r408')
kYUV420PixelFormat = FOUR_CHAR_CODE('y420')
codecInfoDoes1 = (1L << 0)
codecInfoDoes2 = (1L << 1)
codecInfoDoes4 = (1L << 2)
codecInfoDoes8 = (1L << 3)
codecInfoDoes16 = (1L << 4)
codecInfoDoes32 = (1L << 5)
codecInfoDoesDither = (1L << 6)
codecInfoDoesStretch = (1L << 7)
codecInfoDoesShrink = (1L << 8)
codecInfoDoesMask = (1L << 9)
codecInfoDoesTemporal = (1L << 10)
codecInfoDoesDouble = (1L << 11)
codecInfoDoesQuad = (1L << 12)
codecInfoDoesHalf = (1L << 13)
codecInfoDoesQuarter = (1L << 14)
codecInfoDoesRotate = (1L << 15)
codecInfoDoesHorizFlip = (1L << 16)
codecInfoDoesVertFlip = (1L << 17)
codecInfoHasEffectParameterList = (1L << 18)
codecInfoDoesBlend = (1L << 19)
codecInfoDoesWarp = (1L << 20)
codecInfoDoesRecompress = (1L << 21)
codecInfoDoesSpool = (1L << 22)
codecInfoDoesRateConstrain = (1L << 23)
codecInfoDepth1 = (1L << 0)
codecInfoDepth2 = (1L << 1)
codecInfoDepth4 = (1L << 2)
codecInfoDepth8 = (1L << 3)
codecInfoDepth16 = (1L << 4)
codecInfoDepth32 = (1L << 5)
codecInfoDepth24 = (1L << 6)
codecInfoDepth33 = (1L << 7)
codecInfoDepth34 = (1L << 8)
codecInfoDepth36 = (1L << 9)
codecInfoDepth40 = (1L << 10)
codecInfoStoresClut = (1L << 11)
codecInfoDoesLossless = (1L << 12)
codecInfoSequenceSensitive = (1L << 13)
codecFlagUseImageBuffer = (1L << 0)
codecFlagUseScreenBuffer = (1L << 1)
codecFlagUpdatePrevious = (1L << 2)
codecFlagNoScreenUpdate = (1L << 3)
codecFlagWasCompressed = (1L << 4)
codecFlagDontOffscreen = (1L << 5)
codecFlagUpdatePreviousComp = (1L << 6)
codecFlagForceKeyFrame = (1L << 7)
codecFlagOnlyScreenUpdate = (1L << 8)
codecFlagLiveGrab = (1L << 9)
codecFlagDiffFrame = (1L << 9)
codecFlagDontUseNewImageBuffer = (1L << 10)
codecFlagInterlaceUpdate = (1L << 11)
codecFlagCatchUpDiff = (1L << 12)
codecFlagSupportDisable = (1L << 13)
codecFlagReenable = (1L << 14)
codecFlagOutUpdateOnNextIdle = (1L << 9)
codecFlagOutUpdateOnDataSourceChange = (1L << 10)
codecFlagSequenceSensitive = (1L << 11)
codecFlagOutUpdateOnTimeChange = (1L << 12)
codecFlagImageBufferNotSourceImage = (1L << 13)
codecFlagUsedNewImageBuffer = (1L << 14)
codecFlagUsedImageBuffer = (1L << 15)
codecMinimumDataSize = 32768L
compressorComponentType = FOUR_CHAR_CODE('imco')
decompressorComponentType = FOUR_CHAR_CODE('imdc')
codecLosslessQuality = 0x00000400
codecMaxQuality = 0x000003FF
codecMinQuality = 0x00000000
codecLowQuality = 0x00000100
codecNormalQuality = 0x00000200
codecHighQuality = 0x00000300
codecLockBitsShieldCursor = (1 << 0)
codecCompletionSource = (1 << 0)
codecCompletionDest = (1 << 1)
codecCompletionDontUnshield = (1 << 2)
codecCompletionWentOffscreen = (1 << 3)
codecCompletionUnlockBits = (1 << 4)
codecCompletionForceChainFlush = (1 << 5)
codecCompletionDropped = (1 << 6)
codecProgressOpen = 0
codecProgressUpdatePercent = 1
codecProgressClose = 2
defaultDither = 0
forceDither = 1
suppressDither = 2
useColorMatching = 4
callStdBits = 1
callOldBits = 2
noDefaultOpcodes = 4
graphicsModeStraightAlpha = 256
graphicsModePreWhiteAlpha = 257
graphicsModePreBlackAlpha = 258
graphicsModeComposition = 259
graphicsModeStraightAlphaBlend = 260
graphicsModePreMulColorAlpha = 261
evenField1ToEvenFieldOut = 1 << 0
evenField1ToOddFieldOut = 1 << 1
oddField1ToEvenFieldOut = 1 << 2
oddField1ToOddFieldOut = 1 << 3
evenField2ToEvenFieldOut = 1 << 4
evenField2ToOddFieldOut = 1 << 5
oddField2ToEvenFieldOut = 1 << 6
oddField2ToOddFieldOut = 1 << 7
icmFrameTimeHasVirtualStartTimeAndDuration = 1 << 0
codecDSequenceDisableOverlaySurface = (1L << 5)
codecDSequenceSingleField = (1L << 6)
codecDSequenceBidirectionalPrediction = (1L << 7)
codecDSequenceFlushInsteadOfDirtying = (1L << 8)
codecDSequenceEnableSubPixelPositioning = (1L << 9)
kICMSequenceTaskWeight = FOUR_CHAR_CODE('twei')
kICMSequenceTaskName = FOUR_CHAR_CODE('tnam')
kICMSequenceUserPreferredCodecs = FOUR_CHAR_CODE('punt')
kImageDescriptionSampleFormat = FOUR_CHAR_CODE('idfm')
kImageDescriptionClassicAtomFormat = FOUR_CHAR_CODE('atom')
kImageDescriptionQTAtomFormat = FOUR_CHAR_CODE('qtat')
kImageDescriptionEffectDataFormat = FOUR_CHAR_CODE('fxat')
kImageDescriptionPrivateDataFormat = FOUR_CHAR_CODE('priv')
kImageDescriptionAlternateCodec = FOUR_CHAR_CODE('subs')
kImageDescriptionColorSpace = FOUR_CHAR_CODE('cspc')
sfpItemPreviewAreaUser = 11
sfpItemPreviewStaticText = 12
sfpItemPreviewDividerUser = 13
sfpItemCreatePreviewButton = 14
sfpItemShowPreviewButton = 15
kICMPixelFormatIsPlanarMask = 0x0F
kICMPixelFormatIsIndexed = (1L << 4)
kICMPixelFormatIsSupportedByQD = (1L << 5)
kICMPixelFormatIsMonochrome = (1L << 6)
kICMPixelFormatHasAlphaChannel = (1L << 7)
kICMGetChainUltimateParent = 0
kICMGetChainParent = 1
kICMGetChainChild = 2
kICMGetChainUltimateChild = 3
kDontUseValidateToFindGraphicsImporter = 1L << 0
kICMTempThenAppMemory = 1L << 12
kICMAppThenTempMemory = 1L << 13
kQTUsePlatformDefaultGammaLevel = 0
kQTUseSourceGammaLevel = -1L
kQTCCIR601VideoGammaLevel = 0x00023333
identityMatrixType = 0x00
translateMatrixType = 0x01
scaleMatrixType = 0x02
scaleTranslateMatrixType = 0x03
linearMatrixType = 0x04
linearTranslateMatrixType = 0x05
perspectiveMatrixType = 0x06
GraphicsImporterComponentType = FOUR_CHAR_CODE('grip')
graphicsImporterUsesImageDecompressor = 1L << 23
quickTimeImageFileImageDescriptionAtom = FOUR_CHAR_CODE('idsc')
quickTimeImageFileImageDataAtom = FOUR_CHAR_CODE('idat')
quickTimeImageFileMetaDataAtom = FOUR_CHAR_CODE('meta')
quickTimeImageFileColorSyncProfileAtom = FOUR_CHAR_CODE('iicc')
graphicsImporterDrawsAllPixels = 0
graphicsImporterDoesntDrawAllPixels = 1
graphicsImporterDontKnowIfDrawAllPixels = 2
kGraphicsImporterDontDoGammaCorrection = 1L << 0
kGraphicsImporterTrustResolutionFromFile = 1L << 1
kGraphicsImporterEnableSubPixelPositioning = 1L << 2
kGraphicsExportGroup = FOUR_CHAR_CODE('expo')
kGraphicsExportFileType = FOUR_CHAR_CODE('ftyp')
kGraphicsExportMIMEType = FOUR_CHAR_CODE('mime')
kGraphicsExportExtension = FOUR_CHAR_CODE('ext ')
kGraphicsExportDescription = FOUR_CHAR_CODE('desc')
kQTPhotoshopLayerMode = FOUR_CHAR_CODE('lmod')
kQTPhotoshopLayerOpacity = FOUR_CHAR_CODE('lopa')
kQTPhotoshopLayerClipping = FOUR_CHAR_CODE('lclp')
kQTPhotoshopLayerFlags = FOUR_CHAR_CODE('lflg')
kQTPhotoshopLayerName = FOUR_CHAR_CODE('\xa9lnm')
kQTPhotoshopLayerUnicodeName = FOUR_CHAR_CODE('luni')
kQTIndexedImageType = FOUR_CHAR_CODE('nth?')
kQTIndexedImageIsThumbnail = FOUR_CHAR_CODE('n=th')
kQTIndexedImageIsLayer = FOUR_CHAR_CODE('n=ly')
kQTIndexedImageIsPage = FOUR_CHAR_CODE('n=pg')
kQTIndexedImageIsMultiResolution = FOUR_CHAR_CODE('n=rs')
kQTTIFFUserDataPrefix = 0x74690000
kQTTIFFExifUserDataPrefix = 0x65780000
kQTTIFFExifGPSUserDataPrefix = 0x67700000
kQTAlphaMode = FOUR_CHAR_CODE('almo')
kQTAlphaModePreMulColor = FOUR_CHAR_CODE('almp')
kUserDataIPTC = FOUR_CHAR_CODE('iptc')
kQTTIFFUserDataOrientation = 0x74690112
kQTTIFFUserDataTransferFunction = 0x7469012D
kQTTIFFUserDataWhitePoint = 0x7469013E
kQTTIFFUserDataPrimaryChromaticities = 0x7469013F
kQTTIFFUserDataTransferRange = 0x74690156
kQTTIFFUserDataYCbCrPositioning = 0x74690213
kQTTIFFUserDataReferenceBlackWhite = 0x74690214
kQTTIFFUserDataModelPixelScale = 0x7469830E
kQTTIFFUserDataModelTransformation = 0x746985D8
kQTTIFFUserDataModelTiepoint = 0x74698482
kQTTIFFUserDataGeoKeyDirectory = 0x746987AF
kQTTIFFUserDataGeoDoubleParams = 0x746987B0
kQTTIFFUserDataGeoAsciiParams = 0x746987B1
kQTTIFFUserDataIntergraphMatrix = 0x74698480
kQTExifUserDataExifVersion = 0x65789000
kQTExifUserDataFlashPixVersion = 0x6578A000
kQTExifUserDataColorSpace = 0x6578A001
kQTExifUserDataComponentsConfiguration = 0x65789101
kQTExifUserDataCompressedBitsPerPixel = 0x65789102
kQTExifUserDataPixelXDimension = 0x6578A002
kQTExifUserDataPixelYDimension = 0x6578A003
kQTExifUserDataMakerNote = 0x6578927C
kQTExifUserDataUserComment = 0x6578928C
kQTExifUserDataRelatedSoundFile = 0x6578A004
kQTExifUserDataDateTimeOriginal = 0x65789003
kQTExifUserDataDateTimeDigitized = 0x65789004
kQTExifUserDataSubSecTime = 0x65789290
kQTExifUserDataSubSecTimeOriginal = 0x65789291
kQTExifUserDataSubSecTimeDigitized = 0x65789292
kQTExifUserDataExposureTime = 0x6578829A
kQTExifUserDataFNumber = 0x6578829D
kQTExifUserDataExposureProgram = 0x65788822
kQTExifUserDataSpectralSensitivity = 0x65788824
kQTExifUserDataISOSpeedRatings = 0x65788827
kQTExifUserDataShutterSpeedValue = 0x65789201
kQTExifUserDataApertureValue = 0x65789202
kQTExifUserDataBrightnessValue = 0x65789203
kQTExifUserDataExposureBiasValue = 0x65789204
kQTExifUserDataMaxApertureValue = 0x65789205
kQTExifUserDataSubjectDistance = 0x65789206
kQTExifUserDataMeteringMode = 0x65789207
kQTExifUserDataLightSource = 0x65789208
kQTExifUserDataFlash = 0x65789209
kQTExifUserDataFocalLength = 0x6578920A
kQTExifUserDataFlashEnergy = 0x6578A20B
kQTExifUserDataFocalPlaneXResolution = 0x6578A20E
kQTExifUserDataFocalPlaneYResolution = 0x6578A20F
kQTExifUserDataFocalPlaneResolutionUnit = 0x6578A210
kQTExifUserDataSubjectLocation = 0x6578A214
kQTExifUserDataExposureIndex = 0x6578A215
kQTExifUserDataSensingMethod = 0x6578A217
kQTExifUserDataFileSource = 0x6578A300
kQTExifUserDataSceneType = 0x6578A301
kQTExifUserDataGPSVersionID = 0x06770000
kQTExifUserDataGPSLatitudeRef = 0x06770001
kQTExifUserDataGPSLatitude = 0x06770002
kQTExifUserDataGPSLongitudeRef = 0x06770003
kQTExifUserDataGPSLongitude = 0x06770004
kQTExifUserDataGPSAltitudeRef = 0x06770005
kQTExifUserDataGPSAltitude = 0x06770006
kQTExifUserDataGPSTimeStamp = 0x06770007
kQTExifUserDataGPSSatellites = 0x06770008
kQTExifUserDataGPSStatus = 0x06770009
kQTExifUserDataGPSMeasureMode = 0x0677000A
kQTExifUserDataGPSDOP = 0x0677000B
kQTExifUserDataGPSSpeedRef = 0x0677000C
kQTExifUserDataGPSSpeed = 0x0677000D
kQTExifUserDataGPSTrackRef = 0x0677000E
kQTExifUserDataGPSTrack = 0x0677000F
kQTExifUserDataGPSImgDirectionRef = 0x06770010
kQTExifUserDataGPSImgDirection = 0x06770011
kQTExifUserDataGPSMapDatum = 0x06770012
kQTExifUserDataGPSDestLatitudeRef = 0x06770013
kQTExifUserDataGPSDestLatitude = 0x06770014
kQTExifUserDataGPSDestLongitudeRef = 0x06770015
kQTExifUserDataGPSDestLongitude = 0x06770016
kQTExifUserDataGPSDestBearingRef = 0x06770017
kQTExifUserDataGPSDestBearing = 0x06770018
kQTExifUserDataGPSDestDistanceRef = 0x06770019
kQTExifUserDataGPSDestDistance = 0x0677001A
GraphicsExporterComponentType = FOUR_CHAR_CODE('grex')
kBaseGraphicsExporterSubType = FOUR_CHAR_CODE('base')
graphicsExporterIsBaseExporter = 1L << 0
graphicsExporterCanTranscode = 1L << 1
graphicsExporterUsesImageCompressor = 1L << 2
kQTResolutionSettings = FOUR_CHAR_CODE('reso')
kQTTargetDataSize = FOUR_CHAR_CODE('dasz')
kQTDontRecompress = FOUR_CHAR_CODE('dntr')
kQTInterlaceStyle = FOUR_CHAR_CODE('ilac')
kQTColorSyncProfile = FOUR_CHAR_CODE('iccp')
kQTThumbnailSettings = FOUR_CHAR_CODE('thum')
kQTEnableExif = FOUR_CHAR_CODE('exif')
kQTMetaData = FOUR_CHAR_CODE('meta')
kQTTIFFCompressionMethod = FOUR_CHAR_CODE('tifc')
kQTTIFFCompression_None = 1
kQTTIFFCompression_PackBits = 32773L
kQTTIFFLittleEndian = FOUR_CHAR_CODE('tife')
kQTPNGFilterPreference = FOUR_CHAR_CODE('pngf')
kQTPNGFilterBestForColorType = FOUR_CHAR_CODE('bflt')
kQTPNGFilterNone = 0
kQTPNGFilterSub = 1
kQTPNGFilterUp = 2
kQTPNGFilterAverage = 3
kQTPNGFilterPaeth = 4
kQTPNGFilterAdaptivePerRow = FOUR_CHAR_CODE('aflt')
kQTPNGInterlaceStyle = FOUR_CHAR_CODE('ilac')
kQTPNGInterlaceNone = 0
kQTPNGInterlaceAdam7 = 1
ImageTranscodererComponentType = FOUR_CHAR_CODE('imtc')
kGraphicsImportSetDataReferenceSelect = 0x0001
kGraphicsImportGetDataReferenceSelect = 0x0002
kGraphicsImportSetDataFileSelect = 0x0003
kGraphicsImportGetDataFileSelect = 0x0004
kGraphicsImportSetDataHandleSelect = 0x0005
kGraphicsImportGetDataHandleSelect = 0x0006
kGraphicsImportGetImageDescriptionSelect = 0x0007
kGraphicsImportGetDataOffsetAndSizeSelect = 0x0008
kGraphicsImportReadDataSelect = 0x0009
kGraphicsImportSetClipSelect = 0x000A
kGraphicsImportGetClipSelect = 0x000B
kGraphicsImportSetSourceRectSelect = 0x000C
kGraphicsImportGetSourceRectSelect = 0x000D
kGraphicsImportGetNaturalBoundsSelect = 0x000E
kGraphicsImportDrawSelect = 0x000F
kGraphicsImportSetGWorldSelect = 0x0010
kGraphicsImportGetGWorldSelect = 0x0011
kGraphicsImportSetMatrixSelect = 0x0012
kGraphicsImportGetMatrixSelect = 0x0013
kGraphicsImportSetBoundsRectSelect = 0x0014
kGraphicsImportGetBoundsRectSelect = 0x0015
kGraphicsImportSaveAsPictureSelect = 0x0016
kGraphicsImportSetGraphicsModeSelect = 0x0017
kGraphicsImportGetGraphicsModeSelect = 0x0018
kGraphicsImportSetQualitySelect = 0x0019
kGraphicsImportGetQualitySelect = 0x001A
kGraphicsImportSaveAsQuickTimeImageFileSelect = 0x001B
kGraphicsImportSetDataReferenceOffsetAndLimitSelect = 0x001C
kGraphicsImportGetDataReferenceOffsetAndLimitSelect = 0x001D
kGraphicsImportGetAliasedDataReferenceSelect = 0x001E
kGraphicsImportValidateSelect = 0x001F
kGraphicsImportGetMetaDataSelect = 0x0020
kGraphicsImportGetMIMETypeListSelect = 0x0021
kGraphicsImportDoesDrawAllPixelsSelect = 0x0022
kGraphicsImportGetAsPictureSelect = 0x0023
kGraphicsImportExportImageFileSelect = 0x0024
kGraphicsImportGetExportImageTypeListSelect = 0x0025
kGraphicsImportDoExportImageFileDialogSelect = 0x0026
kGraphicsImportGetExportSettingsAsAtomContainerSelect = 0x0027
kGraphicsImportSetExportSettingsFromAtomContainerSelect = 0x0028
kGraphicsImportSetProgressProcSelect = 0x0029
kGraphicsImportGetProgressProcSelect = 0x002A
kGraphicsImportGetImageCountSelect = 0x002B
kGraphicsImportSetImageIndexSelect = 0x002C
kGraphicsImportGetImageIndexSelect = 0x002D
kGraphicsImportGetDataOffsetAndSize64Select = 0x002E
kGraphicsImportReadData64Select = 0x002F
kGraphicsImportSetDataReferenceOffsetAndLimit64Select = 0x0030
kGraphicsImportGetDataReferenceOffsetAndLimit64Select = 0x0031
kGraphicsImportGetDefaultMatrixSelect = 0x0032
kGraphicsImportGetDefaultClipSelect = 0x0033
kGraphicsImportGetDefaultGraphicsModeSelect = 0x0034
kGraphicsImportGetDefaultSourceRectSelect = 0x0035
kGraphicsImportGetColorSyncProfileSelect = 0x0036
kGraphicsImportSetDestRectSelect = 0x0037
kGraphicsImportGetDestRectSelect = 0x0038
kGraphicsImportSetFlagsSelect = 0x0039
kGraphicsImportGetFlagsSelect = 0x003A
kGraphicsImportGetBaseDataOffsetAndSize64Select = 0x003D
kGraphicsImportSetImageIndexToThumbnailSelect = 0x003E
kGraphicsExportDoExportSelect = 0x0001
kGraphicsExportCanTranscodeSelect = 0x0002
kGraphicsExportDoTranscodeSelect = 0x0003
kGraphicsExportCanUseCompressorSelect = 0x0004
kGraphicsExportDoUseCompressorSelect = 0x0005
kGraphicsExportDoStandaloneExportSelect = 0x0006
kGraphicsExportGetDefaultFileTypeAndCreatorSelect = 0x0007
kGraphicsExportGetDefaultFileNameExtensionSelect = 0x0008
kGraphicsExportGetMIMETypeListSelect = 0x0009
kGraphicsExportRequestSettingsSelect = 0x000B
kGraphicsExportSetSettingsFromAtomContainerSelect = 0x000C
kGraphicsExportGetSettingsAsAtomContainerSelect = 0x000D
kGraphicsExportGetSettingsAsTextSelect = 0x000E
kGraphicsExportSetDontRecompressSelect = 0x000F
kGraphicsExportGetDontRecompressSelect = 0x0010
kGraphicsExportSetInterlaceStyleSelect = 0x0011
kGraphicsExportGetInterlaceStyleSelect = 0x0012
kGraphicsExportSetMetaDataSelect = 0x0013
kGraphicsExportGetMetaDataSelect = 0x0014
kGraphicsExportSetTargetDataSizeSelect = 0x0015
kGraphicsExportGetTargetDataSizeSelect = 0x0016
kGraphicsExportSetCompressionMethodSelect = 0x0017
kGraphicsExportGetCompressionMethodSelect = 0x0018
kGraphicsExportSetCompressionQualitySelect = 0x0019
kGraphicsExportGetCompressionQualitySelect = 0x001A
kGraphicsExportSetResolutionSelect = 0x001B
kGraphicsExportGetResolutionSelect = 0x001C
kGraphicsExportSetDepthSelect = 0x001D
kGraphicsExportGetDepthSelect = 0x001E
kGraphicsExportSetColorSyncProfileSelect = 0x0021
kGraphicsExportGetColorSyncProfileSelect = 0x0022
kGraphicsExportSetProgressProcSelect = 0x0023
kGraphicsExportGetProgressProcSelect = 0x0024
kGraphicsExportSetInputDataReferenceSelect = 0x0025
kGraphicsExportGetInputDataReferenceSelect = 0x0026
kGraphicsExportSetInputFileSelect = 0x0027
kGraphicsExportGetInputFileSelect = 0x0028
kGraphicsExportSetInputHandleSelect = 0x0029
kGraphicsExportGetInputHandleSelect = 0x002A
kGraphicsExportSetInputPtrSelect = 0x002B
kGraphicsExportGetInputPtrSelect = 0x002C
kGraphicsExportSetInputGraphicsImporterSelect = 0x002D
kGraphicsExportGetInputGraphicsImporterSelect = 0x002E
kGraphicsExportSetInputPictureSelect = 0x002F
kGraphicsExportGetInputPictureSelect = 0x0030
kGraphicsExportSetInputGWorldSelect = 0x0031
kGraphicsExportGetInputGWorldSelect = 0x0032
kGraphicsExportSetInputPixmapSelect = 0x0033
kGraphicsExportGetInputPixmapSelect = 0x0034
kGraphicsExportSetInputOffsetAndLimitSelect = 0x0035
kGraphicsExportGetInputOffsetAndLimitSelect = 0x0036
kGraphicsExportMayExporterReadInputDataSelect = 0x0037
kGraphicsExportGetInputDataSizeSelect = 0x0038
kGraphicsExportReadInputDataSelect = 0x0039
kGraphicsExportGetInputImageDescriptionSelect = 0x003A
kGraphicsExportGetInputImageDimensionsSelect = 0x003B
kGraphicsExportGetInputImageDepthSelect = 0x003C
kGraphicsExportDrawInputImageSelect = 0x003D
kGraphicsExportSetOutputDataReferenceSelect = 0x003E
kGraphicsExportGetOutputDataReferenceSelect = 0x003F
kGraphicsExportSetOutputFileSelect = 0x0040
kGraphicsExportGetOutputFileSelect = 0x0041
kGraphicsExportSetOutputHandleSelect = 0x0042
kGraphicsExportGetOutputHandleSelect = 0x0043
kGraphicsExportSetOutputOffsetAndMaxSizeSelect = 0x0044
kGraphicsExportGetOutputOffsetAndMaxSizeSelect = 0x0045
kGraphicsExportSetOutputFileTypeAndCreatorSelect = 0x0046
kGraphicsExportGetOutputFileTypeAndCreatorSelect = 0x0047
kGraphicsExportWriteOutputDataSelect = 0x0048
kGraphicsExportSetOutputMarkSelect = 0x0049
kGraphicsExportGetOutputMarkSelect = 0x004A
kGraphicsExportReadOutputDataSelect = 0x004B
kGraphicsExportSetThumbnailEnabledSelect = 0x004C
kGraphicsExportGetThumbnailEnabledSelect = 0x004D
kGraphicsExportSetExifEnabledSelect = 0x004E
kGraphicsExportGetExifEnabledSelect = 0x004F
kImageTranscoderBeginSequenceSelect = 0x0001
kImageTranscoderConvertSelect = 0x0002
kImageTranscoderDisposeDataSelect = 0x0003
kImageTranscoderEndSequenceSelect = 0x0004
clockComponentType = FOUR_CHAR_CODE('clok')
systemTickClock = FOUR_CHAR_CODE('tick')
systemSecondClock = FOUR_CHAR_CODE('seco')
systemMillisecondClock = FOUR_CHAR_CODE('mill')
systemMicrosecondClock = FOUR_CHAR_CODE('micr')
kClockRateIsLinear = 1
kClockImplementsCallBacks = 2
kClockCanHandleIntermittentSound = 4
StandardCompressionType = FOUR_CHAR_CODE('scdi')
StandardCompressionSubType = FOUR_CHAR_CODE('imag')
StandardCompressionSubTypeSound = FOUR_CHAR_CODE('soun')
scListEveryCodec = 1L << 1
scAllowZeroFrameRate = 1L << 2
scAllowZeroKeyFrameRate = 1L << 3
scShowBestDepth = 1L << 4
scUseMovableModal = 1L << 5
scDisableFrameRateItem = 1L << 6
scShowDataRateAsKilobits = 1L << 7
scPreferCropping = 1 << 0
scPreferScaling = 1 << 1
scPreferScalingAndCropping = scPreferScaling | scPreferCropping
scDontDetermineSettingsFromTestImage = 1 << 2
scTestImageWidth = 80
scTestImageHeight = 80
scOKItem = 1
scCancelItem = 2
scCustomItem = 3
scUserCancelled = 1
scPositionRect = 2
scPositionDialog = 3
scSetTestImagePictHandle = 4
scSetTestImagePictFile = 5
scSetTestImagePixMap = 6
scGetBestDeviceRect = 7
scRequestImageSettings = 10
scCompressImage = 11
scCompressPicture = 12
scCompressPictureFile = 13
scRequestSequenceSettings = 14
scCompressSequenceBegin = 15
scCompressSequenceFrame = 16
scCompressSequenceEnd = 17
scDefaultPictHandleSettings = 18
scDefaultPictFileSettings = 19
scDefaultPixMapSettings = 20
scGetInfo = 21
scSetInfo = 22
scNewGWorld = 23
scSpatialSettingsType = FOUR_CHAR_CODE('sptl')
scTemporalSettingsType = FOUR_CHAR_CODE('tprl')
scDataRateSettingsType = FOUR_CHAR_CODE('drat')
scColorTableType = FOUR_CHAR_CODE('clut')
scProgressProcType = FOUR_CHAR_CODE('prog')
scExtendedProcsType = FOUR_CHAR_CODE('xprc')
scPreferenceFlagsType = FOUR_CHAR_CODE('pref')
scSettingsStateType = FOUR_CHAR_CODE('ssta')
scSequenceIDType = FOUR_CHAR_CODE('sequ')
scWindowPositionType = FOUR_CHAR_CODE('wndw')
scCodecFlagsType = FOUR_CHAR_CODE('cflg')
scCodecSettingsType = FOUR_CHAR_CODE('cdec')
scForceKeyValueType = FOUR_CHAR_CODE('ksim')
scSoundSampleRateType = FOUR_CHAR_CODE('ssrt')
scSoundSampleSizeType = FOUR_CHAR_CODE('ssss')
scSoundChannelCountType = FOUR_CHAR_CODE('sscc')
scSoundCompressionType = FOUR_CHAR_CODE('ssct')
scCompressionListType = FOUR_CHAR_CODE('ctyl')
scCodecManufacturerType = FOUR_CHAR_CODE('cmfr')
scSoundVBRCompressionOK = FOUR_CHAR_CODE('cvbr')
scSoundInputSampleRateType = FOUR_CHAR_CODE('ssir')
scSoundSampleRateChangeOK = FOUR_CHAR_CODE('rcok')
scAvailableCompressionListType = FOUR_CHAR_CODE('avai')
scGetCompression = 1
scShowMotionSettings = 1L << 0
scSettingsChangedItem = -1
scCompressFlagIgnoreIdenticalFrames = 1
kQTSettingsVideo = FOUR_CHAR_CODE('vide')
kQTSettingsSound = FOUR_CHAR_CODE('soun')
kQTSettingsComponentVersion = FOUR_CHAR_CODE('vers')
TweenComponentType = FOUR_CHAR_CODE('twen')
TCSourceRefNameType = FOUR_CHAR_CODE('name')
tcDropFrame = 1 << 0
tc24HourMax = 1 << 1
tcNegTimesOK = 1 << 2
tcCounter = 1 << 3
tctNegFlag = 0x80
tcdfShowTimeCode = 1 << 0
MovieImportType = FOUR_CHAR_CODE('eat ')
MovieExportType = FOUR_CHAR_CODE('spit')
canMovieImportHandles = 1 << 0
canMovieImportFiles = 1 << 1
hasMovieImportUserInterface = 1 << 2
canMovieExportHandles = 1 << 3
canMovieExportFiles = 1 << 4
hasMovieExportUserInterface = 1 << 5
movieImporterIsXMLBased = 1 << 5
dontAutoFileMovieImport = 1 << 6
canMovieExportAuxDataHandle = 1 << 7
canMovieImportValidateHandles = 1 << 8
canMovieImportValidateFile = 1 << 9
dontRegisterWithEasyOpen = 1 << 10
canMovieImportInPlace = 1 << 11
movieImportSubTypeIsFileExtension = 1 << 12
canMovieImportPartial = 1 << 13
hasMovieImportMIMEList = 1 << 14
canMovieImportAvoidBlocking = 1 << 15
canMovieExportFromProcedures = 1 << 15
canMovieExportValidateMovie = 1L << 16
movieImportMustGetDestinationMediaType = 1L << 16
movieExportNeedsResourceFork = 1L << 17
canMovieImportDataReferences = 1L << 18
movieExportMustGetSourceMediaType = 1L << 19
canMovieImportWithIdle = 1L << 20
canMovieImportValidateDataReferences = 1L << 21
reservedForUseByGraphicsImporters = 1L << 23
movieImportCreateTrack = 1
movieImportInParallel = 2
movieImportMustUseTrack = 4
movieImportWithIdle = 16
movieImportResultUsedMultipleTracks = 8
movieImportResultNeedIdles = 32
movieImportResultComplete = 64
kMovieExportTextOnly = 0
kMovieExportAbsoluteTime = 1
kMovieExportRelativeTime = 2
kMIDIImportSilenceBefore = 1 << 0
kMIDIImportSilenceAfter = 1 << 1
kMIDIImport20Playable = 1 << 2
kMIDIImportWantLyrics = 1 << 3
kQTMediaConfigResourceType = FOUR_CHAR_CODE('mcfg')
kQTMediaConfigResourceVersion = 2
kQTMediaGroupResourceType = FOUR_CHAR_CODE('mgrp')
kQTMediaGroupResourceVersion = 1
kQTBrowserInfoResourceType = FOUR_CHAR_CODE('brws')
kQTBrowserInfoResourceVersion = 1
kQTMediaMIMEInfoHasChanged = (1L << 1)
kQTMediaFileInfoHasChanged = (1L << 2)
kQTMediaConfigCanUseApp = (1L << 18)
kQTMediaConfigCanUsePlugin = (1L << 19)
kQTMediaConfigUNUSED = (1L << 20)
kQTMediaConfigBinaryFile = (1L << 23)
kQTMediaConfigTextFile = 0
kQTMediaConfigMacintoshFile = (1L << 24)
kQTMediaConfigAssociateByDefault = (1L << 27)
kQTMediaConfigUseAppByDefault = (1L << 28)
kQTMediaConfigUsePluginByDefault = (1L << 29)
kQTMediaConfigDefaultsMask = (kQTMediaConfigUseAppByDefault | kQTMediaConfigUsePluginByDefault)
kQTMediaConfigDefaultsShift = 12
kQTMediaConfigHasFileHasQTAtoms = (1L << 30)
kQTMediaConfigStreamGroupID = FOUR_CHAR_CODE('strm')
kQTMediaConfigInteractiveGroupID = FOUR_CHAR_CODE('intr')
kQTMediaConfigVideoGroupID = FOUR_CHAR_CODE('eyes')
kQTMediaConfigAudioGroupID = FOUR_CHAR_CODE('ears')
kQTMediaConfigMPEGGroupID = FOUR_CHAR_CODE('mpeg')
kQTMediaConfigMP3GroupID = FOUR_CHAR_CODE('mp3 ')
kQTMediaConfigImageGroupID = FOUR_CHAR_CODE('ogle')
kQTMediaConfigMiscGroupID = FOUR_CHAR_CODE('misc')
kQTMediaInfoNetGroup = FOUR_CHAR_CODE('net ')
kQTMediaInfoWinGroup = FOUR_CHAR_CODE('win ')
kQTMediaInfoMacGroup = FOUR_CHAR_CODE('mac ')
kQTMediaInfoMiscGroup = 0x3F3F3F3F
kMimeInfoMimeTypeTag = FOUR_CHAR_CODE('mime')
kMimeInfoFileExtensionTag = FOUR_CHAR_CODE('ext ')
kMimeInfoDescriptionTag = FOUR_CHAR_CODE('desc')
kMimeInfoGroupTag = FOUR_CHAR_CODE('grop')
kMimeInfoDoNotOverrideExistingFileTypeAssociation = FOUR_CHAR_CODE('nofa')
kQTFileTypeAIFF = FOUR_CHAR_CODE('AIFF')
kQTFileTypeAIFC = FOUR_CHAR_CODE('AIFC')
kQTFileTypeDVC = FOUR_CHAR_CODE('dvc!')
kQTFileTypeMIDI = FOUR_CHAR_CODE('Midi')
kQTFileTypePicture = FOUR_CHAR_CODE('PICT')
kQTFileTypeMovie = FOUR_CHAR_CODE('MooV')
kQTFileTypeText = FOUR_CHAR_CODE('TEXT')
kQTFileTypeWave = FOUR_CHAR_CODE('WAVE')
kQTFileTypeSystemSevenSound = FOUR_CHAR_CODE('sfil')
kQTFileTypeMuLaw = FOUR_CHAR_CODE('ULAW')
kQTFileTypeAVI = FOUR_CHAR_CODE('VfW ')
kQTFileTypeSoundDesignerII = FOUR_CHAR_CODE('Sd2f')
kQTFileTypeAudioCDTrack = FOUR_CHAR_CODE('trak')
kQTFileTypePICS = FOUR_CHAR_CODE('PICS')
kQTFileTypeGIF = FOUR_CHAR_CODE('GIFf')
kQTFileTypePNG = FOUR_CHAR_CODE('PNGf')
kQTFileTypeTIFF = FOUR_CHAR_CODE('TIFF')
kQTFileTypePhotoShop = FOUR_CHAR_CODE('8BPS')
kQTFileTypeSGIImage = FOUR_CHAR_CODE('.SGI')
kQTFileTypeBMP = FOUR_CHAR_CODE('BMPf')
kQTFileTypeJPEG = FOUR_CHAR_CODE('JPEG')
kQTFileTypeJFIF = FOUR_CHAR_CODE('JPEG')
kQTFileTypeMacPaint = FOUR_CHAR_CODE('PNTG')
kQTFileTypeTargaImage = FOUR_CHAR_CODE('TPIC')
kQTFileTypeQuickDrawGXPicture = FOUR_CHAR_CODE('qdgx')
kQTFileTypeQuickTimeImage = FOUR_CHAR_CODE('qtif')
kQTFileType3DMF = FOUR_CHAR_CODE('3DMF')
kQTFileTypeFLC = FOUR_CHAR_CODE('FLC ')
kQTFileTypeFlash = FOUR_CHAR_CODE('SWFL')
kQTFileTypeFlashPix = FOUR_CHAR_CODE('FPix')
kQTFileTypeMP4 = FOUR_CHAR_CODE('mpg4')
kQTSettingsDVExportNTSC = FOUR_CHAR_CODE('dvcv')
kQTSettingsDVExportLockedAudio = FOUR_CHAR_CODE('lock')
kQTSettingsEffect = FOUR_CHAR_CODE('effe')
kQTSettingsGraphicsFileImportSequence = FOUR_CHAR_CODE('sequ')
kQTSettingsGraphicsFileImportSequenceEnabled = FOUR_CHAR_CODE('enab')
kQTSettingsMovieExportEnableVideo = FOUR_CHAR_CODE('envi')
kQTSettingsMovieExportEnableSound = FOUR_CHAR_CODE('enso')
kQTSettingsMovieExportSaveOptions = FOUR_CHAR_CODE('save')
kQTSettingsMovieExportSaveForInternet = FOUR_CHAR_CODE('fast')
kQTSettingsMovieExportSaveCompressedMovie = FOUR_CHAR_CODE('cmpm')
kQTSettingsMIDI = FOUR_CHAR_CODE('MIDI')
kQTSettingsMIDISettingFlags = FOUR_CHAR_CODE('sttg')
kQTSettingsText = FOUR_CHAR_CODE('text')
kQTSettingsTextDescription = FOUR_CHAR_CODE('desc')
kQTSettingsTextSize = FOUR_CHAR_CODE('size')
kQTSettingsTextSettingFlags = FOUR_CHAR_CODE('sttg')
kQTSettingsTextTimeFraction = FOUR_CHAR_CODE('timf')
kQTSettingsTime = FOUR_CHAR_CODE('time')
kQTSettingsTimeDuration = FOUR_CHAR_CODE('dura')
kQTSettingsAudioCDTrack = FOUR_CHAR_CODE('trak')
kQTSettingsAudioCDTrackRateShift = FOUR_CHAR_CODE('rshf')
kQTSettingsDVExportDVFormat = FOUR_CHAR_CODE('dvcf')
kQTPresetsListResourceType = FOUR_CHAR_CODE('stg#')
kQTPresetsPlatformListResourceType = FOUR_CHAR_CODE('stgp')
kQTPresetInfoIsDivider = 1
kQTMovieExportSourceInfoResourceType = FOUR_CHAR_CODE('src#')
kQTMovieExportSourceInfoIsMediaType = 1L << 0
kQTMovieExportSourceInfoIsMediaCharacteristic = 1L << 1
kQTMovieExportSourceInfoIsSourceType = 1L << 2
movieExportUseConfiguredSettings = FOUR_CHAR_CODE('ucfg')
movieExportWidth = FOUR_CHAR_CODE('wdth')
movieExportHeight = FOUR_CHAR_CODE('hegt')
movieExportDuration = FOUR_CHAR_CODE('dura')
movieExportVideoFilter = FOUR_CHAR_CODE('iflt')
movieExportTimeScale = FOUR_CHAR_CODE('tmsc')
kQTBrowserInfoCanUseSystemFolderPlugin = (1L << 0)
kQTPreFlightOpenComponent = (1L << 1)
pnotComponentWantsEvents = 1
pnotComponentNeedsNoCache = 2
ShowFilePreviewComponentType = FOUR_CHAR_CODE('pnot')
CreateFilePreviewComponentType = FOUR_CHAR_CODE('pmak')
DataCompressorComponentType = FOUR_CHAR_CODE('dcom')
DataDecompressorComponentType = FOUR_CHAR_CODE('ddec')
AppleDataCompressorSubType = FOUR_CHAR_CODE('adec')
zlibDataCompressorSubType = FOUR_CHAR_CODE('zlib')
kDataHCanRead = 1L << 0
kDataHSpecialRead = 1L << 1
kDataHSpecialReadFile = 1L << 2
kDataHCanWrite = 1L << 3
kDataHSpecialWrite = 1 << 4
kDataHSpecialWriteFile = 1 << 5
kDataHCanStreamingWrite = 1 << 6
kDataHMustCheckDataRef = 1 << 7
kDataRefExtensionChokeSpeed = FOUR_CHAR_CODE('chok')
kDataRefExtensionFileName = FOUR_CHAR_CODE('fnam')
kDataRefExtensionMIMEType = FOUR_CHAR_CODE('mime')
kDataRefExtensionMacOSFileType = FOUR_CHAR_CODE('ftyp')
kDataRefExtensionInitializationData = FOUR_CHAR_CODE('data')
kDataRefExtensionQuickTimeMediaType = FOUR_CHAR_CODE('mtyp')
kDataHChokeToMovieDataRate = 1 << 0
kDataHChokeToParam = 1 << 1
kDataHExtendedSchedule = FOUR_CHAR_CODE('xtnd')
kDataHInfoFlagNeverStreams = 1 << 0
kDataHInfoFlagCanUpdateDataRefs = 1 << 1
kDataHInfoFlagNeedsNetworkBandwidth = 1 << 2
kDataHFileTypeMacOSFileType = FOUR_CHAR_CODE('ftyp')
kDataHFileTypeExtension = FOUR_CHAR_CODE('fext')
kDataHFileTypeMIME = FOUR_CHAR_CODE('mime')
kDataHCreateFileButDontCreateResFile = (1L << 0)
kDataHMovieUsageDoAppendMDAT = 1L << 0
kDataHTempUseSameDirectory = 1L << 0
kDataHTempUseSameVolume = 1L << 1
kDataHTempCreateFile = 1L << 2
kDataHTempOpenFile = 1L << 3
kDataHGetDataRateInfiniteRate = 0x7FFFFFFF
kDataHSetTimeHintsSkipBandwidthRequest = 1 << 0
videoDigitizerComponentType = FOUR_CHAR_CODE('vdig')
vdigInterfaceRev = 2
ntscIn = 0
currentIn = 0
palIn = 1
secamIn = 2
ntscReallyIn = 3
compositeIn = 0
sVideoIn = 1
rgbComponentIn = 2
rgbComponentSyncIn = 3
yuvComponentIn = 4
yuvComponentSyncIn = 5
tvTunerIn = 6
sdiIn = 7
vdPlayThruOff = 0
vdPlayThruOn = 1
vdDigitizerBW = 0
vdDigitizerRGB = 1
vdBroadcastMode = 0
vdVTRMode = 1
vdUseAnyField = 0
vdUseOddField = 1
vdUseEvenField = 2
vdTypeBasic = 0
vdTypeAlpha = 1
vdTypeMask = 2
vdTypeKey = 3
digiInDoesNTSC = 1L << 0
digiInDoesPAL = 1L << 1
digiInDoesSECAM = 1L << 2
digiInDoesGenLock = 1L << 7
digiInDoesComposite = 1L << 8
digiInDoesSVideo = 1L << 9
digiInDoesComponent = 1L << 10
digiInVTR_Broadcast = 1L << 11
digiInDoesColor = 1L << 12
digiInDoesBW = 1L << 13
digiInSignalLock = 1L << 31
digiOutDoes1 = 1L << 0
digiOutDoes2 = 1L << 1
digiOutDoes4 = 1L << 2
digiOutDoes8 = 1L << 3
digiOutDoes16 = 1L << 4
digiOutDoes32 = 1L << 5
digiOutDoesDither = 1L << 6
digiOutDoesStretch = 1L << 7
digiOutDoesShrink = 1L << 8
digiOutDoesMask = 1L << 9
digiOutDoesDouble = 1L << 11
digiOutDoesQuad = 1L << 12
digiOutDoesQuarter = 1L << 13
digiOutDoesSixteenth = 1L << 14
digiOutDoesRotate = 1L << 15
digiOutDoesHorizFlip = 1L << 16
digiOutDoesVertFlip = 1L << 17
digiOutDoesSkew = 1L << 18
digiOutDoesBlend = 1L << 19
digiOutDoesWarp = 1L << 20
digiOutDoesHW_DMA = 1L << 21
digiOutDoesHWPlayThru = 1L << 22
digiOutDoesILUT = 1L << 23
digiOutDoesKeyColor = 1L << 24
digiOutDoesAsyncGrabs = 1L << 25
digiOutDoesUnreadableScreenBits = 1L << 26
digiOutDoesCompress = 1L << 27
digiOutDoesCompressOnly = 1L << 28
digiOutDoesPlayThruDuringCompress = 1L << 29
digiOutDoesCompressPartiallyVisible = 1L << 30
digiOutDoesNotNeedCopyOfCompressData = 1L << 31
dmaDepth1 = 1
dmaDepth2 = 2
dmaDepth4 = 4
dmaDepth8 = 8
dmaDepth16 = 16
dmaDepth32 = 32
dmaDepth2Gray = 64
dmaDepth4Gray = 128
dmaDepth8Gray = 256
kVDIGControlledFrameRate = -1
vdDeviceFlagShowInputsAsDevices = (1 << 0)
vdDeviceFlagHideDevice = (1 << 1)
vdFlagCaptureStarting = (1 << 0)
vdFlagCaptureStopping = (1 << 1)
vdFlagCaptureIsForPreview = (1 << 2)
vdFlagCaptureIsForRecord = (1 << 3)
vdFlagCaptureLowLatency = (1 << 4)
vdFlagCaptureAlwaysUseTimeBase = (1 << 5)
vdFlagCaptureSetSettingsBegin = (1 << 6)
vdFlagCaptureSetSettingsEnd = (1 << 7)
xmlParseComponentType = FOUR_CHAR_CODE('pars')
xmlParseComponentSubType = FOUR_CHAR_CODE('xml ')
xmlIdentifierInvalid = 0
# xmlIdentifierUnrecognized = (long)0xFFFFFFFF
xmlContentTypeInvalid = 0
xmlContentTypeElement = 1
xmlContentTypeCharData = 2
elementFlagAlwaysSelfContained = 1L << 0
elementFlagPreserveWhiteSpace = 1L << 1
xmlParseFlagAllowUppercase = 1L << 0
xmlParseFlagAllowUnquotedAttributeValues = 1L << 1
xmlParseFlagEventParseOnly = 1L << 2
attributeValueKindCharString = 0
attributeValueKindInteger = 1L << 0
attributeValueKindPercent = 1L << 1
attributeValueKindBoolean = 1L << 2
attributeValueKindOnOff = 1L << 3
attributeValueKindColor = 1L << 4
attributeValueKindEnum = 1L << 5
attributeValueKindCaseSensEnum = 1L << 6
MAX_ATTRIBUTE_VALUE_KIND = attributeValueKindCaseSensEnum
nameSpaceIDNone = 0
element_xml = 1
attr_src = 1
SeqGrabComponentType = FOUR_CHAR_CODE('barg')
SeqGrabChannelType = FOUR_CHAR_CODE('sgch')
SeqGrabPanelType = FOUR_CHAR_CODE('sgpn')
SeqGrabCompressionPanelType = FOUR_CHAR_CODE('cmpr')
SeqGrabSourcePanelType = FOUR_CHAR_CODE('sour')
seqGrabToDisk = 1
seqGrabToMemory = 2
seqGrabDontUseTempMemory = 4
seqGrabAppendToFile = 8
seqGrabDontAddMovieResource = 16
seqGrabDontMakeMovie = 32
seqGrabPreExtendFile = 64
seqGrabDataProcIsInterruptSafe = 128
seqGrabDataProcDoesOverlappingReads = 256
seqGrabRecord = 1
seqGrabPreview = 2
seqGrabPlayDuringRecord = 4
seqGrabLowLatencyCapture = 8
seqGrabAlwaysUseTimeBase = 16
seqGrabHasBounds = 1
seqGrabHasVolume = 2
seqGrabHasDiscreteSamples = 4
seqGrabDoNotBufferizeData = 8
seqGrabCanMoveWindowWhileRecording = 16
grabPictOffScreen = 1
grabPictIgnoreClip = 2
grabPictCurrentImage = 4
sgFlagControlledGrab = (1 << 0)
sgFlagAllowNonRGBPixMaps = (1 << 1)
sgDeviceInputNameFlagInputUnavailable = (1 << 0)
sgDeviceNameFlagDeviceUnavailable = (1 << 0)
sgDeviceNameFlagShowInputsAsDevices = (1 << 1)
sgDeviceListWithIcons = (1 << 0)
sgDeviceListDontCheckAvailability = (1 << 1)
sgDeviceListIncludeInputs = (1 << 2)
seqGrabWriteAppend = 0
seqGrabWriteReserve = 1
seqGrabWriteFill = 2
seqGrabUnpause = 0
seqGrabPause = 1
seqGrabPauseForMenu = 3
channelFlagDontOpenResFile = 2
channelFlagHasDependency = 4
sgPanelFlagForPanel = 1
seqGrabSettingsPreviewOnly = 1
channelPlayNormal = 0
channelPlayFast = 1
channelPlayHighQuality = 2
channelPlayAllData = 4
sgSetSettingsBegin = (1 << 0)
sgSetSettingsEnd = (1 << 1)
kSGSmallestDITLSize = -1
kSGLargestDITLSize = -2
sgChannelAtom = FOUR_CHAR_CODE('chan')
sgChannelSettingsAtom = FOUR_CHAR_CODE('ctom')
sgChannelDescription = FOUR_CHAR_CODE('cdsc')
sgChannelSettings = FOUR_CHAR_CODE('cset')
sgDeviceNameType = FOUR_CHAR_CODE('name')
sgDeviceDisplayNameType = FOUR_CHAR_CODE('dnam')
sgDeviceUIDType = FOUR_CHAR_CODE('duid')
sgInputUIDType = FOUR_CHAR_CODE('iuid')
sgUsageType = FOUR_CHAR_CODE('use ')
sgPlayFlagsType = FOUR_CHAR_CODE('plyf')
sgClipType = FOUR_CHAR_CODE('clip')
sgMatrixType = FOUR_CHAR_CODE('mtrx')
sgVolumeType = FOUR_CHAR_CODE('volu')
sgPanelSettingsAtom = FOUR_CHAR_CODE('ptom')
sgPanelDescription = FOUR_CHAR_CODE('pdsc')
sgPanelSettings = FOUR_CHAR_CODE('pset')
sgcSoundCompressionType = FOUR_CHAR_CODE('scmp')
sgcSoundCodecSettingsType = FOUR_CHAR_CODE('cdec')
sgcSoundSampleRateType = FOUR_CHAR_CODE('srat')
sgcSoundChannelCountType = FOUR_CHAR_CODE('schn')
sgcSoundSampleSizeType = FOUR_CHAR_CODE('ssiz')
sgcSoundInputType = FOUR_CHAR_CODE('sinp')
sgcSoundGainType = FOUR_CHAR_CODE('gain')
sgcVideoHueType = FOUR_CHAR_CODE('hue ')
sgcVideoSaturationType = FOUR_CHAR_CODE('satr')
sgcVideoContrastType = FOUR_CHAR_CODE('trst')
sgcVideoSharpnessType = FOUR_CHAR_CODE('shrp')
sgcVideoBrigtnessType = FOUR_CHAR_CODE('brit')
sgcVideoBlackLevelType = FOUR_CHAR_CODE('blkl')
sgcVideoWhiteLevelType = FOUR_CHAR_CODE('whtl')
sgcVideoInputType = FOUR_CHAR_CODE('vinp')
sgcVideoFormatType = FOUR_CHAR_CODE('vstd')
sgcVideoFilterType = FOUR_CHAR_CODE('vflt')
sgcVideoRectType = FOUR_CHAR_CODE('vrct')
sgcVideoDigitizerType = FOUR_CHAR_CODE('vdig')
QTVideoOutputComponentType = FOUR_CHAR_CODE('vout')
QTVideoOutputComponentBaseSubType = FOUR_CHAR_CODE('base')
kQTVideoOutputDontDisplayToUser = 1L << 0
kQTVODisplayModeItem = FOUR_CHAR_CODE('qdmi')
kQTVODimensions = FOUR_CHAR_CODE('dimn')
kQTVOResolution = FOUR_CHAR_CODE('resl')
kQTVORefreshRate = FOUR_CHAR_CODE('refr')
kQTVOPixelType = FOUR_CHAR_CODE('pixl')
kQTVOName = FOUR_CHAR_CODE('name')
kQTVODecompressors = FOUR_CHAR_CODE('deco')
kQTVODecompressorType = FOUR_CHAR_CODE('dety')
kQTVODecompressorContinuous = FOUR_CHAR_CODE('cont')
kQTVODecompressorComponent = FOUR_CHAR_CODE('cmpt')
kClockGetTimeSelect = 0x0001
kClockNewCallBackSelect = 0x0002
kClockDisposeCallBackSelect = 0x0003
kClockCallMeWhenSelect = 0x0004
kClockCancelCallBackSelect = 0x0005
kClockRateChangedSelect = 0x0006
kClockTimeChangedSelect = 0x0007
kClockSetTimeBaseSelect = 0x0008
kClockStartStopChangedSelect = 0x0009
kClockGetRateSelect = 0x000A
kSCGetCompressionExtendedSelect = 0x0001
kSCPositionRectSelect = 0x0002
kSCPositionDialogSelect = 0x0003
kSCSetTestImagePictHandleSelect = 0x0004
kSCSetTestImagePictFileSelect = 0x0005
kSCSetTestImagePixMapSelect = 0x0006
kSCGetBestDeviceRectSelect = 0x0007
kSCRequestImageSettingsSelect = 0x000A
kSCCompressImageSelect = 0x000B
kSCCompressPictureSelect = 0x000C
kSCCompressPictureFileSelect = 0x000D
kSCRequestSequenceSettingsSelect = 0x000E
kSCCompressSequenceBeginSelect = 0x000F
kSCCompressSequenceFrameSelect = 0x0010
kSCCompressSequenceEndSelect = 0x0011
kSCDefaultPictHandleSettingsSelect = 0x0012
kSCDefaultPictFileSettingsSelect = 0x0013
kSCDefaultPixMapSettingsSelect = 0x0014
kSCGetInfoSelect = 0x0015
kSCSetInfoSelect = 0x0016
kSCNewGWorldSelect = 0x0017
kSCSetCompressFlagsSelect = 0x0018
kSCGetCompressFlagsSelect = 0x0019
kSCGetSettingsAsTextSelect = 0x001A
kSCGetSettingsAsAtomContainerSelect = 0x001B
kSCSetSettingsFromAtomContainerSelect = 0x001C
kSCCompressSequenceFrameAsyncSelect = 0x001D
kSCAsyncIdleSelect = 0x001E
kTweenerInitializeSelect = 0x0001
kTweenerDoTweenSelect = 0x0002
kTweenerResetSelect = 0x0003
kTCGetCurrentTimeCodeSelect = 0x0101
kTCGetTimeCodeAtTimeSelect = 0x0102
kTCTimeCodeToStringSelect = 0x0103
kTCTimeCodeToFrameNumberSelect = 0x0104
kTCFrameNumberToTimeCodeSelect = 0x0105
kTCGetSourceRefSelect = 0x0106
kTCSetSourceRefSelect = 0x0107
kTCSetTimeCodeFlagsSelect = 0x0108
kTCGetTimeCodeFlagsSelect = 0x0109
kTCSetDisplayOptionsSelect = 0x010A
kTCGetDisplayOptionsSelect = 0x010B
kMovieImportHandleSelect = 0x0001
kMovieImportFileSelect = 0x0002
kMovieImportSetSampleDurationSelect = 0x0003
kMovieImportSetSampleDescriptionSelect = 0x0004
kMovieImportSetMediaFileSelect = 0x0005
kMovieImportSetDimensionsSelect = 0x0006
kMovieImportSetChunkSizeSelect = 0x0007
kMovieImportSetProgressProcSelect = 0x0008
kMovieImportSetAuxiliaryDataSelect = 0x0009
kMovieImportSetFromScrapSelect = 0x000A
kMovieImportDoUserDialogSelect = 0x000B
kMovieImportSetDurationSelect = 0x000C
kMovieImportGetAuxiliaryDataTypeSelect = 0x000D
kMovieImportValidateSelect = 0x000E
kMovieImportGetFileTypeSelect = 0x000F
kMovieImportDataRefSelect = 0x0010
kMovieImportGetSampleDescriptionSelect = 0x0011
kMovieImportGetMIMETypeListSelect = 0x0012
kMovieImportSetOffsetAndLimitSelect = 0x0013
kMovieImportGetSettingsAsAtomContainerSelect = 0x0014
kMovieImportSetSettingsFromAtomContainerSelect = 0x0015
kMovieImportSetOffsetAndLimit64Select = 0x0016
kMovieImportIdleSelect = 0x0017
kMovieImportValidateDataRefSelect = 0x0018
kMovieImportGetLoadStateSelect = 0x0019
kMovieImportGetMaxLoadedTimeSelect = 0x001A
kMovieImportEstimateCompletionTimeSelect = 0x001B
kMovieImportSetDontBlockSelect = 0x001C
kMovieImportGetDontBlockSelect = 0x001D
kMovieImportSetIdleManagerSelect = 0x001E
kMovieImportSetNewMovieFlagsSelect = 0x001F
kMovieImportGetDestinationMediaTypeSelect = 0x0020
kMovieExportToHandleSelect = 0x0080
kMovieExportToFileSelect = 0x0081
kMovieExportGetAuxiliaryDataSelect = 0x0083
kMovieExportSetProgressProcSelect = 0x0084
kMovieExportSetSampleDescriptionSelect = 0x0085
kMovieExportDoUserDialogSelect = 0x0086
kMovieExportGetCreatorTypeSelect = 0x0087
kMovieExportToDataRefSelect = 0x0088
kMovieExportFromProceduresToDataRefSelect = 0x0089
kMovieExportAddDataSourceSelect = 0x008A
kMovieExportValidateSelect = 0x008B
kMovieExportGetSettingsAsAtomContainerSelect = 0x008C
kMovieExportSetSettingsFromAtomContainerSelect = 0x008D
kMovieExportGetFileNameExtensionSelect = 0x008E
kMovieExportGetShortFileTypeStringSelect = 0x008F
kMovieExportGetSourceMediaTypeSelect = 0x0090
kMovieExportSetGetMoviePropertyProcSelect = 0x0091
kTextExportGetDisplayDataSelect = 0x0100
kTextExportGetTimeFractionSelect = 0x0101
kTextExportSetTimeFractionSelect = 0x0102
kTextExportGetSettingsSelect = 0x0103
kTextExportSetSettingsSelect = 0x0104
kMIDIImportGetSettingsSelect = 0x0100
kMIDIImportSetSettingsSelect = 0x0101
kMovieExportNewGetDataAndPropertiesProcsSelect = 0x0100
kMovieExportDisposeGetDataAndPropertiesProcsSelect = 0x0101
kGraphicsImageImportSetSequenceEnabledSelect = 0x0100
kGraphicsImageImportGetSequenceEnabledSelect = 0x0101
kPreviewShowDataSelect = 0x0001
kPreviewMakePreviewSelect = 0x0002
kPreviewMakePreviewReferenceSelect = 0x0003
kPreviewEventSelect = 0x0004
kDataCodecDecompressSelect = 0x0001
kDataCodecGetCompressBufferSizeSelect = 0x0002
kDataCodecCompressSelect = 0x0003
kDataCodecBeginInterruptSafeSelect = 0x0004
kDataCodecEndInterruptSafeSelect = 0x0005
kDataCodecDecompressPartialSelect = 0x0006
kDataCodecCompressPartialSelect = 0x0007
kDataHGetDataSelect = 0x0002
kDataHPutDataSelect = 0x0003
kDataHFlushDataSelect = 0x0004
kDataHOpenForWriteSelect = 0x0005
kDataHCloseForWriteSelect = 0x0006
kDataHOpenForReadSelect = 0x0008
kDataHCloseForReadSelect = 0x0009
kDataHSetDataRefSelect = 0x000A
kDataHGetDataRefSelect = 0x000B
kDataHCompareDataRefSelect = 0x000C
kDataHTaskSelect = 0x000D
kDataHScheduleDataSelect = 0x000E
kDataHFinishDataSelect = 0x000F
kDataHFlushCacheSelect = 0x0010
kDataHResolveDataRefSelect = 0x0011
kDataHGetFileSizeSelect = 0x0012
kDataHCanUseDataRefSelect = 0x0013
kDataHGetVolumeListSelect = 0x0014
kDataHWriteSelect = 0x0015
kDataHPreextendSelect = 0x0016
kDataHSetFileSizeSelect = 0x0017
kDataHGetFreeSpaceSelect = 0x0018
kDataHCreateFileSelect = 0x0019
kDataHGetPreferredBlockSizeSelect = 0x001A
kDataHGetDeviceIndexSelect = 0x001B
kDataHIsStreamingDataHandlerSelect = 0x001C
kDataHGetDataInBufferSelect = 0x001D
kDataHGetScheduleAheadTimeSelect = 0x001E
kDataHSetCacheSizeLimitSelect = 0x001F
kDataHGetCacheSizeLimitSelect = 0x0020
kDataHGetMovieSelect = 0x0021
kDataHAddMovieSelect = 0x0022
kDataHUpdateMovieSelect = 0x0023
kDataHDoesBufferSelect = 0x0024
kDataHGetFileNameSelect = 0x0025
kDataHGetAvailableFileSizeSelect = 0x0026
kDataHGetMacOSFileTypeSelect = 0x0027
kDataHGetMIMETypeSelect = 0x0028
kDataHSetDataRefWithAnchorSelect = 0x0029
kDataHGetDataRefWithAnchorSelect = 0x002A
kDataHSetMacOSFileTypeSelect = 0x002B
kDataHSetTimeBaseSelect = 0x002C
kDataHGetInfoFlagsSelect = 0x002D
kDataHScheduleData64Select = 0x002E
kDataHWrite64Select = 0x002F
kDataHGetFileSize64Select = 0x0030
kDataHPreextend64Select = 0x0031
kDataHSetFileSize64Select = 0x0032
kDataHGetFreeSpace64Select = 0x0033
kDataHAppend64Select = 0x0034
kDataHReadAsyncSelect = 0x0035
kDataHPollReadSelect = 0x0036
kDataHGetDataAvailabilitySelect = 0x0037
kDataHGetFileSizeAsyncSelect = 0x003A
kDataHGetDataRefAsTypeSelect = 0x003B
kDataHSetDataRefExtensionSelect = 0x003C
kDataHGetDataRefExtensionSelect = 0x003D
kDataHGetMovieWithFlagsSelect = 0x003E
kDataHGetFileTypeOrderingSelect = 0x0040
kDataHCreateFileWithFlagsSelect = 0x0041
kDataHGetMIMETypeAsyncSelect = 0x0042
kDataHGetInfoSelect = 0x0043
kDataHSetIdleManagerSelect = 0x0044
kDataHDeleteFileSelect = 0x0045
kDataHSetMovieUsageFlagsSelect = 0x0046
kDataHUseTemporaryDataRefSelect = 0x0047
kDataHGetTemporaryDataRefCapabilitiesSelect = 0x0048
kDataHRenameFileSelect = 0x0049
kDataHPlaybackHintsSelect = 0x0103
kDataHPlaybackHints64Select = 0x010E
kDataHGetDataRateSelect = 0x0110
kDataHSetTimeHintsSelect = 0x0111
kVDGetMaxSrcRectSelect = 0x0001
kVDGetActiveSrcRectSelect = 0x0002
kVDSetDigitizerRectSelect = 0x0003
kVDGetDigitizerRectSelect = 0x0004
kVDGetVBlankRectSelect = 0x0005
kVDGetMaskPixMapSelect = 0x0006
kVDGetPlayThruDestinationSelect = 0x0008
kVDUseThisCLUTSelect = 0x0009
kVDSetInputGammaValueSelect = 0x000A
kVDGetInputGammaValueSelect = 0x000B
kVDSetBrightnessSelect = 0x000C
kVDGetBrightnessSelect = 0x000D
kVDSetContrastSelect = 0x000E
kVDSetHueSelect = 0x000F
kVDSetSharpnessSelect = 0x0010
kVDSetSaturationSelect = 0x0011
kVDGetContrastSelect = 0x0012
kVDGetHueSelect = 0x0013
kVDGetSharpnessSelect = 0x0014
kVDGetSaturationSelect = 0x0015
kVDGrabOneFrameSelect = 0x0016
kVDGetMaxAuxBufferSelect = 0x0017
kVDGetDigitizerInfoSelect = 0x0019
kVDGetCurrentFlagsSelect = 0x001A
kVDSetKeyColorSelect = 0x001B
kVDGetKeyColorSelect = 0x001C
kVDAddKeyColorSelect = 0x001D
kVDGetNextKeyColorSelect = 0x001E
kVDSetKeyColorRangeSelect = 0x001F
kVDGetKeyColorRangeSelect = 0x0020
kVDSetDigitizerUserInterruptSelect = 0x0021
kVDSetInputColorSpaceModeSelect = 0x0022
kVDGetInputColorSpaceModeSelect = 0x0023
kVDSetClipStateSelect = 0x0024
kVDGetClipStateSelect = 0x0025
kVDSetClipRgnSelect = 0x0026
kVDClearClipRgnSelect = 0x0027
kVDGetCLUTInUseSelect = 0x0028
kVDSetPLLFilterTypeSelect = 0x0029
kVDGetPLLFilterTypeSelect = 0x002A
kVDGetMaskandValueSelect = 0x002B
kVDSetMasterBlendLevelSelect = 0x002C
kVDSetPlayThruDestinationSelect = 0x002D
kVDSetPlayThruOnOffSelect = 0x002E
kVDSetFieldPreferenceSelect = 0x002F
kVDGetFieldPreferenceSelect = 0x0030
kVDPreflightDestinationSelect = 0x0032
kVDPreflightGlobalRectSelect = 0x0033
kVDSetPlayThruGlobalRectSelect = 0x0034
kVDSetInputGammaRecordSelect = 0x0035
kVDGetInputGammaRecordSelect = 0x0036
kVDSetBlackLevelValueSelect = 0x0037
kVDGetBlackLevelValueSelect = 0x0038
kVDSetWhiteLevelValueSelect = 0x0039
kVDGetWhiteLevelValueSelect = 0x003A
kVDGetVideoDefaultsSelect = 0x003B
kVDGetNumberOfInputsSelect = 0x003C
kVDGetInputFormatSelect = 0x003D
kVDSetInputSelect = 0x003E
kVDGetInputSelect = 0x003F
kVDSetInputStandardSelect = 0x0040
kVDSetupBuffersSelect = 0x0041
kVDGrabOneFrameAsyncSelect = 0x0042
kVDDoneSelect = 0x0043
kVDSetCompressionSelect = 0x0044
kVDCompressOneFrameAsyncSelect = 0x0045
kVDCompressDoneSelect = 0x0046
kVDReleaseCompressBufferSelect = 0x0047
kVDGetImageDescriptionSelect = 0x0048
kVDResetCompressSequenceSelect = 0x0049
kVDSetCompressionOnOffSelect = 0x004A
kVDGetCompressionTypesSelect = 0x004B
kVDSetTimeBaseSelect = 0x004C
kVDSetFrameRateSelect = 0x004D
kVDGetDataRateSelect = 0x004E
kVDGetSoundInputDriverSelect = 0x004F
kVDGetDMADepthsSelect = 0x0050
kVDGetPreferredTimeScaleSelect = 0x0051
kVDReleaseAsyncBuffersSelect = 0x0052
kVDSetDataRateSelect = 0x0054
kVDGetTimeCodeSelect = 0x0055
kVDUseSafeBuffersSelect = 0x0056
kVDGetSoundInputSourceSelect = 0x0057
kVDGetCompressionTimeSelect = 0x0058
kVDSetPreferredPacketSizeSelect = 0x0059
kVDSetPreferredImageDimensionsSelect = 0x005A
kVDGetPreferredImageDimensionsSelect = 0x005B
kVDGetInputNameSelect = 0x005C
kVDSetDestinationPortSelect = 0x005D
kVDGetDeviceNameAndFlagsSelect = 0x005E
kVDCaptureStateChangingSelect = 0x005F
kVDGetUniqueIDsSelect = 0x0060
kVDSelectUniqueIDsSelect = 0x0061
kXMLParseDataRefSelect = 0x0001
kXMLParseFileSelect = 0x0002
kXMLParseDisposeXMLDocSelect = 0x0003
kXMLParseGetDetailedParseErrorSelect = 0x0004
kXMLParseAddElementSelect = 0x0005
kXMLParseAddAttributeSelect = 0x0006
kXMLParseAddMultipleAttributesSelect = 0x0007
kXMLParseAddAttributeAndValueSelect = 0x0008
kXMLParseAddMultipleAttributesAndValuesSelect = 0x0009
kXMLParseAddAttributeValueKindSelect = 0x000A
kXMLParseAddNameSpaceSelect = 0x000B
kXMLParseSetOffsetAndLimitSelect = 0x000C
kXMLParseSetEventParseRefConSelect = 0x000D
kXMLParseSetStartDocumentHandlerSelect = 0x000E
kXMLParseSetEndDocumentHandlerSelect = 0x000F
kXMLParseSetStartElementHandlerSelect = 0x0010
kXMLParseSetEndElementHandlerSelect = 0x0011
kXMLParseSetCharDataHandlerSelect = 0x0012
kXMLParseSetPreprocessInstructionHandlerSelect = 0x0013
kXMLParseSetCommentHandlerSelect = 0x0014
kXMLParseSetCDataHandlerSelect = 0x0015
kSGInitializeSelect = 0x0001
kSGSetDataOutputSelect = 0x0002
kSGGetDataOutputSelect = 0x0003
kSGSetGWorldSelect = 0x0004
kSGGetGWorldSelect = 0x0005
kSGNewChannelSelect = 0x0006
kSGDisposeChannelSelect = 0x0007
kSGStartPreviewSelect = 0x0010
kSGStartRecordSelect = 0x0011
kSGIdleSelect = 0x0012
kSGStopSelect = 0x0013
kSGPauseSelect = 0x0014
kSGPrepareSelect = 0x0015
kSGReleaseSelect = 0x0016
kSGGetMovieSelect = 0x0017
kSGSetMaximumRecordTimeSelect = 0x0018
kSGGetMaximumRecordTimeSelect = 0x0019
kSGGetStorageSpaceRemainingSelect = 0x001A
kSGGetTimeRemainingSelect = 0x001B
kSGGrabPictSelect = 0x001C
kSGGetLastMovieResIDSelect = 0x001D
kSGSetFlagsSelect = 0x001E
kSGGetFlagsSelect = 0x001F
kSGSetDataProcSelect = 0x0020
kSGNewChannelFromComponentSelect = 0x0021
kSGDisposeDeviceListSelect = 0x0022
kSGAppendDeviceListToMenuSelect = 0x0023
kSGSetSettingsSelect = 0x0024
kSGGetSettingsSelect = 0x0025
kSGGetIndChannelSelect = 0x0026
kSGUpdateSelect = 0x0027
kSGGetPauseSelect = 0x0028
kSGSettingsDialogSelect = 0x0029
kSGGetAlignmentProcSelect = 0x002A
kSGSetChannelSettingsSelect = 0x002B
kSGGetChannelSettingsSelect = 0x002C
kSGGetModeSelect = 0x002D
kSGSetDataRefSelect = 0x002E
kSGGetDataRefSelect = 0x002F
kSGNewOutputSelect = 0x0030
kSGDisposeOutputSelect = 0x0031
kSGSetOutputFlagsSelect = 0x0032
kSGSetChannelOutputSelect = 0x0033
kSGGetDataOutputStorageSpaceRemainingSelect = 0x0034
kSGHandleUpdateEventSelect = 0x0035
kSGSetOutputNextOutputSelect = 0x0036
kSGGetOutputNextOutputSelect = 0x0037
kSGSetOutputMaximumOffsetSelect = 0x0038
kSGGetOutputMaximumOffsetSelect = 0x0039
kSGGetOutputDataReferenceSelect = 0x003A
kSGWriteExtendedMovieDataSelect = 0x003B
kSGGetStorageSpaceRemaining64Select = 0x003C
kSGGetDataOutputStorageSpaceRemaining64Select = 0x003D
kSGWriteMovieDataSelect = 0x0100
kSGAddFrameReferenceSelect = 0x0101
kSGGetNextFrameReferenceSelect = 0x0102
kSGGetTimeBaseSelect = 0x0103
kSGSortDeviceListSelect = 0x0104
kSGAddMovieDataSelect = 0x0105
kSGChangedSourceSelect = 0x0106
kSGAddExtendedFrameReferenceSelect = 0x0107
kSGGetNextExtendedFrameReferenceSelect = 0x0108
kSGAddExtendedMovieDataSelect = 0x0109
kSGAddOutputDataRefToMediaSelect = 0x010A
kSGSetSettingsSummarySelect = 0x010B
kSGSetChannelUsageSelect = 0x0080
kSGGetChannelUsageSelect = 0x0081
kSGSetChannelBoundsSelect = 0x0082
kSGGetChannelBoundsSelect = 0x0083
kSGSetChannelVolumeSelect = 0x0084
kSGGetChannelVolumeSelect = 0x0085
kSGGetChannelInfoSelect = 0x0086
kSGSetChannelPlayFlagsSelect = 0x0087
kSGGetChannelPlayFlagsSelect = 0x0088
kSGSetChannelMaxFramesSelect = 0x0089
kSGGetChannelMaxFramesSelect = 0x008A
kSGSetChannelRefConSelect = 0x008B
kSGSetChannelClipSelect = 0x008C
kSGGetChannelClipSelect = 0x008D
kSGGetChannelSampleDescriptionSelect = 0x008E
kSGGetChannelDeviceListSelect = 0x008F
kSGSetChannelDeviceSelect = 0x0090
kSGSetChannelMatrixSelect = 0x0091
kSGGetChannelMatrixSelect = 0x0092
kSGGetChannelTimeScaleSelect = 0x0093
kSGChannelPutPictureSelect = 0x0094
kSGChannelSetRequestedDataRateSelect = 0x0095
kSGChannelGetRequestedDataRateSelect = 0x0096
kSGChannelSetDataSourceNameSelect = 0x0097
kSGChannelGetDataSourceNameSelect = 0x0098
kSGChannelSetCodecSettingsSelect = 0x0099
kSGChannelGetCodecSettingsSelect = 0x009A
kSGGetChannelTimeBaseSelect = 0x009B
kSGGetChannelRefConSelect = 0x009C
kSGGetChannelDeviceAndInputNamesSelect = 0x009D
kSGSetChannelDeviceInputSelect = 0x009E
kSGSetChannelSettingsStateChangingSelect = 0x009F
kSGInitChannelSelect = 0x0180
kSGWriteSamplesSelect = 0x0181
kSGGetDataRateSelect = 0x0182
kSGAlignChannelRectSelect = 0x0183
kSGPanelGetDitlSelect = 0x0200
kSGPanelGetTitleSelect = 0x0201
kSGPanelCanRunSelect = 0x0202
kSGPanelInstallSelect = 0x0203
kSGPanelEventSelect = 0x0204
kSGPanelItemSelect = 0x0205
kSGPanelRemoveSelect = 0x0206
kSGPanelSetGrabberSelect = 0x0207
kSGPanelSetResFileSelect = 0x0208
kSGPanelGetSettingsSelect = 0x0209
kSGPanelSetSettingsSelect = 0x020A
kSGPanelValidateInputSelect = 0x020B
kSGPanelSetEventFilterSelect = 0x020C
kSGPanelGetDITLForSizeSelect = 0x020D
kSGGetSrcVideoBoundsSelect = 0x0100
kSGSetVideoRectSelect = 0x0101
kSGGetVideoRectSelect = 0x0102
kSGGetVideoCompressorTypeSelect = 0x0103
kSGSetVideoCompressorTypeSelect = 0x0104
kSGSetVideoCompressorSelect = 0x0105
kSGGetVideoCompressorSelect = 0x0106
kSGGetVideoDigitizerComponentSelect = 0x0107
kSGSetVideoDigitizerComponentSelect = 0x0108
kSGVideoDigitizerChangedSelect = 0x0109
kSGSetVideoBottlenecksSelect = 0x010A
kSGGetVideoBottlenecksSelect = 0x010B
kSGGrabFrameSelect = 0x010C
kSGGrabFrameCompleteSelect = 0x010D
kSGDisplayFrameSelect = 0x010E
kSGCompressFrameSelect = 0x010F
kSGCompressFrameCompleteSelect = 0x0110
kSGAddFrameSelect = 0x0111
kSGTransferFrameForCompressSelect = 0x0112
kSGSetCompressBufferSelect = 0x0113
kSGGetCompressBufferSelect = 0x0114
kSGGetBufferInfoSelect = 0x0115
kSGSetUseScreenBufferSelect = 0x0116
kSGGetUseScreenBufferSelect = 0x0117
kSGGrabCompressCompleteSelect = 0x0118
kSGDisplayCompressSelect = 0x0119
kSGSetFrameRateSelect = 0x011A
kSGGetFrameRateSelect = 0x011B
kSGSetPreferredPacketSizeSelect = 0x0121
kSGGetPreferredPacketSizeSelect = 0x0122
kSGSetUserVideoCompressorListSelect = 0x0123
kSGGetUserVideoCompressorListSelect = 0x0124
kSGSetSoundInputDriverSelect = 0x0100
kSGGetSoundInputDriverSelect = 0x0101
kSGSoundInputDriverChangedSelect = 0x0102
kSGSetSoundRecordChunkSizeSelect = 0x0103
kSGGetSoundRecordChunkSizeSelect = 0x0104
kSGSetSoundInputRateSelect = 0x0105
kSGGetSoundInputRateSelect = 0x0106
kSGSetSoundInputParametersSelect = 0x0107
kSGGetSoundInputParametersSelect = 0x0108
kSGSetAdditionalSoundRatesSelect = 0x0109
kSGGetAdditionalSoundRatesSelect = 0x010A
kSGSetFontNameSelect = 0x0100
kSGSetFontSizeSelect = 0x0101
kSGSetTextForeColorSelect = 0x0102
kSGSetTextBackColorSelect = 0x0103
kSGSetJustificationSelect = 0x0104
kSGGetTextReturnToSpaceValueSelect = 0x0105
kSGSetTextReturnToSpaceValueSelect = 0x0106
kSGGetInstrumentSelect = 0x0100
kSGSetInstrumentSelect = 0x0101
kQTVideoOutputGetDisplayModeListSelect = 0x0001
kQTVideoOutputGetCurrentClientNameSelect = 0x0002
kQTVideoOutputSetClientNameSelect = 0x0003
kQTVideoOutputGetClientNameSelect = 0x0004
kQTVideoOutputBeginSelect = 0x0005
kQTVideoOutputEndSelect = 0x0006
kQTVideoOutputSetDisplayModeSelect = 0x0007
kQTVideoOutputGetDisplayModeSelect = 0x0008
kQTVideoOutputCustomConfigureDisplaySelect = 0x0009
kQTVideoOutputSaveStateSelect = 0x000A
kQTVideoOutputRestoreStateSelect = 0x000B
kQTVideoOutputGetGWorldSelect = 0x000C
kQTVideoOutputGetGWorldParametersSelect = 0x000D
kQTVideoOutputGetIndSoundOutputSelect = 0x000E
kQTVideoOutputGetClockSelect = 0x000F
kQTVideoOutputSetEchoPortSelect = 0x0010
kQTVideoOutputGetIndImageDecompressorSelect = 0x0011
kQTVideoOutputBaseSetEchoPortSelect = 0x0012
handlerHasSpatial = 1 << 0
handlerCanClip = 1 << 1
handlerCanMatte = 1 << 2
handlerCanTransferMode = 1 << 3
handlerNeedsBuffer = 1 << 4
handlerNoIdle = 1 << 5
handlerNoScheduler = 1 << 6
handlerWantsTime = 1 << 7
handlerCGrafPortOnly = 1 << 8
handlerCanSend = 1 << 9
handlerCanHandleComplexMatrix = 1 << 10
handlerWantsDestinationPixels = 1 << 11
handlerCanSendImageData = 1 << 12
handlerCanPicSave = 1 << 13
mMustDraw = 1 << 3
mAtEnd = 1 << 4
mPreflightDraw = 1 << 5
mSyncDrawing = 1 << 6
mPrecompositeOnly = 1 << 9
mSoundOnly = 1 << 10
mDoIdleActionsBeforeDraws = 1 << 11
mDisableIdleActions = 1 << 12
mDidDraw = 1 << 0
mNeedsToDraw = 1 << 2
mDrawAgain = 1 << 3
mPartialDraw = 1 << 4
mWantIdleActions = 1 << 5
forceUpdateRedraw = 1 << 0
forceUpdateNewBuffer = 1 << 1
mHitTestBounds = 1L << 0
mHitTestImage = 1L << 1
mHitTestInvisible = 1L << 2
mHitTestIsClick = 1L << 3
mOpaque = 1L << 0
mInvisible = 1L << 1
kMediaQTIdleFrequencySelector = FOUR_CHAR_CODE('idfq')
kMediaVideoParamBrightness = 1
kMediaVideoParamContrast = 2
kMediaVideoParamHue = 3
kMediaVideoParamSharpness = 4
kMediaVideoParamSaturation = 5
kMediaVideoParamBlackLevel = 6
kMediaVideoParamWhiteLevel = 7
kMHInfoEncodedFrameRate = FOUR_CHAR_CODE('orat')
kEmptyPurgableChunksOverAllowance = 1
kCallComponentExecuteWiredActionSelect = -9
kMediaSetChunkManagementFlagsSelect = 0x0415
kMediaGetChunkManagementFlagsSelect = 0x0416
kMediaSetPurgeableChunkMemoryAllowanceSelect = 0x0417
kMediaGetPurgeableChunkMemoryAllowanceSelect = 0x0418
kMediaEmptyAllPurgeableChunksSelect = 0x0419
kMediaInitializeSelect = 0x0501
kMediaSetHandlerCapabilitiesSelect = 0x0502
kMediaIdleSelect = 0x0503
kMediaGetMediaInfoSelect = 0x0504
kMediaPutMediaInfoSelect = 0x0505
kMediaSetActiveSelect = 0x0506
kMediaSetRateSelect = 0x0507
kMediaGGetStatusSelect = 0x0508
kMediaTrackEditedSelect = 0x0509
kMediaSetMediaTimeScaleSelect = 0x050A
kMediaSetMovieTimeScaleSelect = 0x050B
kMediaSetGWorldSelect = 0x050C
kMediaSetDimensionsSelect = 0x050D
kMediaSetClipSelect = 0x050E
kMediaSetMatrixSelect = 0x050F
kMediaGetTrackOpaqueSelect = 0x0510
kMediaSetGraphicsModeSelect = 0x0511
kMediaGetGraphicsModeSelect = 0x0512
kMediaGSetVolumeSelect = 0x0513
kMediaSetSoundBalanceSelect = 0x0514
kMediaGetSoundBalanceSelect = 0x0515
kMediaGetNextBoundsChangeSelect = 0x0516
kMediaGetSrcRgnSelect = 0x0517
kMediaPrerollSelect = 0x0518
kMediaSampleDescriptionChangedSelect = 0x0519
kMediaHasCharacteristicSelect = 0x051A
kMediaGetOffscreenBufferSizeSelect = 0x051B
kMediaSetHintsSelect = 0x051C
kMediaGetNameSelect = 0x051D
kMediaForceUpdateSelect = 0x051E
kMediaGetDrawingRgnSelect = 0x051F
kMediaGSetActiveSegmentSelect = 0x0520
kMediaInvalidateRegionSelect = 0x0521
kMediaGetNextStepTimeSelect = 0x0522
kMediaSetNonPrimarySourceDataSelect = 0x0523
kMediaChangedNonPrimarySourceSelect = 0x0524
kMediaTrackReferencesChangedSelect = 0x0525
kMediaGetSampleDataPointerSelect = 0x0526
kMediaReleaseSampleDataPointerSelect = 0x0527
kMediaTrackPropertyAtomChangedSelect = 0x0528
kMediaSetTrackInputMapReferenceSelect = 0x0529
kMediaSetVideoParamSelect = 0x052B
kMediaGetVideoParamSelect = 0x052C
kMediaCompareSelect = 0x052D
kMediaGetClockSelect = 0x052E
kMediaSetSoundOutputComponentSelect = 0x052F
kMediaGetSoundOutputComponentSelect = 0x0530
kMediaSetSoundLocalizationDataSelect = 0x0531
kMediaGetInvalidRegionSelect = 0x053C
kMediaSampleDescriptionB2NSelect = 0x053E
kMediaSampleDescriptionN2BSelect = 0x053F
kMediaQueueNonPrimarySourceDataSelect = 0x0540
kMediaFlushNonPrimarySourceDataSelect = 0x0541
kMediaGetURLLinkSelect = 0x0543
kMediaMakeMediaTimeTableSelect = 0x0545
kMediaHitTestForTargetRefConSelect = 0x0546
kMediaHitTestTargetRefConSelect = 0x0547
kMediaGetActionsForQTEventSelect = 0x0548
kMediaDisposeTargetRefConSelect = 0x0549
kMediaTargetRefConsEqualSelect = 0x054A
kMediaSetActionsCallbackSelect = 0x054B
kMediaPrePrerollBeginSelect = 0x054C
kMediaPrePrerollCancelSelect = 0x054D
kMediaEnterEmptyEditSelect = 0x054F
kMediaCurrentMediaQueuedDataSelect = 0x0550
kMediaGetEffectiveVolumeSelect = 0x0551
kMediaResolveTargetRefConSelect = 0x0552
kMediaGetSoundLevelMeteringEnabledSelect = 0x0553
kMediaSetSoundLevelMeteringEnabledSelect = 0x0554
kMediaGetSoundLevelMeterInfoSelect = 0x0555
kMediaGetEffectiveSoundBalanceSelect = 0x0556
kMediaSetScreenLockSelect = 0x0557
kMediaSetDoMCActionCallbackSelect = 0x0558
kMediaGetErrorStringSelect = 0x0559
kMediaGetSoundEqualizerBandsSelect = 0x055A
kMediaSetSoundEqualizerBandsSelect = 0x055B
kMediaGetSoundEqualizerBandLevelsSelect = 0x055C
kMediaDoIdleActionsSelect = 0x055D
kMediaSetSoundBassAndTrebleSelect = 0x055E
kMediaGetSoundBassAndTrebleSelect = 0x055F
kMediaTimeBaseChangedSelect = 0x0560
kMediaMCIsPlayerEventSelect = 0x0561
kMediaGetMediaLoadStateSelect = 0x0562
kMediaVideoOutputChangedSelect = 0x0563
kMediaEmptySampleCacheSelect = 0x0564
kMediaGetPublicInfoSelect = 0x0565
kMediaSetPublicInfoSelect = 0x0566
kMediaGetUserPreferredCodecsSelect = 0x0567
kMediaSetUserPreferredCodecsSelect = 0x0568
kMediaRefConSetPropertySelect = 0x0569
kMediaRefConGetPropertySelect = 0x056A
kMediaNavigateTargetRefConSelect = 0x056B
kMediaGGetIdleManagerSelect = 0x056C
kMediaGSetIdleManagerSelect = 0x056D
kaiToneDescType = FOUR_CHAR_CODE('tone')
kaiNoteRequestInfoType = FOUR_CHAR_CODE('ntrq')
kaiKnobListType = FOUR_CHAR_CODE('knbl')
kaiKeyRangeInfoType = FOUR_CHAR_CODE('sinf')
kaiSampleDescType = FOUR_CHAR_CODE('sdsc')
kaiSampleInfoType = FOUR_CHAR_CODE('smin')
kaiSampleDataType = FOUR_CHAR_CODE('sdat')
kaiSampleDataQUIDType = FOUR_CHAR_CODE('quid')
kaiInstInfoType = FOUR_CHAR_CODE('iinf')
kaiPictType = FOUR_CHAR_CODE('pict')
kaiWriterType = FOUR_CHAR_CODE('\xa9wrt')
kaiCopyrightType = FOUR_CHAR_CODE('\xa9cpy')
kaiOtherStrType = FOUR_CHAR_CODE('str ')
kaiInstrumentRefType = FOUR_CHAR_CODE('iref')
kaiInstGMQualityType = FOUR_CHAR_CODE('qual')
kaiLibraryInfoType = FOUR_CHAR_CODE('linf')
kaiLibraryDescType = FOUR_CHAR_CODE('ldsc')
kInstKnobMissingUnknown = 0
kInstKnobMissingDefault = (1 << 0)
kMusicLoopTypeNormal = 0
kMusicLoopTypePalindrome = 1
instSamplePreProcessFlag = 1 << 0
kQTMIDIComponentType = FOUR_CHAR_CODE('midi')
kOMSComponentSubType = FOUR_CHAR_CODE('OMS ')
kFMSComponentSubType = FOUR_CHAR_CODE('FMS ')
kMIDIManagerComponentSubType = FOUR_CHAR_CODE('mmgr')
kOSXMIDIComponentSubType = FOUR_CHAR_CODE('osxm')
kMusicPacketPortLost = 1
kMusicPacketPortFound = 2
kMusicPacketTimeGap = 3
kAppleSysexID = 0x11
kAppleSysexCmdSampleSize = 0x0001
kAppleSysexCmdSampleBreak = 0x0002
kAppleSysexCmdAtomicInstrument = 0x0010
kAppleSysexCmdDeveloper = 0x7F00
kSynthesizerConnectionFMS = 1
kSynthesizerConnectionMMgr = 2
kSynthesizerConnectionOMS = 4
kSynthesizerConnectionQT = 8
kSynthesizerConnectionOSXMIDI = 16
kSynthesizerConnectionUnavailable = 256
kMusicComponentType = FOUR_CHAR_CODE('musi')
kInstrumentComponentType = FOUR_CHAR_CODE('inst')
kSoftSynthComponentSubType = FOUR_CHAR_CODE('ss ')
kGMSynthComponentSubType = FOUR_CHAR_CODE('gm ')
kSynthesizerDynamicVoice = 1 << 0
kSynthesizerUsesMIDIPort = 1 << 1
kSynthesizerMicrotone = 1 << 2
kSynthesizerHasSamples = 1 << 3
kSynthesizerMixedDrums = 1 << 4
kSynthesizerSoftware = 1 << 5
kSynthesizerHardware = 1 << 6
kSynthesizerDynamicChannel = 1 << 7
kSynthesizerHogsSystemChannel = 1 << 8
kSynthesizerHasSystemChannel = 1 << 9
kSynthesizerSlowSetPart = 1 << 10
kSynthesizerOffline = 1 << 12
kSynthesizerGM = 1 << 14
kSynthesizerDLS = 1 << 15
kSynthesizerSoundLocalization = 1 << 16
kControllerModulationWheel = 1
kControllerBreath = 2
kControllerFoot = 4
kControllerPortamentoTime = 5
kControllerVolume = 7
kControllerBalance = 8
kControllerPan = 10
kControllerExpression = 11
kControllerLever1 = 16
kControllerLever2 = 17
kControllerLever3 = 18
kControllerLever4 = 19
kControllerLever5 = 80
kControllerLever6 = 81
kControllerLever7 = 82
kControllerLever8 = 83
kControllerPitchBend = 32
kControllerAfterTouch = 33
kControllerPartTranspose = 40
kControllerTuneTranspose = 41
kControllerPartVolume = 42
kControllerTuneVolume = 43
kControllerSustain = 64
kControllerPortamento = 65
kControllerSostenuto = 66
kControllerSoftPedal = 67
kControllerReverb = 91
kControllerTremolo = 92
kControllerChorus = 93
kControllerCeleste = 94
kControllerPhaser = 95
kControllerEditPart = 113
kControllerMasterTune = 114
kControllerMasterTranspose = 114
kControllerMasterVolume = 115
kControllerMasterCPULoad = 116
kControllerMasterPolyphony = 117
kControllerMasterFeatures = 118
kQTMSKnobStartID = 0x02000000
kQTMSKnobVolumeAttackTimeID = 0x02000001
kQTMSKnobVolumeDecayTimeID = 0x02000002
kQTMSKnobVolumeSustainLevelID = 0x02000003
kQTMSKnobVolumeRelease1RateID = 0x02000004
kQTMSKnobVolumeDecayKeyScalingID = 0x02000005
kQTMSKnobVolumeReleaseTimeID = 0x02000006
kQTMSKnobVolumeLFODelayID = 0x02000007
kQTMSKnobVolumeLFORampTimeID = 0x02000008
kQTMSKnobVolumeLFOPeriodID = 0x02000009
kQTMSKnobVolumeLFOShapeID = 0x0200000A
kQTMSKnobVolumeLFODepthID = 0x0200000B
kQTMSKnobVolumeOverallID = 0x0200000C
kQTMSKnobVolumeVelocity127ID = 0x0200000D
kQTMSKnobVolumeVelocity96ID = 0x0200000E
kQTMSKnobVolumeVelocity64ID = 0x0200000F
kQTMSKnobVolumeVelocity32ID = 0x02000010
kQTMSKnobVolumeVelocity16ID = 0x02000011
kQTMSKnobPitchTransposeID = 0x02000012
kQTMSKnobPitchLFODelayID = 0x02000013
kQTMSKnobPitchLFORampTimeID = 0x02000014
kQTMSKnobPitchLFOPeriodID = 0x02000015
kQTMSKnobPitchLFOShapeID = 0x02000016
kQTMSKnobPitchLFODepthID = 0x02000017
kQTMSKnobPitchLFOQuantizeID = 0x02000018
kQTMSKnobStereoDefaultPanID = 0x02000019
kQTMSKnobStereoPositionKeyScalingID = 0x0200001A
kQTMSKnobPitchLFOOffsetID = 0x0200001B
kQTMSKnobExclusionGroupID = 0x0200001C
kQTMSKnobSustainTimeID = 0x0200001D
kQTMSKnobSustainInfiniteID = 0x0200001E
kQTMSKnobVolumeLFOStereoID = 0x0200001F
kQTMSKnobVelocityLowID = 0x02000020
kQTMSKnobVelocityHighID = 0x02000021
kQTMSKnobVelocitySensitivityID = 0x02000022
kQTMSKnobPitchSensitivityID = 0x02000023
kQTMSKnobVolumeLFODepthFromWheelID = 0x02000024
kQTMSKnobPitchLFODepthFromWheelID = 0x02000025
kQTMSKnobVolumeExpOptionsID = 0x02000026
kQTMSKnobEnv1AttackTimeID = 0x02000027
kQTMSKnobEnv1DecayTimeID = 0x02000028
kQTMSKnobEnv1SustainLevelID = 0x02000029
kQTMSKnobEnv1SustainTimeID = 0x0200002A
kQTMSKnobEnv1SustainInfiniteID = 0x0200002B
kQTMSKnobEnv1ReleaseTimeID = 0x0200002C
kQTMSKnobEnv1ExpOptionsID = 0x0200002D
kQTMSKnobEnv2AttackTimeID = 0x0200002E
kQTMSKnobEnv2DecayTimeID = 0x0200002F
kQTMSKnobEnv2SustainLevelID = 0x02000030
kQTMSKnobEnv2SustainTimeID = 0x02000031
kQTMSKnobEnv2SustainInfiniteID = 0x02000032
kQTMSKnobEnv2ReleaseTimeID = 0x02000033
kQTMSKnobEnv2ExpOptionsID = 0x02000034
kQTMSKnobPitchEnvelopeID = 0x02000035
kQTMSKnobPitchEnvelopeDepthID = 0x02000036
kQTMSKnobFilterKeyFollowID = 0x02000037
kQTMSKnobFilterTransposeID = 0x02000038
kQTMSKnobFilterQID = 0x02000039
kQTMSKnobFilterFrequencyEnvelopeID = 0x0200003A
kQTMSKnobFilterFrequencyEnvelopeDepthID = 0x0200003B
kQTMSKnobFilterQEnvelopeID = 0x0200003C
kQTMSKnobFilterQEnvelopeDepthID = 0x0200003D
kQTMSKnobReverbThresholdID = 0x0200003E
kQTMSKnobVolumeAttackVelScalingID = 0x0200003F
kQTMSKnobLastIDPlus1 = 0x02000040
kControllerMaximum = 0x00007FFF
# kControllerMinimum = (long)0xFFFF8000
kVoiceCountDynamic = -1
kFirstGMInstrument = 0x00000001
kLastGMInstrument = 0x00000080
kFirstGSInstrument = 0x00000081
kLastGSInstrument = 0x00003FFF
kFirstDrumkit = 0x00004000
kLastDrumkit = 0x00004080
kFirstROMInstrument = 0x00008000
kLastROMInstrument = 0x0000FFFF
kFirstUserInstrument = 0x00010000
kLastUserInstrument = 0x0001FFFF
kInstrumentMatchSynthesizerType = 1
kInstrumentMatchSynthesizerName = 2
kInstrumentMatchName = 4
kInstrumentMatchNumber = 8
kInstrumentMatchGMNumber = 16
kInstrumentMatchGSNumber = 32
kKnobBasic = 8
kKnobReadOnly = 16
kKnobInterruptUnsafe = 32
kKnobKeyrangeOverride = 64
kKnobGroupStart = 128
kKnobFixedPoint8 = 1024
kKnobFixedPoint16 = 2048
kKnobTypeNumber = 0 << 12
kKnobTypeGroupName = 1 << 12
kKnobTypeBoolean = 2 << 12
kKnobTypeNote = 3 << 12
kKnobTypePan = 4 << 12
kKnobTypeInstrument = 5 << 12
kKnobTypeSetting = 6 << 12
kKnobTypeMilliseconds = 7 << 12
kKnobTypePercentage = 8 << 12
kKnobTypeHertz = 9 << 12
kKnobTypeButton = 10 << 12
kUnknownKnobValue = 0x7FFFFFFF
kDefaultKnobValue = 0x7FFFFFFE
notImplementedMusicErr = (0x80000000 | (0xFFFF & (notImplementedMusicOSErr)))
cantSendToSynthesizerErr = (0x80000000 | (0xFFFF & (cantSendToSynthesizerOSErr)))
cantReceiveFromSynthesizerErr = (0x80000000 | (0xFFFF & (cantReceiveFromSynthesizerOSErr)))
illegalVoiceAllocationErr = (0x80000000 | (0xFFFF & (illegalVoiceAllocationOSErr)))
illegalPartErr = (0x80000000 | (0xFFFF & (illegalPartOSErr)))
illegalChannelErr = (0x80000000 | (0xFFFF & (illegalChannelOSErr)))
illegalKnobErr = (0x80000000 | (0xFFFF & (illegalKnobOSErr)))
illegalKnobValueErr = (0x80000000 | (0xFFFF & (illegalKnobValueOSErr)))
illegalInstrumentErr = (0x80000000 | (0xFFFF & (illegalInstrumentOSErr)))
illegalControllerErr = (0x80000000 | (0xFFFF & (illegalControllerOSErr)))
midiManagerAbsentErr = (0x80000000 | (0xFFFF & (midiManagerAbsentOSErr)))
synthesizerNotRespondingErr = (0x80000000 | (0xFFFF & (synthesizerNotRespondingOSErr)))
synthesizerErr = (0x80000000 | (0xFFFF & (synthesizerOSErr)))
illegalNoteChannelErr = (0x80000000 | (0xFFFF & (illegalNoteChannelOSErr)))
noteChannelNotAllocatedErr = (0x80000000 | (0xFFFF & (noteChannelNotAllocatedOSErr)))
tunePlayerFullErr = (0x80000000 | (0xFFFF & (tunePlayerFullOSErr)))
tuneParseErr = (0x80000000 | (0xFFFF & (tuneParseOSErr)))
kGetAtomicInstNoExpandedSamples = 1 << 0
kGetAtomicInstNoOriginalSamples = 1 << 1
kGetAtomicInstNoSamples = kGetAtomicInstNoExpandedSamples | kGetAtomicInstNoOriginalSamples
kGetAtomicInstNoKnobList = 1 << 2
kGetAtomicInstNoInstrumentInfo = 1 << 3
kGetAtomicInstOriginalKnobList = 1 << 4
kGetAtomicInstAllKnobs = 1 << 5
kSetAtomicInstKeepOriginalInstrument = 1 << 0
kSetAtomicInstShareAcrossParts = 1 << 1
kSetAtomicInstCallerTosses = 1 << 2
kSetAtomicInstCallerGuarantees = 1 << 3
kSetAtomicInstInterruptSafe = 1 << 4
kSetAtomicInstDontPreprocess = 1 << 7
kInstrumentNamesModifiable = 1
kInstrumentNamesBoth = 2
kGenericMusicComponentSubtype = FOUR_CHAR_CODE('gene')
kGenericMusicKnob = 1
kGenericMusicInstrumentKnob = 2
kGenericMusicDrumKnob = 3
kGenericMusicGlobalController = 4
kGenericMusicResFirst = 0
kGenericMusicResMiscStringList = 1
kGenericMusicResMiscLongList = 2
kGenericMusicResInstrumentList = 3
kGenericMusicResDrumList = 4
kGenericMusicResInstrumentKnobDescriptionList = 5
kGenericMusicResDrumKnobDescriptionList = 6
kGenericMusicResKnobDescriptionList = 7
kGenericMusicResBitsLongList = 8
kGenericMusicResModifiableInstrumentHW = 9
kGenericMusicResGMTranslation = 10
kGenericMusicResROMInstrumentData = 11
kGenericMusicResAboutPICT = 12
kGenericMusicResLast = 13
kGenericMusicMiscLongFirst = 0
kGenericMusicMiscLongVoiceCount = 1
kGenericMusicMiscLongPartCount = 2
kGenericMusicMiscLongModifiableInstrumentCount = 3
kGenericMusicMiscLongChannelMask = 4
kGenericMusicMiscLongDrumPartCount = 5
kGenericMusicMiscLongModifiableDrumCount = 6
kGenericMusicMiscLongDrumChannelMask = 7
kGenericMusicMiscLongOutputCount = 8
kGenericMusicMiscLongLatency = 9
kGenericMusicMiscLongFlags = 10
kGenericMusicMiscLongFirstGMHW = 11
kGenericMusicMiscLongFirstGMDrumHW = 12
kGenericMusicMiscLongFirstUserHW = 13
kGenericMusicMiscLongLast = 14
kMusicGenericRange = 0x0100
kMusicDerivedRange = 0x0200
kGenericMusicDoMIDI = 1 << 0
kGenericMusicBank0 = 1 << 1
kGenericMusicBank32 = 1 << 2
kGenericMusicErsatzMIDI = 1 << 3
kGenericMusicCallKnobs = 1 << 4
kGenericMusicCallParts = 1 << 5
kGenericMusicCallInstrument = 1 << 6
kGenericMusicCallNumber = 1 << 7
kGenericMusicCallROMInstrument = 1 << 8
kGenericMusicAllDefaults = 1 << 9
kGetInstrumentInfoNoBuiltIn = 1 << 0
kGetInstrumentInfoMidiUserInst = 1 << 1
kGetInstrumentInfoNoIText = 1 << 2
kNoteRequestNoGM = 1
kNoteRequestNoSynthType = 2
kNoteRequestSynthMustMatch = 4
kNoteRequestSpecifyMIDIChannel = 0x80
kPickDontMix = 1
kPickSameSynth = 2
kPickUserInsts = 4
kPickEditAllowEdit = 8
kPickEditAllowPick = 16
kPickEditSynthGlobal = 32
kPickEditControllers = 64
kNoteAllocatorComponentType = FOUR_CHAR_CODE('nota')
kNADummyOneSelect = 29
kNADummyTwoSelect = 30
kTuneQueueDepth = 8
kTunePlayerComponentType = FOUR_CHAR_CODE('tune')
kTuneStartNow = 1
kTuneDontClipNotes = 2
kTuneExcludeEdgeNotes = 4
kTuneQuickStart = 8
kTuneLoopUntil = 16
kTunePlayDifference = 32
kTunePlayConcurrent = 64
kTuneStartNewMaster = 16384
kTuneStopFade = 1
kTuneStopSustain = 2
kTuneStopInstant = 4
kTuneStopReleaseChannels = 8
kTuneMixMute = 1
kTuneMixSolo = 2
kRestEventType = 0x00000000
kNoteEventType = 0x00000001
kControlEventType = 0x00000002
kMarkerEventType = 0x00000003
kUndefined1EventType = 0x00000008
kXNoteEventType = 0x00000009
kXControlEventType = 0x0000000A
kKnobEventType = 0x0000000B
kUndefined2EventType = 0x0000000C
kUndefined3EventType = 0x0000000D
kUndefined4EventType = 0x0000000E
kGeneralEventType = 0x0000000F
kXEventLengthBits = 0x00000002
kGeneralEventLengthBits = 0x00000003
kEventLen = 1L
kXEventLen = 2L
kRestEventLen = kEventLen
kNoteEventLen = kEventLen
kControlEventLen = kEventLen
kMarkerEventLen = kEventLen
kXNoteEventLen = kXEventLen
kXControlEventLen = kXEventLen
kGeneralEventLen = kXEventLen
kEventLengthFieldPos = 30
kEventLengthFieldWidth = 2
kEventTypeFieldPos = 29
kEventTypeFieldWidth = 3
kXEventTypeFieldPos = 28
kXEventTypeFieldWidth = 4
kEventPartFieldPos = 24
kEventPartFieldWidth = 5
kXEventPartFieldPos = 16
kXEventPartFieldWidth = 12
kRestEventDurationFieldPos = 0
kRestEventDurationFieldWidth = 24
kRestEventDurationMax = ((1L << kRestEventDurationFieldWidth) - 1)
kNoteEventPitchFieldPos = 18
kNoteEventPitchFieldWidth = 6
kNoteEventPitchOffset = 32
kNoteEventVolumeFieldPos = 11
kNoteEventVolumeFieldWidth = 7
kNoteEventVolumeOffset = 0
kNoteEventDurationFieldPos = 0
kNoteEventDurationFieldWidth = 11
kNoteEventDurationMax = ((1L << kNoteEventDurationFieldWidth) - 1)
kXNoteEventPitchFieldPos = 0
kXNoteEventPitchFieldWidth = 16
kXNoteEventDurationFieldPos = 0
kXNoteEventDurationFieldWidth = 22
kXNoteEventDurationMax = ((1L << kXNoteEventDurationFieldWidth) - 1)
kXNoteEventVolumeFieldPos = 22
kXNoteEventVolumeFieldWidth = 7
kControlEventControllerFieldPos = 16
kControlEventControllerFieldWidth = 8
kControlEventValueFieldPos = 0
kControlEventValueFieldWidth = 16
kXControlEventControllerFieldPos = 0
kXControlEventControllerFieldWidth = 16
kXControlEventValueFieldPos = 0
kXControlEventValueFieldWidth = 16
kKnobEventValueHighFieldPos = 0
kKnobEventValueHighFieldWidth = 16
kKnobEventKnobFieldPos = 16
kKnobEventKnobFieldWidth = 14
kKnobEventValueLowFieldPos = 0
kKnobEventValueLowFieldWidth = 16
kMarkerEventSubtypeFieldPos = 16
kMarkerEventSubtypeFieldWidth = 8
kMarkerEventValueFieldPos = 0
kMarkerEventValueFieldWidth = 16
kGeneralEventSubtypeFieldPos = 16
kGeneralEventSubtypeFieldWidth = 14
kGeneralEventLengthFieldPos = 0
kGeneralEventLengthFieldWidth = 16
kEndMarkerValue = 0x00000060
kEndMarkerValue = 0x60000000
# _ext = qtma_EXT(*lP
# ulen = (_ext < 2) ? 1 : 2
# ulen = (unsigned short)qtma_EXT(*lP
# ulen = lP[1]
# _ext = qtma_EXT(*lP
# ulen = (_ext < 2) ? 1 : 2
# ulen = (unsigned short)qtma_EXT(*lP
# ulen = lP[-1]
# x = (kRestEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kNoteEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kControlEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# x = (kMarkerEventType << kEventTypeFieldPos) \
# x = EndianU32_NtoB(x) )
# w1 = (kXNoteEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kXControlEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kKnobEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kXEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
# w1 = (kGeneralEventType << kXEventTypeFieldPos) \
# w1 = EndianU32_NtoB(w1)
# w2 = (kGeneralEventLengthBits << kEventLengthFieldPos) \
# w2 = EndianU32_NtoB(w2) )
kGeneralEventNoteRequest = 1
kGeneralEventPartKey = 4
kGeneralEventTuneDifference = 5
kGeneralEventAtomicInstrument = 6
kGeneralEventKnob = 7
kGeneralEventMIDIChannel = 8
kGeneralEventPartChange = 9
kGeneralEventNoOp = 10
kGeneralEventUsedNotes = 11
kGeneralEventPartMix = 12
kMarkerEventEnd = 0
kMarkerEventBeat = 1
kMarkerEventTempo = 2
kCurrentlyNativeEndian = 1
kCurrentlyNotNativeEndian = 2
kQTMIDIGetMIDIPortsSelect = 0x0001
kQTMIDIUseSendPortSelect = 0x0002
kQTMIDISendMIDISelect = 0x0003
kMusicGetDescriptionSelect = 0x0001
kMusicGetPartSelect = 0x0002
kMusicSetPartSelect = 0x0003
kMusicSetPartInstrumentNumberSelect = 0x0004
kMusicGetPartInstrumentNumberSelect = 0x0005
kMusicStorePartInstrumentSelect = 0x0006
kMusicGetPartAtomicInstrumentSelect = 0x0009
kMusicSetPartAtomicInstrumentSelect = 0x000A
kMusicGetPartKnobSelect = 0x0010
kMusicSetPartKnobSelect = 0x0011
kMusicGetKnobSelect = 0x0012
kMusicSetKnobSelect = 0x0013
kMusicGetPartNameSelect = 0x0014
kMusicSetPartNameSelect = 0x0015
kMusicFindToneSelect = 0x0016
kMusicPlayNoteSelect = 0x0017
kMusicResetPartSelect = 0x0018
kMusicSetPartControllerSelect = 0x0019
kMusicGetPartControllerSelect = 0x001A
kMusicGetMIDIProcSelect = 0x001B
kMusicSetMIDIProcSelect = 0x001C
kMusicGetInstrumentNamesSelect = 0x001D
kMusicGetDrumNamesSelect = 0x001E
kMusicGetMasterTuneSelect = 0x001F
kMusicSetMasterTuneSelect = 0x0020
kMusicGetInstrumentAboutInfoSelect = 0x0022
kMusicGetDeviceConnectionSelect = 0x0023
kMusicUseDeviceConnectionSelect = 0x0024
kMusicGetKnobSettingStringsSelect = 0x0025
kMusicGetMIDIPortsSelect = 0x0026
kMusicSendMIDISelect = 0x0027
kMusicStartOfflineSelect = 0x0029
kMusicSetOfflineTimeToSelect = 0x002A
kMusicGetInstrumentKnobDescriptionSelect = 0x002B
kMusicGetDrumKnobDescriptionSelect = 0x002C
kMusicGetKnobDescriptionSelect = 0x002D
kMusicGetInfoTextSelect = 0x002E
kMusicGetInstrumentInfoSelect = 0x002F
kMusicTaskSelect = 0x0031
kMusicSetPartInstrumentNumberInterruptSafeSelect = 0x0032
kMusicSetPartSoundLocalizationSelect = 0x0033
kMusicGenericConfigureSelect = 0x0100
kMusicGenericGetPartSelect = 0x0101
kMusicGenericGetKnobListSelect = 0x0102
kMusicGenericSetResourceNumbersSelect = 0x0103
kMusicDerivedMIDISendSelect = 0x0200
kMusicDerivedSetKnobSelect = 0x0201
kMusicDerivedSetPartSelect = 0x0202
kMusicDerivedSetInstrumentSelect = 0x0203
kMusicDerivedSetPartInstrumentNumberSelect = 0x0204
kMusicDerivedSetMIDISelect = 0x0205
kMusicDerivedStorePartInstrumentSelect = 0x0206
kMusicDerivedOpenResFileSelect = 0x0207
kMusicDerivedCloseResFileSelect = 0x0208
kNARegisterMusicDeviceSelect = 0x0000
kNAUnregisterMusicDeviceSelect = 0x0001
kNAGetRegisteredMusicDeviceSelect = 0x0002
kNASaveMusicConfigurationSelect = 0x0003
kNANewNoteChannelSelect = 0x0004
kNADisposeNoteChannelSelect = 0x0005
kNAGetNoteChannelInfoSelect = 0x0006
kNAPrerollNoteChannelSelect = 0x0007
kNAUnrollNoteChannelSelect = 0x0008
kNASetNoteChannelVolumeSelect = 0x000B
kNAResetNoteChannelSelect = 0x000C
kNAPlayNoteSelect = 0x000D
kNASetControllerSelect = 0x000E
kNASetKnobSelect = 0x000F
kNAFindNoteChannelToneSelect = 0x0010
kNASetInstrumentNumberSelect = 0x0011
kNAPickInstrumentSelect = 0x0012
kNAPickArrangementSelect = 0x0013
kNAStuffToneDescriptionSelect = 0x001B
kNACopyrightDialogSelect = 0x001C
kNAGetIndNoteChannelSelect = 0x001F
kNAGetMIDIPortsSelect = 0x0021
kNAGetNoteRequestSelect = 0x0022
kNASendMIDISelect = 0x0023
kNAPickEditInstrumentSelect = 0x0024
kNANewNoteChannelFromAtomicInstrumentSelect = 0x0025
kNASetAtomicInstrumentSelect = 0x0026
kNAGetKnobSelect = 0x0028
kNATaskSelect = 0x0029
kNASetNoteChannelBalanceSelect = 0x002A
kNASetInstrumentNumberInterruptSafeSelect = 0x002B
kNASetNoteChannelSoundLocalizationSelect = 0x002C
kNAGetControllerSelect = 0x002D
kTuneSetHeaderSelect = 0x0004
kTuneGetTimeBaseSelect = 0x0005
kTuneSetTimeScaleSelect = 0x0006
kTuneGetTimeScaleSelect = 0x0007
kTuneGetIndexedNoteChannelSelect = 0x0008
kTuneQueueSelect = 0x000A
kTuneInstantSelect = 0x000B
kTuneGetStatusSelect = 0x000C
kTuneStopSelect = 0x000D
kTuneSetVolumeSelect = 0x0010
kTuneGetVolumeSelect = 0x0011
kTunePrerollSelect = 0x0012
kTuneUnrollSelect = 0x0013
kTuneSetNoteChannelsSelect = 0x0014
kTuneSetPartTransposeSelect = 0x0015
kTuneGetNoteAllocatorSelect = 0x0017
kTuneSetSofterSelect = 0x0018
kTuneTaskSelect = 0x0019
kTuneSetBalanceSelect = 0x001A
kTuneSetSoundLocalizationSelect = 0x001B
kTuneSetHeaderWithSizeSelect = 0x001C
kTuneSetPartMixSelect = 0x001D
kTuneGetPartMixSelect = 0x001E
| lgpl-2.1 |
dang03/son-cli | src/son/validate/util.py | 5 | 4770 | # Copyright (c) 2015 SONATA-NFV, UBIWHERE
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, UBIWHERE
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
import os
import yaml
import logging
from son.validate import event
log = logging.getLogger(__name__)
evtlog = event.get_logger('validator.events')
def read_descriptor_files(files):
"""
Loads the VNF descriptors provided in the file list. It builds a
dictionary of the loaded descriptor files. Each entry has the
key of the VNF combo ID, in the format 'vendor.name.version'.
:param files: filename list of descriptors
:return: Dictionary of descriptors. None if unsuccessful.
"""
descriptors = {}
for file in files:
content = read_descriptor_file(file)
if not content:
continue
did = descriptor_id(content)
if did in descriptors.keys():
log.error("Duplicate descriptor in files: '{0}' <==> '{1}'"
.format(file, descriptors[did]))
continue
descriptors[did] = file
return descriptors
def read_descriptor_file(file):
"""
Reads a SONATA descriptor from a file.
:param file: descriptor filename
:return: descriptor dictionary
"""
with open(file, 'r') as _file:
try:
descriptor = yaml.load(_file)
except yaml.YAMLError as exc:
evtlog.log("Invalid descriptor",
"Error parsing descriptor file: {0}".format(exc),
file,
'evt_invalid_descriptor')
return
if not descriptor:
evtlog.log("Invalid descriptor",
"Couldn't read descriptor file: '{0}'".format(file),
file,
'evt_invalid_descriptor')
return
if 'vendor' not in descriptor or \
'name' not in descriptor or \
'version' not in descriptor:
log.warning("Invalid SONATA descriptor file: '{0}'. Missing "
"'vendor', 'name' or 'version'. Ignoring."
.format(file))
return
return descriptor
def descriptor_id(descriptor):
"""
Provides the descriptor id of the specified descriptor content
:param descriptor: descriptor content dict
:return: descriptor id
"""
return build_descriptor_id(descriptor['vendor'],
descriptor['name'],
descriptor['version'])
def build_descriptor_id(vendor, name, version):
"""
Assemble the descriptor id based on its vendor, name and version.
:param vendor: descriptor vendor
:param name: descriptor name
:param version: descriptor version
:return: descriptor id
"""
return vendor + '.' + name + '.' + version
def list_files(path, extension):
"""
Retrieves a list of files with the specified extension in a given
directory path.
:param path: directory to search for files
:param extension: extension of files
:return: list of files
"""
file_list = []
for root, dirs, files in os.walk(path):
for file in files:
if file.endswith(extension):
file_list.append(os.path.join(root, file))
return file_list
def strip_root(path):
"""
Remove leading slash of a path
"""
if type(path) is not str:
return path
return path[1:] if path[0] == '/' else path
class CountCalls(object):
"""Decorator to determine number of calls for a method"""
def __init__(self, method):
self.method = method
self.counter = 0
def __call__(self, *args, **kwargs):
self.counter += 1
return self.method(*args, **kwargs)
| apache-2.0 |
tobes/py3status | py3status/modules/gpmdp.py | 2 | 2162 | # -*- coding: utf-8 -*-
"""
Display song currently playing in Google Play Music Desktop Player.
Configuration parameters:
cache_timeout: refresh interval for this module (default 5)
format: specify the items and ordering of the data in the status bar.
These area 1:1 match to gpmdp-remote's options
(default '♫ {info}')
Format placeholders:
{info} Print info about now playing song
{title} Print current song title
{artist} Print current song artist
{album} Print current song album
{album_art} Print current song album art URL
{time_current} Print current song time in milliseconds
{time_total} Print total song time in milliseconds
{status} Print whether GPMDP is paused or playing
{current} Print now playing song in "artist - song" format
Requires:
gpmdp: https://www.googleplaymusicdesktopplayer.com/
gpmdp-remote: https://github.com/iandrewt/gpmdp-remote
@author Aaron Fields https://twitter.com/spirotot
@license BSD
SAMPLE OUTPUT
{'full_text': '♫ Now Playing: The Show Goes On by Lupe Fiasco'}
"""
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = u'♫ {info}'
def gpmdp(self):
def _run_cmd(cmd):
return self.py3.command_output(['gpmdp-remote', cmd]).strip()
full_text = ''
if _run_cmd('status') == 'Playing':
cmds = ['info', 'title', 'artist', 'album', 'status', 'current',
'time_total', 'time_current', 'album_art']
data = {}
for cmd in cmds:
if self.py3.format_contains(self.format, '{0}'.format(cmd)):
data[cmd] = _run_cmd(cmd)
full_text = self.py3.safe_format(self.format, data)
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': full_text
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| bsd-3-clause |
UTDS16/collab-texter | ctxt/client/resources_rc.py | 1 | 19028 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.7.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x05\x88\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x43\x46\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x43\x45\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xa6\x19\x69\x4a\x00\x00\x01\xb5\x49\x44\
\x41\x54\x78\xda\x94\xd3\xcf\x4b\x02\x41\x14\x07\xf0\x67\xa6\x46\
\x10\x11\x45\x08\x5d\x0a\x3a\x48\x14\x66\x24\x1d\x3b\xc5\x6a\x3f\
\x50\x0f\x46\x60\x52\x68\x7a\x13\x21\xeb\x10\x75\xe8\x58\xa7\xfe\
\x0b\xa1\x83\x78\xc8\xe8\xec\xc9\x8b\x10\x96\x75\x88\xec\x87\x12\
\x68\x20\x7a\x08\x35\x74\x9d\xbe\x23\x53\x08\x65\x5b\x03\x1f\x66\
\xf6\xed\xee\xdb\xb7\x6f\x67\xd5\xf4\xbf\x61\x81\x53\xc8\xc2\xb8\
\x58\xff\x79\x58\xe1\x32\x18\x0c\x32\x3e\x0b\xd6\x2e\x85\x9b\x16\
\xe1\x4a\x38\x0e\x04\x02\xd3\xc5\x62\x91\xbc\x5e\xef\x34\x8e\xd5\
\xa0\x52\x29\x24\xb8\x09\x85\x42\x13\x95\x4a\x85\xaa\xd5\x6a\x4b\
\xad\x56\x23\x59\x96\x49\xa7\xd3\x51\x24\x12\xb9\x55\x2b\x24\xc8\
\x26\x12\x89\x59\xb3\xd9\x3c\x58\x2a\x95\x88\x27\xaa\xd7\xeb\xa4\
\xd5\x6a\x29\x1a\x8d\xde\xe1\xfc\xae\x52\x02\x7e\xd1\x73\x32\x99\
\x9c\x33\x99\x4c\x03\xbc\x02\x15\x8a\x8e\xc5\x62\x0f\x88\x6f\x43\
\xac\x53\x0f\x26\x61\x5f\xac\xcf\xf8\xc5\xe1\x70\x38\xcb\x9f\xcc\
\x18\xef\x21\x75\x8b\xf8\xf7\x2e\xc3\x16\x1c\x88\x24\xed\xc3\x01\
\x79\x49\x92\x78\x86\xfc\x4f\x4f\x4d\xf9\xfd\x7e\x7e\x32\x05\xbe\
\x0e\x95\xad\x42\x55\xcc\xdf\x46\xda\x66\xb3\x33\xa7\x73\x95\x27\
\x49\xc3\x92\x88\x4f\xc1\x61\xa7\x26\x7d\x36\x71\x19\x24\xbd\x7e\
\x64\xe8\xe5\x25\x4f\x46\xa3\x71\x38\x93\xb9\x9f\x41\x4c\x86\x39\
\x88\xc0\xeb\x4f\x09\x78\x13\x57\xe0\xc4\xed\xde\x34\x34\x1a\x0c\
\x5d\xee\xa6\x72\xf9\x8d\xe6\xe7\x17\x0c\x88\x87\xa0\x00\xd7\x9d\
\x2a\xe0\x1b\xe9\xd1\xe3\xf1\x8d\xe6\x72\x05\x6c\x94\xf7\x56\x50\
\xa3\xe1\x4d\x66\xd4\x6c\x32\x8a\xc7\x2f\x9e\x70\x30\xf6\xdb\xb7\
\xb6\x43\xce\xe5\xf2\x32\x8b\xc5\xc9\x1c\x8e\x75\xde\x83\x5c\x1b\
\xbb\xd2\x4f\xe2\x12\x65\x16\xd6\xd6\x7c\x4c\xac\x37\xa0\x1f\xfa\
\xa0\x17\x7a\x40\x2b\xbe\xbf\x5a\x54\xfe\xf5\x0a\x35\xd8\x6b\xd5\
\x4c\x74\x04\x3b\x70\x0e\x0d\xa1\xde\x36\xcb\x6d\x9a\x3c\xc1\x87\
\x00\x03\x00\xd2\x66\x9b\xc8\xef\x51\x79\x01\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x5a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x43\x42\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x43\x41\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xf9\x57\x69\x37\x00\x00\x01\x87\x49\x44\
\x41\x54\x78\xda\x9c\xd2\x41\x4b\x02\x41\x14\x00\xe0\xb7\x96\x19\
\x4b\x4b\x44\x8a\xd8\x25\xb0\xa4\x43\xae\x7f\xc1\x10\x84\x2d\x74\
\xad\x24\xa5\x6d\x0b\x22\xc8\x50\x7f\x40\x7f\xa1\x5b\xff\xa2\x6e\
\x12\x75\x0a\x84\xa4\x83\xa7\x8a\x08\x11\x6f\x8b\x20\x2c\xd1\x41\
\x51\x56\x49\x73\x7a\x13\x63\xc8\x92\xb6\x38\xf0\xb1\x6f\x67\xdf\
\x1b\x66\xde\x0e\xc0\xe4\x43\x42\x4f\x93\x16\x6f\xa2\x67\xb6\x88\
\xe5\x82\x17\xf6\x1c\x8e\x2d\x8d\x2d\xf4\x9a\xc9\x64\x08\x7d\x32\
\xbf\xc5\xb6\x31\x45\x6f\xcc\x45\x2a\x95\x0a\xe8\xba\x0e\xaa\xaa\
\x06\x58\x0d\x37\x48\xe4\x46\x14\x6b\xd9\x6c\xb6\x64\x18\x06\xb4\
\x5a\x2d\x68\xb7\xdb\xd0\xe9\x74\x80\x10\x02\x82\x20\x40\x2e\x97\
\x2b\x61\x8e\x7f\xd4\x96\xcb\xe9\x74\x9a\x6e\x77\x3d\x91\x48\x10\
\xbf\x5f\x24\x1e\x8f\x87\x38\x9d\x2e\x22\x8a\x22\x9d\x2f\xa3\xc8\
\xb8\x33\x57\x22\x91\x28\x91\x65\x99\x26\x27\x83\xc1\x0d\xe2\xf5\
\xae\x10\x9f\x6f\x8d\xbe\x57\xcc\xc5\xe6\x1e\x44\xe9\x5c\xbd\xde\
\x04\x4d\xab\x41\x38\x2c\x5d\x17\x0a\x0f\xfb\x6e\xf7\x12\xf0\xfc\
\xdc\x20\xff\x6e\xd4\x02\x32\xba\x4c\x26\x0f\x56\xfb\x7d\x0e\x6c\
\x36\x3b\x34\x1a\x06\x84\x42\xd2\x55\xb1\xf8\x28\xf2\xbc\xf0\x67\
\xd3\x87\x9b\xa8\xa9\xea\xf1\xb2\xae\x7f\x60\xc3\x3e\x7f\x26\x1c\
\x0e\x3b\x70\x1c\x87\xcd\x03\xc8\xe7\x6f\xe9\x1f\xf0\xa2\x9b\x51\
\x67\x8f\xa1\xaa\xa2\x9c\x10\x49\xda\x23\xf1\xf8\x21\x3d\x73\x75\
\x48\xec\xbf\x0b\xb3\x83\x6a\x94\xa2\x9c\x12\x16\x6f\x5b\xbd\x6d\
\x71\xf4\x8e\x76\x99\x41\x6c\x69\x1c\xa1\x26\x3a\x43\x2e\xb4\x88\
\x16\xd0\x3c\xa2\x9d\xe3\xd1\x2c\x9a\x41\xd3\x68\xca\x7c\x13\xbb\
\xe8\x1c\xdd\xa3\x2f\xd4\x63\x73\x3d\x53\xdc\x65\xdf\x07\xfa\x74\
\x81\x6f\x01\x06\x00\x5c\x52\x83\xd4\xd9\xb0\x72\xcb\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\xf0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xff\x61\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x69\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x30\x2d\x63\x30\x36\x30\x20\x36\x31\
\x2e\x31\x33\x34\x37\x37\x37\x2c\x20\x32\x30\x31\x30\x2f\x30\x32\
\x2f\x31\x32\x2d\x31\x37\x3a\x33\x32\x3a\x30\x30\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x52\x69\x67\x68\x74\x73\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x72\x69\x67\x68\x74\x73\x2f\x22\x20\x78\x6d\x6c\
\x6e\x73\x3a\x78\x6d\x70\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\
\x70\x2f\x31\x2e\x30\x2f\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\
\x3a\x73\x74\x52\x65\x66\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\
\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\
\x31\x2e\x30\x2f\x73\x54\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\
\x63\x65\x52\x65\x66\x23\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\
\x70\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\
\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\
\x20\x78\x6d\x70\x52\x69\x67\x68\x74\x73\x3a\x4d\x61\x72\x6b\x65\
\x64\x3d\x22\x46\x61\x6c\x73\x65\x22\x20\x78\x6d\x70\x4d\x4d\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\
\x64\x69\x64\x3a\x41\x36\x32\x31\x39\x37\x44\x33\x36\x41\x30\x38\
\x31\x31\x44\x46\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\
\x30\x36\x46\x41\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\
\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\
\x41\x36\x32\x31\x39\x37\x44\x32\x36\x41\x30\x38\x31\x31\x44\x46\
\x42\x35\x33\x44\x41\x46\x41\x32\x44\x31\x41\x33\x30\x36\x46\x41\
\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\
\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\
\x6f\x70\x20\x43\x53\x33\x20\x57\x69\x6e\x64\x6f\x77\x73\x22\x3e\
\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\
\x72\x6f\x6d\x20\x73\x74\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\
\x63\x65\x49\x44\x3d\x22\x75\x75\x69\x64\x3a\x41\x43\x31\x46\x32\
\x45\x38\x33\x33\x32\x34\x41\x44\x46\x31\x31\x41\x41\x42\x38\x43\
\x35\x33\x39\x30\x44\x38\x35\x42\x35\x42\x33\x22\x20\x73\x74\x52\
\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x75\
\x75\x69\x64\x3a\x43\x39\x44\x33\x34\x39\x36\x36\x34\x41\x33\x43\
\x44\x44\x31\x31\x42\x30\x38\x41\x42\x42\x42\x43\x46\x46\x31\x37\
\x32\x31\x35\x36\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\x65\x74\
\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\x6e\x64\
\x3d\x22\x72\x22\x3f\x3e\xa9\x98\xa9\xe8\x00\x00\x02\x1d\x49\x44\
\x41\x54\x78\xda\x94\x92\x4d\x68\x13\x41\x18\x86\xdf\x24\xed\xae\
\x25\x41\x23\x1e\x54\xd0\x5b\x91\xda\x2a\x34\x60\xf0\xd8\x8b\x76\
\x8d\x3f\xd4\x1f\x52\x90\x52\xb0\x8d\x8d\xa7\x10\x6b\xec\x41\x94\
\x22\xf5\xa2\x20\x88\x3f\x97\x22\xf6\x64\x03\x8a\x21\x07\x23\xa5\
\x01\xcf\xb9\x44\xa4\x5a\x3d\x78\x10\x0c\x1e\xd2\xc0\x36\xb1\x6a\
\x12\xdd\x8d\xeb\xfb\x99\x39\x04\xb5\x44\x07\x9e\x9d\xd9\x6f\xe6\
\x7b\xe7\x9b\x77\xc6\x83\xff\x6b\x87\xc8\x23\x52\x20\xdd\x6a\xfc\
\xcf\x2d\x44\x5e\xc6\xe3\x71\x47\x7a\x45\xc8\xdd\x26\xe9\x30\x79\
\xa5\xb8\x11\x8b\xc5\xfa\x4d\xd3\x44\x24\x12\xe9\xe7\xbf\x87\xb8\
\x5c\x6d\x04\xde\x24\x12\x89\xde\x6a\xb5\x8a\x5a\xad\xf6\x8b\x7a\
\xbd\x8e\x46\xa3\x01\x5d\xd7\x91\x4a\xa5\xde\x7a\xda\x08\x14\x72\
\xb9\xdc\xbe\x60\x30\xb8\xa5\x5c\x2e\x43\x84\x2c\xcb\x82\xa6\x69\
\x48\xa7\xd3\xef\x38\x3f\xd5\x4e\x40\x16\x7d\xc8\xe7\xf3\xfb\x03\
\x81\xc0\x66\xa9\xc0\xc5\xa2\x33\x99\xcc\x7b\xc6\x2f\x90\xcc\x7a\
\x1e\xec\x21\x97\xd5\xf8\xa9\x2c\x4e\x26\x93\x05\xd9\xd9\x71\xc4\
\x43\x74\xa8\xf8\x9f\x2e\x93\xb3\xe4\x8a\x12\x69\x6d\x27\x48\xd1\
\x30\x0c\x51\x28\xfe\x6d\xd7\xa5\x68\x34\x2a\x93\x4b\x64\x62\x9d\
\xca\x86\x49\x4d\xfa\x3e\x7e\x4e\xfd\x36\xb9\x3c\x34\x74\xdc\x09\
\x87\x87\x45\x64\x99\x1c\x51\xf1\xbd\xe4\x6a\xaf\x7c\x48\x58\xf5\
\x8f\x49\x52\x9d\x45\xda\x51\xb9\xd7\x4a\xe5\x33\x2a\x95\x4f\x18\
\x1c\x0c\xf5\x65\xb3\x0b\x37\x19\xdb\x49\xb6\x93\x27\xdc\xcd\xb7\
\x1b\x38\x47\x0c\x49\xe0\x2e\x8b\xec\x66\xc5\xc4\x63\xe4\xd6\xe8\
\xe8\x99\x1e\xdb\x76\xe8\x72\x07\x45\xbe\x60\x60\xe0\x60\x0f\xe3\
\x09\xb2\x72\x12\x78\xbd\x8b\xc9\xdf\x81\xd3\xdf\x80\x7b\x16\x30\
\xc6\xf1\xd6\x06\x63\x22\x70\x67\x7c\x7c\xa2\xbb\x58\x5c\x65\x72\
\x27\x7c\xbe\x4d\xf0\x7a\x37\xc2\xed\xee\xa4\x48\x48\xde\xfb\x94\
\xd5\x3c\xb8\x51\x07\x66\xc8\xf4\x1a\xb0\x8d\x42\xb7\x25\x26\x47\
\x98\x9c\x9b\xbb\x7f\x77\x64\x24\xb2\xc3\x34\xd7\xd0\xd5\xa5\xf3\
\x91\x3c\xfc\xd8\xe2\xcd\xa4\xad\x9c\xfb\x01\xbc\x40\x73\xd7\x59\
\x56\x30\x23\xcf\x58\x2a\xf0\x12\x6d\x7e\xfe\x41\xc9\xef\xf7\x4b\
\x72\xa9\xe5\x1a\xc5\xbb\xec\x02\xb0\x61\x05\x78\x5e\x05\xce\xf3\
\xfe\x4a\xab\xc0\x35\x8e\xa7\xbf\xd2\x07\x11\x61\x55\xb8\xd4\xf4\
\x05\xd7\xc9\x45\xf2\x8c\xd8\x0a\x39\x81\x4d\x27\x35\x9a\x35\xa6\
\x03\x07\xd4\xda\x45\x3f\x2b\xf9\x29\xc0\x00\x30\x59\xbc\x44\x5c\
\x68\x4c\x23\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x0a\
\x04\xb7\xe4\xfe\
\x00\x63\
\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x69\x00\x6f\x00\x6e\
\x00\x0a\
\x0a\x68\x2a\x84\
\x00\x64\
\x00\x69\x00\x73\x00\x63\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\
\x00\x07\
\x0a\x65\x4b\x64\
\x00\x63\
\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\
\x00\x05\
\x00\x6c\x99\x62\
\x00\x65\
\x00\x72\x00\x72\x00\x6f\x00\x72\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00\x02\
\x00\x00\x00\x48\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xea\
\x00\x00\x00\x34\x00\x00\x00\x00\x00\x01\x00\x00\x05\x8c\
\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| mit |
jmcarbo/openerp7 | openerp/addons/mrp_byproduct/mrp_byproduct.py | 29 | 8840 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv import osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
class mrp_subproduct(osv.osv):
_name = 'mrp.subproduct'
_description = 'Byproduct'
_columns={
'product_id': fields.many2one('product.product', 'Product', required=True),
'product_qty': fields.float('Product Qty', digits_compute=dp.get_precision('Product Unit of Measure'), required=True),
'product_uom': fields.many2one('product.uom', 'Product Unit of Measure', required=True),
'subproduct_type': fields.selection([('fixed','Fixed'),('variable','Variable')], 'Quantity Type', required=True, help="Define how the quantity of byproducts will be set on the production orders using this BoM.\
'Fixed' depicts a situation where the quantity of created byproduct is always equal to the quantity set on the BoM, regardless of how many are created in the production order.\
By opposition, 'Variable' means that the quantity will be computed as\
'(quantity of byproduct set on the BoM / quantity of manufactured product set on the BoM * quantity of manufactured product in the production order.)'"),
'bom_id': fields.many2one('mrp.bom', 'BoM'),
}
_defaults={
'subproduct_type': 'variable',
'product_qty': lambda *a: 1.0,
}
def onchange_product_id(self, cr, uid, ids, product_id, context=None):
""" Changes UoM if product_id changes.
@param product_id: Changed product_id
@return: Dictionary of changed values
"""
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
v = {'product_uom': prod.uom_id.id}
return {'value': v}
return {}
def onchange_uom(self, cr, uid, ids, product_id, product_uom, context=None):
res = {'value':{}}
if not product_uom or not product_id:
return res
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
uom = self.pool.get('product.uom').browse(cr, uid, product_uom, context=context)
if uom.category_id.id != product.uom_id.category_id.id:
res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
res['value'].update({'product_uom': product.uom_id.id})
return res
mrp_subproduct()
class mrp_bom(osv.osv):
_name = 'mrp.bom'
_description = 'Bill of Material'
_inherit='mrp.bom'
_columns={
'sub_products':fields.one2many('mrp.subproduct', 'bom_id', 'Byproducts'),
}
mrp_bom()
class mrp_production(osv.osv):
_description = 'Production'
_inherit= 'mrp.production'
def action_confirm(self, cr, uid, ids, context=None):
""" Confirms production order and calculates quantity based on subproduct_type.
@return: Newly generated picking Id.
"""
picking_id = super(mrp_production,self).action_confirm(cr, uid, ids, context=context)
product_uom_obj = self.pool.get('product.uom')
for production in self.browse(cr, uid, ids):
source = production.product_id.property_stock_production.id
if not production.bom_id:
continue
for sub_product in production.bom_id.sub_products:
product_uom_factor = product_uom_obj._compute_qty(cr, uid, production.product_uom.id, production.product_qty, production.bom_id.product_uom.id)
qty1 = sub_product.product_qty
qty2 = production.product_uos and production.product_uos_qty or False
product_uos_factor = 0.0
if qty2 and production.bom_id.product_uos.id:
product_uos_factor = product_uom_obj._compute_qty(cr, uid, production.product_uos.id, production.product_uos_qty, production.bom_id.product_uos.id)
if sub_product.subproduct_type == 'variable':
if production.product_qty:
qty1 *= product_uom_factor / (production.bom_id.product_qty or 1.0)
if production.product_uos_qty:
qty2 *= product_uos_factor / (production.bom_id.product_uos_qty or 1.0)
data = {
'name': 'PROD:'+production.name,
'date': production.date_planned,
'product_id': sub_product.product_id.id,
'product_qty': qty1,
'product_uom': sub_product.product_uom.id,
'product_uos_qty': qty2,
'product_uos': production.product_uos and production.product_uos.id or False,
'location_id': source,
'location_dest_id': production.location_dest_id.id,
'move_dest_id': production.move_prod_id.id,
'state': 'waiting',
'production_id': production.id
}
self.pool.get('stock.move').create(cr, uid, data)
return picking_id
def _get_subproduct_factor(self, cr, uid, production_id, move_id=None, context=None):
"""Compute the factor to compute the qty of procucts to produce for the given production_id. By default,
it's always equal to the quantity encoded in the production order or the production wizard, but with
the module mrp_byproduct installed it can differ for byproducts having type 'variable'.
:param production_id: ID of the mrp.order
:param move_id: ID of the stock move that needs to be produced. Identify the product to produce.
:return: The factor to apply to the quantity that we should produce for the given production order and stock move.
"""
sub_obj = self.pool.get('mrp.subproduct')
move_obj = self.pool.get('stock.move')
production_obj = self.pool.get('mrp.production')
production_browse = production_obj.browse(cr, uid, production_id, context=context)
move_browse = move_obj.browse(cr, uid, move_id, context=context)
subproduct_factor = 1
sub_id = sub_obj.search(cr, uid,[('product_id', '=', move_browse.product_id.id),('bom_id', '=', production_browse.bom_id.id), ('subproduct_type', '=', 'variable')], context=context)
if sub_id:
subproduct_record = sub_obj.browse(cr ,uid, sub_id[0], context=context)
if subproduct_record.bom_id.product_qty:
subproduct_factor = subproduct_record.product_qty / subproduct_record.bom_id.product_qty
return subproduct_factor
return super(mrp_production, self)._get_subproduct_factor(cr, uid, production_id, move_id, context=context)
mrp_production()
class change_production_qty(osv.osv_memory):
_inherit = 'change.production.qty'
def _update_product_to_produce(self, cr, uid, prod, qty, context=None):
bom_obj = self.pool.get('mrp.bom')
move_lines_obj = self.pool.get('stock.move')
prod_obj = self.pool.get('mrp.production')
for m in prod.move_created_ids:
if m.product_id.id == prod.product_id.id:
move_lines_obj.write(cr, uid, [m.id], {'product_qty': qty})
else:
for sub_product_line in prod.bom_id.sub_products:
if sub_product_line.product_id.id == m.product_id.id:
factor = prod_obj._get_subproduct_factor(cr, uid, prod.id, m.id, context=context)
subproduct_qty = sub_product_line.subproduct_type == 'variable' and qty * factor or sub_product_line.product_qty
move_lines_obj.write(cr, uid, [m.id], {'product_qty': subproduct_qty})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
google/grr | grr/server/grr_response_server/bin/config_updater_util.py | 1 | 44739 | #!/usr/bin/env python
"""Utilities for modifying the GRR server configuration."""
import argparse
import getpass
import os
import re
import shutil
import socket
import subprocess
import sys
import time
from typing import Optional, Text, Generator
from urllib import parse as urlparse
import MySQLdb
from MySQLdb.constants import CR as mysql_conn_errors
from MySQLdb.constants import ER as general_mysql_errors
import pkg_resources
# pylint: disable=unused-import,g-bad-import-order
from grr_response_server import server_plugins
# pylint: enable=g-bad-import-order,unused-import
from google.protobuf import text_format
from grr_api_client import errors as api_errors
from grr_api_client import root as api_root
from grr_response_client_builder import repacking
from grr_response_core import config as grr_config
from grr_response_core.lib.util import compatibility
from grr_response_server import maintenance_utils
from grr_response_server import server_startup
from grr_response_server.bin import config_updater_keys_util
from fleetspeak.src.config.proto.fleetspeak_config import config_pb2
from fleetspeak.src.server.grpcservice.proto.fleetspeak_grpcservice import grpcservice_pb2
from fleetspeak.src.server.proto.fleetspeak_server import server_pb2
from fleetspeak.src.server.proto.fleetspeak_server import services_pb2
try:
# Importing readline enables the raw_input calls to have history etc.
import readline # pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top
except ImportError:
# readline is not bundled with Python on Windows. Simply ignoring failing
# import then.
pass
# These control retry behavior when checking that GRR can connect to
# MySQL during config initialization.
_MYSQL_MAX_RETRIES = 2
_MYSQL_RETRY_WAIT_SECS = 2
# Python hacks or executables larger than this limit will not be uploaded.
_MAX_SIGNED_BINARY_BYTES = 100 << 20 # 100 MiB
class ConfigInitError(Exception):
"""Exception raised to abort config initialization."""
def __init__(self):
super().__init__(
"Aborting config initialization. Please run 'grr_config_updater "
"initialize' to retry initialization.")
class BinaryTooLargeError(Exception):
"""Exception raised when trying to upload overly large binaries."""
class UserAlreadyExistsError(Exception):
"""Exception raised when trying to create an already-existing user."""
class UserNotFoundError(Exception):
"""Exception raised when trying to fetch a non-existent user."""
def __init__(self, username):
super().__init__("User '%s' does not exist." % username)
def ImportConfig(filename, config):
"""Reads an old config file and imports keys and user accounts."""
sections_to_import = ["PrivateKeys"]
entries_to_import = [
"Client.executable_signing_public_key", "CA.certificate",
"Frontend.certificate"
]
options_imported = 0
old_config = grr_config.CONFIG.MakeNewConfig()
old_config.Initialize(filename)
for entry in old_config.raw_data:
try:
section = entry.split(".")[0]
if section in sections_to_import or entry in entries_to_import:
config.Set(entry, old_config.Get(entry))
print("Imported %s." % entry)
options_imported += 1
except Exception as e: # pylint: disable=broad-except
print("Exception during import of %s: %s" % (entry, e))
return options_imported
def RetryQuestion(question_text, output_re="", default_val=None):
"""Continually ask a question until the output_re is matched."""
while True:
if default_val is not None:
new_text = "%s [%s]: " % (question_text, default_val)
else:
new_text = "%s: " % question_text
# pytype: disable=wrong-arg-count
output = input(new_text) or str(default_val)
# pytype: enable=wrong-arg-count
output = output.strip()
if not output_re or re.match(output_re, output):
break
else:
print("Invalid input, must match %s" % output_re)
return output
def RetryBoolQuestion(question_text, default_bool):
if not isinstance(default_bool, bool):
raise ValueError("default_bool should be a boolean, not %s" %
type(default_bool))
default_val = "Y" if default_bool else "N"
prompt_suff = "[Yn]" if default_bool else "[yN]"
return RetryQuestion("%s %s: " % (question_text, prompt_suff), "[yY]|[nN]",
default_val)[0].upper() == "Y"
def RetryIntQuestion(question_text: str, default_int: int) -> int:
return int(RetryQuestion(question_text, "^[0-9]+$", str(default_int)))
def GetPassword(question_text: str) -> str:
# TODO(hanuszczak): Incorrect type specification for `getpass`.
# pytype: disable=wrong-arg-types
return getpass.getpass(prompt=question_text)
# pytype: enable=wrong-arg-types
def ConfigureHostnames(config, external_hostname: Optional[Text] = None):
"""This configures the hostnames stored in the config."""
if not external_hostname:
try:
external_hostname = socket.gethostname()
except (OSError, IOError):
print("Sorry, we couldn't guess your hostname.\n")
external_hostname = RetryQuestion(
"Please enter your hostname e.g. "
"grr.example.com", "^[\\.A-Za-z0-9-]+$", external_hostname)
print("""\n\n-=Server URL=-
The Server URL specifies the URL that the clients will connect to
communicate with the server. For best results this should be publicly
accessible. By default this will be port 8080 with the URL ending in /control.
""")
frontend_url = RetryQuestion("Frontend URL", "^http://.*/$",
"http://%s:8080/" % external_hostname)
config.Set("Client.server_urls", [frontend_url])
frontend_port = urlparse.urlparse(frontend_url).port or grr_config.CONFIG.Get(
"Frontend.bind_port")
config.Set("Frontend.bind_port", frontend_port)
print("""\n\n-=AdminUI URL=-:
The UI URL specifies where the Administrative Web Interface can be found.
""")
ui_url = RetryQuestion("AdminUI URL", "^http[s]*://.*$",
"http://%s:8000" % external_hostname)
config.Set("AdminUI.url", ui_url)
ui_port = urlparse.urlparse(ui_url).port or grr_config.CONFIG.Get(
"AdminUI.port")
config.Set("AdminUI.port", ui_port)
def CheckMySQLConnection(db_options):
"""Checks whether a connection can be established to MySQL.
Args:
db_options: A dict mapping GRR MySQL config options to their values.
Returns:
A boolean indicating whether a connection could be made to a MySQL server
instance with the given options.
"""
for tries_left in range(_MYSQL_MAX_RETRIES, -1, -1):
try:
connection_options = dict(
host=db_options["Mysql.host"],
db=db_options["Mysql.database_name"],
user=db_options["Mysql.database_username"],
passwd=db_options["Mysql.database_password"],
charset="utf8")
if "Mysql.port" in db_options:
connection_options["port"] = db_options["Mysql.port"]
if "Mysql.unix_socket" in db_options:
connection_options["unix_socket"] = db_options["Mysql.unix_socket"]
ssl_enabled = "Mysql.client_key_path" in db_options
if ssl_enabled:
connection_options["ssl"] = {
"key": db_options["Mysql.client_key_path"],
"cert": db_options["Mysql.client_cert_path"],
"ca": db_options["Mysql.ca_cert_path"],
}
connection = MySQLdb.connect(**connection_options)
if ssl_enabled:
cursor = connection.cursor()
cursor.execute("SHOW VARIABLES LIKE 'have_ssl'")
res = cursor.fetchone()
if res[0] == "have_ssl" and res[1] == "YES":
print("SSL enabled successfully.")
else:
print("Unable to establish SSL connection to MySQL.")
return False
return True
except MySQLdb.OperationalError as mysql_op_error:
if len(mysql_op_error.args) < 2:
# We expect the exception's arguments to be an error-code and
# an error message.
print("Unexpected exception type received from MySQL. %d attempts "
"left: %s" % (tries_left, mysql_op_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
continue
if mysql_op_error.args[0] == mysql_conn_errors.CONNECTION_ERROR:
print("Failed to connect to MySQL. Is it running? %d attempts left." %
tries_left)
elif mysql_op_error.args[0] == mysql_conn_errors.UNKNOWN_HOST:
print("Unknown-hostname error encountered while trying to connect to "
"MySQL.")
return False # No need for retry.
elif mysql_op_error.args[0] == general_mysql_errors.BAD_DB_ERROR:
# GRR db doesn't exist yet. That's expected if this is the initial
# setup.
return True
elif mysql_op_error.args[0] in (
general_mysql_errors.ACCESS_DENIED_ERROR,
general_mysql_errors.DBACCESS_DENIED_ERROR):
print("Permission error encountered while trying to connect to "
"MySQL: %s" % mysql_op_error)
return False # No need for retry.
else:
print("Unexpected operational error encountered while trying to "
"connect to MySQL. %d attempts left: %s" %
(tries_left, mysql_op_error))
except MySQLdb.Error as mysql_error:
print("Unexpected error encountered while trying to connect to MySQL. "
"%d attempts left: %s" % (tries_left, mysql_error))
time.sleep(_MYSQL_RETRY_WAIT_SECS)
return False
def ConfigureMySQLDatastore(config):
"""Prompts the user for configuration details for a MySQL datastore."""
db_options = {}
db_options["Database.implementation"] = "MysqlDB"
db_options["Blobstore.implementation"] = "DbBlobStore"
print("GRR will use MySQL as its database backend. Enter connection details:")
datastore_init_complete = False
while not datastore_init_complete:
db_options["Mysql.host"] = RetryQuestion("MySQL Host", "^[\\.A-Za-z0-9-]+$",
config["Mysql.host"])
db_options["Mysql.port"] = int(
RetryQuestion("MySQL Port (0 for local socket)", "^[0-9]+$",
config["Mysql.port"]))
db_options["Mysql.database"] = RetryQuestion("MySQL Database",
"^[A-Za-z0-9-]+$",
config["Mysql.database_name"])
db_options["Mysql.database_name"] = db_options["Mysql.database"]
db_options["Mysql.username"] = RetryQuestion(
"MySQL Username", "[A-Za-z0-9-@]+$", config["Mysql.database_username"])
db_options["Mysql.database_username"] = db_options["Mysql.username"]
db_options["Mysql.password"] = GetPassword(
"Please enter password for database user %s: " %
db_options["Mysql.username"])
db_options["Mysql.database_password"] = db_options["Mysql.password"]
use_ssl = RetryBoolQuestion("Configure SSL connections for MySQL?", False)
if use_ssl:
db_options["Mysql.client_key_path"] = RetryQuestion(
"Path to the client private key file",
default_val=config["Mysql.client_key_path"])
db_options["Mysql.client_cert_path"] = RetryQuestion(
"Path to the client certificate file",
default_val=config["Mysql.client_cert_path"])
db_options["Mysql.ca_cert_path"] = RetryQuestion(
"Path to the CA certificate file",
default_val=config["Mysql.ca_cert_path"])
if CheckMySQLConnection(db_options):
print("Successfully connected to MySQL with the provided details.")
datastore_init_complete = True
else:
print("Error: Could not connect to MySQL with the provided details.")
should_retry = RetryBoolQuestion(
"Re-enter MySQL details? Answering 'no' will abort config "
"initialization: ", True)
if should_retry:
db_options.clear()
else:
raise ConfigInitError()
for option, value in db_options.items():
config.Set(option, value)
class FleetspeakConfig:
"""Wraps the bundled fleetspeak configuration."""
def __init__(self):
self.use_fleetspeak: bool = False
self.external_hostname: str = None
self.admin_port = 4444
self.grr_port = 11111
self.https_port = 4443
self.mysql_username: str = None
self.mysql_password: str = None
self.mysql_host: str = None
self.mysql_port = 3306
self.mysql_database: str = None
self.mysql_unix_socket: str = None
self.config_dir = "/etc/fleetspeak-server"
def Prompt(self, config):
"""Sets up the in-memory configuration interactively."""
if self._IsFleetspeakPresent():
self.use_fleetspeak = RetryBoolQuestion(
"Use Fleetspeak (EXPERIMENTAL, next generation communication "
"framework)?", False)
else:
self.use_fleetspeak = False
print("Fleetspeak (EXPERIMENTAL, optional, next generation "
"communication framework) seems to be missing.")
print("Skipping Fleetspeak configuration.\n")
if self.use_fleetspeak:
try:
self.external_hostname = socket.gethostname()
except (OSError, IOError):
self.external_hostname = ""
print("Sorry, we couldn't guess your hostname.\n")
self.external_hostname = RetryQuestion(
"Please enter your hostname e.g. "
"grr.example.com", "^[\\.A-Za-z0-9-]+$", self.external_hostname)
self.https_port = RetryIntQuestion("Fleetspeak public HTTPS port",
self.https_port)
self._PromptMySQL(config)
def Write(self, config):
if self.use_fleetspeak:
self._WriteEnabled(config)
else:
self._WriteDisabled(config)
def _ConfigPath(self, *path_components: str) -> str:
return os.path.join(self.config_dir, *path_components)
def _IsFleetspeakPresent(self) -> bool:
if not os.path.exists(self._ConfigPath()):
return False
if not shutil.which("fleetspeak-config"):
return False
return True
def _PromptMySQLOnce(self, config):
"""Prompt the MySQL configuration once."""
self.mysql_host = RetryQuestion("Fleetspeak MySQL Host",
"^[\\.A-Za-z0-9-]+$", self.mysql_host or
config["Mysql.host"])
self.mysql_port = RetryIntQuestion(
"Fleetspeak MySQL Port (0 for local socket)", self.mysql_port or
0) or None
if self.mysql_port is None:
# golang's mysql connector needs the socket specified explicitly.
self.mysql_unix_socket = RetryQuestion(
"Fleetspeak MySQL local socket path", ".+",
self._FindMysqlUnixSocket() or "")
self.mysql_database = RetryQuestion("Fleetspeak MySQL Database",
"^[A-Za-z0-9-]+$",
self.mysql_database or "fleetspeak")
self.mysql_username = RetryQuestion(
"Fleetspeak MySQL Username", "[A-Za-z0-9-@]+$", self.mysql_username or
config["Mysql.database_username"])
self.mysql_password = GetPassword(
f"Please enter password for database user {self.mysql_username}: ")
def _PromptMySQL(self, config):
"""Prompts the MySQL configuration, retrying if the configuration is invalid."""
while True:
self._PromptMySQLOnce(config)
if self._CheckMySQLConnection():
print("Successfully connected to MySQL with the given configuration.")
return
else:
print("Error: Could not connect to MySQL with the given configuration.")
retry = RetryBoolQuestion("Do you want to retry MySQL configuration?",
True)
if not retry:
raise ConfigInitError()
def _WriteDisabled(self, config):
config.Set("Server.fleetspeak_enabled", False)
config.Set("Client.fleetspeak_enabled", False)
config.Set("ClientBuilder.fleetspeak_bundled", False)
config.Set("Server.fleetspeak_server", "")
if self._IsFleetspeakPresent():
with open(self._ConfigPath("disabled"), "w") as f:
f.write("The existence of this file disables the "
"fleetspeak-server.service systemd unit.\n")
def _WriteEnabled(self, config):
"""Applies the in-memory configuration for the use_fleetspeak case."""
service_config = services_pb2.ServiceConfig(name="GRR", factory="GRPC")
grpc_config = grpcservice_pb2.Config(
target="localhost:{}".format(self.grr_port), insecure=True)
service_config.config.Pack(grpc_config)
server_conf = server_pb2.ServerConfig(services=[service_config])
server_conf.broadcast_poll_time.seconds = 1
with open(self._ConfigPath("server.services.config"), "w") as f:
f.write(text_format.MessageToString(server_conf))
cp = config_pb2.Config()
cp.configuration_name = "Fleetspeak"
if self.mysql_unix_socket:
cp.components_config.mysql_data_source_name = (
"{user}:{password}@unix({socket})/{db}".format(
user=self.mysql_username,
password=self.mysql_password,
socket=self.mysql_unix_socket,
db=self.mysql_database))
else:
cp.components_config.mysql_data_source_name = (
"{user}:{password}@tcp({host}:{port})/{db}".format(
user=self.mysql_username,
password=self.mysql_password,
host=self.mysql_host,
port=self.mysql_port,
db=self.mysql_database))
cp.components_config.https_config.listen_address = "{}:{}".format(
self.external_hostname, self.https_port)
cp.components_config.https_config.disable_streaming = False
cp.components_config.admin_config.listen_address = "localhost:{}".format(
self.admin_port)
cp.public_host_port.append(cp.components_config.https_config.listen_address)
cp.server_component_configuration_file = self._ConfigPath(
"server.components.config")
cp.trusted_cert_file = self._ConfigPath("trusted_cert.pem")
cp.trusted_cert_key_file = self._ConfigPath("trusted_cert_key.pem")
cp.server_cert_file = self._ConfigPath("server_cert.pem")
cp.server_cert_key_file = self._ConfigPath("server_cert_key.pem")
cp.linux_client_configuration_file = self._ConfigPath("linux_client.config")
cp.windows_client_configuration_file = self._ConfigPath(
"windows_client.config")
cp.darwin_client_configuration_file = self._ConfigPath(
"darwin_client.config")
p = subprocess.Popen(["fleetspeak-config", "-config", "/dev/stdin"],
stdin=subprocess.PIPE)
p.communicate(input=text_format.MessageToString(cp).encode())
if p.wait() != 0:
raise RuntimeError("fleetspeak-config command failed.")
# These modules don't exist on Windows, so importing locally.
# pylint: disable=g-import-not-at-top
import grp
import pwd
# pylint: enable=g-import-not-at-top
if (os.geteuid() == 0 and pwd.getpwnam("fleetspeak") and
grp.getgrnam("fleetspeak")):
subprocess.check_call(
["chown", "-R", "fleetspeak:fleetspeak",
self._ConfigPath()])
try:
os.unlink(self._ConfigPath("disabled"))
except FileNotFoundError:
pass
config.Set("Server.fleetspeak_enabled", True)
config.Set("Client.fleetspeak_enabled", True)
config.Set("ClientBuilder.fleetspeak_bundled", True)
config.Set(
"Target:Linux", {
"ClientBuilder.fleetspeak_client_config":
cp.linux_client_configuration_file
})
config.Set(
"Target:Windows", {
"ClientBuilder.fleetspeak_client_config":
cp.windows_client_configuration_file
})
config.Set(
"Target:Darwin", {
"ClientBuilder.fleetspeak_client_config":
cp.darwin_client_configuration_file
})
config.Set("Server.fleetspeak_server",
cp.components_config.admin_config.listen_address)
config.Set("FleetspeakFrontend Context",
{"Server.fleetspeak_message_listen_address": grpc_config.target})
def _CheckMySQLConnection(self):
"""Checks the MySQL configuration by attempting a connection."""
db_options = {
"Mysql.host": self.mysql_host,
"Mysql.database_name": self.mysql_database,
"Mysql.database_username": self.mysql_username,
"Mysql.database_password": self.mysql_password,
}
if self.mysql_port is not None:
db_options["Mysql.port"] = self.mysql_port
if self.mysql_unix_socket is not None:
db_options["Mysql.unix_socket"] = self.mysql_unix_socket
# In Python, localhost is automatically mapped to connecting via the UNIX
# domain socket.
# However, for Go we require a TCP connection at the moment.
# So if the host is localhost, try to connect to 127.0.0.1 to force TCP.
if db_options["Mysql.host"] == "localhost" and "Mysql.port" in db_options:
db_options_localhost = dict(db_options)
db_options_localhost["Mysql.host"] = "127.0.0.1"
if CheckMySQLConnection(db_options_localhost):
return True
return CheckMySQLConnection(db_options)
def _ListUnixSockets(self) -> Generator[str, None, None]:
"""Returns paths of all active UNIX sockets."""
# Example /proc/net/unix:
#
# Num RefCount Protocol Flags Type St Inode Path
# [...]
# 0000000000000000: 00000002 00000000 00010000 0001 01 42013 \
# /run/mysqld/mysqld.sock
# [...]
hex_digit = "[0-9a-fA-F]"
regex = re.compile(f"^{hex_digit}+: ({hex_digit}+ +){{6}}(.*)$")
with open("/proc/net/unix") as f:
for line in f:
line = line.strip("\n")
match = regex.match(line)
if match:
yield match.group(2)
def _FindMysqlUnixSocket(self) -> Optional[str]:
for socket_path in self._ListUnixSockets():
if "mysql" in socket_path:
return socket_path
return None
def ConfigureDatastore(config):
"""Guides the user through configuration of the datastore."""
print("\n\n-=GRR Datastore=-\n"
"For GRR to work each GRR server has to be able to communicate with\n"
"the datastore. To do this we need to configure a datastore.\n")
existing_datastore = grr_config.CONFIG.Get("Datastore.implementation")
if not existing_datastore or existing_datastore == "FakeDataStore":
ConfigureMySQLDatastore(config)
return
print("Found existing settings:\n REL_DB MySQL database")
if existing_datastore == "SqliteDataStore":
set_up_mysql = RetryBoolQuestion(
"The SQLite datastore is no longer supported. Would you like to\n"
"set up a MySQL datastore? Answering 'no' will abort config "
"initialization.", True)
if set_up_mysql:
print("\nPlease note that no data will be migrated from SQLite to "
"MySQL.\n")
ConfigureMySQLDatastore(config)
else:
raise ConfigInitError()
elif existing_datastore == "MySQLAdvancedDataStore":
set_up_mysql = RetryBoolQuestion(
"The MySQLAdvancedDataStore is no longer supported. Would you like to\n"
"set up a new MySQL datastore? Answering 'no' will abort config "
"initialization.", True)
if set_up_mysql:
print("\nPlease note that no data will be migrated from the old data "
"store.\n")
ConfigureMySQLDatastore(config)
else:
raise ConfigInitError()
def ConfigureUrls(config, external_hostname: Optional[Text] = None):
"""Guides the user through configuration of various URLs used by GRR."""
print("\n\n-=GRR URLs=-\n"
"For GRR to work each client has to be able to communicate with the\n"
"server. To do this we normally need a public dns name or IP address\n"
"to communicate with. In the standard configuration this will be used\n"
"to host both the client facing server and the admin user interface.\n")
existing_ui_urn = grr_config.CONFIG.Get("AdminUI.url", default=None)
existing_frontend_urns = grr_config.CONFIG.Get("Client.server_urls")
if not existing_frontend_urns:
# Port from older deprecated setting Client.control_urls.
existing_control_urns = grr_config.CONFIG.Get(
"Client.control_urls", default=None)
if existing_control_urns is not None:
existing_frontend_urns = []
for existing_control_urn in existing_control_urns:
if not existing_control_urn.endswith("control"):
raise RuntimeError("Invalid existing control URL: %s" %
existing_control_urn)
existing_frontend_urns.append(
existing_control_urn.rsplit("/", 1)[0] + "/")
config.Set("Client.server_urls", existing_frontend_urns)
config.Set("Client.control_urls", ["deprecated use Client.server_urls"])
if not existing_frontend_urns or not existing_ui_urn:
ConfigureHostnames(config, external_hostname=external_hostname)
else:
print("Found existing settings:\n AdminUI URL: %s\n "
"Frontend URL(s): %s\n" % (existing_ui_urn, existing_frontend_urns))
if not RetryBoolQuestion("Do you want to keep this configuration?", True):
ConfigureHostnames(config, external_hostname=external_hostname)
def ConfigureEmails(config):
"""Guides the user through email setup."""
print("\n\n-=GRR Emails=-\n"
"GRR needs to be able to send emails for various logging and\n"
"alerting functions. The email domain will be appended to GRR\n"
"usernames when sending emails to users.\n")
existing_log_domain = grr_config.CONFIG.Get("Logging.domain", default=None)
existing_al_email = grr_config.CONFIG.Get(
"Monitoring.alert_email", default=None)
existing_em_email = grr_config.CONFIG.Get(
"Monitoring.emergency_access_email", default=None)
if existing_log_domain and existing_al_email and existing_em_email:
print("Found existing settings:\n"
" Email Domain: %s\n Alert Email Address: %s\n"
" Emergency Access Email Address: %s\n" %
(existing_log_domain, existing_al_email, existing_em_email))
if RetryBoolQuestion("Do you want to keep this configuration?", True):
return
print("\n\n-=Monitoring/Email Domain=-\n"
"Emails concerning alerts or updates must be sent to this domain.\n")
domain = RetryQuestion("Email Domain e.g example.com",
"^([\\.A-Za-z0-9-]+)*$",
grr_config.CONFIG.Get("Logging.domain"))
config.Set("Logging.domain", domain)
print("\n\n-=Alert Email Address=-\n"
"Address where monitoring events get sent, e.g. crashed clients, \n"
"broken server, etc.\n")
email = RetryQuestion("Alert Email Address", "", "grr-monitoring@%s" % domain)
config.Set("Monitoring.alert_email", email)
print("\n\n-=Emergency Email Address=-\n"
"Address where high priority events such as an emergency ACL bypass "
"are sent.\n")
emergency_email = RetryQuestion("Emergency Access Email Address", "",
"grr-emergency@%s" % domain)
config.Set("Monitoring.emergency_access_email", emergency_email)
def InstallTemplatePackage():
"""Call pip to install the templates."""
virtualenv_bin = os.path.dirname(sys.executable)
extension = os.path.splitext(sys.executable)[1]
pip = "%s/pip%s" % (virtualenv_bin, extension)
# Install the GRR server component to satisfy the dependency below.
major_minor_version = ".".join(
pkg_resources.get_distribution("grr-response-core").version.split(".")
[0:2])
# Note that this version spec requires a recent version of pip
subprocess.check_call([
sys.executable, pip, "install", "--upgrade", "-f",
"https://storage.googleapis.com/releases.grr-response.com/index.html",
"grr-response-templates==%s.*" % major_minor_version
])
def FinalizeConfigInit(config,
admin_password: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True,
prompt: bool = True):
"""Performs the final steps of config initialization."""
config.Set("Server.initialized", True)
print("\nWriting configuration to %s." % config["Config.writeback"])
config.Write()
print("Initializing the datastore.")
# Reload the config and initialize the GRR database.
server_startup.Init()
print("\nStep 3: Adding GRR Admin User")
try:
CreateUser("admin", password=admin_password, is_admin=True)
except UserAlreadyExistsError:
if prompt:
# pytype: disable=wrong-arg-count
if ((input("User 'admin' already exists, do you want to "
"reset the password? [yN]: ").upper() or "N") == "Y"):
UpdateUser("admin", password=admin_password, is_admin=True)
# pytype: enable=wrong-arg-count
else:
UpdateUser("admin", password=admin_password, is_admin=True)
print("\nStep 4: Repackaging clients with new configuration.")
if prompt:
redownload_templates = RetryBoolQuestion(
"Server debs include client templates. Re-download templates?", False)
repack_templates = RetryBoolQuestion("Repack client templates?", True)
if redownload_templates:
InstallTemplatePackage()
# Build debug binaries, then build release binaries.
if repack_templates:
repacking.TemplateRepacker().RepackAllTemplates(upload=True)
print("\nGRR Initialization complete! You can edit the new configuration "
"in %s.\n" % config["Config.writeback"])
if prompt and os.geteuid() == 0:
restart = RetryBoolQuestion(
"Restart service for the new configuration "
"to take effect?", True)
if restart:
for service in ("grr-server", "fleetspeak-server"):
try:
print(f"Restarting service: {service}.")
subprocess.check_call(["service", service, "restart"])
except subprocess.CalledProcessError as e:
print(f"Failed to restart: {service}.")
print(e, file=sys.stderr)
else:
print("Please restart the service for the new configuration to take "
"effect.\n")
def Initialize(config=None,
external_hostname: Optional[Text] = None,
admin_password: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True):
"""Initialize or update a GRR configuration."""
print("Checking write access on config %s" % config["Config.writeback"])
if not os.access(config.parser.config_path, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
print("\nStep 0: Importing Configuration from previous installation.")
options_imported = 0
prev_config_file = config.Get("ConfigUpdater.old_config", default=None)
if prev_config_file and os.access(prev_config_file, os.R_OK):
print("Found config file %s." % prev_config_file)
# pytype: disable=wrong-arg-count
if input("Do you want to import this configuration? "
"[yN]: ").upper() == "Y":
options_imported = ImportConfig(prev_config_file, config)
# pytype: enable=wrong-arg-count
else:
print("No old config file found.")
print("\nStep 1: Setting Basic Configuration Parameters")
print("We are now going to configure the server using a bunch of questions.")
fs_config = FleetspeakConfig()
fs_config.Prompt(config)
ConfigureDatastore(config)
ConfigureUrls(config, external_hostname=external_hostname)
ConfigureEmails(config)
print("\nStep 2: Key Generation")
if config.Get("PrivateKeys.server_key", default=None):
if options_imported > 0:
print("Since you have imported keys from another installation in the "
"last step,\nyou probably do not want to generate new keys now.")
# pytype: disable=wrong-arg-count
if (input("You already have keys in your config, do you want to"
" overwrite them? [yN]: ").upper() or "N") == "Y":
config_updater_keys_util.GenerateKeys(config, overwrite_keys=True)
# pytype: enable=wrong-arg-count
else:
config_updater_keys_util.GenerateKeys(config)
fs_config.Write(config)
FinalizeConfigInit(
config,
admin_password=admin_password,
redownload_templates=redownload_templates,
repack_templates=repack_templates,
prompt=True)
def InitializeNoPrompt(
config=None,
external_hostname: Optional[Text] = None,
admin_password: Optional[Text] = None,
mysql_hostname: Optional[Text] = None,
mysql_port: Optional[int] = None,
mysql_username: Optional[Text] = None,
mysql_password: Optional[Text] = None,
mysql_db: Optional[Text] = None,
mysql_client_key_path: Optional[Text] = None,
mysql_client_cert_path: Optional[Text] = None,
mysql_ca_cert_path: Optional[Text] = None,
redownload_templates: bool = False,
repack_templates: bool = True,
use_fleetspeak: bool = False,
mysql_fleetspeak_db: Optional[Text] = None,
):
"""Initialize GRR with no prompts.
Args:
config: config object
external_hostname: A hostname.
admin_password: A password used for the admin user.
mysql_hostname: A hostname used for establishing connection to MySQL.
mysql_port: A port used for establishing connection to MySQL.
mysql_username: A username used for establishing connection to MySQL.
mysql_password: A password used for establishing connection to MySQL.
mysql_db: Name of the MySQL database to use.
mysql_client_key_path: The path name of the client private key file.
mysql_client_cert_path: The path name of the client public key certificate.
mysql_ca_cert_path: The path name of the CA certificate file.
redownload_templates: Indicates whether templates should be re-downloaded.
repack_templates: Indicates whether templates should be re-packed.
use_fleetspeak: Whether to use Fleetspeak.
mysql_fleetspeak_db: Name of the MySQL database to use for Fleetspeak.
Raises:
ValueError: if required flags are not provided, or if the config has
already been initialized.
IOError: if config is not writeable
ConfigInitError: if GRR is unable to connect to a running MySQL instance.
This method does the minimum work necessary to configure GRR without any user
prompting, relying heavily on config default values. User must supply the
external hostname, admin password, and MySQL password; everything else is set
automatically.
"""
if config["Server.initialized"]:
raise ValueError("Config has already been initialized.")
if not external_hostname:
raise ValueError(
"--noprompt set, but --external_hostname was not provided.")
if not admin_password:
raise ValueError("--noprompt set, but --admin_password was not provided.")
if mysql_password is None:
raise ValueError("--noprompt set, but --mysql_password was not provided.")
print("Checking write access on config %s" % config.parser)
if not os.access(config.parser.config_path, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
config_dict = {}
config_dict["Database.implementation"] = "MysqlDB"
config_dict["Blobstore.implementation"] = "DbBlobStore"
config_dict["Mysql.host"] = mysql_hostname or config["Mysql.host"]
config_dict["Mysql.port"] = mysql_port or config["Mysql.port"]
config_dict["Mysql.database_name"] = config_dict[
"Mysql.database"] = mysql_db or config["Mysql.database_name"]
config_dict["Mysql.database_username"] = config_dict["Mysql.username"] = (
mysql_username or config["Mysql.database_username"])
config_dict["Client.server_urls"] = [
"http://%s:%s/" % (external_hostname, config["Frontend.bind_port"])
]
config_dict["AdminUI.url"] = "http://%s:%s" % (external_hostname,
config["AdminUI.port"])
config_dict["Logging.domain"] = external_hostname
config_dict["Monitoring.alert_email"] = ("grr-monitoring@%s" %
external_hostname)
config_dict["Monitoring.emergency_access_email"] = ("grr-emergency@%s" %
external_hostname)
# Print all configuration options, except for the MySQL password.
print("Setting configuration as:\n\n%s" % config_dict)
config_dict["Mysql.database_password"] = config_dict[
"Mysql.password"] = mysql_password
if mysql_client_key_path is not None:
config_dict["Mysql.client_key_path"] = mysql_client_key_path
config_dict["Mysql.client_cert_path"] = mysql_client_cert_path
config_dict["Mysql.ca_cert_path"] = mysql_ca_cert_path
if CheckMySQLConnection(config_dict):
print("Successfully connected to MySQL with the given configuration.")
else:
print("Error: Could not connect to MySQL with the given configuration.")
raise ConfigInitError()
for key, value in config_dict.items():
config.Set(key, value)
config_updater_keys_util.GenerateKeys(config)
fs_config = FleetspeakConfig()
fs_config.use_fleetspeak = use_fleetspeak
fs_config.external_hostname = external_hostname
fs_config.mysql_username = mysql_username
fs_config.mysql_password = mysql_password
fs_config.mysql_host = mysql_hostname
if mysql_port:
fs_config.mysql_port = mysql_port
fs_config.mysql_database = mysql_fleetspeak_db
fs_config.Write(config)
FinalizeConfigInit(
config,
admin_password=admin_password,
redownload_templates=redownload_templates,
repack_templates=repack_templates,
prompt=False)
def UploadSignedBinary(source_path,
binary_type,
platform,
upload_subdirectory=""):
"""Signs a binary and uploads it to the datastore.
Args:
source_path: Path to the binary to upload.
binary_type: Type of the binary, e.g python-hack or executable.
platform: Client platform where the binary is intended to be run.
upload_subdirectory: Path of a subdirectory to upload the binary to,
relative to the canonical path for binaries of the given type and
platform.
Raises:
BinaryTooLargeError: If the binary to upload is too large.
"""
file_size = os.path.getsize(source_path)
if file_size > _MAX_SIGNED_BINARY_BYTES:
raise BinaryTooLargeError(
"File [%s] is of size %d (bytes), which exceeds the allowed maximum "
"of %d bytes." % (source_path, file_size, _MAX_SIGNED_BINARY_BYTES))
context = ["Platform:%s" % platform.title(), "Client Context"]
signing_key = grr_config.CONFIG.Get(
"PrivateKeys.executable_signing_private_key", context=context)
root_api = maintenance_utils.InitGRRRootAPI()
binary_path = "/".join([
platform.lower(),
upload_subdirectory,
os.path.basename(source_path),
])
binary = root_api.GrrBinary(binary_type, binary_path)
with open(source_path, "rb") as fd:
binary.Upload(
fd,
sign_fn=binary.DefaultUploadSigner(
private_key=signing_key.GetRawPrivateKey()))
print("Uploaded %s to %s" % (binary_type, binary_path))
def CreateUser(username, password=None, is_admin=False):
"""Creates a new GRR user."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
user_exists = grr_api.GrrUser(username).Get() is not None
except api_errors.ResourceNotFoundError:
user_exists = False
if user_exists:
raise UserAlreadyExistsError("User '%s' already exists." % username)
user_type, password = _GetUserTypeAndPassword(
username, password=password, is_admin=is_admin)
grr_api.CreateGrrUser(
username=username, user_type=user_type, password=password)
def UpdateUser(username, password=None, is_admin=False):
"""Updates the password or privilege-level for a user."""
user_type, password = _GetUserTypeAndPassword(
username, password=password, is_admin=is_admin)
grr_api = maintenance_utils.InitGRRRootAPI()
grr_user = grr_api.GrrUser(username).Get()
grr_user.Modify(user_type=user_type, password=password)
def GetUserSummary(username):
"""Returns a string with summary info for a user."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
return _Summarize(grr_api.GrrUser(username).Get().data)
except api_errors.ResourceNotFoundError:
raise UserNotFoundError(username)
def GetAllUserSummaries():
"""Returns a string containing summary info for all GRR users."""
grr_api = maintenance_utils.InitGRRRootAPI()
user_wrappers = sorted(grr_api.ListGrrUsers(), key=lambda x: x.username)
summaries = [_Summarize(w.data) for w in user_wrappers]
return "\n\n".join(summaries)
def _Summarize(user_info):
"""Returns a string with summary info for a user."""
return "Username: %s\nIs Admin: %s" % (user_info.username, user_info.user_type
== api_root.GrrUser.USER_TYPE_ADMIN)
def DeleteUser(username):
"""Deletes a GRR user from the datastore."""
grr_api = maintenance_utils.InitGRRRootAPI()
try:
grr_api.GrrUser(username).Get().Delete()
except api_errors.ResourceNotFoundError:
raise UserNotFoundError(username)
def _GetUserTypeAndPassword(username, password=None, is_admin=False):
"""Returns the user-type and password for a user.
Args:
username: Username for the user.
password: Password for the user. If None, or not provided, we will prompt
for one via the terminal.
is_admin: Indicates whether the user should have admin privileges.
"""
if is_admin:
user_type = api_root.GrrUser.USER_TYPE_ADMIN
else:
user_type = api_root.GrrUser.USER_TYPE_STANDARD
if password is None:
password = GetPassword("Please enter password for user '%s':" % username)
return user_type, password
def SwitchToRelDB(config):
"""Switches a given config from using AFF4 to using REL_DB."""
print("***************************************************************\n"
"Make sure to back up the existing configuration writeback file.\n"
"Writeback file path:\n%s\n"
"***************************************************************\n" %
config["Config.writeback"])
RetryBoolQuestion("Continue?", True)
config.Set("Database.implementation", "MysqlDB")
if (config["Blobstore.implementation"] != "DbBlobStore" or RetryBoolQuestion(
"You have a custom 'Blobstore.implementation' setting. Do you want\n"
"to switch to DbBlobStore (default option for REL_DB, meaning that\n"
"blobs will be stored inside the MySQL database)?", True)):
config.Set("Blobstore.implementation", "DbBlobStore")
if (RetryBoolQuestion(
"Do you want to use a different MySQL database for the REL_DB datastore?",
True)):
db_name = RetryQuestion("MySQL Database", "^[A-Za-z0-9-]+$",
config["Mysql.database_name"])
else:
db_name = config["Mysql.database_name"]
config.Set("Mysql.database", db_name)
if (input("Do you want to use previously set up MySQL username and password\n"
"to connect to MySQL database '%s'? [Yn]: " % db_name).upper() or
"Y") == "Y":
username = config["Mysql.database_username"]
password = config["Mysql.database_password"]
else:
username = RetryQuestion("MySQL Username", "[A-Za-z0-9-@]+$",
config["Mysql.database_username"])
password = GetPassword("Please enter password for database user %s: " %
username)
config.Set("Mysql.username", username)
config.Set("Mysql.password", password)
print("Configuration updated.")
def ArgparseBool(raw_value):
"""Returns the boolean value of a raw argparse value.
When defining an argument with argparse, you would think it natural to
be able to set the type to 'bool' and then proceed to set it to
'True' and 'False' via the command line. Unfortunately, that is not possible.
Argparse will silently cast the raw string value of the argument by
calling 'bool()', meaning 'False' gets converted to True. This function is
meant to be used in place of the 'bool' builtin when defining argparse
arguments.
Args:
raw_value: The raw value of the argument, which is a string passed in via
the command line.
Raises:
ArgumentTypeError: If the raw value passed in is not a string equal to
'True' or 'False'.
"""
if not isinstance(raw_value, str):
raise argparse.ArgumentTypeError("Unexpected type: %s. Expected a string." %
compatibility.GetName(type(raw_value)))
if raw_value.lower() == "true":
return True
elif raw_value.lower() == "false":
return False
else:
raise argparse.ArgumentTypeError(
"Invalid value encountered. Expected 'True' or 'False'.")
| apache-2.0 |
ondra-novak/chromium.src | tools/telemetry/telemetry/core/platform/profiler/strace_profiler.py | 43 | 7827 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import re
import signal
import subprocess
import sys
import tempfile
from telemetry.core.platform import profiler
from telemetry.timeline import model
# Parses one line of strace output, for example:
# 6052 1311456063.159722 read(8, "\1\0\0\0\0\0\0\0", 8) = 8 <0.000022>
_STRACE_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'(?P<func>.*?)'
'[(](?P<args>.*?)[)]\s+=\s+'
'(?P<ret>.*?)\s+'
'<(?P<dur>[\d.]+)>$')
_UNFINISHED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<line>.*?)'
'<unfinished ...>$')
_RESUMED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'<[.][.][.]\s(?P<func>.*?)\sresumed>'
'(?P<line>.*?)$')
_KILLED_LINE_RE = re.compile(
'^(?P<tid>\d+)\s+'
'(?P<ts>\d+)'
'(?P<micro>.\d+)\s+'
'[+][+][+] killed by SIGKILL [+][+][+]$')
def _StraceToChromeTrace(pid, infile):
"""Returns chrometrace json format for |infile| strace output."""
# Map of fd:file_name for open file descriptors. Useful for displaying
# file name instead of the descriptor number.
fd_map = {}
# Map of tid:interrupted_call for the interrupted call on each thread. It is
# possible to context switch during a system call. In this case we must
# match up the lines.
interrupted_call_map = {}
out = []
with open(infile, 'r') as f:
for line in f.readlines():
# Ignore kill lines for now.
m = _KILLED_LINE_RE.match(line)
if m:
continue
# If this line is interrupted, then remember it and continue.
m = _UNFINISHED_LINE_RE.match(line)
if m:
assert m.group('tid') not in interrupted_call_map
interrupted_call_map[m.group('tid')] = line
continue
# If this is a resume of a previous line, stitch it together.
interrupted = False
m = _RESUMED_LINE_RE.match(line)
if m:
interrupted = True
assert m.group('tid') in interrupted_call_map
line = interrupted_call_map[m.group('tid')].replace(
'<unfinished ...>', m.group('line'))
del interrupted_call_map[m.group('tid')]
# At this point we can do a normal match.
m = _STRACE_LINE_RE.match(line)
if not m:
if ('exit' not in line and
'Profiling timer expired' not in line and
'<unavailable>' not in line):
logging.warn('Failed to parse line: %s' % line)
continue
ts_begin = int(1000000 * (int(m.group('ts')) + float(m.group('micro'))))
ts_end = ts_begin + int(1000000 * float(m.group('dur')))
tid = int(m.group('tid'))
function_name = unicode(m.group('func'), errors='ignore')
function_args = unicode(m.group('args'), errors='ignore')
ret = unicode(m.group('ret'), errors='ignore')
cat = 'strace'
possible_fd_arg = None
first_arg = function_args.split(',')[0]
if first_arg and first_arg.strip().isdigit():
possible_fd_arg = first_arg.strip()
if function_name == 'open' and ret.isdigit():
# 1918 1311606151.649379 open("/foo/bar.so", O_RDONLY) = 7 <0.000088>
fd_map[ret] = first_arg
args = {
'args': function_args,
'ret': ret,
}
if interrupted:
args['interrupted'] = True
if possible_fd_arg and possible_fd_arg in fd_map:
args['fd%s' % first_arg] = fd_map[possible_fd_arg]
out.append({
'cat': cat,
'pid': pid,
'tid': tid,
'ts': ts_begin,
'ph': 'B', # Begin
'name': function_name,
})
out.append({
'cat': cat,
'pid': pid,
'tid': tid,
'ts': ts_end,
'ph': 'E', # End
'name': function_name,
'args': args,
})
return out
def _GenerateTraceMetadata(timeline_model):
out = []
for process in timeline_model.processes:
out.append({
'name': 'process_name',
'ph': 'M', # Metadata
'pid': process,
'args': {
'name': timeline_model.processes[process].name
}
})
for thread in timeline_model.processes[process].threads:
out.append({
'name': 'thread_name',
'ph': 'M', # Metadata
'pid': process,
'tid': thread,
'args': {
'name': timeline_model.processes[process].threads[thread].name
}
})
return out
class _SingleProcessStraceProfiler(object):
"""An internal class for using perf for a given process."""
def __init__(self, pid, output_file, platform_backend):
self._pid = pid
self._platform_backend = platform_backend
self._output_file = output_file
self._tmp_output_file = tempfile.NamedTemporaryFile('w', 0)
self._proc = subprocess.Popen(
['strace', '-ttt', '-f', '-T', '-p', str(pid), '-o', output_file],
stdout=self._tmp_output_file, stderr=subprocess.STDOUT)
def CollectProfile(self):
if ('renderer' in self._output_file and
not self._platform_backend.GetCommandLine(self._pid)):
logging.warning('Renderer was swapped out during profiling. '
'To collect a full profile rerun with '
'"--extra-browser-args=--single-process"')
self._proc.send_signal(signal.SIGINT)
exit_code = self._proc.wait()
try:
if exit_code:
raise Exception('strace failed with exit code %d. Output:\n%s' % (
exit_code, self._GetStdOut()))
finally:
self._tmp_output_file.close()
return _StraceToChromeTrace(self._pid, self._output_file)
def _GetStdOut(self):
self._tmp_output_file.flush()
try:
with open(self._tmp_output_file.name) as f:
return f.read()
except IOError:
return ''
class StraceProfiler(profiler.Profiler):
def __init__(self, browser_backend, platform_backend, output_path, state):
super(StraceProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
assert self._browser_backend.supports_tracing
self._browser_backend.StartTracing(None, 10)
process_output_file_map = self._GetProcessOutputFileMap()
self._process_profilers = []
self._output_file = output_path + '.json'
for pid, output_file in process_output_file_map.iteritems():
if 'zygote' in output_file:
continue
self._process_profilers.append(
_SingleProcessStraceProfiler(pid, output_file, platform_backend))
@classmethod
def name(cls):
return 'strace'
@classmethod
def is_supported(cls, browser_type):
if sys.platform != 'linux2':
return False
# TODO(tonyg): This should be supported on android and cros.
if (browser_type.startswith('android') or
browser_type.startswith('cros')):
return False
return True
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
options.AppendExtraBrowserArgs([
'--no-sandbox',
'--allow-sandbox-debugging'
])
def CollectProfile(self):
print 'Processing trace...'
out_json = []
for single_process in self._process_profilers:
out_json.extend(single_process.CollectProfile())
timeline_data = self._browser_backend.StopTracing()
timeline_model = model.TimelineModel(timeline_data)
out_json.extend(_GenerateTraceMetadata(timeline_model))
with open(self._output_file, 'w') as f:
f.write(json.dumps(out_json, separators=(',', ':')))
print 'Trace saved as %s' % self._output_file
print 'To view, open in chrome://tracing'
return [self._output_file]
| bsd-3-clause |
40223125/w17t2 | static/Brython3.1.1-20150328-091302/Lib/stat.py | 765 | 4304 | """Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# Indices for stat struct members in the tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
"""Return the portion of the file's mode that can be set by
os.chmod().
"""
return mode & 0o7777
def S_IFMT(mode):
"""Return the portion of the file's mode that describes the
file type.
"""
return mode & 0o170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFBLK = 0o060000 # block device
S_IFREG = 0o100000 # regular file
S_IFIFO = 0o010000 # fifo (named pipe)
S_IFLNK = 0o120000 # symbolic link
S_IFSOCK = 0o140000 # socket file
# Functions to test for each file type
def S_ISDIR(mode):
"""Return True if mode is from a directory."""
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
"""Return True if mode is from a character special device file."""
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
"""Return True if mode is from a block special device file."""
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
"""Return True if mode is from a regular file."""
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
"""Return True if mode is from a FIFO (named pipe)."""
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
"""Return True if mode is from a symbolic link."""
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
"""Return True if mode is from a socket."""
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 0o4000 # set UID bit
S_ISGID = 0o2000 # set GID bit
S_ENFMT = S_ISGID # file locking enforcement
S_ISVTX = 0o1000 # sticky bit
S_IREAD = 0o0400 # Unix V7 synonym for S_IRUSR
S_IWRITE = 0o0200 # Unix V7 synonym for S_IWUSR
S_IEXEC = 0o0100 # Unix V7 synonym for S_IXUSR
S_IRWXU = 0o0700 # mask for owner permissions
S_IRUSR = 0o0400 # read by owner
S_IWUSR = 0o0200 # write by owner
S_IXUSR = 0o0100 # execute by owner
S_IRWXG = 0o0070 # mask for group permissions
S_IRGRP = 0o0040 # read by group
S_IWGRP = 0o0020 # write by group
S_IXGRP = 0o0010 # execute by group
S_IRWXO = 0o0007 # mask for others (not in group) permissions
S_IROTH = 0o0004 # read by others
S_IWOTH = 0o0002 # write by others
S_IXOTH = 0o0001 # execute by others
# Names for file flags
UF_NODUMP = 0x00000001 # do not dump file
UF_IMMUTABLE = 0x00000002 # file may not be changed
UF_APPEND = 0x00000004 # file may only be appended to
UF_OPAQUE = 0x00000008 # directory is opaque when viewed through a union stack
UF_NOUNLINK = 0x00000010 # file may not be renamed or deleted
UF_COMPRESSED = 0x00000020 # OS X: file is hfs-compressed
UF_HIDDEN = 0x00008000 # OS X: file should not be displayed
SF_ARCHIVED = 0x00010000 # file may be archived
SF_IMMUTABLE = 0x00020000 # file may not be changed
SF_APPEND = 0x00040000 # file may only be appended to
SF_NOUNLINK = 0x00100000 # file may not be renamed or deleted
SF_SNAPSHOT = 0x00200000 # file is a snapshot file
_filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((S_IRUSR, "r"),),
((S_IWUSR, "w"),),
((S_IXUSR|S_ISUID, "s"),
(S_ISUID, "S"),
(S_IXUSR, "x")),
((S_IRGRP, "r"),),
((S_IWGRP, "w"),),
((S_IXGRP|S_ISGID, "s"),
(S_ISGID, "S"),
(S_IXGRP, "x")),
((S_IROTH, "r"),),
((S_IWOTH, "w"),),
((S_IXOTH|S_ISVTX, "t"),
(S_ISVTX, "T"),
(S_IXOTH, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form '-rwxrwxrwx'."""
perm = []
for table in _filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
| gpl-3.0 |
Fafou/Sick-Beard | lib/requests/packages/chardet2/langhungarianmodel.py | 63 | 12559 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = ( \
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = ( \
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = { \
'charToOrderMap': Latin2_HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "ISO-8859-2"
}
Win1250HungarianModel = { \
'charToOrderMap': win1250HungarianCharToOrderMap,
'precedenceMatrix': HungarianLangModel,
'mTypicalPositiveRatio': 0.947368,
'keepEnglishLetter': True,
'charsetName': "windows-1250"
}
| gpl-3.0 |
abhishek-ch/hue | desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/Random/test_random.py | 117 | 7111 | # -*- coding: utf-8 -*-
#
# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Random.new()"""
__revision__ = "$Id$"
import unittest
import sys
if sys.version_info[0] == 2 and sys.version_info[1] == 1:
from Crypto.Util.py21compat import *
from Crypto.Util.py3compat import *
class SimpleTest(unittest.TestCase):
def runTest(self):
"""Crypto.Random.new()"""
# Import the Random module and try to use it
from Crypto import Random
randobj = Random.new()
x = randobj.read(16)
y = randobj.read(16)
self.assertNotEqual(x, y)
z = Random.get_random_bytes(16)
self.assertNotEqual(x, z)
self.assertNotEqual(y, z)
# Test the Random.random module, which
# implements a subset of Python's random API
# Not implemented:
# seed(), getstate(), setstate(), jumpahead()
# random(), uniform(), triangular(), betavariate()
# expovariate(), gammavariate(), gauss(),
# longnormvariate(), normalvariate(),
# vonmisesvariate(), paretovariate()
# weibullvariate()
# WichmannHill(), whseed(), SystemRandom()
from Crypto.Random import random
x = random.getrandbits(16*8)
y = random.getrandbits(16*8)
self.assertNotEqual(x, y)
# Test randrange
if x>y:
start = y
stop = x
else:
start = x
stop = y
for step in range(1,10):
x = random.randrange(start,stop,step)
y = random.randrange(start,stop,step)
self.assertNotEqual(x, y)
self.assertEqual(start <= x < stop, True)
self.assertEqual(start <= y < stop, True)
self.assertEqual((x - start) % step, 0)
self.assertEqual((y - start) % step, 0)
for i in range(10):
self.assertEqual(random.randrange(1,2), 1)
self.assertRaises(ValueError, random.randrange, start, start)
self.assertRaises(ValueError, random.randrange, stop, start, step)
self.assertRaises(TypeError, random.randrange, start, stop, step, step)
self.assertRaises(TypeError, random.randrange, start, stop, "1")
self.assertRaises(TypeError, random.randrange, "1", stop, step)
self.assertRaises(TypeError, random.randrange, 1, "2", step)
self.assertRaises(ValueError, random.randrange, start, stop, 0)
# Test randint
x = random.randint(start,stop)
y = random.randint(start,stop)
self.assertNotEqual(x, y)
self.assertEqual(start <= x <= stop, True)
self.assertEqual(start <= y <= stop, True)
for i in range(10):
self.assertEqual(random.randint(1,1), 1)
self.assertRaises(ValueError, random.randint, stop, start)
self.assertRaises(TypeError, random.randint, start, stop, step)
self.assertRaises(TypeError, random.randint, "1", stop)
self.assertRaises(TypeError, random.randint, 1, "2")
# Test choice
seq = range(10000)
x = random.choice(seq)
y = random.choice(seq)
self.assertNotEqual(x, y)
self.assertEqual(x in seq, True)
self.assertEqual(y in seq, True)
for i in range(10):
self.assertEqual(random.choice((1,2,3)) in (1,2,3), True)
self.assertEqual(random.choice([1,2,3]) in [1,2,3], True)
if sys.version_info[0] is 3:
self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True)
self.assertEqual(1, random.choice([1]))
self.assertRaises(IndexError, random.choice, [])
self.assertRaises(TypeError, random.choice, 1)
# Test shuffle. Lacks random parameter to specify function.
# Make copies of seq
seq = range(500)
x = list(seq)
y = list(seq)
random.shuffle(x)
random.shuffle(y)
self.assertNotEqual(x, y)
self.assertEqual(len(seq), len(x))
self.assertEqual(len(seq), len(y))
for i in range(len(seq)):
self.assertEqual(x[i] in seq, True)
self.assertEqual(y[i] in seq, True)
self.assertEqual(seq[i] in x, True)
self.assertEqual(seq[i] in y, True)
z = [1]
random.shuffle(z)
self.assertEqual(z, [1])
if sys.version_info[0] == 3:
z = bytearray(b('12'))
random.shuffle(z)
self.assertEqual(b('1') in z, True)
self.assertRaises(TypeError, random.shuffle, b('12'))
self.assertRaises(TypeError, random.shuffle, 1)
self.assertRaises(TypeError, random.shuffle, "1")
self.assertRaises(TypeError, random.shuffle, (1,2))
# 2to3 wraps a list() around it, alas - but I want to shoot
# myself in the foot here! :D
# if sys.version_info[0] == 3:
# self.assertRaises(TypeError, random.shuffle, range(3))
# Test sample
x = random.sample(seq, 20)
y = random.sample(seq, 20)
self.assertNotEqual(x, y)
for i in range(20):
self.assertEqual(x[i] in seq, True)
self.assertEqual(y[i] in seq, True)
z = random.sample([1], 1)
self.assertEqual(z, [1])
z = random.sample((1,2,3), 1)
self.assertEqual(z[0] in (1,2,3), True)
z = random.sample("123", 1)
self.assertEqual(z[0] in "123", True)
z = random.sample(range(3), 1)
self.assertEqual(z[0] in range(3), True)
if sys.version_info[0] == 3:
z = random.sample(b("123"), 1)
self.assertEqual(z[0] in b("123"), True)
z = random.sample(bytearray(b("123")), 1)
self.assertEqual(z[0] in bytearray(b("123")), True)
self.assertRaises(TypeError, random.sample, 1)
def get_tests(config={}):
return [SimpleTest()]
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| apache-2.0 |
mindofmatthew/three.js | utils/converters/obj/convert_obj_three.py | 160 | 48659 | """Convert Wavefront OBJ / MTL files into Three.js (JSON model version, to be used with ascii / binary loader)
-------------------------
How to use this converter
-------------------------
python convert_obj_three.py -i infile.obj -o outfile.js [-m "morphfiles*.obj"] [-c "morphcolors*.obj"] [-a center|centerxz|top|bottom|none] [-s smooth|flat] [-t ascii|binary] [-d invert|normal] [-b] [-e]
Notes:
- flags
-i infile.obj input OBJ file
-o outfile.js output JS file
-m "morphfiles*.obj" morph OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-c "morphcolors*.obj" morph colors OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-a center|centerxz|top|bottom|none model alignment
-s smooth|flat smooth = export vertex normals, flat = no normals (face normals computed in loader)
-t ascii|binary export ascii or binary format (ascii has more features, binary just supports vertices, faces, normals, uvs and materials)
-d invert|normal invert transparency
-b bake material colors into face colors
-x 10.0 scale and truncate
-f 2 morph frame sampling step
- by default:
use smooth shading (if there were vertex normals in the original model)
will be in ASCII format
original model is assumed to use non-inverted transparency / dissolve (0.0 fully transparent, 1.0 fully opaque)
no face colors baking
no scale and truncate
morph frame step = 1 (all files will be processed)
- binary conversion will create two files:
outfile.js (materials)
outfile.bin (binary buffers)
--------------------------------------------------
How to use generated JS file in your HTML document
--------------------------------------------------
<script type="text/javascript" src="Three.js"></script>
...
<script type="text/javascript">
...
// load ascii model
var jsonLoader = new THREE.JSONLoader();
jsonLoader.load( "Model_ascii.js", createScene );
// load binary model
var binLoader = new THREE.BinaryLoader();
binLoader.load( "Model_bin.js", createScene );
function createScene( geometry, materials ) {
var mesh = new THREE.Mesh( geometry, new THREE.MeshFaceMaterial( materials ) );
}
...
</script>
-------------------------------------
Parsers based on formats descriptions
-------------------------------------
http://en.wikipedia.org/wiki/Obj
http://en.wikipedia.org/wiki/Material_Template_Library
-------------------
Current limitations
-------------------
- for the moment, only diffuse color and texture are used
(will need to extend shaders / renderers / materials in Three)
- texture coordinates can be wrong in canvas renderer
(there is crude normalization, but it doesn't
work for all cases)
- smoothing can be turned on/off only for the whole mesh
----------------------------------------------
How to get proper OBJ + MTL files with Blender
----------------------------------------------
0. Remove default cube (press DEL and ENTER)
1. Import / create model
2. Select all meshes (Select -> Select All by Type -> Mesh)
3. Export to OBJ (File -> Export -> Wavefront .obj)
- enable following options in exporter
Material Groups
Rotate X90
Apply Modifiers
High Quality Normals
Copy Images
Selection Only
Objects as OBJ Objects
UVs
Normals
Materials
- select empty folder
- give your exported file name with "obj" extension
- click on "Export OBJ" button
4. Your model is now all files in this folder (OBJ, MTL, number of images)
- this converter assumes all files staying in the same folder,
(OBJ / MTL files use relative paths)
- for WebGL, textures must be power of 2 sized
------
Author
------
AlteredQualia http://alteredqualia.com
"""
import fileinput
import operator
import random
import os.path
import getopt
import sys
import struct
import math
import glob
# #####################################################
# Configuration
# #####################################################
ALIGN = "none" # center centerxz bottom top none
SHADING = "smooth" # smooth flat
TYPE = "ascii" # ascii binary
TRANSPARENCY = "normal" # normal invert
TRUNCATE = False
SCALE = 1.0
FRAMESTEP = 1
BAKE_COLORS = False
# default colors for debugging (each material gets one distinct color):
# white, red, green, blue, yellow, cyan, magenta
COLORS = [0xeeeeee, 0xee0000, 0x00ee00, 0x0000ee, 0xeeee00, 0x00eeee, 0xee00ee]
# #####################################################
# Templates
# #####################################################
TEMPLATE_FILE_ASCII = u"""\
{
"metadata" :
{
"formatVersion" : 3.1,
"sourceFile" : "%(fname)s",
"generatedBy" : "OBJConverter",
"vertices" : %(nvertex)d,
"faces" : %(nface)d,
"normals" : %(nnormal)d,
"colors" : %(ncolor)d,
"uvs" : %(nuv)d,
"materials" : %(nmaterial)d
},
"scale" : %(scale)f,
"materials": [%(materials)s],
"vertices": [%(vertices)s],
"morphTargets": [%(morphTargets)s],
"morphColors": [%(morphColors)s],
"normals": [%(normals)s],
"colors": [%(colors)s],
"uvs": [[%(uvs)s]],
"faces": [%(faces)s]
}
"""
TEMPLATE_FILE_BIN = u"""\
{
"metadata" :
{
"formatVersion" : 3.1,
"sourceFile" : "%(fname)s",
"generatedBy" : "OBJConverter",
"vertices" : %(nvertex)d,
"faces" : %(nface)d,
"normals" : %(nnormal)d,
"uvs" : %(nuv)d,
"materials" : %(nmaterial)d
},
"materials": [%(materials)s],
"buffers": "%(buffers)s"
}
"""
TEMPLATE_VERTEX = "%f,%f,%f"
TEMPLATE_VERTEX_TRUNCATE = "%d,%d,%d"
TEMPLATE_N = "%.5g,%.5g,%.5g"
TEMPLATE_UV = "%.5g,%.5g"
TEMPLATE_COLOR = "%.3g,%.3g,%.3g"
TEMPLATE_COLOR_DEC = "%d"
TEMPLATE_MORPH_VERTICES = '\t{ "name": "%s", "vertices": [%s] }'
TEMPLATE_MORPH_COLORS = '\t{ "name": "%s", "colors": [%s] }'
# #####################################################
# Utils
# #####################################################
def file_exists(filename):
"""Return true if file exists and is accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
def get_name(fname):
"""Create model name based of filename ("path/fname.js" -> "fname").
"""
return os.path.splitext(os.path.basename(fname))[0]
def bbox(vertices):
"""Compute bounding box of vertex array.
"""
if len(vertices)>0:
minx = maxx = vertices[0][0]
miny = maxy = vertices[0][1]
minz = maxz = vertices[0][2]
for v in vertices[1:]:
if v[0]<minx:
minx = v[0]
elif v[0]>maxx:
maxx = v[0]
if v[1]<miny:
miny = v[1]
elif v[1]>maxy:
maxy = v[1]
if v[2]<minz:
minz = v[2]
elif v[2]>maxz:
maxz = v[2]
return { 'x':[minx,maxx], 'y':[miny,maxy], 'z':[minz,maxz] }
else:
return { 'x':[0,0], 'y':[0,0], 'z':[0,0] }
def translate(vertices, t):
"""Translate array of vertices by vector t.
"""
for i in xrange(len(vertices)):
vertices[i][0] += t[0]
vertices[i][1] += t[1]
vertices[i][2] += t[2]
def center(vertices):
"""Center model (middle of bounding box).
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0] + (bb['y'][1] - bb['y'][0])/2.0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def top(vertices):
"""Align top of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][1]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def bottom(vertices):
"""Align bottom of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def centerxz(vertices):
"""Center model around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = 0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def normalize(v):
"""Normalize 3d vector"""
l = math.sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])
if l:
v[0] /= l
v[1] /= l
v[2] /= l
def veckey3(v):
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
# #####################################################
# MTL parser
# #####################################################
def texture_relative_path(fullpath):
texture_file = os.path.basename(fullpath.replace("\\", "/"))
return texture_file
def parse_mtl(fname):
"""Parse MTL file.
"""
materials = {}
previous_line = ""
for line in fileinput.input(fname):
line = previous_line + line
if line[-2:-1] == '\\':
previous_line = line[:-2]
continue
previous_line = ""
# Only split once initially for single-parameter tags that might have additional spaces in
# their values (i.e. "newmtl Material with spaces").
chunks = line.split(None, 1)
if len(chunks) > 0:
if len(chunks) > 1:
chunks[1] = chunks[1].strip()
# Material start
# newmtl identifier
if chunks[0] == "newmtl":
if len(chunks) > 1:
identifier = chunks[1]
else:
identifier = ""
if not identifier in materials:
materials[identifier] = {}
# Diffuse texture
# map_Kd texture_diffuse.jpg
if chunks[0] == "map_Kd" and len(chunks) == 2:
materials[identifier]["mapDiffuse"] = texture_relative_path(chunks[1])
# Ambient texture
# map_Ka texture_ambient.jpg
if chunks[0] == "map_Ka" and len(chunks) == 2:
materials[identifier]["mapAmbient"] = texture_relative_path(chunks[1])
# Specular texture
# map_Ks texture_specular.jpg
if chunks[0] == "map_Ks" and len(chunks) == 2:
materials[identifier]["mapSpecular"] = texture_relative_path(chunks[1])
# Alpha texture
# map_d texture_alpha.png
if chunks[0] == "map_d" and len(chunks) == 2:
materials[identifier]["transparent"] = True
materials[identifier]["mapAlpha"] = texture_relative_path(chunks[1])
# Bump texture
# map_bump texture_bump.jpg or bump texture_bump.jpg
if (chunks[0] == "map_bump" or chunks[0] == "bump") and len(chunks) == 2:
materials[identifier]["mapBump"] = texture_relative_path(chunks[1])
# Split the remaining parameters.
if len(chunks) > 1:
chunks = [chunks[0]] + chunks[1].split()
# Diffuse color
# Kd 1.000 1.000 1.000
if chunks[0] == "Kd" and len(chunks) == 4:
materials[identifier]["colorDiffuse"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Ambient color
# Ka 1.000 1.000 1.000
if chunks[0] == "Ka" and len(chunks) == 4:
materials[identifier]["colorAmbient"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular color
# Ks 1.000 1.000 1.000
if chunks[0] == "Ks" and len(chunks) == 4:
materials[identifier]["colorSpecular"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular coefficient
# Ns 154.000
if chunks[0] == "Ns" and len(chunks) == 2:
materials[identifier]["specularCoef"] = float(chunks[1])
# Transparency
# Tr 0.9 or d 0.9
if (chunks[0] == "Tr" or chunks[0] == "d") and len(chunks) == 2:
materials[identifier]["transparent"] = True
if TRANSPARENCY == "invert":
materials[identifier]["transparency"] = 1.0 - float(chunks[1])
else:
materials[identifier]["transparency"] = float(chunks[1])
# Optical density
# Ni 1.0
if chunks[0] == "Ni" and len(chunks) == 2:
materials[identifier]["opticalDensity"] = float(chunks[1])
# Illumination
# illum 2
#
# 0. Color on and Ambient off
# 1. Color on and Ambient on
# 2. Highlight on
# 3. Reflection on and Ray trace on
# 4. Transparency: Glass on, Reflection: Ray trace on
# 5. Reflection: Fresnel on and Ray trace on
# 6. Transparency: Refraction on, Reflection: Fresnel off and Ray trace on
# 7. Transparency: Refraction on, Reflection: Fresnel on and Ray trace on
# 8. Reflection on and Ray trace off
# 9. Transparency: Glass on, Reflection: Ray trace off
# 10. Casts shadows onto invisible surfaces
if chunks[0] == "illum" and len(chunks) == 2:
materials[identifier]["illumination"] = int(chunks[1])
return materials
# #####################################################
# OBJ parser
# #####################################################
def parse_vertex(text):
"""Parse text chunk specifying single vertex.
Possible formats:
vertex index
vertex index / texture index
vertex index / texture index / normal index
vertex index / / normal index
"""
v = 0
t = 0
n = 0
chunks = text.split("/")
v = int(chunks[0])
if len(chunks) > 1:
if chunks[1]:
t = int(chunks[1])
if len(chunks) > 2:
if chunks[2]:
n = int(chunks[2])
return { 'v':v, 't':t, 'n':n }
def parse_obj(fname):
"""Parse OBJ file.
"""
vertices = []
normals = []
uvs = []
faces = []
materials = {}
material = ""
mcounter = 0
mcurrent = 0
mtllib = ""
# current face state
group = 0
object = 0
smooth = 0
previous_line = ""
for line in fileinput.input(fname):
line = previous_line + line
if line[-2:-1] == '\\':
previous_line = line[:-2]
continue
previous_line = ""
# Only split once initially for single-parameter tags that might have additional spaces in
# their values (i.e. "usemtl Material with spaces").
chunks = line.split(None, 1)
if len(chunks) > 0:
if len(chunks) > 1:
chunks[1] = chunks[1].strip()
# Group
if chunks[0] == "g" and len(chunks) == 2:
group = chunks[1]
# Object
if chunks[0] == "o" and len(chunks) == 2:
object = chunks[1]
# Materials definition
if chunks[0] == "mtllib" and len(chunks) == 2:
mtllib = chunks[1]
# Material
if chunks[0] == "usemtl":
if len(chunks) > 1:
material = chunks[1]
else:
material = ""
if not material in materials:
mcurrent = mcounter
materials[material] = mcounter
mcounter += 1
else:
mcurrent = materials[material]
# Split the remaining parameters.
if len(chunks) > 1:
chunks = [chunks[0]] + chunks[1].split()
# Vertices as (x,y,z) coordinates
# v 0.123 0.234 0.345
if chunks[0] == "v" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
vertices.append([x,y,z])
# Normals in (x,y,z) form; normals might not be unit
# vn 0.707 0.000 0.707
if chunks[0] == "vn" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
normals.append([x,y,z])
# Texture coordinates in (u,v[,w]) coordinates, w is optional
# vt 0.500 -1.352 [0.234]
if chunks[0] == "vt" and len(chunks) >= 3:
u = float(chunks[1])
v = float(chunks[2])
w = 0
if len(chunks)>3:
w = float(chunks[3])
uvs.append([u,v,w])
# Face
if chunks[0] == "f" and len(chunks) >= 4:
vertex_index = []
uv_index = []
normal_index = []
# Precompute vert / normal / uv lists
# for negative index lookup
vertlen = len(vertices) + 1
normlen = len(normals) + 1
uvlen = len(uvs) + 1
for v in chunks[1:]:
vertex = parse_vertex(v)
if vertex['v']:
if vertex['v'] < 0:
vertex['v'] += vertlen
vertex_index.append(vertex['v'])
if vertex['t']:
if vertex['t'] < 0:
vertex['t'] += uvlen
uv_index.append(vertex['t'])
if vertex['n']:
if vertex['n'] < 0:
vertex['n'] += normlen
normal_index.append(vertex['n'])
faces.append({
'vertex':vertex_index,
'uv':uv_index,
'normal':normal_index,
'material':mcurrent,
'group':group,
'object':object,
'smooth':smooth,
})
# Smooth shading
if chunks[0] == "s" and len(chunks) == 2:
smooth = chunks[1]
return faces, vertices, uvs, normals, materials, mtllib
# #####################################################
# Generator - faces
# #####################################################
def setBit(value, position, on):
if on:
mask = 1 << position
return (value | mask)
else:
mask = ~(1 << position)
return (value & mask)
def generate_face(f, fc):
isTriangle = ( len(f['vertex']) == 3 )
if isTriangle:
nVertices = 3
else:
nVertices = 4
hasMaterial = True # for the moment OBJs without materials get default material
hasFaceUvs = False # not supported in OBJ
hasFaceVertexUvs = ( len(f['uv']) >= nVertices )
hasFaceNormals = False # don't export any face normals (as they are computed in engine)
hasFaceVertexNormals = ( len(f["normal"]) >= nVertices and SHADING == "smooth" )
hasFaceColors = BAKE_COLORS
hasFaceVertexColors = False # not supported in OBJ
faceType = 0
faceType = setBit(faceType, 0, not isTriangle)
faceType = setBit(faceType, 1, hasMaterial)
faceType = setBit(faceType, 2, hasFaceUvs)
faceType = setBit(faceType, 3, hasFaceVertexUvs)
faceType = setBit(faceType, 4, hasFaceNormals)
faceType = setBit(faceType, 5, hasFaceVertexNormals)
faceType = setBit(faceType, 6, hasFaceColors)
faceType = setBit(faceType, 7, hasFaceVertexColors)
faceData = []
# order is important, must match order in JSONLoader
# face type
# vertex indices
# material index
# face uvs index
# face vertex uvs indices
# face normal index
# face vertex normals indices
# face color index
# face vertex colors indices
faceData.append(faceType)
# must clamp in case on polygons bigger than quads
for i in xrange(nVertices):
index = f['vertex'][i] - 1
faceData.append(index)
faceData.append( f['material'] )
if hasFaceVertexUvs:
for i in xrange(nVertices):
index = f['uv'][i] - 1
faceData.append(index)
if hasFaceVertexNormals:
for i in xrange(nVertices):
index = f['normal'][i] - 1
faceData.append(index)
if hasFaceColors:
index = fc['material']
faceData.append(index)
return ",".join( map(str, faceData) )
# #####################################################
# Generator - chunks
# #####################################################
def hexcolor(c):
return ( int(c[0] * 255) << 16 ) + ( int(c[1] * 255) << 8 ) + int(c[2] * 255)
def generate_vertex(v, option_vertices_truncate, scale):
if not option_vertices_truncate:
return TEMPLATE_VERTEX % (v[0], v[1], v[2])
else:
return TEMPLATE_VERTEX_TRUNCATE % (scale * v[0], scale * v[1], scale * v[2])
def generate_normal(n):
return TEMPLATE_N % (n[0], n[1], n[2])
def generate_uv(uv):
return TEMPLATE_UV % (uv[0], uv[1])
def generate_color_rgb(c):
return TEMPLATE_COLOR % (c[0], c[1], c[2])
def generate_color_decimal(c):
return TEMPLATE_COLOR_DEC % hexcolor(c)
# #####################################################
# Morphs
# #####################################################
def generate_morph_vertex(name, vertices):
vertex_string = ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices)
return TEMPLATE_MORPH_VERTICES % (name, vertex_string)
def generate_morph_color(name, colors):
color_string = ",".join(generate_color_rgb(c) for c in colors)
return TEMPLATE_MORPH_COLORS % (name, color_string)
def extract_material_colors(materials, mtlfilename, basename):
"""Extract diffuse colors from MTL materials
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
mtlColorArraySrt = []
for m in mtl:
if m in materials:
index = materials[m]
color = mtl[m].get("colorDiffuse", [1,0,0])
mtlColorArraySrt.append([index, color])
mtlColorArraySrt.sort()
mtlColorArray = [x[1] for x in mtlColorArraySrt]
return mtlColorArray
def extract_face_colors(faces, material_colors):
"""Extract colors from materials and assign them to faces
"""
faceColors = []
for face in faces:
material_index = face['material']
faceColors.append(material_colors[material_index])
return faceColors
def generate_morph_targets(morphfiles, n_vertices, infile):
skipOriginalMorph = False
norminfile = os.path.normpath(infile)
morphVertexData = []
for mfilepattern in morphfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
indices = range(0, len(matches), FRAMESTEP)
for i in indices:
path = matches[i]
normpath = os.path.normpath(path)
if normpath != norminfile or not skipOriginalMorph:
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
else:
if ALIGN == "center":
center(morphVertices)
elif ALIGN == "centerxz":
centerxz(morphVertices)
elif ALIGN == "bottom":
bottom(morphVertices)
elif ALIGN == "top":
top(morphVertices)
morphVertexData.append((get_name(name), morphVertices))
print "adding [%s] with %d vertices" % (name, n_morph_vertices)
morphTargets = ""
if len(morphVertexData):
morphTargets = "\n%s\n\t" % ",\n".join(generate_morph_vertex(name, vertices) for name, vertices in morphVertexData)
return morphTargets
def generate_morph_colors(colorfiles, n_vertices, n_faces):
morphColorData = []
colorFaces = []
materialColors = []
for mfilepattern in colorfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
for path in matches:
normpath = os.path.normpath(path)
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
n_morph_faces = len(morphFaces)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph color map [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
elif n_faces != n_morph_faces:
print "WARNING: skipping morph color map [%s] with different number of faces [%d] than the original model [%d]" % (name, n_morph_faces, n_faces)
else:
morphMaterialColors = extract_material_colors(morphMaterials, morphMtllib, normpath)
morphFaceColors = extract_face_colors(morphFaces, morphMaterialColors)
morphColorData.append((get_name(name), morphFaceColors))
# take first color map for baking into face colors
if len(colorFaces) == 0:
colorFaces = morphFaces
materialColors = morphMaterialColors
print "adding [%s] with %d face colors" % (name, len(morphFaceColors))
morphColors = ""
if len(morphColorData):
morphColors = "\n%s\n\t" % ",\n".join(generate_morph_color(name, colors) for name, colors in morphColorData)
return morphColors, colorFaces, materialColors
# #####################################################
# Materials
# #####################################################
def generate_color(i):
"""Generate hex color corresponding to integer.
Colors should have well defined ordering.
First N colors are hardcoded, then colors are random
(must seed random number generator with deterministic value
before getting colors).
"""
if i < len(COLORS):
#return "0x%06x" % COLORS[i]
return COLORS[i]
else:
#return "0x%06x" % int(0xffffff * random.random())
return int(0xffffff * random.random())
def value2string(v):
if type(v)==str and v[0:2] != "0x":
return '"%s"' % v
elif type(v) == bool:
return str(v).lower()
return str(v)
def generate_materials(mtl, materials):
"""Generate JS array of materials objects
JS material objects are basically prettified one-to-one
mappings of MTL properties in JSON format.
"""
mtl_array = []
for m in mtl:
if m in materials:
index = materials[m]
# add debug information
# materials should be sorted according to how
# they appeared in OBJ file (for the first time)
# this index is identifier used in face definitions
mtl[m]['DbgName'] = m
mtl[m]['DbgIndex'] = index
mtl[m]['DbgColor'] = generate_color(index)
if BAKE_COLORS:
mtl[m]['vertexColors'] = "face"
mtl_raw = ",\n".join(['\t"%s" : %s' % (n, value2string(v)) for n,v in sorted(mtl[m].items())])
mtl_string = "\t{\n%s\n\t}" % mtl_raw
mtl_array.append([index, mtl_string])
return ",\n\n".join([m for i,m in sorted(mtl_array)])
def generate_mtl(materials):
"""Generate dummy materials (if there is no MTL file).
"""
mtl = {}
for m in materials:
index = materials[m]
mtl[m] = {
'DbgName': m,
'DbgIndex': index,
'DbgColor': generate_color(index)
}
return mtl
def generate_materials_string(materials, mtlfilename, basename):
"""Generate final materials string.
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
return generate_materials(mtl, materials)
def create_materials(materials, mtlfilename, basename):
"""Parse MTL file and create mapping between its materials and OBJ materials.
Eventual edge cases are handled here (missing materials, missing MTL file).
"""
random.seed(42) # to get well defined color order for debug colors
# default materials with debug colors for when
# there is no specified MTL / MTL loading failed,
# or if there were no materials / null materials
mtl = generate_mtl(materials)
if mtlfilename:
# create full pathname for MTL (included from OBJ)
path = os.path.dirname(basename)
fname = os.path.join(path, mtlfilename)
if file_exists(fname):
# override default materials with real ones from MTL
# (where they exist, otherwise keep defaults)
mtl.update(parse_mtl(fname))
else:
print "Couldn't find [%s]" % fname
return mtl
# #####################################################
# Faces
# #####################################################
def is_triangle_flat(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_triangle_flat_uv(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==3
def is_triangle_smooth(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_triangle_smooth_uv(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and len(f['uv'])==3
def is_quad_flat(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_quad_flat_uv(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==4
def is_quad_smooth(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_quad_smooth_uv(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and len(f['uv'])==4
def sort_faces(faces):
data = {
'triangles_flat': [],
'triangles_flat_uv': [],
'triangles_smooth': [],
'triangles_smooth_uv': [],
'quads_flat': [],
'quads_flat_uv': [],
'quads_smooth': [],
'quads_smooth_uv': []
}
for f in faces:
if is_triangle_flat(f):
data['triangles_flat'].append(f)
elif is_triangle_flat_uv(f):
data['triangles_flat_uv'].append(f)
elif is_triangle_smooth(f):
data['triangles_smooth'].append(f)
elif is_triangle_smooth_uv(f):
data['triangles_smooth_uv'].append(f)
elif is_quad_flat(f):
data['quads_flat'].append(f)
elif is_quad_flat_uv(f):
data['quads_flat_uv'].append(f)
elif is_quad_smooth(f):
data['quads_smooth'].append(f)
elif is_quad_smooth_uv(f):
data['quads_smooth_uv'].append(f)
return data
# #####################################################
# API - ASCII converter
# #####################################################
def convert_ascii(infile, morphfiles, colorfiles, outfile):
"""Convert infile.obj to outfile.js
Here is where everything happens. If you need to automate conversions,
just import this file as Python module and call this method.
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
# parse OBJ / MTL files
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
n_vertices = len(vertices)
n_faces = len(faces)
# align model
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
# generate normals string
nnormal = 0
normals_string = ""
if SHADING == "smooth":
normals_string = ",".join(generate_normal(n) for n in normals)
nnormal = len(normals)
# extract morph vertices
morphTargets = generate_morph_targets(morphfiles, n_vertices, infile)
# extract morph colors
morphColors, colorFaces, materialColors = generate_morph_colors(colorfiles, n_vertices, n_faces)
# generate colors string
ncolor = 0
colors_string = ""
if len(colorFaces) < len(faces):
colorFaces = faces
materialColors = extract_material_colors(materials, mtllib, infile)
if BAKE_COLORS:
colors_string = ",".join(generate_color_decimal(c) for c in materialColors)
ncolor = len(materialColors)
# generate ascii model string
text = TEMPLATE_FILE_ASCII % {
"name" : get_name(outfile),
"fname" : os.path.basename(infile),
"nvertex" : len(vertices),
"nface" : len(faces),
"nuv" : len(uvs),
"nnormal" : nnormal,
"ncolor" : ncolor,
"nmaterial" : len(materials),
"materials" : generate_materials_string(materials, mtllib, infile),
"normals" : normals_string,
"colors" : colors_string,
"uvs" : ",".join(generate_uv(uv) for uv in uvs),
"vertices" : ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices),
"morphTargets" : morphTargets,
"morphColors" : morphColors,
"faces" : ",".join(generate_face(f, fc) for f, fc in zip(faces, colorFaces)),
"scale" : SCALE
}
out = open(outfile, "w")
out.write(text)
out.close()
print "%d vertices, %d faces, %d materials" % (len(vertices), len(faces), len(materials))
# #############################################################################
# API - Binary converter
# #############################################################################
def dump_materials_to_buffer(faces, buffer):
for f in faces:
data = struct.pack('<H',
f['material'])
buffer.append(data)
def dump_vertices3_to_buffer(faces, buffer):
for f in faces:
vi = f['vertex']
data = struct.pack('<III',
vi[0]-1, vi[1]-1, vi[2]-1)
buffer.append(data)
def dump_vertices4_to_buffer(faces, buffer):
for f in faces:
vi = f['vertex']
data = struct.pack('<IIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1)
buffer.append(data)
def dump_normals3_to_buffer(faces, buffer):
for f in faces:
ni = f['normal']
data = struct.pack('<III',
ni[0]-1, ni[1]-1, ni[2]-1)
buffer.append(data)
def dump_normals4_to_buffer(faces, buffer):
for f in faces:
ni = f['normal']
data = struct.pack('<IIII',
ni[0]-1, ni[1]-1, ni[2]-1, ni[3]-1)
buffer.append(data)
def dump_uvs3_to_buffer(faces, buffer):
for f in faces:
ui = f['uv']
data = struct.pack('<III',
ui[0]-1, ui[1]-1, ui[2]-1)
buffer.append(data)
def dump_uvs4_to_buffer(faces, buffer):
for f in faces:
ui = f['uv']
data = struct.pack('<IIII',
ui[0]-1, ui[1]-1, ui[2]-1, ui[3]-1)
buffer.append(data)
def add_padding(buffer, n):
if n % 4:
for i in range(4 - n % 4):
data = struct.pack('<B', 0)
buffer.append(data)
def convert_binary(infile, outfile):
"""Convert infile.obj to outfile.js + outfile.bin
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
binfile = get_name(outfile) + ".bin"
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
sfaces = sort_faces(faces)
if SHADING == "smooth":
nnormals = len(normals)
else:
nnormals = 0
# ###################
# generate JS file
# ###################
text = TEMPLATE_FILE_BIN % {
"name" : get_name(outfile),
"materials" : generate_materials_string(materials, mtllib, infile),
"buffers" : binfile,
"fname" : os.path.basename(infile),
"nvertex" : len(vertices),
"nface" : len(faces),
"nmaterial" : len(materials),
"nnormal" : nnormals,
"nuv" : len(uvs)
}
out = open(outfile, "w")
out.write(text)
out.close()
# ###################
# generate BIN file
# ###################
buffer = []
# header
# ------
header_bytes = struct.calcsize('<12s')
header_bytes += struct.calcsize('<BBBBBBBB')
header_bytes += struct.calcsize('<IIIIIIIIIII')
# signature
signature = struct.pack('<12s', 'Three.js 003')
# metadata (all data is little-endian)
vertex_coordinate_bytes = 4
normal_coordinate_bytes = 1
uv_coordinate_bytes = 4
vertex_index_bytes = 4
normal_index_bytes = 4
uv_index_bytes = 4
material_index_bytes = 2
# header_bytes unsigned char 1
# vertex_coordinate_bytes unsigned char 1
# normal_coordinate_bytes unsigned char 1
# uv_coordinate_bytes unsigned char 1
# vertex_index_bytes unsigned char 1
# normal_index_bytes unsigned char 1
# uv_index_bytes unsigned char 1
# material_index_bytes unsigned char 1
bdata = struct.pack('<BBBBBBBB', header_bytes,
vertex_coordinate_bytes,
normal_coordinate_bytes,
uv_coordinate_bytes,
vertex_index_bytes,
normal_index_bytes,
uv_index_bytes,
material_index_bytes)
ntri_flat = len(sfaces['triangles_flat'])
ntri_smooth = len(sfaces['triangles_smooth'])
ntri_flat_uv = len(sfaces['triangles_flat_uv'])
ntri_smooth_uv = len(sfaces['triangles_smooth_uv'])
nquad_flat = len(sfaces['quads_flat'])
nquad_smooth = len(sfaces['quads_smooth'])
nquad_flat_uv = len(sfaces['quads_flat_uv'])
nquad_smooth_uv = len(sfaces['quads_smooth_uv'])
# nvertices unsigned int 4
# nnormals unsigned int 4
# nuvs unsigned int 4
# ntri_flat unsigned int 4
# ntri_smooth unsigned int 4
# ntri_flat_uv unsigned int 4
# ntri_smooth_uv unsigned int 4
# nquad_flat unsigned int 4
# nquad_smooth unsigned int 4
# nquad_flat_uv unsigned int 4
# nquad_smooth_uv unsigned int 4
ndata = struct.pack('<IIIIIIIIIII', len(vertices),
nnormals,
len(uvs),
ntri_flat,
ntri_smooth,
ntri_flat_uv,
ntri_smooth_uv,
nquad_flat,
nquad_smooth,
nquad_flat_uv,
nquad_smooth_uv)
buffer.append(signature)
buffer.append(bdata)
buffer.append(ndata)
# 1. vertices
# ------------
# x float 4
# y float 4
# z float 4
for v in vertices:
data = struct.pack('<fff', v[0], v[1], v[2])
buffer.append(data)
# 2. normals
# ---------------
# x signed char 1
# y signed char 1
# z signed char 1
if SHADING == "smooth":
for n in normals:
normalize(n)
data = struct.pack('<bbb', math.floor(n[0]*127+0.5),
math.floor(n[1]*127+0.5),
math.floor(n[2]*127+0.5))
buffer.append(data)
add_padding(buffer, nnormals * 3)
# 3. uvs
# -----------
# u float 4
# v float 4
for uv in uvs:
data = struct.pack('<ff', uv[0], uv[1])
buffer.append(data)
# padding
#data = struct.pack('<BB', 0, 0)
#buffer.append(data)
# 4. flat triangles (vertices + materials)
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# ------------------
# m unsigned short 2
dump_vertices3_to_buffer(sfaces['triangles_flat'], buffer)
dump_materials_to_buffer(sfaces['triangles_flat'], buffer)
add_padding(buffer, ntri_flat * 2)
# 5. smooth triangles (vertices + materials + normals)
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# -------------------
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# -------------------
# m unsigned short 2
dump_vertices3_to_buffer(sfaces['triangles_smooth'], buffer)
dump_normals3_to_buffer(sfaces['triangles_smooth'], buffer)
dump_materials_to_buffer(sfaces['triangles_smooth'], buffer)
add_padding(buffer, ntri_smooth * 2)
# 6. flat triangles uv (vertices + materials + uvs)
# --------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# --------------------
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices3_to_buffer(sfaces['triangles_flat_uv'], buffer)
dump_uvs3_to_buffer(sfaces['triangles_flat_uv'], buffer)
dump_materials_to_buffer(sfaces['triangles_flat_uv'], buffer)
add_padding(buffer, ntri_flat_uv * 2)
# 7. smooth triangles uv (vertices + materials + normals + uvs)
# ----------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# --------------------
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# --------------------
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices3_to_buffer(sfaces['triangles_smooth_uv'], buffer)
dump_normals3_to_buffer(sfaces['triangles_smooth_uv'], buffer)
dump_uvs3_to_buffer(sfaces['triangles_smooth_uv'], buffer)
dump_materials_to_buffer(sfaces['triangles_smooth_uv'], buffer)
add_padding(buffer, ntri_smooth_uv * 2)
# 8. flat quads (vertices + materials)
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices4_to_buffer(sfaces['quads_flat'], buffer)
dump_materials_to_buffer(sfaces['quads_flat'], buffer)
add_padding(buffer, nquad_flat * 2)
# 9. smooth quads (vertices + materials + normals)
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# --------------------
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices4_to_buffer(sfaces['quads_smooth'], buffer)
dump_normals4_to_buffer(sfaces['quads_smooth'], buffer)
dump_materials_to_buffer(sfaces['quads_smooth'], buffer)
add_padding(buffer, nquad_smooth * 2)
# 10. flat quads uv (vertices + materials + uvs)
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# --------------------
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices4_to_buffer(sfaces['quads_flat_uv'], buffer)
dump_uvs4_to_buffer(sfaces['quads_flat_uv'], buffer)
dump_materials_to_buffer(sfaces['quads_flat_uv'], buffer)
add_padding(buffer, nquad_flat_uv * 2)
# 11. smooth quads uv
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# --------------------
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
# --------------------
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
# --------------------
# m unsigned short 2
dump_vertices4_to_buffer(sfaces['quads_smooth_uv'], buffer)
dump_normals4_to_buffer(sfaces['quads_smooth_uv'], buffer)
dump_uvs4_to_buffer(sfaces['quads_smooth_uv'], buffer)
dump_materials_to_buffer(sfaces['quads_smooth_uv'], buffer)
add_padding(buffer, nquad_smooth_uv * 2)
path = os.path.dirname(outfile)
fname = os.path.join(path, binfile)
out = open(fname, "wb")
out.write("".join(buffer))
out.close()
# #############################################################################
# Helpers
# #############################################################################
def usage():
print "Usage: %s -i filename.obj -o filename.js [-m morphfiles*.obj] [-c morphcolors*.obj] [-a center|top|bottom] [-s flat|smooth] [-t binary|ascii] [-d invert|normal]" % os.path.basename(sys.argv[0])
# #####################################################
# Main
# #####################################################
if __name__ == "__main__":
# get parameters from the command line
try:
opts, args = getopt.getopt(sys.argv[1:], "hbi:m:c:b:o:a:s:t:d:x:f:", ["help", "bakecolors", "input=", "morphs=", "colors=", "output=", "align=", "shading=", "type=", "dissolve=", "truncatescale=", "framestep="])
except getopt.GetoptError:
usage()
sys.exit(2)
infile = outfile = ""
morphfiles = ""
colorfiles = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
infile = a
elif o in ("-m", "--morphs"):
morphfiles = a
elif o in ("-c", "--colors"):
colorfiles = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-a", "--align"):
if a in ("top", "bottom", "center", "centerxz", "none"):
ALIGN = a
elif o in ("-s", "--shading"):
if a in ("flat", "smooth"):
SHADING = a
elif o in ("-t", "--type"):
if a in ("binary", "ascii"):
TYPE = a
elif o in ("-d", "--dissolve"):
if a in ("normal", "invert"):
TRANSPARENCY = a
elif o in ("-b", "--bakecolors"):
BAKE_COLORS = True
elif o in ("-x", "--truncatescale"):
TRUNCATE = True
SCALE = float(a)
elif o in ("-f", "--framestep"):
FRAMESTEP = int(a)
if infile == "" or outfile == "":
usage()
sys.exit(2)
print "Converting [%s] into [%s] ..." % (infile, outfile)
if morphfiles:
print "Morphs [%s]" % morphfiles
if colorfiles:
print "Colors [%s]" % colorfiles
if TYPE == "ascii":
convert_ascii(infile, morphfiles, colorfiles, outfile)
elif TYPE == "binary":
convert_binary(infile, outfile)
| mit |
InAnimaTe/CouchPotatoServer | libs/CodernityDB/indexcreator.py | 81 | 25444 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011-2013 Codernity (http://codernity.com)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import tokenize
import token
import uuid
class IndexCreatorException(Exception):
def __init__(self, ex, line=None):
self.ex = ex
self.line = line
def __str__(self):
if self.line:
return repr(self.ex + "(in line: %d)" % self.line)
return repr(self.ex)
class IndexCreatorFunctionException(IndexCreatorException):
pass
class IndexCreatorValueException(IndexCreatorException):
pass
class Parser(object):
def __init__(self):
pass
def parse(self, data, name=None):
if not name:
self.name = "_" + uuid.uuid4().hex
else:
self.name = name
self.ind = 0
self.stage = 0
self.logic = ['and', 'or', 'in']
self.logic2 = ['&', '|']
self.allowed_props = {'TreeBasedIndex': ['type', 'name', 'key_format', 'node_capacity', 'pointer_format', 'meta_format'],
'HashIndex': ['type', 'name', 'key_format', 'hash_lim', 'entry_line_format'],
'MultiHashIndex': ['type', 'name', 'key_format', 'hash_lim', 'entry_line_format'],
'MultiTreeBasedIndex': ['type', 'name', 'key_format', 'node_capacity', 'pointer_format', 'meta_format']
}
self.funcs = {'md5': (['md5'], ['.digest()']),
'len': (['len'], []),
'str': (['str'], []),
'fix_r': (['self.fix_r'], []),
'prefix': (['self.prefix'], []),
'infix': (['self.infix'], []),
'suffix': (['self.suffix'], [])
}
self.handle_int_imports = {'infix': "from itertools import izip\n"}
self.funcs_with_body = {'fix_r':
(""" def fix_r(self,s,l):
e = len(s)
if e == l:
return s
elif e > l:
return s[:l]
else:
return s.rjust(l,'_')\n""", False),
'prefix':
(""" def prefix(self,s,m,l,f):
t = len(s)
if m < 1:
m = 1
o = set()
if t > l:
s = s[:l]
t = l
while m <= t:
o.add(s.rjust(f,'_'))
s = s[:-1]
t -= 1
return o\n""", False),
'suffix':
(""" def suffix(self,s,m,l,f):
t = len(s)
if m < 1:
m = 1
o = set()
if t > l:
s = s[t-l:]
t = len(s)
while m <= t:
o.add(s.rjust(f,'_'))
s = s[1:]
t -= 1
return o\n""", False),
'infix':
(""" def infix(self,s,m,l,f):
t = len(s)
o = set()
for x in xrange(m - 1, l):
t = (s, )
for y in xrange(0, x):
t += (s[y + 1:],)
o.update(set(''.join(x).rjust(f, '_').lower() for x in izip(*t)))
return o\n""", False)}
self.none = ['None', 'none', 'null']
self.props_assign = ['=', ':']
self.all_adj_num_comp = {token.NUMBER: (
token.NUMBER, token.NAME, '-', '('),
token.NAME: (token.NUMBER, token.NAME, '-', '('),
')': (token.NUMBER, token.NAME, '-', '(')
}
self.all_adj_num_op = {token.NUMBER: (token.NUMBER, token.NAME, '('),
token.NAME: (token.NUMBER, token.NAME, '('),
')': (token.NUMBER, token.NAME, '(')
}
self.allowed_adjacent = {
"<=": self.all_adj_num_comp,
">=": self.all_adj_num_comp,
">": self.all_adj_num_comp,
"<": self.all_adj_num_comp,
"==": {token.NUMBER: (token.NUMBER, token.NAME, '('),
token.NAME: (token.NUMBER, token.NAME, token.STRING, '('),
token.STRING: (token.NAME, token.STRING, '('),
')': (token.NUMBER, token.NAME, token.STRING, '('),
']': (token.NUMBER, token.NAME, token.STRING, '(')
},
"+": {token.NUMBER: (token.NUMBER, token.NAME, '('),
token.NAME: (token.NUMBER, token.NAME, token.STRING, '('),
token.STRING: (token.NAME, token.STRING, '('),
')': (token.NUMBER, token.NAME, token.STRING, '('),
']': (token.NUMBER, token.NAME, token.STRING, '(')
},
"-": {token.NUMBER: (token.NUMBER, token.NAME, '('),
token.NAME: (token.NUMBER, token.NAME, '('),
')': (token.NUMBER, token.NAME, '('),
'<': (token.NUMBER, token.NAME, '('),
'>': (token.NUMBER, token.NAME, '('),
'<=': (token.NUMBER, token.NAME, '('),
'>=': (token.NUMBER, token.NAME, '('),
'==': (token.NUMBER, token.NAME, '('),
']': (token.NUMBER, token.NAME, '(')
},
"*": self.all_adj_num_op,
"/": self.all_adj_num_op,
"%": self.all_adj_num_op,
",": {token.NUMBER: (token.NUMBER, token.NAME, token.STRING, '{', '[', '('),
token.NAME: (token.NUMBER, token.NAME, token.STRING, '(', '{', '['),
token.STRING: (token.NAME, token.STRING, token.NUMBER, '(', '{', '['),
')': (token.NUMBER, token.NAME, token.STRING, '(', '{', '['),
']': (token.NUMBER, token.NAME, token.STRING, '(', '{', '['),
'}': (token.NUMBER, token.NAME, token.STRING, '(', '{', '[')
}
}
def is_num(s):
m = re.search('[^0-9*()+\-\s/]+', s)
return not m
def is_string(s):
m = re.search('\s*(?P<a>[\'\"]+).*?(?P=a)\s*', s)
return m
data = re.split('make_key_value\:', data)
if len(data) < 2:
raise IndexCreatorFunctionException(
"Couldn't find a definition of make_key_value function!\n")
spl1 = re.split('make_key\:', data[0])
spl2 = re.split('make_key\:', data[1])
self.funcs_rev = False
if len(spl1) > 1:
data = [spl1[0]] + [data[1]] + [spl1[1]]
self.funcs_rev = True
elif len(spl2) > 1:
data = [data[0]] + spl2
else:
data.append("key")
if data[1] == re.search('\s*', data[1], re.S | re.M).group(0):
raise IndexCreatorFunctionException("Empty function body ",
len(re.split('\n', data[0])) + (len(re.split('\n', data[2])) if self.funcs_rev else 1) - 1)
if data[2] == re.search('\s*', data[2], re.S | re.M).group(0):
raise IndexCreatorFunctionException("Empty function body ",
len(re.split('\n', data[0])) + (1 if self.funcs_rev else len(re.split('\n', data[1]))) - 1)
if data[0] == re.search('\s*', data[0], re.S | re.M).group(0):
raise IndexCreatorValueException("You didn't set any properity or you set them not at the begining of the code\n")
data = [re.split(
'\n', data[0]), re.split('\n', data[1]), re.split('\n', data[2])]
self.cnt_lines = (len(data[0]), len(data[1]), len(data[2]))
ind = 0
self.predata = data
self.data = [[], [], []]
for i, v in enumerate(self.predata[0]):
for k, w in enumerate(self.predata[0][i]):
if self.predata[0][i][k] in self.props_assign:
if not is_num(self.predata[0][i][k + 1:]) and self.predata[0][i].strip()[:4] != 'type' and self.predata[0][i].strip()[:4] != 'name':
s = self.predata[0][i][k + 1:]
self.predata[0][i] = self.predata[0][i][:k + 1]
m = re.search('\s+', s.strip())
if not is_string(s) and not m:
s = "'" + s.strip() + "'"
self.predata[0][i] += s
break
for n, i in enumerate(self.predata):
for k in i:
k = k.strip()
if k:
self.data[ind].append(k)
self.check_enclosures(k, n)
ind += 1
return self.parse_ex()
def readline(self, stage):
def foo():
if len(self.data[stage]) <= self.ind:
self.ind = 0
return ""
else:
self.ind += 1
return self.data[stage][self.ind - 1]
return foo
def add(self, l, i):
def add_aux(*args):
# print args,self.ind
if len(l[i]) < self.ind:
l[i].append([])
l[i][self.ind - 1].append(args)
return add_aux
def parse_ex(self):
self.index_name = ""
self.index_type = ""
self.curLine = -1
self.con = -1
self.brackets = -1
self.curFunc = None
self.colons = 0
self.line_cons = ([], [], [])
self.pre_tokens = ([], [], [])
self.known_dicts_in_mkv = []
self.prop_name = True
self.prop_assign = False
self.is_one_arg_enough = False
self.funcs_stack = []
self.last_line = [-1, -1, -1]
self.props_set = []
self.custom_header = set()
self.tokens = []
self.tokens_head = ['# %s\n' % self.name, 'class %s(' % self.name, '):\n', ' def __init__(self, *args, **kwargs): ']
for i in xrange(3):
tokenize.tokenize(self.readline(i), self.add(self.pre_tokens, i))
# tokenize treats some keyword not in the right way, thats why we
# have to change some of them
for nk, k in enumerate(self.pre_tokens[i]):
for na, a in enumerate(k):
if a[0] == token.NAME and a[1] in self.logic:
self.pre_tokens[i][nk][
na] = (token.OP, a[1], a[2], a[3], a[4])
for i in self.pre_tokens[1]:
self.line_cons[1].append(self.check_colons(i, 1))
self.check_adjacents(i, 1)
if self.check_for_2nd_arg(i) == -1 and not self.is_one_arg_enough:
raise IndexCreatorValueException("No 2nd value to return (did u forget about ',None'?", self.cnt_line_nr(i[0][4], 1))
self.is_one_arg_enough = False
for i in self.pre_tokens[2]:
self.line_cons[2].append(self.check_colons(i, 2))
self.check_adjacents(i, 2)
for i in self.pre_tokens[0]:
self.handle_prop_line(i)
self.cur_brackets = 0
self.tokens += ['\n super(%s, self).__init__(*args, **kwargs)\n def make_key_value(self, data): ' % self.name]
for i in self.pre_tokens[1]:
for k in i:
self.handle_make_value(*k)
self.curLine = -1
self.con = -1
self.cur_brackets = 0
self.tokens += ['\n def make_key(self, key):']
for i in self.pre_tokens[2]:
for k in i:
self.handle_make_key(*k)
if self.index_type == "":
raise IndexCreatorValueException("Missing index type definition\n")
if self.index_name == "":
raise IndexCreatorValueException("Missing index name\n")
self.tokens_head[0] = "# " + self.index_name + "\n" + \
self.tokens_head[0]
for i in self.funcs_with_body:
if self.funcs_with_body[i][1]:
self.tokens_head.insert(4, self.funcs_with_body[i][0])
if None in self.custom_header:
self.custom_header.remove(None)
if self.custom_header:
s = ' custom_header = """'
for i in self.custom_header:
s += i
s += '"""\n'
self.tokens_head.insert(4, s)
if self.index_type in self.allowed_props:
for i in self.props_set:
if i not in self.allowed_props[self.index_type]:
raise IndexCreatorValueException("Properity %s is not allowed for index type: %s" % (i, self.index_type))
# print "".join(self.tokens_head)
# print "----------"
# print (" ".join(self.tokens))
return "".join(self.custom_header), "".join(self.tokens_head) + (" ".join(self.tokens))
# has to be run BEFORE tokenize
def check_enclosures(self, d, st):
encs = []
contr = {'(': ')', '{': '}', '[': ']', "'": "'", '"': '"'}
ends = [')', '}', ']', "'", '"']
for i in d:
if len(encs) > 0 and encs[-1] in ['"', "'"]:
if encs[-1] == i:
del encs[-1]
elif i in contr:
encs += [i]
elif i in ends:
if len(encs) < 1 or contr[encs[-1]] != i:
raise IndexCreatorValueException("Missing opening enclosure for \'%s\'" % i, self.cnt_line_nr(d, st))
del encs[-1]
if len(encs) > 0:
raise IndexCreatorValueException("Missing closing enclosure for \'%s\'" % encs[0], self.cnt_line_nr(d, st))
def check_adjacents(self, d, st):
def std_check(d, n):
if n == 0:
prev = -1
else:
prev = d[n - 1][1] if d[n - 1][0] == token.OP else d[n - 1][0]
cur = d[n][1] if d[n][0] == token.OP else d[n][0]
# there always is an endmarker at the end, but this is a precaution
if n + 2 > len(d):
nex = -1
else:
nex = d[n + 1][1] if d[n + 1][0] == token.OP else d[n + 1][0]
if prev not in self.allowed_adjacent[cur]:
raise IndexCreatorValueException("Wrong left value of the %s" % cur, self.cnt_line_nr(line, st))
# there is an assumption that whole data always ends with 0 marker, the idea prolly needs a rewritting to allow more whitespaces
# between tokens, so it will be handled anyway
elif nex not in self.allowed_adjacent[cur][prev]:
raise IndexCreatorValueException("Wrong right value of the %s" % cur, self.cnt_line_nr(line, st))
for n, (t, i, _, _, line) in enumerate(d):
if t == token.NAME or t == token.STRING:
if n + 1 < len(d) and d[n + 1][0] in [token.NAME, token.STRING]:
raise IndexCreatorValueException("Did you forget about an operator in between?", self.cnt_line_nr(line, st))
elif i in self.allowed_adjacent:
std_check(d, n)
def check_colons(self, d, st):
cnt = 0
br = 0
def check_ret_args_nr(a, s):
c_b_cnt = 0
s_b_cnt = 0
n_b_cnt = 0
comas_cnt = 0
for _, i, _, _, line in a:
if c_b_cnt == n_b_cnt == s_b_cnt == 0:
if i == ',':
comas_cnt += 1
if (s == 1 and comas_cnt > 1) or (s == 2 and comas_cnt > 0):
raise IndexCreatorFunctionException("Too much arguments to return", self.cnt_line_nr(line, st))
if s == 0 and comas_cnt > 0:
raise IndexCreatorValueException("A coma here doesn't make any sense", self.cnt_line_nr(line, st))
elif i == ':':
if s == 0:
raise IndexCreatorValueException("A colon here doesn't make any sense", self.cnt_line_nr(line, st))
raise IndexCreatorFunctionException("Two colons don't make any sense", self.cnt_line_nr(line, st))
if i == '{':
c_b_cnt += 1
elif i == '}':
c_b_cnt -= 1
elif i == '(':
n_b_cnt += 1
elif i == ')':
n_b_cnt -= 1
elif i == '[':
s_b_cnt += 1
elif i == ']':
s_b_cnt -= 1
def check_if_empty(a):
for i in a:
if i not in [token.NEWLINE, token.INDENT, token.ENDMARKER]:
return False
return True
if st == 0:
check_ret_args_nr(d, st)
return
for n, i in enumerate(d):
if i[1] == ':':
if br == 0:
if len(d) < n or check_if_empty(d[n + 1:]):
raise IndexCreatorValueException(
"Empty return value", self.cnt_line_nr(i[4], st))
elif len(d) >= n:
check_ret_args_nr(d[n + 1:], st)
return cnt
else:
cnt += 1
elif i[1] == '{':
br += 1
elif i[1] == '}':
br -= 1
check_ret_args_nr(d, st)
return -1
def check_for_2nd_arg(self, d):
c_b_cnt = 0 # curly brackets counter '{}'
s_b_cnt = 0 # square brackets counter '[]'
n_b_cnt = 0 # normal brackets counter '()'
def check_2nd_arg(d, ind):
d = d[ind[0]:]
for t, i, (n, r), _, line in d:
if i == '{' or i is None:
return 0
elif t == token.NAME:
self.known_dicts_in_mkv.append((i, (n, r)))
return 0
elif t == token.STRING or t == token.NUMBER:
raise IndexCreatorValueException("Second return value of make_key_value function has to be a dictionary!", self.cnt_line_nr(line, 1))
for ind in enumerate(d):
t, i, _, _, _ = ind[1]
if s_b_cnt == n_b_cnt == c_b_cnt == 0:
if i == ',':
return check_2nd_arg(d, ind)
elif (t == token.NAME and i not in self.funcs) or i == '{':
self.is_one_arg_enough = True
if i == '{':
c_b_cnt += 1
self.is_one_arg_enough = True
elif i == '}':
c_b_cnt -= 1
elif i == '(':
n_b_cnt += 1
elif i == ')':
n_b_cnt -= 1
elif i == '[':
s_b_cnt += 1
elif i == ']':
s_b_cnt -= 1
return -1
def cnt_line_nr(self, l, stage):
nr = -1
for n, i in enumerate(self.predata[stage]):
# print i,"|||",i.strip(),"|||",l
if l == i.strip():
nr = n
if nr == -1:
return -1
if stage == 0:
return nr + 1
elif stage == 1:
return nr + self.cnt_lines[0] + (self.cnt_lines[2] - 1 if self.funcs_rev else 0)
elif stage == 2:
return nr + self.cnt_lines[0] + (self.cnt_lines[1] - 1 if not self.funcs_rev else 0)
return -1
def handle_prop_line(self, d):
d_len = len(d)
if d[d_len - 1][0] == token.ENDMARKER:
d_len -= 1
if d_len < 3:
raise IndexCreatorValueException("Can't handle properity assingment ", self.cnt_line_nr(d[0][4], 0))
if not d[1][1] in self.props_assign:
raise IndexCreatorValueException(
"Did you forget : or =?", self.cnt_line_nr(d[0][4], 0))
if d[0][0] == token.NAME or d[0][0] == token.STRING:
if d[0][1] in self.props_set:
raise IndexCreatorValueException("Properity %s is set more than once" % d[0][1], self.cnt_line_nr(d[0][4], 0))
self.props_set += [d[0][1]]
if d[0][1] == "type" or d[0][1] == "name":
t, tk, _, _, line = d[2]
if d_len > 3:
raise IndexCreatorValueException(
"Wrong value to assign", self.cnt_line_nr(line, 0))
if t == token.STRING:
m = re.search('\s*(?P<a>[\'\"]+)(.*?)(?P=a)\s*', tk)
if m:
tk = m.groups()[1]
elif t != token.NAME:
raise IndexCreatorValueException(
"Wrong value to assign", self.cnt_line_nr(line, 0))
if d[0][1] == "type":
if d[2][1] == "TreeBasedIndex":
self.custom_header.add("from CodernityDB.tree_index import TreeBasedIndex\n")
elif d[2][1] == "MultiTreeBasedIndex":
self.custom_header.add("from CodernityDB.tree_index import MultiTreeBasedIndex\n")
elif d[2][1] == "MultiHashIndex":
self.custom_header.add("from CodernityDB.hash_index import MultiHashIndex\n")
self.tokens_head.insert(2, tk)
self.index_type = tk
else:
self.index_name = tk
return
else:
self.tokens += ['\n kwargs["' + d[0][1] + '"]']
else:
raise IndexCreatorValueException("Can't handle properity assingment ", self.cnt_line_nr(d[0][4], 0))
self.tokens += ['=']
self.check_adjacents(d[2:], 0)
self.check_colons(d[2:], 0)
for i in d[2:]:
self.tokens += [i[1]]
def generate_func(self, t, tk, pos_start, pos_end, line, hdata, stage):
if self.last_line[stage] != -1 and pos_start[0] > self.last_line[stage] and line != '':
raise IndexCreatorFunctionException("This line will never be executed!", self.cnt_line_nr(line, stage))
if t == 0:
return
if pos_start[1] == 0:
if self.line_cons[stage][pos_start[0] - 1] == -1:
self.tokens += ['\n return']
self.last_line[stage] = pos_start[0]
else:
self.tokens += ['\n if']
elif tk == ':' and self.line_cons[stage][pos_start[0] - 1] > -1:
if self.line_cons[stage][pos_start[0] - 1] == 0:
self.tokens += [':\n return']
return
self.line_cons[stage][pos_start[0] - 1] -= 1
if tk in self.logic2:
# print tk
if line[pos_start[1] - 1] != tk and line[pos_start[1] + 1] != tk:
self.tokens += [tk]
if line[pos_start[1] - 1] != tk and line[pos_start[1] + 1] == tk:
if tk == '&':
self.tokens += ['and']
else:
self.tokens += ['or']
return
if self.brackets != 0:
def search_through_known_dicts(a):
for i, (n, r) in self.known_dicts_in_mkv:
if i == tk and r > pos_start[1] and n == pos_start[0] and hdata == 'data':
return True
return False
if t == token.NAME and len(self.funcs_stack) > 0 and self.funcs_stack[-1][0] == 'md5' and search_through_known_dicts(tk):
raise IndexCreatorValueException("Second value returned by make_key_value for sure isn't a dictionary ", self.cnt_line_nr(line, 1))
if tk == ')':
self.cur_brackets -= 1
if len(self.funcs_stack) > 0 and self.cur_brackets == self.funcs_stack[-1][1]:
self.tokens += [tk]
self.tokens += self.funcs[self.funcs_stack[-1][0]][1]
del self.funcs_stack[-1]
return
if tk == '(':
self.cur_brackets += 1
if tk in self.none:
self.tokens += ['None']
return
if t == token.NAME and tk not in self.logic and tk != hdata:
if tk not in self.funcs:
self.tokens += [hdata + '["' + tk + '"]']
else:
self.tokens += self.funcs[tk][0]
if tk in self.funcs_with_body:
self.funcs_with_body[tk] = (
self.funcs_with_body[tk][0], True)
self.custom_header.add(self.handle_int_imports.get(tk))
self.funcs_stack += [(tk, self.cur_brackets)]
else:
self.tokens += [tk]
def handle_make_value(self, t, tk, pos_start, pos_end, line):
self.generate_func(t, tk, pos_start, pos_end, line, 'data', 1)
def handle_make_key(self, t, tk, pos_start, pos_end, line):
self.generate_func(t, tk, pos_start, pos_end, line, 'key', 2)
| gpl-3.0 |
rahul003/mxnet | plugin/opencv/opencv.py | 61 | 6214 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable=too-many-arguments,no-member,invalid-name
"""Opencv plugin for mxnet"""
import random
import ctypes
import cv2
import mxnet as mx
from mxnet.base import _LIB
from mxnet.base import mx_uint, NDArrayHandle, check_call
def imdecode(str_img, flag=1):
"""Decode image from str buffer.
Wrapper for cv2.imdecode that uses mx.nd.NDArray
Parameters
----------
str_img : str
str buffer read from image file
flag : int
same as flag for cv2.imdecode
Returns
-------
img : NDArray
decoded image in (width, height, channels)
with BGR color channel order
"""
hdl = NDArrayHandle()
check_call(_LIB.MXCVImdecode(ctypes.c_char_p(str_img),
mx_uint(len(str_img)),
flag, ctypes.byref(hdl)))
return mx.nd.NDArray(hdl)
def resize(src, size, interpolation=cv2.INTER_LINEAR):
"""Decode image from str buffer.
Wrapper for cv2.imresize that uses mx.nd.NDArray
Parameters
----------
src : NDArray
image in (width, height, channels)
size : tuple
target size in (width, height)
interpolation : int
same as interpolation for cv2.imresize
Returns
-------
img : NDArray
resized image
"""
hdl = NDArrayHandle()
check_call(_LIB.MXCVResize(src.handle, mx_uint(size[0]), mx_uint(size[1]),
interpolation, ctypes.byref(hdl)))
return mx.nd.NDArray(hdl)
def copyMakeBorder(src, top, bot, left, right, border_type=cv2.BORDER_CONSTANT, value=0):
"""Pad image border
Wrapper for cv2.copyMakeBorder that uses mx.nd.NDArray
Parameters
----------
src : NDArray
Image in (width, height, channels).
Others are the same with cv2.copyMakeBorder
Returns
-------
img : NDArray
padded image
"""
hdl = NDArrayHandle()
check_call(_LIB.MXCVcopyMakeBorder(src.handle, ctypes.c_int(top), ctypes.c_int(bot),
ctypes.c_int(left), ctypes.c_int(right),
ctypes.c_int(border_type), ctypes.c_double(value),
ctypes.byref(hdl)))
return mx.nd.NDArray(hdl)
def scale_down(src_size, size):
"""Scale down crop size if it's bigger than image size"""
w, h = size
sw, sh = src_size
if sh < h:
w, h = float(w*sh)/h, sh
if sw < w:
w, h = sw, float(h*sw)/w
return int(w), int(h)
def fixed_crop(src, x0, y0, w, h, size=None, interpolation=cv2.INTER_CUBIC):
"""Crop src at fixed location, and (optionally) resize it to size"""
out = mx.nd.crop(src, begin=(y0, x0, 0), end=(y0+h, x0+w, int(src.shape[2])))
if size is not None and (w, h) != size:
out = resize(out, size, interpolation=interpolation)
return out
def random_crop(src, size):
"""Randomly crop src with size. Upsample result if src is smaller than size"""
h, w, _ = src.shape
new_w, new_h = scale_down((w, h), size)
x0 = random.randint(0, w - new_w)
y0 = random.randint(0, h - new_h)
out = fixed_crop(src, x0, y0, new_w, new_h, size)
return out, (x0, y0, new_w, new_h)
def color_normalize(src, mean, std):
"""Normalize src with mean and std"""
src -= mean
src /= std
return src
def random_size_crop(src, size, min_area=0.25, ratio=(3.0/4.0, 4.0/3.0)):
"""Randomly crop src with size. Randomize area and aspect ratio"""
h, w, _ = src.shape
area = w*h
for _ in range(10):
new_area = random.uniform(min_area, 1.0) * area
new_ratio = random.uniform(*ratio)
new_w = int(new_area*new_ratio)
new_h = int(new_area/new_ratio)
if random.uniform(0., 1.) < 0.5:
new_w, new_h = new_h, new_w
if new_w > w or new_h > h:
continue
x0 = random.randint(0, w - new_w)
y0 = random.randint(0, h - new_h)
out = fixed_crop(src, x0, y0, new_w, new_h, size)
return out, (x0, y0, new_w, new_h)
return random_crop(src, size)
class ImageListIter(mx.io.DataIter):
"""An example image iterator using opencv plugin"""
def __init__(self, root, flist, batch_size, size, mean=None):
mx.io.DataIter.__init__(self)
self.root = root
self.list = [line.strip() for line in open(flist).readlines()]
self.cur = 0
self.batch_size = batch_size
self.size = size
if mean is not None:
self.mean = mx.nd.array(mean)
else:
self.mean = None
def reset(self):
"""Reset iterator position to 0"""
self.cur = 0
def next(self):
"""Move iterator position forward"""
batch = mx.nd.zeros((self.batch_size, self.size[1], self.size[0], 3))
i = self.cur
for i in range(self.cur, min(len(self.list), self.cur+self.batch_size)):
str_img = open(self.root+self.list[i]+'.jpg').read()
img = imdecode(str_img, 1)
img, _ = random_crop(img, self.size)
batch[i - self.cur] = img
batch = mx.nd.transpose(batch, axes=(0, 3, 1, 2))
ret = mx.io.DataBatch(data=[batch],
label=[],
pad=self.batch_size-(i-self.cur),
index=None)
self.cur = i
return ret
| apache-2.0 |
openmv/micropython | tests/basics/string_format_error.py | 62 | 1415 | # tests for errors in {} format string
try:
'{0:0}'.format('zzz')
except (ValueError):
print('ValueError')
try:
'{1:}'.format(1)
except IndexError:
print('IndexError')
try:
'}'.format('zzzz')
except ValueError:
print('ValueError')
# end of format parsing conversion specifier
try:
'{!'.format('a')
except ValueError:
print('ValueError')
# unknown conversion specifier
try:
'abc{!d}'.format('1')
except ValueError:
print('ValueError')
try:
'{abc'.format('zzzz')
except ValueError:
print('ValueError')
# expected ':' after specifier
try:
'{!s :}'.format(2)
except ValueError:
print('ValueError')
try:
'{}{0}'.format(1, 2)
except ValueError:
print('ValueError')
try:
'{1:}'.format(1)
except IndexError:
print('IndexError')
try:
'{ 0 :*^10}'.format(12)
except KeyError:
print('KeyError')
try:
'{0}{}'.format(1)
except ValueError:
print('ValueError')
try:
'{}{}'.format(1)
except IndexError:
print('IndexError')
try:
'{0:+s}'.format('1')
except ValueError:
print('ValueError')
try:
'{0:+c}'.format(1)
except ValueError:
print('ValueError')
try:
'{0:s}'.format(1)
except ValueError:
print('ValueError')
try:
'{:*"1"}'.format('zz')
except ValueError:
print('ValueError')
# unknown format code for str arg
try:
'{:X}'.format('zz')
except ValueError:
print('ValueError')
| mit |
jezdez/kuma | vendor/packages/pygments/lexers/_lua_builtins.py | 43 | 6965 | # -*- coding: utf-8 -*-
"""
pygments.lexers._lua_builtins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file contains the names and modules of lua functions
It is able to re-generate itself, but for adding new functions you
probably have to add some callbacks (see function module_callbacks).
Do not edit the MODULES dict by hand.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from __future__ import print_function
MODULES = {'basic': ('_G',
'_VERSION',
'assert',
'collectgarbage',
'dofile',
'error',
'getfenv',
'getmetatable',
'ipairs',
'load',
'loadfile',
'loadstring',
'next',
'pairs',
'pcall',
'print',
'rawequal',
'rawget',
'rawset',
'select',
'setfenv',
'setmetatable',
'tonumber',
'tostring',
'type',
'unpack',
'xpcall'),
'coroutine': ('coroutine.create',
'coroutine.resume',
'coroutine.running',
'coroutine.status',
'coroutine.wrap',
'coroutine.yield'),
'debug': ('debug.debug',
'debug.getfenv',
'debug.gethook',
'debug.getinfo',
'debug.getlocal',
'debug.getmetatable',
'debug.getregistry',
'debug.getupvalue',
'debug.setfenv',
'debug.sethook',
'debug.setlocal',
'debug.setmetatable',
'debug.setupvalue',
'debug.traceback'),
'io': ('io.close',
'io.flush',
'io.input',
'io.lines',
'io.open',
'io.output',
'io.popen',
'io.read',
'io.tmpfile',
'io.type',
'io.write'),
'math': ('math.abs',
'math.acos',
'math.asin',
'math.atan2',
'math.atan',
'math.ceil',
'math.cosh',
'math.cos',
'math.deg',
'math.exp',
'math.floor',
'math.fmod',
'math.frexp',
'math.huge',
'math.ldexp',
'math.log10',
'math.log',
'math.max',
'math.min',
'math.modf',
'math.pi',
'math.pow',
'math.rad',
'math.random',
'math.randomseed',
'math.sinh',
'math.sin',
'math.sqrt',
'math.tanh',
'math.tan'),
'modules': ('module',
'require',
'package.cpath',
'package.loaded',
'package.loadlib',
'package.path',
'package.preload',
'package.seeall'),
'os': ('os.clock',
'os.date',
'os.difftime',
'os.execute',
'os.exit',
'os.getenv',
'os.remove',
'os.rename',
'os.setlocale',
'os.time',
'os.tmpname'),
'string': ('string.byte',
'string.char',
'string.dump',
'string.find',
'string.format',
'string.gmatch',
'string.gsub',
'string.len',
'string.lower',
'string.match',
'string.rep',
'string.reverse',
'string.sub',
'string.upper'),
'table': ('table.concat',
'table.insert',
'table.maxn',
'table.remove',
'table.sort')}
if __name__ == '__main__': # pragma: no cover
import re
try:
from urllib import urlopen
except ImportError:
from urllib.request import urlopen
import pprint
# you can't generally find out what module a function belongs to if you
# have only its name. Because of this, here are some callback functions
# that recognize if a gioven function belongs to a specific module
def module_callbacks():
def is_in_coroutine_module(name):
return name.startswith('coroutine.')
def is_in_modules_module(name):
if name in ['require', 'module'] or name.startswith('package'):
return True
else:
return False
def is_in_string_module(name):
return name.startswith('string.')
def is_in_table_module(name):
return name.startswith('table.')
def is_in_math_module(name):
return name.startswith('math')
def is_in_io_module(name):
return name.startswith('io.')
def is_in_os_module(name):
return name.startswith('os.')
def is_in_debug_module(name):
return name.startswith('debug.')
return {'coroutine': is_in_coroutine_module,
'modules': is_in_modules_module,
'string': is_in_string_module,
'table': is_in_table_module,
'math': is_in_math_module,
'io': is_in_io_module,
'os': is_in_os_module,
'debug': is_in_debug_module}
def get_newest_version():
f = urlopen('http://www.lua.org/manual/')
r = re.compile(r'^<A HREF="(\d\.\d)/">Lua \1</A>')
for line in f:
m = r.match(line)
if m is not None:
return m.groups()[0]
def get_lua_functions(version):
f = urlopen('http://www.lua.org/manual/%s/' % version)
r = re.compile(r'^<A HREF="manual.html#pdf-(.+)">\1</A>')
functions = []
for line in f:
m = r.match(line)
if m is not None:
functions.append(m.groups()[0])
return functions
def get_function_module(name):
for mod, cb in module_callbacks().items():
if cb(name):
return mod
if '.' in name:
return name.split('.')[0]
else:
return 'basic'
def regenerate(filename, modules):
with open(filename) as fp:
content = fp.read()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
with open(filename, 'w') as fp:
fp.write(header)
fp.write('MODULES = %s\n\n' % pprint.pformat(modules))
fp.write(footer)
def run():
version = get_newest_version()
print('> Downloading function index for Lua %s' % version)
functions = get_lua_functions(version)
print('> %d functions found:' % len(functions))
modules = {}
for full_function_name in functions:
print('>> %s' % full_function_name)
m = get_function_module(full_function_name)
modules.setdefault(m, []).append(full_function_name)
regenerate(__file__, modules)
run()
| mpl-2.0 |
DukeOfHazard/crits | crits/core/views.py | 7 | 83579 | import datetime
import json
import logging
from bson import json_util
from dateutil.parser import parse
from time import gmtime, strftime
from django.conf import settings
from django.contrib.auth.decorators import user_passes_test
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.template.loader import render_to_string
from crits.actors.actor import ActorThreatIdentifier
from crits.actors.forms import AddActorForm, AddActorIdentifierTypeForm
from crits.actors.forms import AddActorIdentifierForm, AttributeIdentifierForm
from crits.backdoors.forms import AddBackdoorForm
from crits.campaigns.campaign import Campaign
from crits.campaigns.forms import AddCampaignForm, CampaignForm
from crits.certificates.forms import UploadCertificateForm
from crits.comments.forms import AddCommentForm, InlineCommentForm
from crits.config.config import CRITsConfig
from crits.core.data_tools import json_handler
from crits.core.forms import SourceAccessForm, AddSourceForm, AddUserRoleForm
from crits.core.forms import SourceForm, DownloadFileForm, AddReleasabilityForm
from crits.core.forms import TicketForm
from crits.core.handlers import add_releasability, add_releasability_instance
from crits.core.handlers import remove_releasability, remove_releasability_instance
from crits.core.handlers import add_new_source, generate_counts_jtable
from crits.core.handlers import source_add_update, source_remove, source_remove_all
from crits.core.handlers import modify_bucket_list, promote_bucket_list
from crits.core.handlers import download_object_handler, unflatten
from crits.core.handlers import modify_sector_list
from crits.core.handlers import generate_bucket_jtable, generate_bucket_csv
from crits.core.handlers import generate_sector_jtable, generate_sector_csv
from crits.core.handlers import generate_dashboard, generate_global_search
from crits.core.handlers import login_user, reset_user_password
from crits.core.handlers import generate_user_profile, generate_user_preference
from crits.core.handlers import modify_source_access, get_bucket_autocomplete
from crits.core.handlers import dns_timeline, email_timeline, indicator_timeline
from crits.core.handlers import generate_users_jtable, generate_items_jtable
from crits.core.handlers import toggle_item_state, download_grid_file
from crits.core.handlers import get_data_for_item, generate_audit_jtable
from crits.core.handlers import details_from_id, status_update
from crits.core.handlers import get_favorites, favorite_update
from crits.core.handlers import generate_favorites_jtable
from crits.core.handlers import ticket_add, ticket_update, ticket_remove
from crits.core.handlers import description_update
from crits.core.source_access import SourceAccess
from crits.core.user import CRITsUser
from crits.core.user_role import UserRole
from crits.core.user_tools import user_can_view_data, is_admin, user_sources
from crits.core.user_tools import user_is_admin, get_user_list, get_nav_template
from crits.core.user_tools import get_user_role, get_user_email_notification
from crits.core.user_tools import get_user_info, get_user_organization
from crits.core.user_tools import is_user_subscribed, unsubscribe_user
from crits.core.user_tools import subscribe_user, subscribe_to_source
from crits.core.user_tools import unsubscribe_from_source, is_user_subscribed_to_source
from crits.core.user_tools import add_new_user_role, change_user_password, toggle_active
from crits.core.user_tools import save_user_secret
from crits.core.user_tools import toggle_user_preference, update_user_preference
from crits.core.user_tools import get_api_key_by_name, create_api_key_by_name
from crits.core.user_tools import revoke_api_key_by_name, make_default_api_key_by_name
from crits.core.class_mapper import class_from_id
from crits.domains.forms import TLDUpdateForm, AddDomainForm
from crits.emails.forms import EmailUploadForm, EmailEMLForm, EmailYAMLForm, EmailRawUploadForm, EmailOutlookForm
from crits.events.forms import EventForm
from crits.exploits.forms import AddExploitForm
from crits.indicators.forms import UploadIndicatorCSVForm, UploadIndicatorTextForm
from crits.indicators.forms import UploadIndicatorForm, NewIndicatorActionForm
from crits.indicators.indicator import IndicatorAction
from crits.ips.forms import AddIPForm
from crits.locations.forms import AddLocationForm
from crits.notifications.handlers import get_user_notifications
from crits.notifications.handlers import remove_user_from_notification
from crits.notifications.handlers import remove_user_notifications
from crits.objects.forms import AddObjectForm
from crits.pcaps.forms import UploadPcapForm
from crits.raw_data.forms import UploadRawDataFileForm, UploadRawDataForm
from crits.raw_data.forms import NewRawDataTypeForm
from crits.raw_data.raw_data import RawDataType
from crits.relationships.forms import ForgeRelationshipForm
from crits.samples.forms import UploadFileForm
from crits.screenshots.forms import AddScreenshotForm
from crits.targets.forms import TargetInfoForm
from crits.vocabulary.sectors import Sectors
logger = logging.getLogger(__name__)
@user_passes_test(user_can_view_data)
def update_object_description(request):
"""
Toggle favorite in a user profile.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
type_ = request.POST['type']
id_ = request.POST['id']
description = request.POST['description']
analyst = request.user.username
return HttpResponse(json.dumps(description_update(type_,
id_,
description,
analyst)),
mimetype="application/json")
else:
return render_to_response("error.html",
{"error" : 'Expected AJAX POST.'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def toggle_favorite(request):
"""
Toggle favorite in a user profile.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
type_ = request.POST['type']
id_ = request.POST['id']
analyst = request.user.username
return HttpResponse(json.dumps(favorite_update(type_,
id_,
analyst)),
mimetype="application/json")
else:
return render_to_response("error.html",
{"error" : 'Expected AJAX POST.'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def favorites(request):
"""
Get favorites for a user.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
analyst = request.user.username
return HttpResponse(json.dumps(get_favorites(analyst)),
mimetype="application/json")
else:
return render_to_response("error.html",
{"error" : 'Expected AJAX POST.'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def favorites_list(request, ctype=None, option=None):
"""
Get favorites for a user for jtable.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
return generate_favorites_jtable(request, ctype, option)
@user_passes_test(user_can_view_data)
def get_dialog(request):
"""
Get a specific dialog for rendering in the UI.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
dialog = request.GET.get('dialog', '')
# Regex in urls.py doesn't seem to be working, should sanity check dialog
return render_to_response(dialog + ".html",
{"error" : 'Dialog not found'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def update_status(request, type_, id_):
"""
Update the status of a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param type_: The top-level object to update.
:type type_: str
:param id_: The ObjectId to search for.
:type id_: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
value = request.POST['value']
analyst = request.user.username
return HttpResponse(json.dumps(status_update(type_,
id_,
value,
analyst)),
mimetype="application/json")
else:
return render_to_response("error.html",
{"error" : 'Expected AJAX POST.'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def get_item_data(request):
"""
Get basic data for an item. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
item_id = request.POST.get('id','')
item_type = request.POST.get('type','')
# Right now we pass the id/type for the data we want
# If we write a function that doesn't pass these values,
# then grab them from the cookies
if not item_id:
item_id = request.COOKIES.get('crits_rel_id','')
if not item_type:
item_type = request.COOKIES.get('crits_rel_type','')
response = get_data_for_item(item_type, item_id)
return HttpResponse(json.dumps(response, default=json_handler),
content_type="application/json")
@user_passes_test(user_can_view_data)
def global_search_listing(request):
"""
Return results for a global search.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
# For object searches
if 'q' not in request.GET:
return render_to_response("error.html",
{"error" : 'No valid search criteria'},
RequestContext(request))
args = generate_global_search(request)
# If we matched a single ObjectID
if 'url' in args:
return redirect(args['url'], args['key'])
# For all other searches
if 'Result' in args and args['Result'] == "ERROR":
return render_to_response("error.html",
{"error": args['Message']},
RequestContext(request))
return render_to_response("search_listing.html",
args,
RequestContext(request))
def about(request):
"""
Return the About page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
return render_to_response('about.html',
{},
RequestContext(request))
def help(request):
"""
Return the Help page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
return render_to_response('help.html',
{},
RequestContext(request))
# Mongo Auth
def login(request):
"""
Authenticate a user.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
# Gather basic request information
crits_config = CRITsConfig.objects().first()
url = request.GET.get('next')
user_agent = request.META.get('HTTP_USER_AGENT', '')
remote_addr = request.META.get('REMOTE_ADDR', '')
accept_language = request.META.get('HTTP_ACCEPT_LANGUAGE', '')
next_url = request.REQUEST.get('next', None)
# Setup defaults
username = None
login = True
show_auth = True
message = crits_config.crits_message
token_message = """
<b>If you are not using TOTP or not sure what TOTP is,<br />leave the Token field empty.</b><br />
If you are setting up TOTP for the first time, please enter a PIN above.<br />
If you are already setup with TOTP, please enter your PIN + Key above."""
response = {}
# Check for remote user being enabled and check for user
if crits_config.remote_user:
show_auth = False
username = request.META.get(settings.REMOTE_USER_META,None)
if username:
resp = login_user(username, None, next_url, user_agent,
remote_addr, accept_language, request,
totp_pass=None)
if resp['success']:
return HttpResponseRedirect(resp['message'])
else:
# Login failed, set messages/settings and continue
message = resp['message']
login = False
if resp['type'] == "totp_required":
login = True
else:
logger.warn("REMOTE_USER enabled, but no user passed.")
message = 'REMOTE_USER not provided. Please notify an admin.'
return render_to_response('login.html',
{'next': url,
'theme': 'default',
'login': False,
'show_auth': False,
'message': message,
'token_message': token_message},
RequestContext(request))
# Attempt authentication
if request.method == 'POST' and request.is_ajax():
next_url = request.POST.get('next_url', None)
# Get username from form if this is not Remote User
if not crits_config.remote_user:
username = request.POST.get('username', None)
# Even if it is remote user, try to get password.
# Remote user will not have one so we pass None.
password = request.POST.get('password', None)
# TOTP can still be required for Remote Users
totp_pass = request.POST.get('totp_pass', None)
if (not username or
(not totp_pass and crits_config.totp_web == 'Required')):
response['success'] = False
response['message'] = 'Unknown user or bad password.'
return HttpResponse(json.dumps(response),
mimetype="application/json")
#This casues auth failures with LDAP and upper case name parts
#username = username.lower()
# login_user will handle the following cases:
# - User logging in with no TOTP enabled.
# - User logging in with TOTP enabled.
# - User logging in and setting up TOTP for the first time.
# It should return the string to use for setting up their
# authenticator and then prompt the user to submit pin + token.
resp = login_user(username, password, next_url, user_agent,
remote_addr, accept_language, request,
totp_pass=totp_pass)
return HttpResponse(json.dumps(resp), mimetype="application/json")
# Display template for authentication
return render_to_response('login.html',
{'next': url,
'theme': 'default',
'login': login,
'show_auth': show_auth,
'message': message,
'token_message': token_message},
RequestContext(request))
def reset_password(request):
"""
Reset a user password.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
action = request.POST.get('action', None)
username = request.POST.get('username', None)
email = request.POST.get('email', None)
submitted_rcode = request.POST.get('reset_code', None)
new_p = request.POST.get('new_p', None)
new_p_c = request.POST.get('new_p_c', None)
analyst = request.user.username
return reset_user_password(username=username,
action=action,
email=email,
submitted_rcode=submitted_rcode,
new_p=new_p,
new_p_c=new_p_c,
analyst=analyst)
return render_to_response('login.html',
{'reset': True},
RequestContext(request))
@user_passes_test(user_can_view_data)
def profile(request, user=None):
"""
Render the User Profile page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param username: The user to render the profile page for.
:type username: str
:returns: :class:`django.http.HttpResponse`
"""
if user and is_admin(request.user.username):
username = user
else:
username = request.user.username
args = generate_user_profile(username,request)
if 'status'in args and args['status'] == "ERROR":
return render_to_response('error.html',
{'data': request,
'error': "Invalid request"},
RequestContext(request))
return render_to_response('profile.html',
args,
RequestContext(request))
@user_passes_test(user_can_view_data)
def dashboard(request):
"""
Render the Dashboard.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
return generate_dashboard(request)
@user_passes_test(user_can_view_data)
def counts_listing(request,option=None):
"""
Render the Counts jtable.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
return generate_counts_jtable(request, option)
@user_passes_test(user_can_view_data)
def source_releasability(request):
"""
Modify a top-level object's releasability. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
type_ = request.POST.get('type', None)
id_ = request.POST.get('id', None)
name = request.POST.get('name', None)
action = request.POST.get('action', None)
date = request.POST.get('date', datetime.datetime.now())
if not isinstance(date, datetime.datetime):
date = parse(date, fuzzy=True)
user = str(request.user.username)
if not type_ or not id_ or not name or not action:
error = "Modifying releasability requires a type, id, source, and action"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
if action == "add":
result = add_releasability(type_, id_, name, user)
elif action == "add_instance":
result = add_releasability_instance(type_, id_, name, user)
elif action == "remove":
result = remove_releasability(type_, id_, name, user)
elif action == "remove_instance":
result = remove_releasability_instance(type_, id_, name, date, user)
else:
error = "Unknown releasability action: %s" % action
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
if result['success']:
subscription = {
'type': type_,
'id': id_
}
html = render_to_string('releasability_header_widget.html',
{'releasability': result['obj'],
'subscription': subscription},
RequestContext(request))
response = {'success': result['success'],
'html': html}
else:
response = {'success': result['success'],
'error': result['message']}
return HttpResponse(json.dumps(response),
mimetype="application/json")
else:
error = "Expected AJAX POST!"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
def source_access(request):
"""
Modify a user's profile. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if not is_admin(request.user.username):
error = "You do not have permission to use this feature!"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
if request.method == 'POST' and request.is_ajax():
form = SourceAccessForm(request.POST)
if form.is_valid():
data = form.cleaned_data
result = modify_source_access(request.user.username,
data)
if result['success']:
message = '<div>User modified successfully!</div>'
result['message'] = message
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
return HttpResponse(json.dumps({'form':form.as_table()}),
mimetype="application/json")
else:
error = "Expected AJAX POST!"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_is_admin)
def source_add(request):
"""
Add a source to CRITs. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
source_form = AddSourceForm(request.POST)
analyst = request.user.username
if source_form.is_valid():
result = add_new_source(source_form.cleaned_data['source'],
analyst)
if result:
msg = ('<div>Source added successfully! Add this source to '
'users to utilize it.</div>')
message = {'message': msg,
'success': True}
else:
message = {'message': '<div>Source addition failed!</div>', 'success':
False}
else:
message = {'success': False,
'form': source_form.as_table()}
return HttpResponse(json.dumps(message),
mimetype="application/json")
return render_to_response("error.html",
{"error" : 'Expected AJAX POST' },
RequestContext(request))
@user_passes_test(user_is_admin)
def user_role_add(request):
"""
Add a user role to CRITs. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
role_form = AddUserRoleForm(request.POST)
analyst = request.user.username
if role_form.is_valid() and is_admin(request.user.username):
result = add_new_user_role(role_form.cleaned_data['role'],
analyst)
if result:
message = {'message': '<div>User role added successfully!</div>',
'success': True}
else:
message = {'message': '<div>User role addition failed!</div>',
'success': False}
else:
message = {'success': False,
'form': role_form.as_table()}
return HttpResponse(json.dumps(message),
mimetype="application/json")
return render_to_response("error.html",
{"error" : 'Expected AJAX POST'},
RequestContext(request))
@user_passes_test(user_can_view_data)
def add_update_source(request, method, obj_type, obj_id):
"""
Add/Update a source for a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param method: Whether this is an "add" or "update".
:type method: str
:param obj_type: The type of top-level object.
:type obj_type: str
:param obj_id: The ObjectId to search for.
:type obj_id: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
form = SourceForm(request.user.username, request.POST)
if form.is_valid():
data = form.cleaned_data
analyst = request.user.username
# check to see that this user can already see the object
if (data['name'] in user_sources(analyst)):
if method == "add":
date = datetime.datetime.now()
else:
date = datetime.datetime.strptime(data['date'],
settings.PY_DATETIME_FORMAT)
result = source_add_update(obj_type,
obj_id,
method,
data['name'],
method=data['method'],
reference=data['reference'],
date=date,
analyst=analyst)
if 'object' in result:
if method == "add":
result['header'] = result['object'].name
result['data_field'] = 'name'
result['html'] = render_to_string('sources_header_widget.html',
{'source': result['object'],
'obj_type': obj_type,
'obj_id': obj_id},
RequestContext(request))
else:
result['html'] = render_to_string('sources_row_widget.html',
{'source': result['object'],
'instance': result['instance'],
'obj_type': obj_type,
'obj_id': obj_id},
RequestContext(request))
return HttpResponse(json.dumps(result,
default=json_handler),
mimetype='application/json')
else:
return HttpResponse(json.dumps({'success': False,
'form': form.as_table()}),
mimetype='application/json')
else:
return HttpResponse(json.dumps({'success': False,
'form':form.as_table()}),
mimetype='application/json')
return HttpResponse({})
@user_passes_test(user_can_view_data)
def remove_source(request, obj_type, obj_id):
"""
Remove a source from a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param obj_type: The type of top-level object.
:type obj_type: str
:param obj_id: The ObjectId to search for.
:type obj_id: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
if is_admin(request.user.username):
date = datetime.datetime.strptime(request.POST['key'],
settings.PY_DATETIME_FORMAT)
name = request.POST['name']
result = source_remove(obj_type,
obj_id,
name,
date,
'%s' % request.user.username)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "You do not have permission to remove this item"
return render_to_response("error.html",
{'error': error},
RequestContext(request))
return HttpResponse({})
@user_passes_test(user_can_view_data)
def remove_all_source(request, obj_type, obj_id):
"""
Remove all sources from a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param obj_type: The type of top-level object.
:type obj_type: str
:param obj_id: The ObjectId to search for.
:type obj_id: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
if is_admin(request.user.username):
name = request.POST['key']
result = source_remove_all(obj_type,
obj_id,
name, '%s' % request.user.username)
result['last'] = True
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "You do not have permission to remove this item"
return render_to_response("error.html",
{'error': error},
RequestContext(request))
return HttpResponse({})
@user_passes_test(user_can_view_data)
def bucket_promote(request):
"""
Promote a bucket to a Campaign. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
bucket = request.GET.get("name", None)
if not bucket:
return render_to_response("error.html",
{'error': 'Need a bucket.'},
RequestContext(request))
form = CampaignForm(request.POST)
if form.is_valid():
analyst = request.user.username
confidence = form.cleaned_data['confidence']
name = form.cleaned_data['name']
related = form.cleaned_data['related']
description = form.cleaned_data['description']
result = promote_bucket_list(bucket,
confidence,
name,
related,
description,
analyst)
return HttpResponse(json.dumps(result), mimetype="application/json")
@user_passes_test(user_can_view_data)
def bucket_modify(request):
"""
Modify a bucket list for a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
tags = request.POST['tags'].split(",")
oid = request.POST['oid']
itype = request.POST['itype']
modify_bucket_list(itype, oid, tags, request.user.username)
return HttpResponse({})
@user_passes_test(user_can_view_data)
def bucket_list(request, option=None):
"""
Generate the jtable data for rendering in the list template.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
if option == "csv":
return generate_bucket_csv(request)
return generate_bucket_jtable(request, option)
@user_passes_test(user_can_view_data)
def download_object(request):
"""
Download a top-level object.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method != "POST":
return render_to_response("error.html",
{"error" : "Expecting POST."},
RequestContext(request))
form = DownloadFileForm(request.POST)
if form.is_valid():
total_limit = form.cleaned_data['total_limit']
depth_limit = form.cleaned_data['depth_limit']
rel_limit = form.cleaned_data['rel_limit']
bin_fmt = form.cleaned_data['bin_fmt']
rst_fmt = form.cleaned_data['rst_fmt']
objects = form.cleaned_data['objects']
obj_type = form.cleaned_data['obj_type']
obj_id = form.cleaned_data['obj_id']
crits_config = CRITsConfig.objects().first()
total_max = getattr(crits_config, 'total_max', settings.TOTAL_MAX)
depth_max = getattr(crits_config, 'depth_max', settings.DEPTH_MAX)
rel_max = getattr(crits_config, 'rel_max', settings.REL_MAX)
try:
total_limit = int(total_limit)
depth_limit = int(depth_limit)
rel_limit = int(rel_limit)
if total_limit < 0 or depth_limit < 0 or rel_limit < 0:
raise
except:
return render_to_response("error.html",
{"error" : "Limits must be positive integers."},
RequestContext(request))
# Don't exceed the configured maximums. This is done in the view
# so that scripts can enforce their own limmits.
if total_limit > total_max:
total_limit = total_max
if depth_limit > depth_max:
depth_limit = depth_max
if rel_limit > rel_max:
rel_limit = rel_max
sources = user_sources(request.user.username)
if not sources:
return render_to_response("error.html",
{"error" : "No matching data."},
RequestContext(request))
result = download_object_handler(total_limit,
depth_limit,
rel_limit,
rst_fmt,
bin_fmt,
objects,
[(obj_type, obj_id)],
sources)
if not result['success']:
return render_to_response("error.html",
{"error" : "No matching data."},
RequestContext(request))
response = HttpResponse()
response['mimetype'] = result['mimetype']
response['Content-Disposition'] = 'attachment; filename=%s' % result['filename']
response.write(result['data'])
return response
else:
return render_to_response("error.html",
{"error" : "Invalid form."},
RequestContext(request))
@user_passes_test(user_can_view_data)
def timeline(request, data_type="dns"):
"""
Render the timeline.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param data_type: The type of data to include in the timeline.
:type data_type: str
:returns: :class:`django.http.HttpResponse`
"""
format = request.GET.get("format", "none")
analyst = request.user.username
sources = user_sources(analyst)
query = {}
params = {}
if request.GET.get("campaign"):
query["campaign.name"] = request.GET.get("campaign")
params["campaign"] = query["campaign.name"]
if request.GET.get("backdoor"):
query["backdoor.name"] = request.GET.get("backdoor")
params["backdoor"] = query["backdoor.name"]
query["source.name"] = {"$in": sources}
page_title = data_type
if format == "json":
timeglider = []
tline = {}
tline['id'] = "tline"
tline['focus_date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
tline['initial_zoom'] = "20"
tline['timezone'] = strftime("%z", gmtime())
events = []
# DNS data
if data_type == "dns":
tline['title'] = "DNS"
events = dns_timeline(query, analyst, sources)
# Email data
elif data_type == "email":
tline['title'] = "Emails"
events = email_timeline(query, analyst, sources)
# Indicator data
elif data_type == "indicator":
tline['title'] = "Indicators"
tline['initial_zoom'] = "14"
events = indicator_timeline(query, analyst, sources)
tline['events'] = events
timeglider.append(tline)
return HttpResponse(json.dumps(timeglider,
default=json_util.default),
mimetype="application/json")
else:
return render_to_response('timeline.html',
{'data_type': data_type,
'params': json.dumps(params),
'page_title': page_title},
RequestContext(request))
def base_context(request):
"""
Set of common content to include in the Response so it is always available
to every template on every page. This is included in settings.py in the
TEMPLATE_CONTEXT_PROCESSORS.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: dict
"""
crits_config = CRITsConfig.objects().first()
base_context = {}
classification = getattr(crits_config,
'classification',
settings.CLASSIFICATION)
instance_name = getattr(crits_config,
'instance_name',
settings.INSTANCE_NAME)
company_name = getattr(crits_config,
'company_name',
settings.COMPANY_NAME)
crits_version = settings.CRITS_VERSION
enable_toasts = getattr(crits_config,
'enable_toasts',
settings.ENABLE_TOASTS)
git_branch = getattr(crits_config,
'git_branch',
settings.GIT_BRANCH)
git_hash = getattr(crits_config,
'git_hash',
settings.GIT_HASH)
git_hash_long = getattr(crits_config,
'git_hash_long',
settings.GIT_HASH_LONG)
git_repo_url = getattr(crits_config,
'git_repo_url',
settings.GIT_REPO_URL)
hide_git_hash = getattr(crits_config,
'hide_git_hash',
settings.HIDE_GIT_HASH)
splunk_url = getattr(crits_config,
'splunk_search_url',
settings.SPLUNK_SEARCH_URL)
secure_cookie = getattr(crits_config,
'secure_cookie',
settings.SECURE_COOKIE)
mongo_database = settings.MONGO_DATABASE
base_context['crits_config'] = crits_config
base_context['current_datetime'] = datetime.datetime.now()
base_context['classification'] = classification.upper()
base_context['instance_name'] = instance_name
base_context['company_name'] = company_name
base_context['crits_version'] = crits_version
base_context['enable_toasts'] = enable_toasts
if git_repo_url:
base_context['git_repo_link'] = "<a href='"+git_repo_url+"/commit/"+git_hash_long+"'>"+git_branch+':'+git_hash+"</a>"
else:
base_context['git_repo_link'] = "%s:%s" % (git_branch, git_hash)
base_context['hide_git_hash'] = hide_git_hash
base_context['splunk_search_url'] = splunk_url
base_context['mongo_database'] = mongo_database
base_context['secure_cookie'] = secure_cookie
base_context['service_nav_templates'] = settings.SERVICE_NAV_TEMPLATES
base_context['service_cp_templates'] = settings.SERVICE_CP_TEMPLATES
base_context['service_tab_templates'] = settings.SERVICE_TAB_TEMPLATES
if request.user.is_authenticated():
user = request.user.username
# Forms that don't require a user
base_context['add_indicator_action'] = NewIndicatorActionForm()
base_context['add_target'] = TargetInfoForm()
base_context['campaign_add'] = AddCampaignForm()
base_context['comment_add'] = AddCommentForm()
base_context['inline_comment_add'] = InlineCommentForm()
base_context['campaign_form'] = CampaignForm()
base_context['location_add'] = AddLocationForm()
base_context['add_raw_data_type'] = NewRawDataTypeForm()
base_context['relationship_form'] = ForgeRelationshipForm()
base_context['source_access'] = SourceAccessForm()
base_context['upload_tlds'] = TLDUpdateForm()
base_context['user_role_add'] = AddUserRoleForm()
base_context['new_ticket'] = TicketForm(initial={'date': datetime.datetime.now()})
base_context['add_actor_identifier_type'] = AddActorIdentifierTypeForm()
base_context['attribute_actor_identifier'] = AttributeIdentifierForm()
# Forms that require a user
try:
base_context['actor_add'] = AddActorForm(user)
except Exception, e:
logger.warning("Base Context AddActorForm Error: %s" % e)
try:
base_context['add_actor_identifier'] = AddActorIdentifierForm(user)
except Exception, e:
logger.warning("Base Context AddActorIdentifierForm Error: %s" % e)
try:
base_context['backdoor_add'] = AddBackdoorForm(user)
except Exception, e:
logger.warning("Base Context AddBackdoorForm Error: %s" % e)
try:
base_context['exploit_add'] = AddExploitForm(user)
except Exception, e:
logger.warning("Base Context AddExploitForm Error: %s" % e)
try:
base_context['add_domain'] = AddDomainForm(user)
except Exception, e:
logger.warning("Base Context AddDomainForm Error: %s" % e)
try:
base_context['ip_form'] = AddIPForm(user, None)
except Exception, e:
logger.warning("Base Context AddIPForm Error: %s" % e)
try:
base_context['source_add'] = SourceForm(user,
initial={'analyst': user})
except Exception, e:
logger.warning("Base Context SourceForm Error: %s" % e)
try:
base_context['upload_cert'] = UploadCertificateForm(user)
except Exception, e:
logger.warning("Base Context UploadCertificateForm Error: %s" % e)
try:
base_context['upload_csv'] = UploadIndicatorCSVForm(user)
except Exception, e:
logger.warning("Base Context UploadIndicatorCSVForm Error: %s" % e)
try:
base_context['upload_email_outlook'] = EmailOutlookForm(user)
except Exception, e:
logger.warning("Base Context EmailOutlookForm Error: %s" % e)
try:
base_context['upload_email_eml'] = EmailEMLForm(user)
except Exception, e:
logger.warning("Base Context EmailEMLForm Error: %s" % e)
try:
base_context['upload_email_fields'] = EmailUploadForm(user)
except Exception, e:
logger.warning("Base Context EmailUploadForm Error: %s" % e)
try:
base_context['upload_email_yaml'] = EmailYAMLForm(user)
except Exception, e:
logger.warning("Base Context EmailYAMLForm Error: %s" % e)
try:
base_context['upload_email_raw'] = EmailRawUploadForm(user)
except Exception, e:
logger.warning("Base Context EmailRawUploadForm Error: %s" % e)
try:
base_context['upload_event'] = EventForm(user)
except Exception, e:
logger.warning("Base Context EventForm Error: %s" % e)
try:
base_context['upload_ind'] = UploadIndicatorForm(user)
except Exception, e:
logger.warning("Base Context UploadIndicatorForm Error: %s" % e)
try:
base_context['upload_pcap'] = UploadPcapForm(user)
except Exception, e:
logger.warning("Base Context UploadPcapForm Error: %s" % e)
try:
base_context['upload_text'] = UploadIndicatorTextForm(user)
except Exception, e:
logger.warning("Base Context UploadIndicatorTextForm Error: %s" % e)
try:
base_context['upload_sample'] = UploadFileForm(user)
except Exception, e:
logger.warning("Base Context UploadFileForm Error: %s" % e)
try:
base_context['object_form'] = AddObjectForm(user, None)
except Exception, e:
logger.warning("Base Context AddObjectForm Error: %s" % e)
try:
base_context['releasability_form'] = AddReleasabilityForm(user)
except Exception, e:
logger.warning("Base Context AddReleasabilityForm Error: %s" % e)
try:
base_context['screenshots_form'] = AddScreenshotForm(user)
except Exception, e:
logger.warning("Base Context AddScreenshotForm Error: %s" % e)
try:
base_context['upload_raw_data'] = UploadRawDataForm(user)
except Exception, e:
logger.warning("Base Context UploadRawDataForm Error: %s" % e)
try:
base_context['upload_raw_data_file'] = UploadRawDataFileForm(user)
except Exception, e:
logger.warning("Base Context UploadRawDataFileForm Error: %s" % e)
# Other info acquired from functions
try:
base_context['user_list'] = get_user_list()
except Exception, e:
logger.warning("Base Context get_user_list Error: %s" % e)
try:
base_context['email_notifications'] = get_user_email_notification(user)
except Exception, e:
logger.warning("Base Context get_user_email_notification Error: %s" % e)
try:
base_context['user_notifications'] = get_user_notifications(user,
count=True)
except Exception, e:
logger.warning("Base Context get_user_notifications Error: %s" % e)
try:
base_context['user_organization'] = get_user_organization(user)
except Exception, e:
logger.warning("Base Context get_user_organization Error: %s" % e)
try:
base_context['user_role'] = get_user_role(user)
except Exception, e:
logger.warning("Base Context get_user_role Error: %s" % e)
try:
base_context['user_source_list'] = user_sources(user)
except Exception, e:
logger.warning("Base Context user_sources Error: %s" % e)
nav_template = get_nav_template(request.user.prefs.nav)
if nav_template != None:
base_context['nav_template'] = nav_template
base_context['newer_notifications_location'] = request.user.prefs.toast_notifications.get('newer_notifications_location', 'top')
base_context['initial_notifications_display'] = request.user.prefs.toast_notifications.get('initial_notifications_display', 'show')
base_context['max_visible_notifications'] = request.user.prefs.toast_notifications.get('max_visible_notifications', 5)
base_context['notification_anchor_location'] = request.user.prefs.toast_notifications.get('notification_anchor_location', 'bottom_right')
base_context['nav_config'] = {'text_color': request.user.prefs.nav.get('text_color'),
'background_color': request.user.prefs.nav.get('background_color'),
'hover_text_color': request.user.prefs.nav.get('hover_text_color'),
'hover_background_color': request.user.prefs.nav.get('hover_background_color')}
if is_admin(request.user.username):
try:
base_context['source_create'] = AddSourceForm()
except Exception, e:
logger.warning("Base Context AddSourceForm Error: %s" % e)
base_context['category_list'] = [
{'collection': '', 'name': ''},
{'collection': settings.COL_BACKDOORS,
'name': 'Backdoors'},
{'collection': settings.COL_CAMPAIGNS,
'name': 'Campaigns'},
{'collection': settings.COL_EVENT_TYPES,
'name': 'Event Types'},
{'collection': settings.COL_IDB_ACTIONS,
'name': 'Indicator Actions'},
{'collection': settings.COL_INTERNAL_LOCATIONS,
'name': 'Internal Locations'},
{'collection': settings.COL_OBJECT_TYPES,
'name': 'Object Types'},
{'collection': settings.COL_RAW_DATA_TYPES,
'name': 'Raw Data Types'},
{'collection': settings.COL_RELATIONSHIP_TYPES,
'name': 'Relationship Types'},
{'collection': settings.COL_SOURCE_ACCESS,
'name': 'Sources'},
{'collection': settings.COL_USER_ROLES,
'name': 'User Roles'}
]
return base_context
@user_passes_test(user_can_view_data)
def user_context(request):
"""
Set of common content about the user to include in the Response so it is
always available to every template on every page. This is included in
settings.py in the TEMPLATE_CONTEXT_PROCESSORS.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: dict
"""
context = {}
try:
context['admin'] = is_admin(request.user.username)
except:
context['admin'] = False
# Get user theme
user = CRITsUser.objects(username=request.user.username).first()
context['theme'] = user.get_preference('ui', 'theme', 'default')
favorite_count = 0
favorites = user.favorites.to_dict()
for favorite in favorites.values():
favorite_count += len(favorite)
context['user_favorites'] = user.favorites.to_json()
context['favorite_count'] = favorite_count
return context
@user_passes_test(user_can_view_data)
def get_user_source_list(request):
"""
Get a user's source list. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
user_source_access = user_sources('%s' % request.user.username)
message = {'success': True,
'data': user_source_access}
return HttpResponse(json.dumps(message),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_is_admin)
def user_source_access(request, username=None):
"""
Get a user's source access list. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param username: The user to get the sources for.
:type username: str.
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
if not username:
username = request.POST.get('username', None)
user = get_user_info(username)
if user:
user = user.to_dict()
if 'sources' not in user:
user['sources'] = ''
else:
user = {'username': '',
'sources': '',
'organization': settings.COMPANY_NAME}
form = SourceAccessForm(initial=user)
message = {'success': True,
'message': form.as_table()}
return HttpResponse(json.dumps(message),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def user_preference_toggle(request, section, setting):
"""
Toggle a preference for a user. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param section: The preferences section to toggle.
:type section: str.
:param setting: The setting to toggle.
:type setting: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
pref = generate_user_preference(request, section, 'toggle', setting)
if not pref or 'toggle' not in pref:
error = "Unexpected Preference Toggle Received in AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
result = toggle_user_preference(request.user.username, section, setting, is_enabled=pref.get('enabled'))
if result['success']:
result["message"] = "(Saved)"
if result['state']:
result["text"] = "Enabled"
result["title"]= "Click to Disable"
else:
result["text"] = "Disabled"
result["title"]= "Click to Enable"
if 'reload' in pref:
result["reload"] = pref['reload']
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def user_preference_update(request, section):
"""
Update a user preference. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param section: The preferences section to toggle.
:type section: str.
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
result = {}
pref = generate_user_preference(request,section)
if not pref or 'formclass' not in pref or not callable(pref['formclass']):
error = "Unexpected Form Received in AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
form = (pref['formclass'])(request, request.POST)
if form.is_valid():
data = form.cleaned_data
# Incoming attributes may be flattened, e.g.
# option.one.sub.key = value
# So this will unflatten them into a option: {one: a} dict
values = unflatten( data )
result = update_user_preference(request.user.username, section, values)
result['values'] = values
if result['success']:
result["message"] = "(Saved)"
if 'reload' in pref:
result["reload"] = pref['reload']
else:
result['success'] = False
pref['form'] = form # Inject our form instance with validation results
result['html'] = render_to_string("preferences_widget.html",
{'pref': pref},
RequestContext(request))
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def clear_user_notifications(request):
"""
Clear a user's notifications.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
remove_user_notifications("%s" % request.user.username)
return HttpResponseRedirect(reverse('crits.core.views.profile') + '#notifications_button')
@user_passes_test(user_can_view_data)
def delete_user_notification(request, type_, oid):
"""
Delete a user notification. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param type_: The top-level object type.
:type type_: str
:param oid: The ObjectId.
:type oid: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
result = remove_user_from_notification("%s" % request.user.username,
oid,
type_)
message = "<p style=\"text-align: center;\">You have no new notifications!</p>"
result['message'] = message
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def change_subscription(request, stype, oid):
"""
Subscribe/unsubscribe a user from this top-level object. Should be an AJAX
POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param stype: The CRITs type of the top-level object.
:type stype: str
:param oid: The ObjectId of the top-level object.
:type oid: str
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
username = "%s" % request.user.username
message = ""
if is_user_subscribed(username, stype, oid):
unsubscribe_user(username, stype, oid)
message = ("<span class=\"ui-icon ui-icon-signal-diag subscription_link"
"_disable\" title=\"Subscribe\"></span>")
else:
subscribe_user(username, stype, oid)
message = ("<span class=\"ui-icon ui-icon-close subscription_link"
"_enable\" title=\"Unsubscribe\"></span>")
result = {'success': True,
'message': message}
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def source_subscription(request):
"""
Subscribe/unsubscribe a user from this source. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
username = "%s" % request.user.username
user_source_access = user_sources(username)
source = request.POST['source']
if source not in user_source_access:
error = "You do not have access to that source."
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
message = ""
if is_user_subscribed_to_source(username, source):
unsubscribe_from_source(username, source)
message = "unsubscribed"
else:
subscribe_to_source(username, source)
message = "subscribed"
result = {'success': True, 'message': message}
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
def collections(request):
"""
Set of common content about collections to include in the Response so it is
always available to every template on every page. This is included in
settings.py in the TEMPLATE_CONTEXT_PROCESSORS.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: dict
"""
colls = {}
colls['COL_CERTIFICATES'] = settings.COL_CERTIFICATES
colls['COL_EMAIL'] = settings.COL_EMAIL
colls['COL_EVENTS'] = settings.COL_EVENTS
colls['COL_DOMAINS'] = settings.COL_DOMAINS
colls['COL_INDICATORS'] = settings.COL_INDICATORS
colls['COL_IPS'] = settings.COL_IPS
colls['COL_PCAPS'] = settings.COL_PCAPS
colls['COL_RAW_DATA'] = settings.COL_RAW_DATA
colls['COL_SAMPLES'] = settings.COL_SAMPLES
colls['COL_TARGETS'] = settings.COL_TARGETS
return colls
@user_passes_test(user_can_view_data)
def change_password(request):
"""
Change a user's password. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
username = request.user.username
current_p = request.POST['current_p']
new_p = request.POST['new_p']
new_p_c = request.POST['new_p_c']
result = change_user_password(username,
current_p,
new_p,
new_p_c)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def change_totp_pin(request):
"""
Change a user's TOTP pin. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
username = request.user.username
new_pin = request.POST.get('new_pin', None)
if new_pin:
result = save_user_secret(username, new_pin, "crits", (200,200))
if result['success']:
result['message'] = "Secret: %s" % result['secret']
if result['qr_img']:
qr_img = result['qr_img']
result['qr_img'] = '<br /><img src="data:image/png;base64,'
result['qr_img'] += '%s" />' % qr_img
else:
result['message'] = "Secret generation failed"
else:
result = {'message': "Please provide a pin"}
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_is_admin)
def control_panel(request):
"""
Render the control panel.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
return render_to_response("control_panel.html",
{},
RequestContext(request))
@user_passes_test(user_is_admin)
def users_listing(request, option=None):
"""
Generate the jtable data for rendering in the list template.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
return generate_users_jtable(request, option)
@user_passes_test(user_is_admin)
def toggle_user_active(request):
"""
Toggle a user active/inactive. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
user = request.POST.get('username', None)
analyst = request.user.username
if not user:
result = {'success': False}
else:
toggle_active(user, analyst)
result = {'success': True}
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_is_admin)
def item_editor(request):
"""
Render the item editor control panel page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
counts = {}
obj_list = [ActorThreatIdentifier,
Campaign,
IndicatorAction,
RawDataType,
SourceAccess,
UserRole]
for col_obj in obj_list:
counts[col_obj._meta['crits_type']] = col_obj.objects().count()
return render_to_response("item_editor.html",
{'counts': counts},
RequestContext(request))
@user_passes_test(user_is_admin)
def items_listing(request, itype, option=None):
"""
Generate the jtable data for rendering in the list template.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param itype: The item type.
:type itype: str
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
return generate_items_jtable(request, itype, option)
@user_passes_test(user_is_admin)
def audit_listing(request, option=None):
"""
Generate the jtable data for rendering in the list template.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
return generate_audit_jtable(request, option)
@user_passes_test(user_can_view_data)
def toggle_item_active(request):
"""
Toggle an item active/inactive. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == 'POST' and request.is_ajax():
type_ = request.POST.get('coll', None)
oid = request.POST.get('oid', None)
analyst = request.user.username
if not oid or not type_:
result = {'success': False}
else:
result = toggle_item_state(type_, oid, analyst)
return HttpResponse(json.dumps(result), mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def download_file(request, sample_md5):
"""
Download a file. Used mainly for situations where you are not using the
standard download file modal form in the UI.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param sample_md5: The MD5 of the file to download.
:type sample_md5: str
:returns: :class:`django.http.HttpResponse`
"""
dtype = request.GET.get("type", "sample")
if dtype in ('object', 'pcap', 'cert'):
return download_grid_file(request, dtype, sample_md5)
else:
return render_to_response('error.html',
{'data': request,
'error': "Unknown Type: %s" % dtype},
RequestContext(request))
@user_passes_test(user_can_view_data)
def details(request, type_=None, id_=None):
"""
Redirect to the details page. This is useful for getting to the details page
when you know the type and ID but not the information necessary for normally
getting to the Details page.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param type_: The CRITs type of the top-level object.
:type type_: str
:param id_: The ObjectId to search for.
:type id_: str
:returns: :class:`django.http.HttpResponseRedirect`
"""
if not type_ or not id_:
return render_to_response('error.html',
{'error': "Need a type and id to redirect to."},
RequestContext(request))
redir = details_from_id(type_, id_)
if redir:
return HttpResponseRedirect(redir)
else:
return render_to_response('error.html',
{'error': "No details page exists for type %s" % type_},
RequestContext(request))
@user_passes_test(user_can_view_data)
def add_update_ticket(request, method, type_=None, id_=None):
"""
Add/update/remove a ticket for a top-level object.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param method: Whether this is an "add", "update", or "remove".
:type method: str
:param type_: The CRITs type of the top-level object.
:type type_: str
:param id_: The ObjectId to search for.
:type id_: str
:returns: :class:`django.http.HttpResponseRedirect`
"""
if method == "remove" and request.method == "POST" and request.is_ajax():
analyst = request.user.username
if is_admin(analyst):
date = datetime.datetime.strptime(request.POST['key'],
settings.PY_DATETIME_FORMAT)
date = date.replace(microsecond=date.microsecond/1000*1000)
result = ticket_remove(type_, id_, date, analyst)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "You do not have permission to remove this item."
return render_to_response("error.html",
{'error': error},
RequestContext(request))
if request.method == "POST" and request.is_ajax():
form = TicketForm(request.POST)
if form.is_valid():
data = form.cleaned_data
add = {
'ticket_number': data['ticket_number'],
'analyst': request.user.username
}
if method == "add":
add['date'] = datetime.datetime.now()
result = ticket_add(type_, id_, add)
else:
date = datetime.datetime.strptime(data['date'],
settings.PY_DATETIME_FORMAT)
date = date.replace(microsecond=date.microsecond/1000*1000)
add['date'] = date
result = ticket_update(type_, id_, add)
crits_config = CRITsConfig.objects().first()
if 'object' in result:
result['html'] = render_to_string('tickets_row_widget.html',
{'ticket': result['object'],
'admin': is_admin(request.user),
'crits_config': crits_config,
'obj_type': type_,
'obj': class_from_id(type_, id_)})
return HttpResponse(json.dumps(result,
default=json_handler),
mimetype="application/json")
else: #invalid form
return HttpResponse(json.dumps({'success':False,
'form': form.as_table()}),
mimetype="application/json")
#default. Should we do anything else here?
return HttpResponse({})
@user_passes_test(user_can_view_data)
def get_search_help(request):
"""
Render the search help box. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
result = {'template': render_to_string('search_help.html', {})}
return HttpResponse(json.dumps(result, default=json_handler),
mimetype="application/json")
@user_passes_test(user_can_view_data)
def get_api_key(request):
"""
Get an API key for a user. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == "POST" and request.is_ajax():
username = request.user.username
name = request.POST.get('name', None)
if not name:
return HttpResponse(json.dumps({'success': False,
'message': 'Need a name.'}),
mimetype="application/json")
result = get_api_key_by_name(username, name)
if result:
return HttpResponse(json.dumps({'success': True,
'message': result}),
mimetype="application/json")
else:
return HttpResponse(json.dumps({'success': False,
'message': 'No key for that name.'}),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def create_api_key(request):
"""
Create an API key for a user. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == "POST" and request.is_ajax():
username = request.user.username
name = request.POST.get('name', None)
if not name:
return HttpResponse(json.dumps({'success': False,
'message': 'Need a name.'}),
mimetype="application/json")
result = create_api_key_by_name(username, name)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def make_default_api_key(request):
"""
Set an API key as default for a user. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == "POST" and request.is_ajax():
username = request.user.username
name = request.POST.get('name', None)
if not name:
return HttpResponse(json.dumps({'success': False,
'message': 'Need a name.'}),
mimetype="application/json")
result = make_default_api_key_by_name(username, name)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def revoke_api_key(request):
"""
Revoke an API key for a user. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponseRedirect`
"""
if request.method == "POST" and request.is_ajax():
username = request.user.username
name = request.POST.get('name', None)
if not name:
return HttpResponse(json.dumps({'success': False,
'message': 'Need a name.'}),
mimetype="application/json")
result = revoke_api_key_by_name(username, name)
return HttpResponse(json.dumps(result),
mimetype="application/json")
else:
error = "Expected AJAX POST"
return render_to_response("error.html",
{"error" : error },
RequestContext(request))
@user_passes_test(user_can_view_data)
def sector_modify(request):
"""
Modify a sectors list for a top-level object. Should be an AJAX POST.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
sectors = request.POST['sectors'].split(",")
oid = request.POST['oid']
itype = request.POST['itype']
modify_sector_list(itype, oid, sectors, request.user.username)
return HttpResponse({})
@user_passes_test(user_can_view_data)
def sector_list(request, option=None):
"""
Generate the jtable data for rendering in the list template.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
if option == "csv":
return generate_sector_csv(request)
return generate_sector_jtable(request, option)
@user_passes_test(user_can_view_data)
def get_available_sectors(request):
"""
Get the available sectors to use.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
return HttpResponse(
json.dumps(Sectors.values(sort=True), default=json_handler),
content_type='application/json'
)
return HttpResponse({})
@user_passes_test(user_can_view_data)
def bucket_autocomplete(request):
"""
Get the list of current buckets to autocomplete.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
if request.method == "POST" and request.is_ajax():
term = request.POST.get('term', None)
if term:
return get_bucket_autocomplete(term)
return HttpResponse({})
| mit |
petemounce/ansible | lib/ansible/modules/cloud/openstack/os_port.py | 11 | 12625 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_port
short_description: Add/Update/Delete ports from an OpenStack cloud.
extends_documentation_fragment: openstack
author: "Davide Agnello (@dagnello)"
version_added: "2.0"
description:
- Add, Update or Remove ports from an OpenStack cloud. A I(state) of
'present' will ensure the port is created or updated if required.
options:
network:
description:
- Network ID or name this port belongs to.
required: true
name:
description:
- Name that has to be given to the port.
required: false
default: None
fixed_ips:
description:
- Desired IP and/or subnet for this port. Subnet is referenced by
subnet_id and IP is referenced by ip_address.
required: false
default: None
admin_state_up:
description:
- Sets admin state.
required: false
default: None
mac_address:
description:
- MAC address of this port.
required: false
default: None
security_groups:
description:
- Security group(s) ID(s) or name(s) associated with the port (comma
separated string or YAML list)
required: false
default: None
no_security_groups:
description:
- Do not associate a security group with this port.
required: false
default: False
allowed_address_pairs:
description:
- "Allowed address pairs list. Allowed address pairs are supported with
dictionary structure.
e.g. allowed_address_pairs:
- ip_address: 10.1.0.12
mac_address: ab:cd:ef:12:34:56
- ip_address: ..."
required: false
default: None
extra_dhcp_opts:
description:
- "Extra dhcp options to be assigned to this port. Extra options are
supported with dictionary structure.
e.g. extra_dhcp_opts:
- opt_name: opt name1
opt_value: value1
- opt_name: ..."
required: false
default: None
device_owner:
description:
- The ID of the entity that uses this port.
required: false
default: None
device_id:
description:
- Device ID of device using this port.
required: false
default: None
state:
description:
- Should the resource be present or absent.
choices: [present, absent]
default: present
availability_zone:
description:
- Ignored. Present for backwards compatability
required: false
'''
EXAMPLES = '''
# Create a port
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
# Create a port with a static IP
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
fixed_ips:
- ip_address: 10.1.0.21
# Create a port with No security groups
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/
username: admin
password: admin
project_name: admin
name: port1
network: foo
no_security_groups: True
# Update the existing 'port1' port with multiple security groups (version 1)
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/d
username: admin
password: admin
project_name: admin
name: port1
security_groups: 1496e8c7-4918-482a-9172-f4f00fc4a3a5,057d4bdf-6d4d-472...
# Update the existing 'port1' port with multiple security groups (version 2)
- os_port:
state: present
auth:
auth_url: https://region-b.geo-1.identity.hpcloudsvc.com:35357/v2.0/d
username: admin
password: admin
project_name: admin
name: port1
security_groups:
- 1496e8c7-4918-482a-9172-f4f00fc4a3a5
- 057d4bdf-6d4d-472...
'''
RETURN = '''
id:
description: Unique UUID.
returned: success
type: string
name:
description: Name given to the port.
returned: success
type: string
network_id:
description: Network ID this port belongs in.
returned: success
type: string
security_groups:
description: Security group(s) associated with this port.
returned: success
type: list
status:
description: Port's status.
returned: success
type: string
fixed_ips:
description: Fixed ip(s) associated with this port.
returned: success
type: list
tenant_id:
description: Tenant id associated with this port.
returned: success
type: string
allowed_address_pairs:
description: Allowed address pairs with this port.
returned: success
type: list
admin_state_up:
description: Admin state up flag for this port.
returned: success
type: bool
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
def _needs_update(module, port, cloud):
"""Check for differences in the updatable values.
NOTE: We don't currently allow name updates.
"""
compare_simple = ['admin_state_up',
'mac_address',
'device_owner',
'device_id']
compare_dict = ['allowed_address_pairs',
'extra_dhcp_opts']
compare_list = ['security_groups']
for key in compare_simple:
if module.params[key] is not None and module.params[key] != port[key]:
return True
for key in compare_dict:
if module.params[key] is not None and module.params[key] != port[key]:
return True
for key in compare_list:
if module.params[key] is not None and (set(module.params[key]) !=
set(port[key])):
return True
# NOTE: if port was created or updated with 'no_security_groups=True',
# subsequent updates without 'no_security_groups' flag or
# 'no_security_groups=False' and no specified 'security_groups', will not
# result in an update to the port where the default security group is
# applied.
if module.params['no_security_groups'] and port['security_groups'] != []:
return True
if module.params['fixed_ips'] is not None:
for item in module.params['fixed_ips']:
if 'ip_address' in item:
# if ip_address in request does not match any in existing port,
# update is required.
if not any(match['ip_address'] == item['ip_address']
for match in port['fixed_ips']):
return True
if 'subnet_id' in item:
return True
for item in port['fixed_ips']:
# if ip_address in existing port does not match any in request,
# update is required.
if not any(match.get('ip_address') == item['ip_address']
for match in module.params['fixed_ips']):
return True
return False
def _system_state_change(module, port, cloud):
state = module.params['state']
if state == 'present':
if not port:
return True
return _needs_update(module, port, cloud)
if state == 'absent' and port:
return True
return False
def _compose_port_args(module, cloud):
port_kwargs = {}
optional_parameters = ['name',
'fixed_ips',
'admin_state_up',
'mac_address',
'security_groups',
'allowed_address_pairs',
'extra_dhcp_opts',
'device_owner',
'device_id']
for optional_param in optional_parameters:
if module.params[optional_param] is not None:
port_kwargs[optional_param] = module.params[optional_param]
if module.params['no_security_groups']:
port_kwargs['security_groups'] = []
return port_kwargs
def get_security_group_id(module, cloud, security_group_name_or_id):
security_group = cloud.get_security_group(security_group_name_or_id)
if not security_group:
module.fail_json(msg="Security group: %s, was not found"
% security_group_name_or_id)
return security_group['id']
def main():
argument_spec = openstack_full_argument_spec(
network=dict(required=False),
name=dict(required=False),
fixed_ips=dict(type='list', default=None),
admin_state_up=dict(type='bool', default=None),
mac_address=dict(default=None),
security_groups=dict(default=None, type='list'),
no_security_groups=dict(default=False, type='bool'),
allowed_address_pairs=dict(type='list', default=None),
extra_dhcp_opts=dict(type='list', default=None),
device_owner=dict(default=None),
device_id=dict(default=None),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs(
mutually_exclusive=[
['no_security_groups', 'security_groups'],
]
)
module = AnsibleModule(argument_spec,
supports_check_mode=True,
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
name = module.params['name']
state = module.params['state']
try:
cloud = shade.openstack_cloud(**module.params)
if module.params['security_groups']:
# translate security_groups to UUID's if names where provided
module.params['security_groups'] = [
get_security_group_id(module, cloud, v)
for v in module.params['security_groups']
]
port = None
network_id = None
if name:
port = cloud.get_port(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(module, port, cloud))
changed = False
if state == 'present':
if not port:
network = module.params['network']
if not network:
module.fail_json(
msg="Parameter 'network' is required in Port Create"
)
port_kwargs = _compose_port_args(module, cloud)
network_object = cloud.get_network(network)
if network_object:
network_id = network_object['id']
else:
module.fail_json(
msg="Specified network was not found."
)
port = cloud.create_port(network_id, **port_kwargs)
changed = True
else:
if _needs_update(module, port, cloud):
port_kwargs = _compose_port_args(module, cloud)
port = cloud.update_port(port['id'], **port_kwargs)
changed = True
module.exit_json(changed=changed, id=port['id'], port=port)
if state == 'absent':
if port:
cloud.delete_port(port['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e))
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
nfallen/servo | tests/wpt/css-tests/css21_dev/xhtml1print/reference/support/fonts/makegsubfonts.py | 820 | 14309 |
import os
import textwrap
from xml.etree import ElementTree
from fontTools.ttLib import TTFont, newTable
from fontTools.misc.psCharStrings import T2CharString
from fontTools.ttLib.tables.otTables import GSUB,\
ScriptList, ScriptRecord, Script, DefaultLangSys,\
FeatureList, FeatureRecord, Feature,\
LookupList, Lookup, AlternateSubst, SingleSubst
# paths
directory = os.path.dirname(__file__)
shellSourcePath = os.path.join(directory, "gsubtest-shell.ttx")
shellTempPath = os.path.join(directory, "gsubtest-shell.otf")
featureList = os.path.join(directory, "gsubtest-features.txt")
javascriptData = os.path.join(directory, "gsubtest-features.js")
outputPath = os.path.join(os.path.dirname(directory), "gsubtest-lookup%d")
baseCodepoint = 0xe000
# -------
# Features
# -------
f = open(featureList, "rb")
text = f.read()
f.close()
mapping = []
for line in text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("#"):
continue
# parse
values = line.split("\t")
tag = values.pop(0)
mapping.append(tag);
# --------
# Outlines
# --------
def addGlyphToCFF(glyphName=None, program=None, private=None, globalSubrs=None, charStringsIndex=None, topDict=None, charStrings=None):
charString = T2CharString(program=program, private=private, globalSubrs=globalSubrs)
charStringsIndex.append(charString)
glyphID = len(topDict.charset)
charStrings.charStrings[glyphName] = glyphID
topDict.charset.append(glyphName)
def makeLookup1():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup1")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
# bump this up so that the sequence is the same as the lookup 3 font
cp += 3
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 1
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = SingleSubst()
subtable.Format = 2
subtable.LookupType = 1
subtable.mapping = {
"%s.pass" % tag : "%s.fail" % tag,
"%s.fail" % tag : "%s.pass" % tag,
}
lookup.SubTable.append(subtable)
path = outputPath % 1 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeLookup3():
# make a variation of the shell TTX data
f = open(shellSourcePath)
ttxData = f.read()
f.close()
ttxData = ttxData.replace("__familyName__", "gsubtest-lookup3")
tempShellSourcePath = shellSourcePath + ".temp"
f = open(tempShellSourcePath, "wb")
f.write(ttxData)
f.close()
# compile the shell
shell = TTFont(sfntVersion="OTTO")
shell.importXML(tempShellSourcePath)
shell.save(shellTempPath)
os.remove(tempShellSourcePath)
# load the shell
shell = TTFont(shellTempPath)
# grab the PASS and FAIL data
hmtx = shell["hmtx"]
glyphSet = shell.getGlyphSet()
failGlyph = glyphSet["F"]
failGlyph.decompile()
failGlyphProgram = list(failGlyph.program)
failGlyphMetrics = hmtx["F"]
passGlyph = glyphSet["P"]
passGlyph.decompile()
passGlyphProgram = list(passGlyph.program)
passGlyphMetrics = hmtx["P"]
# grab some tables
hmtx = shell["hmtx"]
cmap = shell["cmap"]
# start the glyph order
existingGlyphs = [".notdef", "space", "F", "P"]
glyphOrder = list(existingGlyphs)
# start the CFF
cff = shell["CFF "].cff
globalSubrs = cff.GlobalSubrs
topDict = cff.topDictIndex[0]
topDict.charset = existingGlyphs
private = topDict.Private
charStrings = topDict.CharStrings
charStringsIndex = charStrings.charStringsIndex
features = sorted(mapping)
# build the outline, hmtx and cmap data
cp = baseCodepoint
for index, tag in enumerate(features):
# tag.pass
glyphName = "%s.pass" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
# tag.fail
glyphName = "%s.fail" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
# tag.default
glyphName = "%s.default" % tag
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=passGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = passGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# tag.alt1,2,3
for i in range(1,4):
glyphName = "%s.alt%d" % (tag, i)
glyphOrder.append(glyphName)
addGlyphToCFF(
glyphName=glyphName,
program=failGlyphProgram,
private=private,
globalSubrs=globalSubrs,
charStringsIndex=charStringsIndex,
topDict=topDict,
charStrings=charStrings
)
hmtx[glyphName] = failGlyphMetrics
for table in cmap.tables:
if table.format == 4:
table.cmap[cp] = glyphName
else:
raise NotImplementedError, "Unsupported cmap table format: %d" % table.format
cp += 1
# set the glyph order
shell.setGlyphOrder(glyphOrder)
# start the GSUB
shell["GSUB"] = newTable("GSUB")
gsub = shell["GSUB"].table = GSUB()
gsub.Version = 1.0
# make a list of all the features we will make
featureCount = len(features)
# set up the script list
scriptList = gsub.ScriptList = ScriptList()
scriptList.ScriptCount = 1
scriptList.ScriptRecord = []
scriptRecord = ScriptRecord()
scriptList.ScriptRecord.append(scriptRecord)
scriptRecord.ScriptTag = "DFLT"
script = scriptRecord.Script = Script()
defaultLangSys = script.DefaultLangSys = DefaultLangSys()
defaultLangSys.FeatureCount = featureCount
defaultLangSys.FeatureIndex = range(defaultLangSys.FeatureCount)
defaultLangSys.ReqFeatureIndex = 65535
defaultLangSys.LookupOrder = None
script.LangSysCount = 0
script.LangSysRecord = []
# set up the feature list
featureList = gsub.FeatureList = FeatureList()
featureList.FeatureCount = featureCount
featureList.FeatureRecord = []
for index, tag in enumerate(features):
# feature record
featureRecord = FeatureRecord()
featureRecord.FeatureTag = tag
feature = featureRecord.Feature = Feature()
featureList.FeatureRecord.append(featureRecord)
# feature
feature.FeatureParams = None
feature.LookupCount = 1
feature.LookupListIndex = [index]
# write the lookups
lookupList = gsub.LookupList = LookupList()
lookupList.LookupCount = featureCount
lookupList.Lookup = []
for tag in features:
# lookup
lookup = Lookup()
lookup.LookupType = 3
lookup.LookupFlag = 0
lookup.SubTableCount = 1
lookup.SubTable = []
lookupList.Lookup.append(lookup)
# subtable
subtable = AlternateSubst()
subtable.Format = 1
subtable.LookupType = 3
subtable.alternates = {
"%s.default" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt1" % tag : ["%s.pass" % tag, "%s.fail" % tag, "%s.fail" % tag],
"%s.alt2" % tag : ["%s.fail" % tag, "%s.pass" % tag, "%s.fail" % tag],
"%s.alt3" % tag : ["%s.fail" % tag, "%s.fail" % tag, "%s.pass" % tag]
}
lookup.SubTable.append(subtable)
path = outputPath % 3 + ".otf"
if os.path.exists(path):
os.remove(path)
shell.save(path)
# get rid of the shell
if os.path.exists(shellTempPath):
os.remove(shellTempPath)
def makeJavascriptData():
features = sorted(mapping)
outStr = []
outStr.append("")
outStr.append("/* This file is autogenerated by makegsubfonts.py */")
outStr.append("")
outStr.append("/* ")
outStr.append(" Features defined in gsubtest fonts with associated base")
outStr.append(" codepoints for each feature:")
outStr.append("")
outStr.append(" cp = codepoint for feature featX")
outStr.append("")
outStr.append(" cp default PASS")
outStr.append(" cp featX=1 FAIL")
outStr.append(" cp featX=2 FAIL")
outStr.append("")
outStr.append(" cp+1 default FAIL")
outStr.append(" cp+1 featX=1 PASS")
outStr.append(" cp+1 featX=2 FAIL")
outStr.append("")
outStr.append(" cp+2 default FAIL")
outStr.append(" cp+2 featX=1 FAIL")
outStr.append(" cp+2 featX=2 PASS")
outStr.append("")
outStr.append("*/")
outStr.append("")
outStr.append("var gFeatures = {");
cp = baseCodepoint
taglist = []
for tag in features:
taglist.append("\"%s\": 0x%x" % (tag, cp))
cp += 4
outStr.append(textwrap.fill(", ".join(taglist), initial_indent=" ", subsequent_indent=" "))
outStr.append("};");
outStr.append("");
if os.path.exists(javascriptData):
os.remove(javascriptData)
f = open(javascriptData, "wb")
f.write("\n".join(outStr))
f.close()
# build fonts
print "Making lookup type 1 font..."
makeLookup1()
print "Making lookup type 3 font..."
makeLookup3()
# output javascript data
print "Making javascript data file..."
makeJavascriptData() | mpl-2.0 |
mnahm5/django-estore | Lib/site-packages/boto/kinesis/__init__.py | 145 | 1652 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the Amazon Kinesis service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.kinesis.layer1 import KinesisConnection
return get_regions('kinesis', connection_cls=KinesisConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
yavuzovski/playground | python/django/RESTTest/.venv/lib/python3.4/site-packages/django/contrib/gis/geos/linestring.py | 136 | 6019 | from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.coordseq import GEOSCoordSeq
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry, LinearGeometryMixin
from django.contrib.gis.geos.point import Point
from django.contrib.gis.shortcuts import numpy
from django.utils.six.moves import range
class LineString(LinearGeometryMixin, GEOSGeometry):
_init_func = capi.create_linestring
_minlength = 2
has_cs = True
def __init__(self, *args, **kwargs):
"""
Initializes on the given sequence -- may take lists, tuples, NumPy arrays
of X,Y pairs, or Point objects. If Point objects are used, ownership is
_not_ transferred to the LineString object.
Examples:
ls = LineString((1, 1), (2, 2))
ls = LineString([(1, 1), (2, 2)])
ls = LineString(array([(1, 1), (2, 2)]))
ls = LineString(Point(1, 1), Point(2, 2))
"""
# If only one argument provided, set the coords array appropriately
if len(args) == 1:
coords = args[0]
else:
coords = args
if not (isinstance(coords, (tuple, list)) or numpy and isinstance(coords, numpy.ndarray)):
raise TypeError('Invalid initialization input for LineStrings.')
# If SRID was passed in with the keyword arguments
srid = kwargs.get('srid')
ncoords = len(coords)
if not ncoords:
super(LineString, self).__init__(self._init_func(None), srid=srid)
return
if ncoords < self._minlength:
raise ValueError(
'%s requires at least %d points, got %s.' % (
self.__class__.__name__,
self._minlength,
ncoords,
)
)
if isinstance(coords, (tuple, list)):
# Getting the number of coords and the number of dimensions -- which
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
ndim = None
# Incrementing through each of the coordinates and verifying
for coord in coords:
if not isinstance(coord, (tuple, list, Point)):
raise TypeError('Each coordinate should be a sequence (list or tuple)')
if ndim is None:
ndim = len(coord)
self._checkdim(ndim)
elif len(coord) != ndim:
raise TypeError('Dimension mismatch.')
numpy_coords = False
else:
shape = coords.shape # Using numpy's shape.
if len(shape) != 2:
raise TypeError('Too many dimensions.')
self._checkdim(shape[1])
ndim = shape[1]
numpy_coords = True
# Creating a coordinate sequence object because it is easier to
# set the points using GEOSCoordSeq.__setitem__().
cs = GEOSCoordSeq(capi.create_cs(ncoords, ndim), z=bool(ndim == 3))
for i in range(ncoords):
if numpy_coords:
cs[i] = coords[i, :]
elif isinstance(coords[i], Point):
cs[i] = coords[i].tuple
else:
cs[i] = coords[i]
# Calling the base geometry initialization with the returned pointer
# from the function.
super(LineString, self).__init__(self._init_func(cs.ptr), srid=srid)
def __iter__(self):
"Allows iteration over this LineString."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of points in this LineString."
return len(self._cs)
def _get_single_external(self, index):
return self._cs[index]
_get_single_internal = _get_single_external
def _set_list(self, length, items):
ndim = self._cs.dims
hasz = self._cs.hasz # I don't understand why these are different
# create a new coordinate sequence and populate accordingly
cs = GEOSCoordSeq(capi.create_cs(length, ndim), z=hasz)
for i, c in enumerate(items):
cs[i] = c
ptr = self._init_func(cs.ptr)
if ptr:
capi.destroy_geom(self.ptr)
self.ptr = ptr
self._post_init(self.srid)
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._checkindex(index)
self._cs[index] = value
def _checkdim(self, dim):
if dim not in (2, 3):
raise TypeError('Dimension mismatch.')
# #### Sequence Properties ####
@property
def tuple(self):
"Returns a tuple version of the geometry from the coordinate sequence."
return self._cs.tuple
coords = tuple
def _listarr(self, func):
"""
Internal routine that returns a sequence (list) corresponding with
the given function. Will return a numpy array if possible.
"""
lst = [func(i) for i in range(len(self))]
if numpy:
return numpy.array(lst) # ARRRR!
else:
return lst
@property
def array(self):
"Returns a numpy array for the LineString."
return self._listarr(self._cs.__getitem__)
@property
def x(self):
"Returns a list or numpy array of the X variable."
return self._listarr(self._cs.getX)
@property
def y(self):
"Returns a list or numpy array of the Y variable."
return self._listarr(self._cs.getY)
@property
def z(self):
"Returns a list or numpy array of the Z variable."
if not self.hasz:
return None
else:
return self._listarr(self._cs.getZ)
# LinearRings are LineStrings used within Polygons.
class LinearRing(LineString):
_minlength = 4
_init_func = capi.create_linearring
| gpl-3.0 |
Snyder005/StatisticalMethods | examples/XrayImage/cluster.py | 10 | 5264 | import astropy.io.fits as pyfits
import numpy as np
import os
# ====================================================================
# Functions for realizing the model:
def beta_model_profile(r, S0, rc, beta):
'''
The fabled beta model, radial profile S(r)
'''
return S0 * (1.0 + (r/rc)**2)**(-3.0*beta + 0.5)
def beta_model_image(x, y, x0, y0, S0, rc, beta):
'''
Here, x and y are arrays ("meshgrids" or "ramps") containing x and y pixel numbers,
and the other arguments are galaxy cluster beta model parameters.
Returns a surface brightness image of the same shape as x and y.
'''
return beta_model_profile(np.sqrt((x-x0)**2 + (y-y0)**2), S0, rc, beta)
def model_image(x, y, ex, pb, x0, y0, S0, rc, beta, b):
'''
Here, x, y, ex and pb are images, all of the same shape, and the other args are
cluster model and X-ray background parameters. ex is the (constant) exposure map
and pb is the (constant) particle background map.
'''
return (beta_model_image(x, y, x0, y0, S0, rc, beta) + b) * ex + pb
# ====================================================================
class XrayData:
def __init__(self):
self.pars = np.zeros(6)
return
def read_in_data(self):
# Download the data if we don't already have it
self.targdir = 'a1835_xmm/'
os.system('mkdir -p ' + self.targdir)
imagefile = 'P0098010101M2U009IMAGE_3000.FTZ'
expmapfile = 'P0098010101M2U009EXPMAP3000.FTZ'
bkgmapfile = 'P0098010101M2X000BKGMAP3000.FTZ'
remotedir = 'http://heasarc.gsfc.nasa.gov/FTP/xmm/data/rev0/0098010101/PPS/'
for filename in [imagefile,expmapfile,bkgmapfile]:
path = self.targdir + filename
url = remotedir + filename
if not os.path.isfile(path): # i.e. if the file does not exist already:
os.system('wget -nd -O ' + path + ' ' + 'url')
# Read in the data
self.imfits = pyfits.open(self.targdir + imagefile)
self.im = self.imfits[0].data
self.pbfits = pyfits.open(self.targdir + bkgmapfile)
self.pb = self.pbfits[0].data
self.exfits = pyfits.open(self.targdir + expmapfile)
self.ex = self.exfits[0].data
return
def set_up_maps(self):
# Make x and y ramps
self.x = np.array([np.arange(self.im.shape[0]) for j in np.arange(self.im.shape[1])])
self.y = np.array([[j for i in np.arange(self.im.shape[1])] for j in np.arange(self.im.shape[0])])
### mask a list of circular regions covering non-cluster sources
maskfile = 'M2ptsrc.txt'
mask = np.loadtxt(self.targdir + maskfile)
for reg in mask:
distance2 = (self.x-(reg[0]-1.0))**2 + (self.y-(reg[1]-1.0))**2
self.ex[distance2 <= reg[2]**2] = 0.0
# helpful mask image to keep track of which pixels we can ignore
self.mask = self.ex * 0.0
self.mask[self.ex > 0.0] = 1.0
return
def set_pars(self,x0,y0,S0,rc,beta,b):
self.pars[0] = x0
self.pars[1] = y0
self.pars[2] = S0
self.pars[3] = rc
self.pars[4] = beta
self.pars[5] = b
return
def make_mean_image(self):
x0 = self.pars[0]
y0 = self.pars[1]
S0 = self.pars[2]
rc = self.pars[3]
beta = self.pars[4]
b = self.pars[5]
self.mu = model_image(self.x,self.y,self.ex,self.pb,x0,y0,S0,rc,beta,b)
return
def make_mock_data(self):
self.mock = np.random.poisson(self.mu,self.mu.shape)
return
def evaluate_log_prior(self):
# Uniform in all parameters...
return 0.0
def evaluate_log_likelihood(self):
self.make_mean_image()
# Return un-normalized Poisson sampling distribution:
# log (\mu^N e^{-\mu} / N!) = N log \mu - \mu + constant
return np.sum(self.im * np.log(self.mu) - self.mu)
def evaluate_unnormalised_log_posterior(self,x0,y0,S0,rc,beta,b):
self.set_pars(x0,y0,S0,rc,beta,b)
return self.evaluate_log_likelihood() + self.evaluate_log_prior()
# ====================================================================
'''
Adam's routines for input into MCMC:
def lnpost(params, data):
# assumes S0 is a free parameter
x0 = params[0]
y0 = params[1]
S0 = params[2]
rc = params[3]
beta = params[4]
bg = params[5]
if x0 < 0. or x0 >= data.im.shape[0] or y0 < 0. or y0 > data.im.shape[1] or S0 <= 0. or rc <= 0. or beta <= 0.0:
return -np.inf
mod = modelImage(data, x0, y0, S0, rc, beta, bg)
if np.min(mod) <= 0.0:
return -np.inf
return np.sum( (-mod + data.im * np.log(mod)) * data.mask )
def lnpost2(params, data):
# assumes log(S0) is a free parameter
x0 = params[0]
y0 = params[1]
S0 = np.exp(params[2])
rc = params[3]
beta = params[4]
bg = params[5]
if x0 < 0. or x0 >= data.im.shape[0] or y0 < 0. or y0 > data.im.shape[1] or S0 <= 0. or rc <= 0. or beta <= 0.0:
return -np.inf
mod = modelImage(data, x0, y0, S0, rc, beta, bg)
if np.min(mod) <= 0.0:
return -np.inf
return np.sum( (-mod + data.im * np.log(mod)) * data.mask )
'''
| gpl-2.0 |
beni55/django | tests/template_tests/filter_tests/test_unordered_list.py | 14 | 6527 | from django.template.defaultfilters import unordered_list
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe
from ..utils import setup
class UnorderedListTests(SimpleTestCase):
@setup({'unordered_list01': '{{ a|unordered_list }}'})
def test_unordered_list01(self):
output = self.engine.render_to_string('unordered_list01', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@ignore_warnings(category=RemovedInDjango20Warning)
@setup({'unordered_list02': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list02(self):
output = self.engine.render_to_string('unordered_list02', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list03': '{{ a|unordered_list }}'})
def test_unordered_list03(self):
output = self.engine.render_to_string('unordered_list03', {'a': ['x>', [mark_safe('<y')]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list04': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list04(self):
output = self.engine.render_to_string('unordered_list04', {'a': ['x>', [mark_safe('<y')]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list05': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list05(self):
output = self.engine.render_to_string('unordered_list05', {'a': ['x>', ['<y']]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@ignore_warnings(category=RemovedInDjango20Warning)
class DeprecatedUnorderedListSyntaxTests(SimpleTestCase):
@setup({'unordered_list01': '{{ a|unordered_list }}'})
def test_unordered_list01(self):
output = self.engine.render_to_string('unordered_list01', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list02': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list02(self):
output = self.engine.render_to_string('unordered_list02', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list03': '{{ a|unordered_list }}'})
def test_unordered_list03(self):
output = self.engine.render_to_string('unordered_list03', {'a': ['x>', [[mark_safe('<y'), []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list04': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list04(self):
output = self.engine.render_to_string('unordered_list04', {'a': ['x>', [[mark_safe('<y'), []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
@setup({'unordered_list05': '{% autoescape off %}{{ a|unordered_list }}{% endautoescape %}'})
def test_unordered_list05(self):
output = self.engine.render_to_string('unordered_list05', {'a': ['x>', [['<y', []]]]})
self.assertEqual(output, '\t<li>x>\n\t<ul>\n\t\t<li><y</li>\n\t</ul>\n\t</li>')
class FunctionTests(SimpleTestCase):
def test_list(self):
self.assertEqual(unordered_list(['item 1', 'item 2']), '\t<li>item 1</li>\n\t<li>item 2</li>')
def test_nested(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1']]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>',
)
def test_nested2(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1', 'item1.2'], 'item 2']),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t\t<li>item1.2'
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>',
)
def test_nested_multiple(self):
self.assertEqual(
unordered_list(['item 1', ['item 1.1', ['item 1.1.1', ['item 1.1.1.1']]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>',
)
def test_nested_multiple2(self):
self.assertEqual(
unordered_list(['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>'
'Lawrence</li>\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>'
'\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>',
)
def test_ulitem(self):
@python_2_unicode_compatible
class ULItem(object):
def __init__(self, title):
self.title = title
def __str__(self):
return 'ulitem-%s' % str(self.title)
a = ULItem('a')
b = ULItem('b')
self.assertEqual(unordered_list([a, b]), '\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
def item_generator():
yield a
yield b
self.assertEqual(unordered_list(item_generator()), '\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
@ignore_warnings(category=RemovedInDjango20Warning)
def test_legacy(self):
"""
Old format for unordered lists should still work
"""
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
self.assertEqual(
unordered_list(['item 1', [['item 1.1', []]]]),
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1</li>\n\t</ul>\n\t</li>',
)
self.assertEqual(
unordered_list(['item 1', [['item 1.1', []],
['item 1.2', []]]]), '\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1'
'</li>\n\t\t<li>item 1.2</li>\n\t</ul>\n\t</li>',
)
self.assertEqual(
unordered_list(['States', [['Kansas', [['Lawrence', []], ['Topeka', []]]], ['Illinois', []]]]),
'\t<li>States\n\t<ul>\n\t\t<li>Kansas\n\t\t<ul>\n\t\t\t<li>Lawrence</li>'
'\n\t\t\t<li>Topeka</li>\n\t\t</ul>\n\t\t</li>\n\t\t<li>Illinois</li>\n\t</ul>\n\t</li>',
)
| bsd-3-clause |
Veske/POL-POM-5 | phoenicis-bash/src/main/python/BashBinder/CommandParser.py | 3 | 9173 | #!/usr/bin/env python
# coding=utf-8
# Copyright (C) 2015 Pâris Quentin
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
from Environment.EnvironmentFormatter import EnvironmentFormatter
from com.playonlinux.framework import Downloader
from com.playonlinux.core.scripts import ScriptFailureException
from com.playonlinux.framework import WineVersion
from com.playonlinux.framework import Wine
from com.playonlinux.framework import WineShortcut
from com.playonlinux.core.utils import Architecture
from java.net import URL
from java.io import File, FileOutputStream
class CommandParser(object):
def __init__(self, setupWindowManager, command):
self.command = command
self.splitCommand = self.command.split("\t")
self.setupWindowManager = setupWindowManager
def getCookie(self):
return self.splitCommand[0]
def getCommand(self):
return self.splitCommand[1]
def executeCommand(self):
commandExecutor = CommandParser.CommandExecutor(self.splitCommand, self.setupWindowManager)
return getattr(commandExecutor, self.getCommand())()
class CommandExecutor():
def __init__(self, command, setupWindowManager):
self.command = command
self.setupWindowManager = setupWindowManager
def POL_SetupWindow_Init(self):
setupWindowId = self.command[2]
if("TITLE" in os.environ.keys()):
windowTitle = os.environ["TITLE"]
else:
windowTitle = "${application.name} Wizard";
self.setupWindowManager.newWindow(setupWindowId, windowTitle).init()
def POL_SetupWindow_message(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
self.setupWindowManager.getWindow(setupWindowId).message(textToShow)
def POL_SetupWindow_presentation(self):
setupWindowId = self.command[2]
programName = self.command[3]
programEditor = self.command[4]
editorURL = self.command[5]
scriptorName = self.command[6]
prefixName = self.command[7]
self.setupWindowManager.getWindow(setupWindowId).presentation(programName, programEditor, editorURL, scriptorName, prefixName)
def POL_SetupWindow_free_presentation(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
self.setupWindowManager.getWindow(setupWindowId).presentation(textToShow)
def POL_SetupWindow_wait(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
self.setupWindowManager.getWindow(setupWindowId).wait(textToShow)
def POL_SetupWindow_browse(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
try:
currentDirectory = self.command[4]
except IndexError:
currentDirectory = ""
return self.setupWindowManager.getWindow(setupWindowId).browse(textToShow, currentDirectory, allowedFiles)
def POL_SetupWindow_textbox(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
try:
defaultValue = self.command[4]
except IndexError:
defaultValue = ""
return self.setupWindowManager.getWindow(setupWindowId).textbox(textToShow, defaultValue)
def POL_SetupWindow_menu(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
try:
separator = self.command[5]
except IndexError:
separator = "~"
items = self.command[4].split(separator)
return self.setupWindowManager.getWindow(setupWindowId).menu(textToShow, items)
def POL_SetupWindow_Close(self):
setupWindowId = self.command[2]
self.setupWindowManager.getWindow(setupWindowId).close()
def POL_Download(self):
setupWindowId = self.command[2]
url = self.command[3]
currentDirectory = self.command[4]
try:
checkSum = self.command[5]
except IndexError:
checkSum = ""
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
localFile = os.path.join(currentDirectory,
Downloader.wizard(setupWindow).findFileNameFromURL(URL(url)))
downloader = Downloader.wizard(setupWindow).get(url, localFile)
if(checkSum != ""):
downloader.check(checkSum)
def POL_SetupWindow_licence(self):
setupWindowId = self.command[2]
textToShow = self.command[3]
licenceFilePath = self.command[5]
self.setupWindowManager.getWindow(setupWindowId).licenceFile(textToShow, licenceFilePath)
def POL_Throw(self):
raise ScriptFailureException(self.command[3])
def POL_Print(self):
message = self.command[3]
self.setupWindowManager.template.log(message)
def POL_Wine_InstallVersion(self):
setupWindowId = self.command[2]
version = self.command[3]
arch = self.command[4]
wineVersion = WineVersion(version, "upstream-%s" % arch,
self.setupWindowManager.getWindow(setupWindowId))
wineVersion.install()
def POL_Wine_PrefixCreate(self):
setupWindowId = self.command[2]
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
prefixName = self.command[3]
version = self.command[4]
try:
arch = self.command[5]
arch = str(Architecture.fromWinePackageName(arch).name())
except IndexError:
arch = None
if(arch is not None):
Wine.wizard(setupWindow).selectPrefix(prefixName).createPrefix(version, "upstream", arch)
else:
Wine.wizard(setupWindow).selectPrefix(prefixName).createPrefix(version, arch)
def POL_Wine(self):
setupWindowId = self.command[2]
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
workingDirectory = self.command[3]
prefixName = self.command[4]
fifoOut = self.command[5]
fifoErr = self.command[6]
env = EnvironmentFormatter.getEnvironmentVarsFromBashBase64EncodedString(self.command[7])
prgmName = self.command[8]
args = self.command[9::1]
return Wine.wizard(setupWindow).selectPrefix(prefixName)\
.withErrorStream(FileOutputStream(File(fifoErr)))\
.withOutputStream(FileOutputStream(File(fifoOut)))\
.runForeground(
workingDirectory,
prgmName,
args,
env
).getLastReturnCode()
def POL_Config_PrefixRead(self):
setupWindowId = self.command[2]
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
prefixName = self.command[3]
key = self.command[4]
return Wine.wizard(setupWindow).selectPrefix(prefixName).config().readValue(key)
def POL_Config_PrefixWrite(self):
setupWindowId = self.command[2]
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
prefixName = self.command[3]
key = self.command[4]
value = self.command[5]
return Wine(setupWindow).selectPrefix(prefixName).config().writeValue(key, value)
def POL_Shortcut(self):
setupWindowId = self.command[2]
setupWindow = self.setupWindowManager.getWindow(setupWindowId)
winePrefix = self.command[3]
binary = self.command[4]
shortcutName = self.command[5]
websiteIcon = self.command[6]
argument = self.command[7]
categories = self.command[8]
WineShortcut.wizard(setupWindow)\
.withArguments([argument])\
.withExecutableName(binary)\
.withWinePrefix(winePrefix)\
.withName(shortcutName)\
.create() | gpl-3.0 |
Kaisuke5/chainer | chainer/functions/array/split_axis.py | 13 | 3058 | import collections
import six
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class SplitAxis(function.Function):
"""Function that splits multiple arrays towards the specified axis."""
def __init__(self, indices_or_sections, axis):
if not isinstance(indices_or_sections, (int, collections.Iterable)):
raise TypeError('indices_or_sections must be integer or 1-D array')
self.indices_or_sections = indices_or_sections
self.axis = axis
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 1)
type_check.expect(in_types[0].ndim >= self.axis)
if isinstance(self.indices_or_sections, collections.Iterable):
max_index = type_check.Variable(
self.indices_or_sections[-1], 'max_index')
type_check.expect(in_types[0].shape[self.axis] > max_index)
else:
sections = type_check.Variable(
self.indices_or_sections, 'sections')
type_check.expect(in_types[0].shape[self.axis] % sections == 0)
def forward(self, x):
if isinstance(self.indices_or_sections, collections.Iterable):
cdimx = x[0].shape[self.axis]
ind = list(self.indices_or_sections)
ind.append(cdimx)
prev_i = 0
for i in ind:
cdimy = max(0, min(i, cdimx) - prev_i)
if cdimy == 0:
raise ValueError('Not support if shape contains 0')
prev_i = i
xp = cuda.get_array_module(*x)
return tuple(xp.split(x[0], self.indices_or_sections, self.axis))
def backward(self, x, gys):
xp = cuda.get_array_module(*x)
if any(gy is None for gy in gys):
gx = xp.zeros_like(x[0])
gxs = xp.split(gx, self.indices_or_sections, self.axis)
for gxi, gy in six.moves.zip(gxs, gys):
if gy is None:
continue
gxi[:] = gy
return gx,
else:
return xp.concatenate(gys, axis=self.axis),
def split_axis(x, indices_or_sections, axis):
"""Splits given variables along an axis.
Args:
x (tuple of Variables): Variables to be split.
indices_or_sections (int or 1-D array): If this argument is an integer,
N, the array will be divided into N equal arrays along axis.
If it is a 1-D array of sorted integers, it
indicates the positions where the array is split.
axis (int): Axis that the input array is split along.
Returns:
``tuple`` or ``Variable``: Tuple of :class:`~chainer.Variable` objects
if the number of outputs is more than 1 or
:class:`~chainer.Variable` otherwise.
.. note::
This function raises ``ValueError`` if at least
one of the outputs is splitted to zero-size
(i.e. `axis`-th value of its shape is zero).
"""
return SplitAxis(indices_or_sections, axis)(x)
| mit |
abhishekgahlot/youtube-dl | youtube_dl/extractor/testtube.py | 109 | 3295 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
qualities,
)
class TestTubeIE(InfoExtractor):
_VALID_URL = r'https?://testtube\.com/[^/?#]+/(?P<id>[^/?#]+)'
_TESTS = [{
'url': 'https://testtube.com/dnews/5-weird-ways-plants-can-eat-animals?utm_source=FB&utm_medium=DNews&utm_campaign=DNewsSocial',
'info_dict': {
'id': '60163',
'display_id': '5-weird-ways-plants-can-eat-animals',
'duration': 275,
'ext': 'webm',
'title': '5 Weird Ways Plants Can Eat Animals',
'description': 'Why have some plants evolved to eat meat?',
'thumbnail': 're:^https?://.*\.jpg$',
'uploader': 'DNews',
'uploader_id': 'dnews',
},
}, {
'url': 'https://testtube.com/iflscience/insane-jet-ski-flipping',
'info_dict': {
'id': 'fAGfJ4YjVus',
'ext': 'mp4',
'title': 'Flipping Jet-Ski Skills | Outrageous Acts of Science',
'uploader': 'Science Channel',
'uploader_id': 'ScienceChannel',
'upload_date': '20150203',
'description': 'md5:e61374030015bae1d2e22f096d4769d6',
}
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
youtube_url = self._html_search_regex(
r'<iframe[^>]+src="((?:https?:)?//www.youtube.com/embed/[^"]+)"',
webpage, 'youtube iframe', default=None)
if youtube_url:
return self.url_result(youtube_url, 'Youtube', video_id=display_id)
video_id = self._search_regex(
r"player\.loadRevision3Item\('video_id',\s*([0-9]+)\);",
webpage, 'video ID')
all_info = self._download_json(
'https://testtube.com/api/getPlaylist.json?api_key=ba9c741bce1b9d8e3defcc22193f3651b8867e62&codecs=h264,vp8,theora&video_id=%s' % video_id,
video_id)
info = all_info['items'][0]
formats = []
for vcodec, fdatas in info['media'].items():
for name, fdata in fdatas.items():
formats.append({
'format_id': '%s-%s' % (vcodec, name),
'url': fdata['url'],
'vcodec': vcodec,
'tbr': fdata.get('bitrate'),
})
self._sort_formats(formats)
duration = int_or_none(info.get('duration'))
images = info.get('images')
thumbnails = None
preference = qualities(['mini', 'small', 'medium', 'large'])
if images:
thumbnails = [{
'id': thumbnail_id,
'url': img_url,
'preference': preference(thumbnail_id)
} for thumbnail_id, img_url in images.items()]
return {
'id': video_id,
'display_id': display_id,
'title': info['title'],
'description': info.get('summary'),
'thumbnails': thumbnails,
'uploader': info.get('show', {}).get('name'),
'uploader_id': info.get('show', {}).get('slug'),
'duration': duration,
'formats': formats,
}
| unlicense |
OndrejIT/pyload | module/plugins/crypter/GoogledriveComDereferer.py | 6 | 2747 | # -*- coding: utf-8 -*
from module.network.HTTPRequest import BadHeader
from ..internal.Crypter import Crypter
from ..internal.misc import json
class GoogledriveComDereferer(Crypter):
__name__ = "GoogledriveComDereferer"
__type__ = "crypter"
__version__ = "0.01"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?(?:drive|docs)\.google\.com/open\?(?:.+;)?id=(?P<ID>[-\w]+)'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("folder_per_package", "Default;Yes;No", "Create folder for each package", "Default"),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)]
__description__ = """Drive.google.com dereferer plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]"),
("GammaC0de", "nitzo2001[AT]yahoo[DOT]com")]
NAME_PATTERN = r"folderName: '(?P<N>.+?)'"
OFFLINE_PATTERN = r'<TITLE>'
API_URL = "https://www.googleapis.com/drive/v3/"
API_KEY = "AIzaSyAcA9c4evtwSY1ifuvzo6HKBkeot5Bk_U4"
def api_response(self, cmd, **kwargs):
kwargs['key'] = self.API_KEY
try:
json_data = json.loads(self.load("%s%s" % (self.API_URL, cmd),
get=kwargs))
self.log_debug("API response: %s" % json_data)
return json_data
except BadHeader, e:
try:
json_data = json.loads(e.content)
self.log_error("API Error: %s" % cmd,
json_data['error']['message'],
"ID: %s" % self.info['pattern']['ID'],
"Error code: %s" % e.code)
except ValueError:
self.log_error("API Error: %s" % cmd,
e,
"ID: %s" % self.info['pattern']['ID'],
"Error code: %s" % e.code)
return None
def decrypt(self, pyfile):
json_data = self.api_response("files/%s" % self.info['pattern']['ID'])
if json_data is None:
self.fail("API error")
if 'error' in json_data:
if json_data['error']['code'] == 404:
self.offline()
else:
self.fail(json_data['error']['message'])
link = "https://drive.google.com/%s/%s" % \
(("file/d" if json_data['mimeType'] != "application/vnd.google-apps.folder" else "drive/folders"),
self.info['pattern']['ID'])
self.packages = [(pyfile.package().folder, [link], pyfile.package().name)]
| gpl-3.0 |
phoebusliang/parallel-lettuce | tests/integration/lib/Django-1.2.5/django/core/xheaders.py | 518 | 1157 | """
Pages in Django can are served up with custom HTTP headers containing useful
information about those pages -- namely, the content type and object ID.
This module contains utility functions for retrieving and doing interesting
things with these special "X-Headers" (so called because the HTTP spec demands
that custom headers are prefixed with "X-").
Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
"""
def populate_xheaders(request, response, model, object_id):
"""
Adds the "X-Object-Type" and "X-Object-Id" headers to the given
HttpResponse according to the given model and object_id -- but only if the
given HttpRequest object has an IP address within the INTERNAL_IPS setting
or if the request is from a logged in staff member.
"""
from django.conf import settings
if (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
or (hasattr(request, 'user') and request.user.is_active
and request.user.is_staff)):
response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
response['X-Object-Id'] = str(object_id)
| gpl-3.0 |
JPJPJPOPOP/zulip | scripts/nagios/cron_file_helper.py | 52 | 1044 | import time
# Avoid requiring the typing module to be installed
if False:
from typing import Tuple
def nagios_from_file(results_file):
# type: (str) -> Tuple[int, str]
"""Returns a nagios-appropriate string and return code obtained by
parsing the desired file on disk. The file on disk should be of format
%s|%s % (timestamp, nagios_string)
This file is created by various nagios checking cron jobs such as
check-rabbitmq-queues and check-rabbitmq-consumers"""
data = open(results_file).read().strip()
pieces = data.split('|')
if not len(pieces) == 4:
state = 'UNKNOWN'
ret = 3
data = "Results file malformed"
else:
timestamp = int(pieces[0])
time_diff = time.time() - timestamp
if time_diff > 60 * 2:
ret = 3
state = 'UNKNOWN'
data = "Results file is stale"
else:
ret = int(pieces[1])
state = pieces[2]
data = pieces[3]
return (ret, "%s: %s" % (state, data))
| apache-2.0 |
hachreak/invenio-ext | invenio_ext/sqlalchemy/types/marshal_binary.py | 5 | 1985 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Implement compressed column type."""
from sqlalchemy.types import LargeBinary, TypeDecorator
from invenio_utils.serializers import ZlibMarshal
class MarshalBinary(TypeDecorator):
"""Implement compressed column type."""
impl = LargeBinary
def __init__(self, default_value, force_type=None, *args, **kwargs):
"""Initialize default value and type."""
super(MarshalBinary, self).__init__(*args, **kwargs)
self.default_value = default_value
self.force_type = force_type if force_type is not None else lambda x: x
def process_bind_param(self, value, dialect):
"""Compress data in column."""
if value is not None:
value = ZlibMarshal.dumps(self.force_type(value))
return value
return value
def process_result_value(self, value, dialect):
"""Load comressed data from column."""
if value is not None:
try:
value = ZlibMarshal.loads(value)
except:
value = None
return value if value is not None else \
(self.default_value() if callable(self.default_value) else
self.default_value)
| gpl-2.0 |
CanalTP/navitia | source/jormungandr/tests/kirin_realtime_tests.py | 1 | 155835 | # Copyright (c) 2001-2015, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# channel `#navitia` on riot https://riot.im/app/#/room/#navitia:matrix.org
# https://groups.google.com/d/forum/navitia
# www.navitia.io
# Note: the tests_mechanism should be the first
# import for the conf to be loaded correctly when only this test is ran
from __future__ import absolute_import
from copy import deepcopy
from datetime import datetime
import uuid
from tests.tests_mechanism import dataset
from jormungandr.utils import str_to_time_stamp, make_namedtuple
from tests import gtfs_realtime_pb2, kirin_pb2
from tests.check_utils import (
get_not_null,
journey_basic_query,
isochrone_basic_query,
get_used_vj,
get_arrivals,
get_valid_time,
is_valid_disruption,
check_journey,
Journey,
Section,
SectionStopDT,
is_valid_graphical_isochrone,
sub_query,
has_the_disruption,
get_disruptions_by_id,
)
from tests.rabbitmq_utils import RabbitMQCnxFixture, rt_topic
from shapely.geometry import asShape
UpdatedStopTime = make_namedtuple(
'UpdatedStopTime',
'stop_id',
'arrival',
'departure',
arrival_delay=0,
departure_delay=0,
message=None,
departure_skipped=False,
arrival_skipped=False,
is_added=False,
is_detour=False,
)
class MockKirinDisruptionsFixture(RabbitMQCnxFixture):
"""
Mock a chaos disruption message, in order to check the api
"""
def _make_mock_item(self, *args, **kwargs):
return make_mock_kirin_item(*args, **kwargs)
def tstamp(str):
"""just to have clearer tests"""
return str_to_time_stamp(str)
def _dt(h, m, s):
"""syntaxic sugar"""
return datetime(1900, 1, 1, hour=h, minute=m, second=s)
MAIN_ROUTING_TEST_SETTING = {
'main_routing_test': {'kraken_args': ['--BROKER.rt_topics=' + rt_topic, 'spawn_maintenance_worker']}
}
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnVJDeletion(MockKirinDisruptionsFixture):
def test_vj_deletion(self):
"""
send a mock kirin vj cancellation and test that the vj is not taken
"""
response = self.query_region(journey_basic_query + "&data_freshness=realtime")
isochrone = self.query_region(isochrone_basic_query + "&data_freshness=realtime")
# with no cancellation, we have 2 journeys, one direct and one with the vj:A:0
assert get_arrivals(response) == ['20120614T080222', '20120614T080436']
assert get_used_vj(response) == [['vehicle_journey:vjA'], []]
# Disruption impacting lines A, B, C starts at 06:00 and ends at 11:59:59
# Get VJ at 12:00 and disruption doesn't appear
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T120000')
assert len(pt_response['disruptions']) == 0
is_valid_graphical_isochrone(isochrone, self.tester, isochrone_basic_query + "&data_freshness=realtime")
geojson = isochrone['isochrones'][0]['geojson']
multi_poly = asShape(geojson)
# we have 3 departures and 1 disruption (linked to line A departure)
departures = self.query_region("stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800")
assert len(departures['disruptions']) == 1
assert len(departures['departures']) == 4
# A new disruption impacting vjA is created between 08:01:00 and 08:01:01
self.send_mock("vjA", "20120614", 'canceled', disruption_id='disruption_bob')
def _check_train_cancel_disruption(dis):
is_valid_disruption(dis, chaos_disrup=False)
assert dis['contributor'] == rt_topic
assert dis['disruption_id'] == 'disruption_bob'
assert dis['severity']['effect'] == 'NO_SERVICE'
assert len(dis['impacted_objects']) == 1
ptobj = dis['impacted_objects'][0]['pt_object']
assert ptobj['embedded_type'] == 'trip'
assert ptobj['id'] == 'vjA'
assert ptobj['name'] == 'vjA'
# for cancellation we do not output the impacted stops
assert 'impacted_stops' not in dis['impacted_objects'][0]
# We should see the disruption
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 1
_check_train_cancel_disruption(pt_response['disruptions'][0])
# and we should be able to query for the vj's disruption
disrup_response = self.query_region('vehicle_journeys/vehicle_journey:vjA/disruptions')
assert len(disrup_response['disruptions']) == 1
_check_train_cancel_disruption(disrup_response['disruptions'][0])
traffic_reports_response = self.query_region('traffic_reports?_current_datetime=20120614T0800')
traffic_reports = get_not_null(traffic_reports_response, 'traffic_reports')
assert len(traffic_reports) == 1
vjs = get_not_null(traffic_reports[0], "vehicle_journeys")
assert len(vjs) == 1
assert vjs[0]['id'] == 'vehicle_journey:vjA'
new_response = self.query_region(journey_basic_query + "&data_freshness=realtime")
assert set(get_arrivals(new_response)) == set(['20120614T080436', '20120614T080223'])
assert get_used_vj(new_response) == [['vehicle_journey:vjM'], []]
isochrone_realtime = self.query_region(isochrone_basic_query + "&data_freshness=realtime")
is_valid_graphical_isochrone(
isochrone_realtime, self.tester, isochrone_basic_query + "&data_freshness=realtime"
)
geojson_realtime = isochrone_realtime['isochrones'][0]['geojson']
multi_poly_realtime = asShape(geojson_realtime)
isochrone_base_schedule = self.query_region(isochrone_basic_query + "&data_freshness=base_schedule")
is_valid_graphical_isochrone(
isochrone_base_schedule, self.tester, isochrone_basic_query + "&data_freshness=base_schedule"
)
geojson_base_schedule = isochrone_base_schedule['isochrones'][0]['geojson']
multi_poly_base_schedule = asShape(geojson_base_schedule)
assert not multi_poly.difference(multi_poly_realtime).is_empty
assert multi_poly.equals(multi_poly_base_schedule)
# We have one less departure (vjA because of disruption)
# The disruption doesn't appear because the lines departing aren't impacted during the period
departures = self.query_region("stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800")
assert len(departures['disruptions']) == 0
assert len(departures['departures']) == 3
# We still have 2 passages in base schedule, but we have the new disruption
departures = self.query_region(
"stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800&data_freshness=base_schedule"
)
assert len(departures['disruptions']) == 2
assert len(departures['departures']) == 4
# it should not have changed anything for the theoric
new_base = self.query_region(journey_basic_query + "&data_freshness=base_schedule")
assert get_arrivals(new_base) == ['20120614T080222', '20120614T080436']
assert get_used_vj(new_base) == [['vehicle_journey:vjA'], []]
# see http://jira.canaltp.fr/browse/NAVP-266,
# _current_datetime is needed to make it work
# assert len(new_base['disruptions']) == 1
# remove links as the calling url is not the same
for j in new_base['journeys']:
j.pop('links', None)
for j in response['journeys']:
j.pop('links', None)
assert new_base['journeys'] == response['journeys']
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestMainStopAreaWeightFactorWithKirinUpdate(MockKirinDisruptionsFixture):
def test_main_stop_area_weight_factor_with_kirin_update(self):
response = self.query_region("places?type[]=stop_area&q=stop")
places = response['places']
assert len(places) == 3
assert places[0]['id'] == 'stopA'
assert places[1]['id'] == 'stopB'
assert places[2]['id'] == 'stopC'
# only used to activate the autocomplete rebuild process
self.send_mock("id", "20120614", 'type', disruption_id='disruption_bob')
response = self.query_region("places?type[]=stop_area&q=stop&_main_stop_area_weight_factor=5")
places = response['places']
assert len(places) == 3
assert places[0]['id'] == 'stopC'
assert places[1]['id'] == 'stopA'
assert places[2]['id'] == 'stopB'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestAutocompleteOnWaysWithKirinUpdate(MockKirinDisruptionsFixture):
def test_autocomplete_on_ways_with_kirin_update(self):
response = self.query_region("places?&q=rue ts")
places = response['places']
assert len(places) == 1
assert places[0]['embedded_type'] == 'address'
assert places[0]['name'] == 'rue ts (Condom)'
# only used to activate the autocomplete rebuild process
self.send_mock("id", "20120614", 'type', disruption_id='disruption_bob')
# After injection of realtime, we should not return way with visible=false.
response = self.query_region("places?&q=rue ts")
places = response['places']
assert len(places) == 1
assert places[0]['embedded_type'] == 'address'
assert places[0]['name'] == 'rue ts (Condom)'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnVJDelay(MockKirinDisruptionsFixture):
def test_vj_delay(self):
"""
send a mock kirin vj delay and test that the vj is not taken
"""
response = self.query_region(journey_basic_query + "&data_freshness=realtime")
# with no cancellation, we have 2 journeys, one direct and one with the vj:A:0
assert get_arrivals(response) == ['20120614T080222', '20120614T080436']
assert get_used_vj(response) == [['vehicle_journey:vjA'], []]
# we have 3 departures and 1 disruption (linked to the first passage)
departures = self.query_region("stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800")
assert len(departures['disruptions']) == 1
assert len(departures['departures']) == 4
assert departures['departures'][0]['stop_date_time']['departure_date_time'] == '20120614T080100'
pt_response = self.query_region('vehicle_journeys')
initial_nb_vehicle_journeys = len(pt_response['vehicle_journeys'])
assert initial_nb_vehicle_journeys == 9
# no disruption yet
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 0
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
message='cow on tracks',
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed',
)
# A new vj is created, which the vj with the impact of the disruption
pt_response = self.query_region('vehicle_journeys')
assert len(pt_response['vehicle_journeys']) == (initial_nb_vehicle_journeys + 1)
vj_ids = [vj['id'] for vj in pt_response['vehicle_journeys']]
assert 'vehicle_journey:vjA:modified:0:vjA_delayed' in vj_ids
def _check_train_delay_disruption(dis):
is_valid_disruption(dis, chaos_disrup=False)
assert dis['disruption_id'] == 'vjA_delayed'
assert dis['severity']['effect'] == 'SIGNIFICANT_DELAYS'
assert len(dis['impacted_objects']) == 1
ptobj = dis['impacted_objects'][0]['pt_object']
assert ptobj['embedded_type'] == 'trip'
assert ptobj['id'] == 'vjA'
assert ptobj['name'] == 'vjA'
# for delay we should have detail on the impacted stops
impacted_objs = get_not_null(dis['impacted_objects'][0], 'impacted_stops')
assert len(impacted_objs) == 2
imp_obj1 = impacted_objs[0]
assert get_valid_time(get_not_null(imp_obj1, 'amended_arrival_time')) == _dt(h=8, m=2, s=24)
assert get_valid_time(get_not_null(imp_obj1, 'amended_departure_time')) == _dt(h=8, m=2, s=25)
assert get_not_null(imp_obj1, 'cause') == 'cow on tracks'
assert get_not_null(imp_obj1, 'departure_status') == 'delayed'
assert get_not_null(imp_obj1, 'arrival_status') == 'delayed'
assert get_not_null(imp_obj1, 'stop_time_effect') == 'delayed'
assert get_valid_time(get_not_null(imp_obj1, 'base_arrival_time')) == _dt(8, 1, 0)
assert get_valid_time(get_not_null(imp_obj1, 'base_departure_time')) == _dt(8, 1, 0)
imp_obj2 = impacted_objs[1]
assert get_valid_time(get_not_null(imp_obj2, 'amended_arrival_time')) == _dt(h=8, m=4, s=0)
assert get_valid_time(get_not_null(imp_obj2, 'amended_departure_time')) == _dt(h=8, m=4, s=0)
assert imp_obj2['cause'] == ''
assert get_not_null(imp_obj1, 'stop_time_effect') == 'delayed'
assert get_not_null(imp_obj1, 'departure_status') == 'delayed'
assert get_not_null(imp_obj1, 'arrival_status') == 'delayed'
assert get_valid_time(get_not_null(imp_obj2, 'base_departure_time')) == _dt(8, 1, 2)
assert get_valid_time(get_not_null(imp_obj2, 'base_arrival_time')) == _dt(8, 1, 2)
# we should see the disruption
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 1
_check_train_delay_disruption(pt_response['disruptions'][0])
# In order to not disturb the test, line M which was added afterwards for shared section tests, is forbidden here
new_response = self.query_region(journey_basic_query + "&data_freshness=realtime&forbidden_uris[]=M&")
assert get_arrivals(new_response) == ['20120614T080436', '20120614T080520']
assert get_used_vj(new_response) == [[], ['vehicle_journey:vjA:modified:0:vjA_delayed']]
pt_journey = new_response['journeys'][1]
check_journey(
pt_journey,
Journey(
sections=[
Section(
departure_date_time='20120614T080208',
arrival_date_time='20120614T080225',
base_departure_date_time=None,
base_arrival_date_time=None,
stop_date_times=[],
),
Section(
departure_date_time='20120614T080225',
arrival_date_time='20120614T080400',
base_departure_date_time='20120614T080100',
base_arrival_date_time='20120614T080102',
stop_date_times=[
SectionStopDT(
departure_date_time='20120614T080225',
arrival_date_time='20120614T080224',
base_departure_date_time='20120614T080100',
base_arrival_date_time='20120614T080100',
),
SectionStopDT(
departure_date_time='20120614T080400',
arrival_date_time='20120614T080400',
base_departure_date_time='20120614T080102',
base_arrival_date_time='20120614T080102',
),
],
),
Section(
departure_date_time='20120614T080400',
arrival_date_time='20120614T080520',
base_departure_date_time=None,
base_arrival_date_time=None,
stop_date_times=[],
),
]
),
)
# it should not have changed anything for the theoric
new_base = self.query_region(journey_basic_query + "&data_freshness=base_schedule")
assert get_arrivals(new_base) == ['20120614T080222', '20120614T080436']
assert get_used_vj(new_base), [['vehicle_journey:vjA'] == []]
# we have one delayed departure
departures = self.query_region("stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800")
assert len(departures['disruptions']) == 2
assert len(departures['departures']) == 4
assert departures['departures'][1]['stop_date_time']['departure_date_time'] == '20120614T080225'
# Same as realtime except the departure date time
departures = self.query_region(
"stop_points/stop_point:stopB/departures?_current_datetime=20120614T0800&data_freshness=base_schedule"
)
assert len(departures['disruptions']) == 2
assert len(departures['departures']) == 4
assert departures['departures'][0]['stop_date_time']['departure_date_time'] == '20120614T080100'
# We send again the same disruption
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
message='cow on tracks',
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed',
)
# A new vj is created, but a useless vj has been cleaned, so the number of vj does not change
pt_response = self.query_region('vehicle_journeys')
assert len(pt_response['vehicle_journeys']) == (initial_nb_vehicle_journeys + 1)
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 1
_check_train_delay_disruption(pt_response['disruptions'][0])
# So the first real-time vj created for the first disruption should be deactivated
# In order to not disturb the test, line M which was added afterwards for shared section tests, is forbidden here
new_response = self.query_region(journey_basic_query + "&data_freshness=realtime&forbidden_uris[]=M&")
assert get_arrivals(new_response) == ['20120614T080436', '20120614T080520']
assert get_used_vj(new_response), [[] == ['vehicle_journey:vjA:modified:1:vjA_delayed']]
# it should not have changed anything for the theoric
new_base = self.query_region(journey_basic_query + "&data_freshness=base_schedule")
assert get_arrivals(new_base) == ['20120614T080222', '20120614T080436']
assert get_used_vj(new_base), [['vehicle_journey:vjA'] == []]
# we then try to send a delay on another train.
# we should not have lost the first delay
self.send_mock(
"vjB",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
tstamp("20120614T180224"),
tstamp("20120614T180225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
),
UpdatedStopTime(
"stop_point:stopA",
tstamp("20120614T180400"),
tstamp("20120614T180400"),
message="bob's in the place",
),
],
)
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 1
_check_train_delay_disruption(pt_response['disruptions'][0])
# we should also have the disruption on vjB
assert (
len(
self.query_region('vehicle_journeys/vehicle_journey:vjB?_current_datetime=20120614T1337')[
'disruptions'
]
)
== 1
)
###################################
# We now send a partial delete on B
###################################
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB", arrival=tstamp("20120614T080100"), departure=tstamp("20120614T080100")
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
message='cow on tracks',
arrival_skipped=True,
),
],
disruption_id='vjA_skip_A',
)
# A new vj is created
vjs = self.query_region('vehicle_journeys?_current_datetime=20120614T1337')
assert len(vjs['vehicle_journeys']) == (initial_nb_vehicle_journeys + 2)
vjA = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
# we now have 2 disruption on vjA
assert len(vjA['disruptions']) == 2
all_dis = {d['id']: d for d in vjA['disruptions']}
assert 'vjA_skip_A' in all_dis
dis = all_dis['vjA_skip_A']
is_valid_disruption(dis, chaos_disrup=False)
assert dis['disruption_id'] == 'vjA_skip_A'
assert dis['severity']['effect'] == 'REDUCED_SERVICE'
assert len(dis['impacted_objects']) == 1
ptobj = dis['impacted_objects'][0]['pt_object']
assert ptobj['embedded_type'] == 'trip'
assert ptobj['id'] == 'vjA'
assert ptobj['name'] == 'vjA'
# for delay we should have detail on the impacted stops
impacted_objs = get_not_null(dis['impacted_objects'][0], 'impacted_stops')
assert len(impacted_objs) == 2
imp_obj1 = impacted_objs[0]
assert get_valid_time(get_not_null(imp_obj1, 'amended_arrival_time')) == _dt(8, 1, 0)
assert get_valid_time(get_not_null(imp_obj1, 'amended_departure_time')) == _dt(8, 1, 0)
assert get_not_null(imp_obj1, 'stop_time_effect') == 'unchanged'
assert get_not_null(imp_obj1, 'arrival_status') == 'unchanged'
assert get_not_null(imp_obj1, 'departure_status') == 'unchanged'
assert get_valid_time(get_not_null(imp_obj1, 'base_arrival_time')) == _dt(8, 1, 0)
assert get_valid_time(get_not_null(imp_obj1, 'base_departure_time')) == _dt(8, 1, 0)
imp_obj2 = impacted_objs[1]
assert 'amended_arrival_time' not in imp_obj2
assert get_not_null(imp_obj2, 'cause') == 'cow on tracks'
assert get_not_null(imp_obj2, 'stop_time_effect') == 'deleted' # the stoptime is marked as deleted
assert get_not_null(imp_obj2, 'arrival_status') == 'deleted'
assert get_not_null(imp_obj2, 'departure_status') == 'unchanged' # the departure is not changed
assert get_valid_time(get_not_null(imp_obj2, 'base_departure_time')) == _dt(8, 1, 2)
assert get_valid_time(get_not_null(imp_obj2, 'base_arrival_time')) == _dt(8, 1, 2)
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnVJDelayDayAfter(MockKirinDisruptionsFixture):
def test_vj_delay_day_after(self):
"""
send a mock kirin vj delaying on day after and test that the vj is not taken
"""
response = self.query_region(journey_basic_query + "&data_freshness=realtime")
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
# with no cancellation, we have 2 journeys, one direct and one with the vj:A:0
assert get_arrivals(response) == ['20120614T080222', '20120614T080436'] # pt_walk + vj 08:01
assert get_used_vj(response), [['vjA'] == []]
pt_response = self.query_region('vehicle_journeys')
initial_nb_vehicle_journeys = len(pt_response['vehicle_journeys'])
assert initial_nb_vehicle_journeys == 9
# check that we have the next vj
s_coord = "0.0000898312;0.0000898312" # coordinate of S in the dataset
r_coord = "0.00188646;0.00071865" # coordinate of R in the dataset
journey_later_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord=s_coord, to_coord=r_coord, datetime="20120614T080500"
)
later_response = self.query_region(journey_later_query + "&data_freshness=realtime")
assert get_arrivals(later_response) == ['20120614T080936', '20120614T180222'] # pt_walk + vj 18:01
assert get_used_vj(later_response), [[] == ['vehicle_journey:vjB']]
# no disruption yet
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 0
# sending disruption delaying VJ to the next day
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime("stop_point:stopB", tstamp("20120615T070224"), tstamp("20120615T070224")),
UpdatedStopTime("stop_point:stopA", tstamp("20120615T070400"), tstamp("20120615T070400")),
],
disruption_id='96231_2015-07-28_0',
effect='unknown',
)
# A new vj is created
pt_response = self.query_region('vehicle_journeys')
assert len(pt_response['vehicle_journeys']) == (initial_nb_vehicle_journeys + 1)
vj_ids = [vj['id'] for vj in pt_response['vehicle_journeys']]
assert 'vehicle_journey:vjA:modified:0:96231_2015-07-28_0' in vj_ids
# we should see the disruption
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['disruptions']) == 1
is_valid_disruption(pt_response['disruptions'][0], chaos_disrup=False)
assert pt_response['disruptions'][0]['disruption_id'] == '96231_2015-07-28_0'
# In order to not disturb the test, line M which was added afterwards for shared section tests, is forbidden here
new_response = self.query_region(journey_basic_query + "&data_freshness=realtime&forbidden_uris[]=M&")
assert get_arrivals(new_response) == ['20120614T080436', '20120614T180222'] # pt_walk + vj 18:01
assert get_used_vj(new_response), [[] == ['vjB']]
# it should not have changed anything for the base-schedule
new_base = self.query_region(journey_basic_query + "&data_freshness=base_schedule")
assert get_arrivals(new_base) == ['20120614T080222', '20120614T080436']
assert get_used_vj(new_base), [['vjA'] == []]
# the day after, we can use the delayed vj
journey_day_after_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord=s_coord, to_coord=r_coord, datetime="20120615T070000"
)
day_after_response = self.query_region(journey_day_after_query + "&data_freshness=realtime")
assert get_arrivals(day_after_response) == [
'20120615T070436',
'20120615T070520',
] # pt_walk + rt 07:02:24
assert get_used_vj(day_after_response), [[] == ['vehicle_journey:vjA:modified:0:96231_2015-07-28_0']]
# it should not have changed anything for the theoric the day after
day_after_base = self.query_region(journey_day_after_query + "&data_freshness=base_schedule")
assert get_arrivals(day_after_base) == ['20120615T070436', '20120615T080222']
assert get_used_vj(day_after_base), [[] == ['vjA']]
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnVJOnTime(MockKirinDisruptionsFixture):
def test_vj_on_time(self):
"""
We don't want to output an on time disruption on journeys,
departures, arrivals and route_schedules (also on
stop_schedules, but no vj disruption is outputed for the
moment).
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption same as base schedule
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='vjA_on_time',
effect='unknown',
)
# We have a new diruption
disruptions_after = self.query_region('disruptions?_current_datetime=20120614T080000')
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
assert has_the_disruption(disruptions_after, 'vjA_on_time')
# it's not in journeys
journey_query = journey_basic_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(journey_query)
assert not has_the_disruption(response, 'vjA_on_time')
self.is_valid_journey_response(response, journey_query)
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
# it's not in departures
response = self.query_region(
"stop_points/stop_point:stopB/departures?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert not has_the_disruption(response, 'vjA_on_time')
assert response['departures'][0]['stop_date_time']['data_freshness'] == 'realtime'
# it's not in arrivals
response = self.query_region(
"stop_points/stop_point:stopA/arrivals?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert not has_the_disruption(response, 'vjA_on_time')
assert response['arrivals'][0]['stop_date_time']['data_freshness'] == 'realtime'
# it's not in stop_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/stop_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert not has_the_disruption(response, 'vjA_on_time')
assert response['stop_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
assert response['stop_schedules'][0]['date_times'][0]['base_date_time'] == '20120614T080100'
assert response['stop_schedules'][0]['date_times'][0]['date_time'] == '20120614T080100'
# it's not in terminus_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/terminus_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert not has_the_disruption(response, 'vjA_on_time')
assert response['terminus_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
assert response['terminus_schedules'][0]['date_times'][0]['base_date_time'] == '20120614T080100'
assert response['terminus_schedules'][0]['date_times'][0]['date_time'] == '20120614T080100'
# it's not in route_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/route_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert not has_the_disruption(response, 'vjA_on_time')
# no realtime flags on route_schedules yet
# New disruption one second late
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=1,
departure_delay=1,
arrival=tstamp("20120614T080101"),
departure=tstamp("20120614T080101"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=1,
departure_delay=1,
arrival=tstamp("20120614T080103"),
departure=tstamp("20120614T080103"),
),
],
disruption_id='vjA_late',
)
# We have a new diruption
disruptions_after = self.query_region('disruptions?_current_datetime=20120614T080000')
assert nb_disruptions_before + 2 == len(disruptions_after['disruptions'])
assert has_the_disruption(disruptions_after, 'vjA_late')
# it's in journeys
response = self.query_region(journey_query)
assert has_the_disruption(response, 'vjA_late')
self.is_valid_journey_response(response, journey_query)
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
# it's in departures
response = self.query_region(
"stop_points/stop_point:stopB/departures?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert has_the_disruption(response, 'vjA_late')
assert response['departures'][0]['stop_date_time']['departure_date_time'] == '20120614T080101'
assert response['departures'][0]['stop_date_time']['data_freshness'] == 'realtime'
# it's in arrivals
response = self.query_region(
"stop_points/stop_point:stopA/arrivals?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert has_the_disruption(response, 'vjA_late')
assert response['arrivals'][0]['stop_date_time']['arrival_date_time'] == '20120614T080103'
assert response['arrivals'][0]['stop_date_time']['data_freshness'] == 'realtime'
# it's in stop_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/stop_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert has_the_disruption(response, 'vjA_late')
assert response['stop_schedules'][0]['date_times'][0]['links'][1]['type'] == 'disruption'
assert response['stop_schedules'][0]['date_times'][0]['date_time'] == '20120614T080101'
assert response['stop_schedules'][0]['date_times'][0]['base_date_time'] == '20120614T080100'
assert response['stop_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
# it's in terminus_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/terminus_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert has_the_disruption(response, 'vjA_late')
assert response['terminus_schedules'][0]['date_times'][0]['links'][1]['type'] == 'disruption'
assert response['terminus_schedules'][0]['date_times'][0]['date_time'] == '20120614T080101'
assert response['terminus_schedules'][0]['date_times'][0]['base_date_time'] == '20120614T080100'
assert response['terminus_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
# it's in route_schedules
response = self.query_region(
"stop_points/stop_point:stopB/lines/A/route_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
assert has_the_disruption(response, 'vjA_late')
# no realtime flags on route_schedules yet
MAIN_ROUTING_TEST_SETTING_NO_ADD = {
'main_routing_test': {
'kraken_args': [
'--BROKER.rt_topics=' + rt_topic,
'spawn_maintenance_worker',
] # also check that by 'default is_realtime_add_enabled=0'
}
}
MAIN_ROUTING_TEST_SETTING = deepcopy(MAIN_ROUTING_TEST_SETTING_NO_ADD)
MAIN_ROUTING_TEST_SETTING['main_routing_test']['kraken_args'].append('--GENERAL.is_realtime_add_enabled=1')
MAIN_ROUTING_TEST_SETTING['main_routing_test']['kraken_args'].append('--GENERAL.is_realtime_add_trip_enabled=1')
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnNewStopTimeAtTheEnd(MockKirinDisruptionsFixture):
def test_add_and_delete_one_stop_time_at_the_end(self):
"""
1. create a new_stop_time to add a final stop in C
test that a new journey is possible with section type = public_transport from B to C
2. delete the added stop_time and verify that the public_transport section is absent
3. delete again stop_time and verify that the public_transport section is absent
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption with two stop_times same as base schedule and
# a new stop_time on stop_point:stopC added at the end
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080104"),
departure=tstamp("20120614T080104"),
),
],
disruption_id='new_stop_time',
)
# We have a new disruption to add a new stop_time at stop_point:stopC in vehicle_journey 'VJA'
disruptions_after = self.query_region('disruptions?_current_datetime=20120614T080000')
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
assert has_the_disruption(disruptions_after, 'new_stop_time')
last_disrupt = disruptions_after['disruptions'][-1]
assert last_disrupt['severity']['effect'] == 'MODIFIED_SERVICE'
journey_query = journey_basic_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(journey_query)
assert has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, journey_query)
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][1]['display_informations']['physical_mode'] == 'Tramway'
B_C_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord='stop_point:stopB', to_coord='stop_point:stopC', datetime='20120614T080000'
)
# The result with base_schedule should not have a journey with public_transport from B to C
base_journey_query = B_C_query + "&data_freshness=base_schedule&_current_datetime=20120614T080000"
response = self.query_region(base_journey_query)
assert not has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, base_journey_query)
assert len(response['journeys']) == 1 # check we only have one journey
assert len(response['journeys'][0]['sections']) == 1
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
assert 'data_freshness' not in response['journeys'][0]['sections'][0] # means it's base_schedule
# The result with realtime should have a journey with public_transport from B to C
rt_journey_query = B_C_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(rt_journey_query)
assert has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, rt_journey_query)
assert len(response['journeys']) == 2 # check there's a new journey possible
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][0]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][0]['to']['id'] == 'stop_point:stopC'
assert response['journeys'][0]['sections'][0]['duration'] == 4
assert response['journeys'][0]['status'] == 'MODIFIED_SERVICE'
assert 'data_freshness' not in response['journeys'][1]['sections'][0] # means it's base_schedule
assert response['journeys'][1]['sections'][0]['type'] == 'street_network'
# New disruption with a deleted stop_time recently added at stop_point:stopC
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080104"),
departure=tstamp("20120614T080104"),
message='stop_time deleted',
arrival_skipped=True,
)
],
disruption_id='deleted_stop_time',
)
# We have a new disruption with a deleted stop_time at stop_point:stopC in vehicle_journey 'VJA'
disruptions_with_deleted = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disruptions_after['disruptions']) + 1 == len(disruptions_with_deleted['disruptions'])
assert has_the_disruption(disruptions_with_deleted, 'deleted_stop_time')
# The result with realtime should not have a journey with public_transport from B to C
# since the added stop_time has been deleted by the last disruption
rt_journey_query = B_C_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(rt_journey_query)
assert not has_the_disruption(response, 'added_stop_time')
self.is_valid_journey_response(response, rt_journey_query)
assert len(response['journeys']) == 1
assert len(response['journeys'][0]['sections']) == 1
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
assert 'data_freshness' not in response['journeys'][0]['sections'][0]
# New disruption with a deleted stop_time already deleted at stop_point:stopC
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080104"),
departure=tstamp("20120614T080104"),
message='stop_time deleted',
arrival_skipped=True,
)
],
disruption_id='re_deleted_stop_time',
)
# We have a new disruption with a deleted stop_time at stop_point:stopC in vehicle_journey 'VJA'
disruptions_with_deleted = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disruptions_after['disruptions']) + 2 == len(disruptions_with_deleted['disruptions'])
assert has_the_disruption(disruptions_with_deleted, 're_deleted_stop_time')
# The result with realtime should not have a journey with public_transport from B to C
rt_journey_query = B_C_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(rt_journey_query)
assert not has_the_disruption(response, 'added_stop_time')
self.is_valid_journey_response(response, rt_journey_query)
assert len(response['journeys']) == 1
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinReadTripEffectFromTripUpdate(MockKirinDisruptionsFixture):
def test_read_trip_effect_from_tripupdate(self):
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
assert nb_disruptions_before == 12
vjs_before = self.query_region('vehicle_journeys')
assert len(vjs_before['vehicle_journeys']) == 9
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=0,
departure_delay=0,
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
message='stop_time deleted',
arrival_skipped=True,
departure_skipped=True,
),
],
disruption_id='reduced_service_vjA',
effect='reduced_service',
)
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 13
assert has_the_disruption(disrupts, 'reduced_service_vjA')
last_disrupt = disrupts['disruptions'][-1]
assert last_disrupt['severity']['effect'] == 'REDUCED_SERVICE'
assert last_disrupt['severity']['name'] == 'reduced service'
vjs_after = self.query_region('vehicle_journeys')
# we got a new vj due to the disruption, which means the disruption is handled correctly
assert len(vjs_after['vehicle_journeys']) == 10
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinSchedulesNewStopTimeInBetween(MockKirinDisruptionsFixture):
def test_schedules_add_one_stop_time(self):
"""
Checking that when a stop is added on a trip, /departures and /stop_schedules are updated
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
base_query = 'stop_areas/stopC/{api}?from_datetime={dt}&_current_datetime={dt}&data_freshness={df}'
departures = self.query_region(base_query.format(api='departures', dt='20120614T080100', df='realtime'))
assert len(departures['departures']) == 0
stop_schedules = self.query_region(
base_query.format(api='stop_schedules', dt='20120614T080100', df='realtime')
)
assert len(stop_schedules['stop_schedules']) == 1
assert stop_schedules['stop_schedules'][0]['display_informations']['label'] == '1D'
assert not stop_schedules['stop_schedules'][0]['date_times']
# New disruption with a new stop_time in between B and A of the VJ = vjA
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
arrival_delay=0,
departure_delay=0,
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080330"),
departure=tstamp("20120614T080331"),
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed_with_new_stop_time',
effect='modified',
)
disruptions_after = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disruptions_before['disruptions']) + 1 == len(disruptions_after['disruptions'])
assert has_the_disruption(disruptions_after, 'vjA_delayed_with_new_stop_time')
# still nothing for base_schedule
departures = self.query_region(
base_query.format(api='departures', dt='20120614T080100', df='base_schedule')
)
assert len(departures['departures']) == 0
stop_schedules = self.query_region(
base_query.format(api='stop_schedules', dt='20120614T080100', df='base_schedule')
)
assert len(stop_schedules['stop_schedules']) == 2 # a new route is linked (not used in base_schedule)
assert not stop_schedules['stop_schedules'][0]['date_times']
assert not stop_schedules['stop_schedules'][1]['date_times']
# departures updated in realtime
departures = self.query_region(base_query.format(api='departures', dt='20120614T080100', df='realtime'))
assert len(departures['departures']) == 1
assert departures['departures'][0]['stop_date_time']['data_freshness'] == 'realtime'
assert (
departures['departures'][0]['stop_date_time']['arrival_date_time'] == '20120614T080330'
) # new stop
assert departures['departures'][0]['stop_date_time']['departure_date_time'] == '20120614T080331'
assert 'vjA_delayed_with_new_stop_time' in [
l['id'] for l in departures['departures'][0]['display_informations']['links']
] # link to disruption
assert 'vjA_delayed_with_new_stop_time' in [
d['id'] for d in departures['disruptions']
] # disruption in collection
# stop_schedules updated in realtime
stop_schedules = self.query_region(
base_query.format(api='stop_schedules', dt='20120614T080100', df='realtime')
)
assert len(stop_schedules['stop_schedules']) == 2
assert stop_schedules['stop_schedules'][1]['display_informations']['label'] == '1D'
assert not stop_schedules['stop_schedules'][1]['date_times'] # still no departure on other route
assert stop_schedules['stop_schedules'][0]['display_informations']['label'] == '1A'
assert stop_schedules['stop_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
assert (
stop_schedules['stop_schedules'][0]['date_times'][0]['date_time'] == '20120614T080331'
) # new departure
assert 'vjA_delayed_with_new_stop_time' in [
l['id'] for l in stop_schedules['stop_schedules'][0]['date_times'][0]['links']
] # link to disruption
assert 'vjA_delayed_with_new_stop_time' in [
d['id'] for d in departures['disruptions']
] # disruption in collection
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnNewStopTimeInBetween(MockKirinDisruptionsFixture):
def test_add_modify_and_delete_one_stop_time(self):
"""
1. Create a disruption with delay on VJ = vjA (with stop_time B and A) and verify the journey
for a query from S to R: S-> walk-> B -> public_transport -> A -> walk -> R
2. Add a new stop_time (stop_point C) in between B and A in the VJ = vjA and verify the journey as above
3. Verify the journey for a query from S to C: S-> walk-> B -> public_transport -> C
4. Delete the added stop_time and verify the journey for a query in 3.
"""
# New disruption with a delay of VJ = vjA
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
message='cow on tracks',
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 13
assert has_the_disruption(disrupts, 'vjA_delayed')
# query from S to R: Journey without delay with departure from B at 20120614T080100
# and arrival to A at 20120614T080102 returned
response = self.query_region(journey_basic_query + "&data_freshness=realtime")
assert len(response['journeys']) == 2
assert len(response['journeys'][0]['sections']) == 3
assert len(response['journeys'][1]['sections']) == 1
assert response['journeys'][0]['sections'][1]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'base_schedule'
assert response['journeys'][0]['sections'][1]['departure_date_time'] == '20120614T080101'
assert response['journeys'][0]['sections'][1]['arrival_date_time'] == '20120614T080103'
assert len(response['journeys'][0]['sections'][1]['stop_date_times']) == 2
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
# A new request with departure after 2 minutes gives us journey with delay
response = self.query_region(sub_query + "&data_freshness=realtime&datetime=20120614T080200")
assert len(response['journeys']) == 2
assert len(response['journeys'][0]['sections']) == 3
assert len(response['journeys'][1]['sections']) == 1
assert response['journeys'][0]['sections'][1]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][1]['departure_date_time'] == '20120614T080225'
assert response['journeys'][0]['sections'][1]['arrival_date_time'] == '20120614T080400'
assert len(response['journeys'][0]['sections'][1]['stop_date_times']) == 2
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
# New disruption with a new stop_time in between B and A of the VJ = vjA
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
message='cow on tracks',
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080330"),
departure=tstamp("20120614T080330"),
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed_with_new_stop_time',
effect='modified',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 14
assert has_the_disruption(disrupts, 'vjA_delayed_with_new_stop_time')
last_disrupt = disrupts['disruptions'][-1]
assert last_disrupt['severity']['effect'] == 'MODIFIED_SERVICE'
# the journey has the new stop_time in its section of public_transport
response = self.query_region(sub_query + "&data_freshness=realtime&datetime=20120614T080200")
assert len(response['journeys']) == 2
assert len(response['journeys'][0]['sections']) == 3
assert len(response['journeys'][1]['sections']) == 1
assert response['journeys'][0]['sections'][1]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][1]['departure_date_time'] == '20120614T080225'
assert response['journeys'][0]['sections'][1]['arrival_date_time'] == '20120614T080400'
assert len(response['journeys'][0]['sections'][1]['stop_date_times']) == 3
assert (
response['journeys'][0]['sections'][1]['stop_date_times'][1]['stop_point']['name']
== 'stop_point:stopC'
)
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
# Query from S to C: Uses a public_transport from B to C
S_to_C_query = "journeys?from={from_coord}&to={to_coord}".format(
from_coord='0.0000898312;0.0000898312', to_coord='stop_point:stopC'
)
base_journey_query = S_to_C_query + "&data_freshness=realtime&datetime=20120614T080200"
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 2
assert len(response['journeys'][0]['sections']) == 2
assert len(response['journeys'][1]['sections']) == 1
assert response['journeys'][0]['sections'][1]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][1]['departure_date_time'] == '20120614T080225'
assert response['journeys'][0]['sections'][1]['arrival_date_time'] == '20120614T080330'
# New disruption with a deleted stop_time recently added at stop_point:stopC
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080330"),
departure=tstamp("20120614T080330"),
message='stop_time deleted',
arrival_skipped=True,
)
],
disruption_id='deleted_stop_time',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 15
assert has_the_disruption(disrupts, 'deleted_stop_time')
# the journey doesn't have public_transport
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 1
assert len(response['journeys'][0]['sections']) == 1
assert response['journeys'][0]['type'] == 'best'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinOnNewStopTimeAtTheBeginning(MockKirinDisruptionsFixture):
def test_add_modify_and_delete_one_stop_time(self):
"""
1. create a new_stop_time to add a final stop in C
test that a new journey is possible with section type = public_transport from B to C
2. delete the added stop_time and verify that the public_transport section is absent
3. delete again stop_time and verify that the public_transport section is absent
"""
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 12
C_to_R_query = "journeys?from={from_coord}&to={to_coord}".format(
from_coord='stop_point:stopC', to_coord='stop_point:stopA'
)
# Query from C to R: the journey doesn't have any public_transport
base_journey_query = C_to_R_query + "&data_freshness=realtime&datetime=20120614T080000&walking_speed=0.7"
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 1
assert len(response['journeys'][0]['sections']) == 1
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
assert 'data_freshness' not in response['journeys'][0]['sections'][0]
assert response['journeys'][0]['durations']['walking'] == 127
# New disruption with two stop_times same as base schedule and
# a new stop_time on stop_point:stopC added at the beginning
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080000"),
departure=tstamp("20120614T080000"),
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080001"),
departure=tstamp("20120614T080001"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080002"),
departure=tstamp("20120614T080002"),
),
],
disruption_id='new_stop_time',
effect='delayed',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 13
assert has_the_disruption(disrupts, 'new_stop_time')
last_disruption = disrupts['disruptions'][-1]
assert last_disruption['impacted_objects'][0]['impacted_stops'][0]['arrival_status'] == 'added'
assert last_disruption['impacted_objects'][0]['impacted_stops'][0]['departure_status'] == 'added'
assert last_disruption['severity']['effect'] == 'SIGNIFICANT_DELAYS'
assert last_disruption['severity']['name'] == 'trip delayed'
# Query from C to R: the journey should have a public_transport from C to A
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 2
assert len(response['journeys'][1]['sections']) == 1
assert response['journeys'][0]['sections'][0]['type'] == 'public_transport'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][0]['departure_date_time'] == '20120614T080000'
assert response['journeys'][0]['sections'][0]['arrival_date_time'] == '20120614T080002'
assert response['journeys'][1]['sections'][0]['type'] == 'street_network'
# New disruption with a deleted stop_time recently added at stop_point:stopC
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080000"),
departure=tstamp("20120614T080000"),
message='stop_time deleted',
arrival_skipped=True,
)
],
disruption_id='deleted_stop_time',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == 14
assert has_the_disruption(disrupts, 'deleted_stop_time')
last_disruption = disrupts['disruptions'][-1]
assert last_disruption['impacted_objects'][0]['impacted_stops'][0]['arrival_status'] == 'deleted'
assert (
last_disruption['impacted_objects'][0]['impacted_stops'][0]['departure_status'] == 'unchanged'
) # Why?
assert last_disruption['severity']['effect'] == 'REDUCED_SERVICE'
assert last_disruption['severity']['name'] == 'reduced service'
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 1
assert len(response['journeys'][0]['sections']) == 1
assert response['journeys'][0]['sections'][0]['type'] == 'street_network'
assert 'data_freshness' not in response['journeys'][0]['sections'][0]
assert response['journeys'][0]['durations']['walking'] == 127
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T080000')
assert len(pt_response['disruptions']) == 2
@dataset(MAIN_ROUTING_TEST_SETTING_NO_ADD)
class TestKrakenNoAdd(MockKirinDisruptionsFixture):
def test_no_rt_add_possible(self):
"""
trying to add new_stop_time without allowing it in kraken
test that it is ignored
(same test as test_add_one_stop_time_at_the_end(), different result expected)
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption same as base schedule
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080104"),
departure=tstamp("20120614T080104"),
),
],
disruption_id='new_stop_time',
)
# No new disruption
disruptions_after = self.query_region('disruptions?_current_datetime=20120614T080000')
assert nb_disruptions_before == len(disruptions_after['disruptions'])
assert not has_the_disruption(disruptions_after, 'new_stop_time')
journey_query = journey_basic_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(journey_query)
assert not has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, journey_query)
assert response['journeys'][0]['sections'][1]['data_freshness'] == 'base_schedule'
B_C_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord='stop_point:stopB', to_coord='stop_point:stopC', datetime='20120614T080000'
)
base_journey_query = B_C_query + "&data_freshness=base_schedule&_current_datetime=20120614T080000"
response = self.query_region(base_journey_query)
assert not has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, base_journey_query)
assert len(response['journeys']) == 1 # check we only have one journey
assert 'data_freshness' not in response['journeys'][0]['sections'][0] # means it's base_schedule
B_C_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord='stop_point:stopB', to_coord='stop_point:stopC', datetime='20120614T080000'
)
rt_journey_query = B_C_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
response = self.query_region(rt_journey_query)
assert not has_the_disruption(response, 'new_stop_time')
self.is_valid_journey_response(response, rt_journey_query)
assert len(response['journeys']) == 1 # check there's no new journey possible
assert 'data_freshness' not in response['journeys'][0]['sections'][0] # means it's base_schedule
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinStopTimeOnDetourAtTheEnd(MockKirinDisruptionsFixture):
def test_stop_time_with_detour_at_the_end(self):
"""
1. create a new_stop_time at C to replace existing one at A so that we have
A deleted_for_detour and C added_for_detour
2. test that a new journey is possible with section type = public_transport from B to C
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption with one stop_time same as base schedule, another one deleted and
# a new stop_time on stop_point:stopC added at the end
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
arrival_skipped=True,
is_detour=True,
message='deleted for detour',
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080104"),
departure=tstamp("20120614T080104"),
is_added=True,
is_detour=True,
message='added for detour',
),
],
disruption_id='stop_time_with_detour',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == nb_disruptions_before + 1
assert has_the_disruption(disrupts, 'stop_time_with_detour')
last_disrupt = disrupts['disruptions'][-1]
assert last_disrupt['severity']['effect'] == 'DETOUR'
# Verify impacted objects
assert len(last_disrupt['impacted_objects']) == 1
impacted_stops = last_disrupt['impacted_objects'][0]['impacted_stops']
assert len(impacted_stops) == 3
assert bool(impacted_stops[0]['is_detour']) is False
assert impacted_stops[0]['cause'] == 'on time'
assert bool(impacted_stops[1]['is_detour']) is True
assert impacted_stops[1]['cause'] == 'deleted for detour'
assert impacted_stops[1]['departure_status'] == 'unchanged'
assert impacted_stops[1]['arrival_status'] == 'deleted'
assert bool(impacted_stops[2]['is_detour']) is True
assert impacted_stops[2]['cause'] == 'added for detour'
assert impacted_stops[2]['departure_status'] == 'added'
assert impacted_stops[2]['arrival_status'] == 'added'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinStopTimeOnDetourAndArrivesBeforeDeletedAtTheEnd(MockKirinDisruptionsFixture):
def test_stop_time_with_detour_and_arrival_before_deleted_at_the_end(self):
"""
1. create a new_stop_time at C to replace existing one at A so that we have A deleted_for_detour
and C added_for_detour with arrival time < to arrival time of A (deleted)
2. Kraken accepts this disruption
"""
disruptions_before = self.query_region('disruptions?_current_datetime=20120614T080000')
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption with one stop_time same as base schedule, another one deleted and
# a new stop_time on stop_point:stopC added at the end
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopA",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
arrival_skipped=True,
departure_skipped=True,
is_detour=True,
message='deleted for detour',
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120614T080101"),
departure=tstamp("20120614T080101"),
is_added=True,
is_detour=True,
message='added for detour',
),
],
disruption_id='stop_time_with_detour',
)
# Verify disruptions
disrupts = self.query_region('disruptions?_current_datetime=20120614T080000')
assert len(disrupts['disruptions']) == nb_disruptions_before + 1
assert has_the_disruption(disrupts, 'stop_time_with_detour')
last_disrupt = disrupts['disruptions'][-1]
assert last_disrupt['severity']['effect'] == 'DETOUR'
# Verify impacted objects
assert len(last_disrupt['impacted_objects']) == 1
impacted_stops = last_disrupt['impacted_objects'][0]['impacted_stops']
assert len(impacted_stops) == 3
assert bool(impacted_stops[0]['is_detour']) is False
assert impacted_stops[0]['cause'] == 'on time'
assert bool(impacted_stops[1]['is_detour']) is True
assert impacted_stops[1]['cause'] == 'deleted for detour'
assert impacted_stops[1]['departure_status'] == 'deleted'
assert impacted_stops[1]['arrival_status'] == 'deleted'
assert bool(impacted_stops[2]['is_detour']) is True
assert impacted_stops[2]['cause'] == 'added for detour'
assert impacted_stops[2]['departure_status'] == 'added'
assert impacted_stops[2]['arrival_status'] == 'added'
B_C_query = "journeys?from={from_coord}&to={to_coord}&datetime={datetime}".format(
from_coord='stop_point:stopB', to_coord='stop_point:stopC', datetime='20120614T080000'
)
# Query with data_freshness=base_schedule
base_journey_query = B_C_query + "&data_freshness=base_schedule&_current_datetime=20120614T080000"
# There is no public transport from B to C
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['type'] == 'best'
assert 'data_freshness' not in response['journeys'][0]['sections'][0] # means it's base_schedule
# Query with data_freshness=realtime
base_journey_query = B_C_query + "&data_freshness=realtime&_current_datetime=20120614T080000"
# There is a public transport from B to C with realtime having only two stop_date_times
# as the deleted-for-detour stop should not be displayed
response = self.query_region(base_journey_query)
assert len(response['journeys']) == 2
assert response['journeys'][0]['status'] == 'DETOUR'
assert response['journeys'][0]['sections'][0]['type'] == 'public_transport'
assert len(response['journeys'][0]['sections'][0]['stop_date_times']) == 2
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
assert response['journeys'][0]['sections'][0]['display_informations']['physical_mode'] == 'Tramway'
assert has_the_disruption(response, 'stop_time_with_detour')
# Tramway is the first physical_mode in NTFS, but we might pick mode in a smarter way in the future
response = self.query_region('physical_modes')
assert response['physical_modes'][0]['name'] == 'Tramway'
# Check attributes of deleted stop_time in the concerned vehicle_journey
vj_query = 'vehicle_journeys/{vj}?_current_datetime={dt}'.format(
vj='vehicle_journey:vjA:modified:0:stop_time_with_detour', dt='20120614T080000'
)
response = self.query_region(vj_query)
assert has_the_disruption(response, 'stop_time_with_detour')
assert len(response['vehicle_journeys']) == 1
assert len(response['vehicle_journeys'][0]['stop_times']) == 3
assert response['vehicle_journeys'][0]['stop_times'][1]['drop_off_allowed'] is False
assert response['vehicle_journeys'][0]['stop_times'][1]['pickup_allowed'] is False
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddNewTrip(MockKirinDisruptionsFixture):
def test_add_new_trip(self):
"""
0. test that no PT-Ref object related to the new trip exists and that no PT-journey exists
1. create a new trip
2. test that journey is possible using this new trip
3. test some PT-Ref objects were created
4. test that /pt_objects returns those objects
5. test that PT-Ref filters are working
6. test /departures and stop_schedules
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# /journeys before (only direct walk)
C_B_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&"
"datetime={dt}&_current_datetime={dt}".format(
f='stop_point:stopC', to='stop_point:stopB', dt='20120614T080000'
)
)
response = self.query_region(C_B_query)
assert not has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 1
assert 'non_pt_walking' in response['journeys'][0]['tags']
# /pt_objects before
ptobj_query = 'pt_objects?q={q}&_current_datetime={dt}'.format(q='adi', dt='20120614T080000') # ++typo
response = self.query_region(ptobj_query)
assert 'pt_objects' not in response
# Check that no vehicle_journey exists on the future realtime-trip
vj_query = 'vehicle_journeys/{vj}?_current_datetime={dt}'.format(
vj='vehicle_journey:additional-trip:modified:0:new_trip', dt='20120614T080000'
)
response, status = self.query_region(vj_query, check=False)
assert status == 404
assert 'vehicle_journeys' not in response
# Check that no additional line exists
line_query = 'lines/{l}?_current_datetime={dt}'.format(l='line:stopC_stopB', dt='20120614T080000')
response, status = self.query_region(line_query, check=False)
assert status == 404
assert 'lines' not in response
# Check that PT-Ref filter fails as no object exists
vj_filter_query = 'commercial_modes/{cm}/vehicle_journeys?_current_datetime={dt}'.format(
cm='commercial_mode:additional_service', dt='20120614T080000'
)
response, status = self.query_region(vj_filter_query, check=False)
assert status == 404
assert response['error']['message'] == 'ptref : Filters: Unable to find object'
network_filter_query = 'vehicle_journeys/{vj}/networks?_current_datetime={dt}'.format(
vj='vehicle_journey:additional-trip:modified:0:new_trip', dt='20120614T080000'
)
response, status = self.query_region(network_filter_query, check=False)
assert status == 404
assert response['error']['message'] == 'ptref : Filters: Unable to find object'
# Check that no departure exist on stop_point stop_point:stopC for neither base_schedule nor realtime
departure_query = "stop_points/stop_point:stopC/departures?_current_datetime=20120614T080000"
departures = self.query_region(departure_query + '&data_freshness=base_schedule')
assert len(departures['departures']) == 0
departures = self.query_region(departure_query + '&data_freshness=realtime')
assert len(departures['departures']) == 0
# Check stop_schedules on stop_point stop_point:stopC for base_schedule and realtime with
# Date_times list empty
ss_on_sp_query = "stop_points/stop_point:stopC/stop_schedules?_current_datetime=20120614T080000"
stop_schedules = self.query_region(ss_on_sp_query + '&data_freshness=realtime')
assert len(stop_schedules['stop_schedules']) == 1
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'D'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
# Check that no stop_schedule exist on line:stopC_stopB and stop_point stop_point:stopC
ss_on_line_query = (
"stop_points/stop_point:stopC/lines/line:stopC_stopB/"
"stop_schedules?_current_datetime=20120614T080000"
)
stop_schedules, status = self.query_region(ss_on_line_query + '&data_freshness=realtime', check=False)
assert status == 404
assert len(stop_schedules['stop_schedules']) == 0
# New disruption, a new trip without headsign with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
physical_mode_id='physical_mode:Bus', # this physical mode exists in kraken
)
# Check new disruption 'additional-trip' to add a new trip
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
new_trip_disruptions = get_disruptions_by_id(disruptions_after, 'new_trip')
assert len(new_trip_disruptions) == 1
new_trip_disrupt = new_trip_disruptions[0]
assert new_trip_disrupt['id'] == 'new_trip'
assert new_trip_disrupt['severity']['effect'] == 'ADDITIONAL_SERVICE'
assert len(new_trip_disrupt['impacted_objects'][0]['impacted_stops']) == 2
assert all(
[
(s['departure_status'] == 'added' and s['arrival_status'] == 'added')
for s in new_trip_disrupt['impacted_objects'][0]['impacted_stops']
]
)
assert new_trip_disrupt['application_periods'][0]['begin'] == '20120614T080100'
assert new_trip_disrupt['application_periods'][0]['end'] == '20120614T080102'
# Check that a PT journey now exists
response = self.query_region(C_B_query)
assert has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 2
pt_journey = response['journeys'][0]
assert 'non_pt_walking' not in pt_journey['tags']
assert pt_journey['status'] == 'ADDITIONAL_SERVICE'
assert pt_journey['sections'][0]['data_freshness'] == 'realtime'
assert pt_journey['sections'][0]['display_informations']['commercial_mode'] == 'additional service'
assert pt_journey['sections'][0]['display_informations']['physical_mode'] == 'Bus'
# Check date_times
assert pt_journey['sections'][0]['departure_date_time'] == '20120614T080100'
assert pt_journey['sections'][0]['arrival_date_time'] == '20120614T080102'
assert pt_journey['sections'][0]['stop_date_times'][0]['arrival_date_time'] == '20120614T080100'
assert pt_journey['sections'][0]['stop_date_times'][-1]['arrival_date_time'] == '20120614T080102'
# Check /pt_objects after: new objects created
response = self.query_region(ptobj_query)
assert len(response['pt_objects']) == 4
assert len([o for o in response['pt_objects'] if o['id'] == 'network:additional_service']) == 1
assert len([o for o in response['pt_objects'] if o['id'] == 'commercial_mode:additional_service']) == 1
assert len([o for o in response['pt_objects'] if o['id'] == 'line:stopC_stopB']) == 1
assert len([o for o in response['pt_objects'] if o['id'] == 'route:stopC_stopB']) == 1
# Check that the vehicle_journey has been created
response = self.query_region(vj_query)
assert has_the_disruption(response, 'new_trip')
assert len(response['vehicle_journeys']) == 1
# Check that name and headsign are empty
assert response['vehicle_journeys'][0]['name'] == ''
assert response['vehicle_journeys'][0]['headsign'] == ''
assert response['vehicle_journeys'][0]['disruptions'][0]['id'] == 'new_trip'
assert len(response['vehicle_journeys'][0]['stop_times']) == 2
assert response['vehicle_journeys'][0]['stop_times'][0]['drop_off_allowed'] is True
assert response['vehicle_journeys'][0]['stop_times'][0]['pickup_allowed'] is True
# Check that the new line has been created with necessary information
response = self.query_region(line_query)
assert len(response['lines']) == 1
assert response['lines'][0]['name'] == 'stopC - stopB'
assert response['lines'][0]['network']['id'] == 'network:additional_service'
assert response['lines'][0]['commercial_mode']['id'] == 'commercial_mode:additional_service'
assert response['lines'][0]['routes'][0]['id'] == 'route:stopC_stopB'
assert response['lines'][0]['routes'][0]['name'] == 'stopC - stopB'
assert response['lines'][0]['routes'][0]['direction']['id'] == 'stopB'
assert response['lines'][0]['routes'][0]['direction_type'] == 'forward'
# Check that objects created are linked in PT-Ref filter
response = self.query_region(vj_filter_query)
assert has_the_disruption(response, 'new_trip')
assert len(response['vehicle_journeys']) == 1
# Check that the newly created vehicle journey are well filtered by &since and &until
# Note: For backward compatibility parameter &data_freshness with base_schedule is added
# and works with &since and &until
vj_base_query = (
'commercial_modes/commercial_mode:additional_service/vehicle_journeys?'
'_current_datetime={dt}&since={sin}&until={un}&data_freshness={df}'
)
response, status = self.query_region(
vj_base_query.format(
dt='20120614T080000', sin='20120614T080100', un='20120614T080102', df='base_schedule'
),
check=False,
)
assert status == 404
assert 'vehicle_journeys' not in response
response = self.query_region(
vj_base_query.format(
dt='20120614T080000', sin='20120614T080100', un='20120614T080102', df='realtime'
)
)
assert len(response['vehicle_journeys']) == 1
response, status = self.query_region(
vj_base_query.format(
dt='20120614T080000', sin='20120614T080101', un='20120614T080102', df='realtime'
),
check=False,
)
assert status == 404
assert 'vehicle_journeys' not in response
response = self.query_region(network_filter_query)
assert len(response['networks']) == 1
assert response['networks'][0]['name'] == 'additional service'
# Check that no departure exist on stop_point stop_point:stopC for base_schedule
departures = self.query_region(departure_query + '&data_freshness=base_schedule')
assert len(departures['departures']) == 0
# Check that departures on stop_point stop_point:stopC exists with disruption
departures = self.query_region(departure_query + '&data_freshness=realtime')
assert len(departures['disruptions']) == 1
assert departures['disruptions'][0]['disruption_uri'] == 'new_trip'
assert departures['departures'][0]['display_informations']['name'] == 'stopC - stopB'
# Check that stop_schedule on line "line:stopC_stopB" and stop_point stop_point:stopC
# for base_schedule date_times list is empty.
stop_schedules = self.query_region(ss_on_line_query + '&data_freshness=base_schedule')
assert len(stop_schedules['stop_schedules']) == 1
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
# Check that stop_schedule on line "line:stopC_stopB" and stop_point stop_point:stopC
# exists with disruption.
stop_schedules = self.query_region(ss_on_line_query + '&data_freshness=realtime')
assert len(stop_schedules['stop_schedules']) == 1
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['disruptions']) == 1
assert stop_schedules['disruptions'][0]['uri'] == 'new_trip'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 1
assert stop_schedules['stop_schedules'][0]['date_times'][0]['date_time'] == '20120614T080100'
assert stop_schedules['stop_schedules'][0]['date_times'][0]['data_freshness'] == 'realtime'
# Check stop_schedules on stop_point stop_point:stopC for base_schedule
# Date_times list is empty for both stop_schedules
stop_schedules = self.query_region(ss_on_sp_query + '&data_freshness=base_schedule')
assert len(stop_schedules['stop_schedules']) == 2
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'D'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
assert stop_schedules['stop_schedules'][1]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][1]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['stop_schedules'][1]['date_times']) == 0
# Check stop_schedules on stop_point stop_point:stopC for realtime
# Date_times list is empty for line 'D' but not for the new line added
stop_schedules = self.query_region(ss_on_sp_query + '&data_freshness=realtime')
assert len(stop_schedules['stop_schedules']) == 2
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'D'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
assert stop_schedules['stop_schedules'][1]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][1]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['stop_schedules'][1]['date_times']) == 1
assert stop_schedules['stop_schedules'][1]['date_times'][0]['date_time'] == '20120614T080100'
assert stop_schedules['stop_schedules'][1]['date_times'][0]['data_freshness'] == 'realtime'
# Check stop_schedules on stop_area stopC for base_schedule
# Date_times list is empty for both stop_schedules
ss_on_sa_query = "stop_areas/stopC/stop_schedules?_current_datetime=20120614T080000"
stop_schedules = self.query_region(ss_on_sa_query + '&data_freshness=base_schedule')
assert len(stop_schedules['stop_schedules']) == 2
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'D'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
assert stop_schedules['stop_schedules'][1]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][1]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['stop_schedules'][1]['date_times']) == 0
# Check stop_schedules on stop_area stopC for realtime
# Date_times list is empty for line 'D' but not for the new line added
ss_on_sa_query = "stop_areas/stopC/stop_schedules?_current_datetime=20120614T080000"
stop_schedules = self.query_region(ss_on_sa_query + '&data_freshness=realtime')
assert len(stop_schedules['stop_schedules']) == 2
assert stop_schedules['stop_schedules'][0]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][0]['links'][0]['id'] == 'D'
assert len(stop_schedules['stop_schedules'][0]['date_times']) == 0
assert stop_schedules['stop_schedules'][1]['links'][0]['type'] == 'line'
assert stop_schedules['stop_schedules'][1]['links'][0]['id'] == 'line:stopC_stopB'
assert len(stop_schedules['stop_schedules'][1]['date_times']) == 1
assert stop_schedules['stop_schedules'][1]['date_times'][0]['date_time'] == '20120614T080100'
assert stop_schedules['stop_schedules'][1]['date_times'][0]['data_freshness'] == 'realtime'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestPtRefOnAddedTrip(MockKirinDisruptionsFixture):
def test_ptref_on_added_trip(self):
"""
1. Test all possibles ptref calls with/without filters before adding a new trip
2. Test all possibles ptref calls with/without filters after adding a new trip
3. Test all possibles ptref calls with/without filters after modifying the recently added trip
Note: physical_mode is present in gtfs-rt whereas for network and commercial_mode default value is used
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# Verify that network, line, commercial_mode of the new trip to be added in future is absent
resp, status = self.query_region("networks/network:additional_service", check=False)
assert status == 404
assert resp['error']['message'] == 'ptref : Filters: Unable to find object'
resp, status = self.query_region("lines/line:stopC_stopB", check=False)
assert status == 404
assert resp['error']['message'] == 'ptref : Filters: Unable to find object'
resp, status = self.query_region("commercial_modes/commercial_mode:additional_service", check=False)
assert status == 404
assert resp['error']['message'] == 'ptref : Filters: Unable to find object'
# The following ptref search should work with base-schedule data.
# network <-> datasets
resp = self.query_region("networks/base_network/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("datasets/default:dataset/networks")
assert resp["networks"][0]["id"] == "base_network"
# line <-> company
resp = self.query_region("lines/A/companies")
assert resp["companies"][0]["id"] == "base_company"
resp = self.query_region("companies/base_company/lines")
assert resp["lines"][0]["id"] == "A"
# company <-> commercial_modes
resp = self.query_region("companies/base_company/commercial_modes")
assert resp['commercial_modes'][0]['id'] == '0x0'
resp = self.query_region("commercial_modes/0x0/companies")
assert resp["companies"][0]["id"] == "base_company"
# route <-> dataset
resp = self.query_region("routes/B:3/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("datasets/default:dataset/routes")
routes = [rt["id"] for rt in resp["routes"]]
assert "B:3" in routes
# vehicle_journey <-> company
resp = self.query_region("vehicle_journeys/vehicle_journey:vjA/companies")
assert resp["companies"][0]["id"] == "base_company"
resp = self.query_region("companies/base_company/vehicle_journeys")
assert len(resp["vehicle_journeys"]) == 9
# network <-> contributor
resp = self.query_region("networks/base_network/contributors")
assert resp["contributors"][0]["id"] == "default:contributor"
resp = self.query_region("contributors/default:contributor/networks")
assert resp["networks"][0]["id"] == "base_network"
# New disruption, a new trip with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
"added",
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id="new_trip",
effect="additional_service",
physical_mode_id="physical_mode:Bus", # this physical mode exists in kraken
)
# Check new disruption 'additional-trip' to add a new trip
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
# Verify that network, line, commercial_mode of the new trip are present
resp = self.query_region("networks/network:additional_service")
assert "networks" in resp
resp = self.query_region("lines/line:stopC_stopB")
assert "lines" in resp
resp = self.query_region("commercial_modes/commercial_mode:additional_service")
assert "commercial_modes" in resp
resp = self.query_region("networks/network:additional_service/physical_modes")
assert resp["physical_modes"][0]["id"] == "physical_mode:Bus"
resp = self.query_region("physical_modes/physical_mode:Bus/networks")
networks = [nw["id"] for nw in resp["networks"]]
assert "network:additional_service" in networks
# network by line should work
resp = self.query_region("lines/line:stopC_stopB/networks")
assert resp["networks"][0]["id"] == "network:additional_service"
# The physical_mode sent in gtfs-rt should be present in the new line added
resp = self.query_region("lines/line:stopC_stopB/physical_modes")
assert resp["physical_modes"][0]["id"] == "physical_mode:Bus"
# The default commercial_mode used for a new line should be present
resp = self.query_region("lines/line:stopC_stopB/commercial_modes")
assert resp["commercial_modes"][0]["id"] == "commercial_mode:additional_service"
# Newly added lines should have a route, vehicle_journey,
resp = self.query_region("lines/line:stopC_stopB/routes")
assert resp["routes"][0]["id"] == "route:stopC_stopB"
resp = self.query_region("lines/line:stopC_stopB/vehicle_journeys")
assert resp["vehicle_journeys"][0]["id"] == "vehicle_journey:additional-trip:modified:0:new_trip"
# Name and headsign are empty
assert resp["vehicle_journeys"][0]["name"] == ""
assert resp["vehicle_journeys"][0]["headsign"] == ""
# We should be able to get the line from vehicle_journey recently added
resp = self.query_region("vehicle_journeys/vehicle_journey:additional-trip:modified:0:new_trip/lines")
assert resp["lines"][0]["id"] == "line:stopC_stopB"
# We should be able to get the physical_mode sent in gtfs-rt from vehicle_journey recently added
resp = self.query_region(
"vehicle_journeys/vehicle_journey:additional-trip:modified:0:new_trip/physical_modes"
)
assert resp["physical_modes"][0]["id"] == "physical_mode:Bus"
# The following ptref search should work with a trip added.
# network <-> datasets
resp = self.query_region("networks/network:additional_service/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("datasets/default:dataset/networks")
networks = [nw["id"] for nw in resp["networks"]]
assert "network:additional_service" in networks
# route <-> dataset
resp = self.query_region("routes/route:stopC_stopB/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("datasets/default:dataset/routes")
routes = [rt["id"] for rt in resp["routes"]]
assert "route:stopC_stopB" in routes
# route <-> physical_mode
resp = self.query_region("routes/route:stopC_stopB/physical_modes")
assert resp["physical_modes"][0]["id"] == "physical_mode:Bus"
resp = self.query_region("physical_modes/physical_mode:Bus/routes")
routes = [rt["id"] for rt in resp["routes"]]
assert "route:stopC_stopB" in routes
# route <-> stop_point
resp = self.query_region("routes/route:stopC_stopB/stop_points")
sps = [sp["id"] for sp in resp["stop_points"]]
assert "stop_point:stopC" in sps
assert "stop_point:stopB" in sps
resp = self.query_region("stop_points/stop_point:stopC/routes")
routes = [rt["id"] for rt in resp["routes"]]
assert "route:stopC_stopB" in routes
resp = self.query_region("stop_points/stop_point:stopB/routes")
routes = [rt["id"] for rt in resp["routes"]]
assert "route:stopC_stopB" in routes
# network <-> contributor
resp = self.query_region("networks/network:additional_service/contributors")
assert resp["contributors"][0]["id"] == "default:contributor"
resp = self.query_region("contributors/default:contributor/networks")
networks = [nw["id"] for nw in resp["networks"]]
assert "network:additional_service" in networks
# line <-> company
resp = self.query_region("lines/line:stopC_stopB/companies")
assert resp["companies"][0]["id"] == "base_company"
resp = self.query_region("companies/base_company/lines")
assert resp["lines"][7]["id"] == "line:stopC_stopB"
# vehicle_journey <-> company
resp = self.query_region(
"vehicle_journeys/vehicle_journey:additional-trip:modified:0:new_trip/companies"
)
assert resp["companies"][0]["id"] == "base_company"
resp = self.query_region("companies/base_company/vehicle_journeys")
vjs = [vj["id"] for vj in resp["vehicle_journeys"]]
assert "vehicle_journey:additional-trip:modified:0:new_trip" in vjs
# commercial_mode <-> company
resp = self.query_region("commercial_modes/commercial_mode:additional_service/companies")
assert resp["companies"][0]["id"] == "base_company"
resp = self.query_region("companies/base_company/commercial_modes")
commercial_modes = [cm["id"] for cm in resp["commercial_modes"]]
assert "commercial_mode:additional_service" in commercial_modes
# stop_point <-> dataset
resp = self.query_region("stop_points/stop_point:stopC/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("stop_points/stop_point:stopB/datasets")
assert resp["datasets"][0]["id"] == "default:dataset"
resp = self.query_region("datasets/default:dataset/stop_points")
sps = [sp["id"] for sp in resp["stop_points"]]
assert "stop_point:stopC" in sps
assert "stop_point:stopB" in sps
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddNewTripWithWrongPhysicalMode(MockKirinDisruptionsFixture):
def test_add_new_trip_with_wrong_physical_mode(self):
"""
1. send a disruption to create a new trip with physical_mode absent in kaken
2. check of journey, disruption and PT-Ref objects to verify that no trip is added
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption, a new trip with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
physical_mode_id='physical_mode:Toto', # this physical mode doesn't exist in kraken
)
# Check there is no new disruption
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before == len(disruptions_after['disruptions'])
# / Journeys: as no trip on pt added, only direct walk.
C_B_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&"
"datetime={dt}&_current_datetime={dt}".format(
f='stop_point:stopC', to='stop_point:stopB', dt='20120614T080000'
)
)
response = self.query_region(C_B_query)
assert not has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 1
assert 'non_pt_walking' in response['journeys'][0]['tags']
# Check that no vehicle_journey is added
vj_query = 'vehicle_journeys/{vj}?_current_datetime={dt}'.format(
vj='vehicle_journey:additional-trip:modified:0:new_trip', dt='20120614T080000'
)
response, status = self.query_region(vj_query, check=False)
assert status == 404
assert 'vehicle_journeys' not in response
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddNewTripWithoutPhysicalMode(MockKirinDisruptionsFixture):
def test_add_new_trip_without_physical_mode(self):
"""
1. send a disruption to create a new trip without physical_mode absent in kaken
2. check physical_mode of journey
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption, a new trip with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
)
# Check that a new disruption is added
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
C_B_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&"
"datetime={dt}&_current_datetime={dt}".format(
f='stop_point:stopC', to='stop_point:stopB', dt='20120614T080000'
)
)
# Check that a PT journey exists with first physical_mode in the NTFS('Tramway')
response = self.query_region(C_B_query)
assert has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 2
pt_journey = response['journeys'][0]
assert 'non_pt_walking' not in pt_journey['tags']
assert pt_journey['status'] == 'ADDITIONAL_SERVICE'
assert pt_journey['sections'][0]['data_freshness'] == 'realtime'
assert pt_journey['sections'][0]['display_informations']['commercial_mode'] == 'additional service'
assert pt_journey['sections'][0]['display_informations']['physical_mode'] == 'Tramway'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinUpdateTripWithPhysicalMode(MockKirinDisruptionsFixture):
def test_update_trip_with_physical_mode(self):
"""
1. send a disruption with a physical_mode to update a trip
2. check physical_mode of journey
"""
# we have 8 vehicle_jouneys
pt_response = self.query_region('vehicle_journeys')
initial_nb_vehicle_journeys = len(pt_response['vehicle_journeys'])
assert initial_nb_vehicle_journeys == 9
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# physical_mode of base vehicle_journey
pt_response = self.query_region(
'vehicle_journeys/vehicle_journey:vjA/physical_modes?_current_datetime=20120614T1337'
)
assert len(pt_response['physical_modes']) == 1
assert pt_response['physical_modes'][0]['name'] == 'Tramway'
self.send_mock(
"vjA",
"20120614",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120614T080224"),
departure=tstamp("20120614T080225"),
arrival_delay=60 + 24,
departure_delay=60 + 25,
message='cow on tracks',
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120614T080400"),
departure=tstamp("20120614T080400"),
arrival_delay=3 * 60 + 58,
departure_delay=3 * 60 + 58,
),
],
disruption_id='vjA_delayed',
physical_mode_id='physical_mode:Bus', # this physical mode exists in kraken
)
# Check that a new disruption is added
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
# A new vj is created
pt_response = self.query_region('vehicle_journeys')
assert len(pt_response['vehicle_journeys']) == (initial_nb_vehicle_journeys + 1)
# physical_mode of the newly created vehicle_journey is the base vehicle_journey physical mode (Tramway)
pt_response = self.query_region(
'vehicle_journeys/vehicle_journey:vjA:modified:0:vjA_delayed/physical_modes'
)
assert len(pt_response['physical_modes']) == 1
assert pt_response['physical_modes'][0]['name'] == 'Tramway'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddTripWithHeadSign(MockKirinDisruptionsFixture):
def test_add_trip_with_headsign(self):
"""
1. send a disruption with a headsign to add a trip
2. check that headsign is present in journey.section.display_informations
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption, a new trip with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
headsign='trip_headsign',
)
# Check that a new disruption is added
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
C_B_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&"
"datetime={dt}&_current_datetime={dt}".format(
f='stop_point:stopC', to='stop_point:stopB', dt='20120614T080000'
)
)
# Check that a PT journey exists with trip_headsign in display_informations
response = self.query_region(C_B_query)
assert has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 2
pt_journey = response['journeys'][0]
assert pt_journey['status'] == 'ADDITIONAL_SERVICE'
assert pt_journey['sections'][0]['data_freshness'] == 'realtime'
assert pt_journey['sections'][0]['display_informations']['headsign'] == 'trip_headsign'
# Check the vehicle_journey created by real-time
new_vj = self.query_region('vehicle_journeys/vehicle_journey:additional-trip:modified:0:new_trip')
assert len(new_vj['vehicle_journeys']) == 1
assert (new_vj['vehicle_journeys'][0]['name']) == 'trip_headsign'
assert (new_vj['vehicle_journeys'][0]['headsign']) == 'trip_headsign'
@dataset(MAIN_ROUTING_TEST_SETTING_NO_ADD)
class TestKirinAddNewTripBlocked(MockKirinDisruptionsFixture):
def test_add_new_trip_blocked(self):
"""
Disable realtime trip-add in Kraken
1. send a disruption to create a new trip
2. test that no journey is possible using this new trip
3. test that no PT-Ref objects were created
4. test that /pt_objects doesn't return objects
5. test that PT-Ref filters find nothing
6. test /departures and stop_schedules
"""
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# New disruption, a new trip with 2 stop_times in realtime
self.send_mock(
"additional-trip",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
)
# Check there is no new disruption
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before == len(disruptions_after['disruptions'])
# /journeys before (only direct walk)
C_B_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&"
"datetime={dt}&_current_datetime={dt}".format(
f='stop_point:stopC', to='stop_point:stopB', dt='20120614T080000'
)
)
response = self.query_region(C_B_query)
assert not has_the_disruption(response, 'new_trip')
self.is_valid_journey_response(response, C_B_query)
assert len(response['journeys']) == 1
assert 'non_pt_walking' in response['journeys'][0]['tags']
# /pt_objects before
ptobj_query = 'pt_objects?q={q}&_current_datetime={dt}'.format(q='adi', dt='20120614T080000') # ++typo
response = self.query_region(ptobj_query)
assert 'pt_objects' not in response
# Check that no vehicle_journey exists on the future realtime-trip
vj_query = 'vehicle_journeys/{vj}?_current_datetime={dt}'.format(
vj='vehicle_journey:additional-trip:modified:0:new_trip', dt='20120614T080000'
)
response, status = self.query_region(vj_query, check=False)
assert status == 404
assert 'vehicle_journeys' not in response
# Check that no additional line exists
line_query = 'lines/{l}?_current_datetime={dt}'.format(l='line:stopC_stopB', dt='20120614T080000')
response, status = self.query_region(line_query, check=False)
assert status == 404
assert 'lines' not in response
# Check that PT-Ref filter fails as no object exists
vj_filter_query = 'commercial_modes/{cm}/vehicle_journeys?_current_datetime={dt}'.format(
cm='commercial_mode:additional_service', dt='20120614T080000'
)
response, status = self.query_region(vj_filter_query, check=False)
assert status == 404
assert response['error']['message'] == 'ptref : Filters: Unable to find object'
network_filter_query = 'vehicle_journeys/{vj}/networks?_current_datetime={dt}'.format(
vj='vehicle_journey:additional-trip:modified:0:new_trip', dt='20120614T080000'
)
response, status = self.query_region(network_filter_query, check=False)
assert status == 404
assert response['error']['message'] == 'ptref : Filters: Unable to find object'
# Check that no departure exist on stop_point stop_point:stopC
departure_query = "stop_points/stop_point:stopC/departures?_current_datetime=20120614T080000"
departures = self.query_region(departure_query)
assert len(departures['departures']) == 0
# Check that no stop_schedule exist on line:stopC_stopB and stop_point stop_point:stopC
ss_query = (
"stop_points/stop_point:stopC/lines/line:stopC_stopB/"
"stop_schedules?_current_datetime=20120614T080000&data_freshness=realtime"
)
stop_schedules, status = self.query_region(ss_query, check=False)
assert status == 404
assert len(stop_schedules['stop_schedules']) == 0
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddNewTripPresentInNavitiaTheSameDay(MockKirinDisruptionsFixture):
def test_add_new_trip_present_in_navitia_the_same_day(self):
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# The vehicle_journey vjA is present in navitia
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['vehicle_journeys']) == 1
assert len(pt_response['disruptions']) == 0
# New disruption, a new trip with vehicle_journey id = vjA and having 2 stop_times in realtime
self.send_mock(
"vjA",
"20120614",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080100"),
departure=tstamp("20120614T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120614T080102"),
departure=tstamp("20120614T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
)
# Check that there should not be a new disruption
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before == len(disruptions_after['disruptions'])
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinAddNewTripPresentInNavitiaWithAShift(MockKirinDisruptionsFixture):
def test_add_new_trip_present_in_navitia_with_a_shift(self):
disruption_query = 'disruptions?_current_datetime={dt}'.format(dt='20120614T080000')
disruptions_before = self.query_region(disruption_query)
nb_disruptions_before = len(disruptions_before['disruptions'])
# The vehicle_journey vjA is present in navitia
pt_response = self.query_region('vehicle_journeys/vehicle_journey:vjA?_current_datetime=20120614T1337')
assert len(pt_response['vehicle_journeys']) == 1
assert len(pt_response['disruptions']) == 0
# New disruption, a new trip with meta vehicle journey id = vjA and having 2 stop_times in realtime
self.send_mock(
"vjA",
"20120620",
'added',
[
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120620T080100"),
departure=tstamp("20120620T080100"),
message='on time',
),
UpdatedStopTime(
"stop_point:stopB",
arrival_delay=0,
departure_delay=0,
is_added=True,
arrival=tstamp("20120620T080102"),
departure=tstamp("20120620T080102"),
),
],
disruption_id='new_trip',
effect='additional_service',
)
# The new trip is accepted because, it is not the same day of the base vj
# So a disruption is added
disruptions_after = self.query_region(disruption_query)
assert nb_disruptions_before + 1 == len(disruptions_after['disruptions'])
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinDelayPassMidnightTowardsNextDay(MockKirinDisruptionsFixture):
def test_delay_pass_midnight_towards_next_day(self):
"""
Relates to "test_cots_update_trip_with_delay_pass_midnight_on_first_station" in kirin
1. Add a disruption with a delay in second station (stop_point:stopA) so that there is a pass midnight
2. Verify disruption count, vehicle_journeys count and journey
3. Update the disruption so that departure station stop_point:stopB is replaced by stop_point:stopC
with a delay so that there is no more pass midnight
4. Verify disruption count, vehicle_journeys count and journey
Note: '&forbidden_uris[]=PM' used to avoid line 'PM' and it's vj=vjPB in /journey
"""
disruption_query = 'disruptions?_current _datetime={dt}'.format(dt='20120614T080000')
initial_nb_disruptions = len(self.query_region(disruption_query)['disruptions'])
pt_response = self.query_region('vehicle_journeys')
initial_nb_vehicle_journeys = len(pt_response['vehicle_journeys'])
empty_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&max_duration_to_pt=0&"
"datetime={dt}&_current_datetime={dt}&forbidden_uris[]=PM"
)
# Check journeys in realtime for 20120615(the day of the future disruption) from B to A
# vjB circulates everyday with departure at 18:01:00 and arrival at 18:01:02
ba_15T18_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120615T180000'
)
response = self.query_region(ba_15T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120615T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120615T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# vjB circulates the day before at 18:01:00 and arrival at 18:01:02
ba_14T18_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120614T180000'
)
response = self.query_region(ba_14T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120614T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120614T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# vjB circulates the day after at 18:01:00 and arrival at 18:01:02
ba_16T18_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120616T180000'
)
response = self.query_region(ba_16T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120616T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120616T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# A new disruption with a delay on arrival station to have a pass midnight
self.send_mock(
"vjB",
"20120615",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120615T180100"),
departure=tstamp("20120615T180100"),
arrival_delay=0,
departure_delay=0,
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120616T010102"),
departure=tstamp("20120616T010102"),
arrival_delay=7 * 60 * 60,
message="Delayed to have pass midnight",
),
],
disruption_id='stop_time_with_detour',
effect='delayed',
)
# A new disruption is added
disruptions_after = self.query_region(disruption_query)
assert initial_nb_disruptions + 1 == len(disruptions_after['disruptions'])
# A new vehicle_journey is added
pt_response = self.query_region('vehicle_journeys')
assert initial_nb_vehicle_journeys + 1 == len(pt_response['vehicle_journeys'])
# Check journeys in realtime for 20120615, the day of the disruption from B to A
# vjB circulates with departure at 20120615T18:01:00 and arrival at 20120616T01:01:02
response = self.query_region(ba_15T18_journey_query + '&forbidden_uris[]=PM')
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120615T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120616T010102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['base_departure_date_time'] == '20120615T180100'
assert response['journeys'][0]['sections'][0]['departure_date_time'] == '20120615T180100'
assert response['journeys'][0]['sections'][0]['base_arrival_date_time'] == '20120615T180102'
assert response['journeys'][0]['sections'][0]['arrival_date_time'] == '20120616T010102'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
# vjB circulates the day before at 18:01:00 and arrival at 18:01:02
response = self.query_region(ba_14T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120614T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120614T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# vjB circulates the day after at 18:01:00 and arrival at 18:01:02
response = self.query_region(ba_16T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120616T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120616T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# Disruption is modified with first station on detour and delay so that there is no more pass midnight
self.send_mock(
"vjB",
"20120615",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120615T000100"),
departure=tstamp("20120615T180100"),
arrival_delay=0,
departure_delay=0,
arrival_skipped=True,
departure_skipped=True,
is_detour=True,
message='deleted for detour',
),
UpdatedStopTime(
"stop_point:stopC",
arrival_delay=0,
departure_delay=0,
arrival=tstamp("20120616T003000"),
departure=tstamp("20120616T003000"),
is_added=True,
is_detour=True,
message='added for detour',
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120616T010102"),
departure=tstamp("20120616T010102"),
arrival_delay=7 * 60 * 60,
message="No more pass midnight",
),
],
disruption_id='stop_time_with_detour',
effect='delayed',
)
# The disruption created above is modified so no disruption is added
disruptions_after = self.query_region(disruption_query)
assert initial_nb_disruptions + 1 == len(disruptions_after['disruptions'])
# The disruption created above is modified so no vehicle_journey is added
pt_response = self.query_region('vehicle_journeys')
assert initial_nb_vehicle_journeys + 1 == len(pt_response['vehicle_journeys'])
# Query for 20120615T180000 makes wait till 003000 the next day
# vjB circulates on 20120616 with departure at 00:30:00 and arrival at 01:01:02
ca_15T18_journey_query = empty_query.format(
f='stop_point:stopC', to='stop_point:stopA', dt='20120615T180000'
)
response = self.query_region(ca_15T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120616T003000'
assert response['journeys'][0]['arrival_date_time'] == '20120616T010102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
assert len(response['journeys'][0]['sections'][0]['stop_date_times']) == 2
# vjB circulates the day before at 18:01:00 and arrival at 18:01:02
response = self.query_region(ba_14T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120614T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120614T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# vjB circulates the day after at 18:01:00 and arrival at 18:01:02
response = self.query_region(ba_16T18_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120616T180100'
assert response['journeys'][0]['arrival_date_time'] == '20120616T180102'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'B'
assert response['journeys'][0]['sections'][0]['base_departure_date_time'] == '20120616T180100'
assert response['journeys'][0]['sections'][0]['base_arrival_date_time'] == '20120616T180102'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
@dataset(MAIN_ROUTING_TEST_SETTING)
class TestKirinDelayOnBasePassMidnightTowardsNextDay(MockKirinDisruptionsFixture):
def test_delay_on_base_pass_midnight_towards_next_day(self):
"""
Relates to "test_cots_update_trip_with_delay_pass_midnight_on_first_station" in kirin
Test on a vehicle_journey with departure from stop_point:stopB at 23:55:00 and arrival
to stop_point:stopA at 00:01:00 the next day.
1. Verify disruption count, vehicle_journeys count and journey
2. Add a disruption with a delay = 2 minutes at first station (stop_point:stopB) so that
there is still pass midnight
3. Update the disruption with a delay = 6 minutes at first station and delay = 5 minutes
at second station so that there is no more pass midnight and the departure is the day after
4. Update the disruption with a smaller delay on first station and advance on arrival station
so that there is no pass midnight and the departure is the same day as original (base_schedule)
"""
def journey_base_schedule_for_day_before(resp):
assert resp['journeys'][0]['departure_date_time'] == '20120614T235500'
assert resp['journeys'][0]['arrival_date_time'] == '20120615T000100'
assert resp['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert resp['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
def journey_base_schedule_for_next_day(resp):
assert resp['journeys'][0]['departure_date_time'] == '20120616T235500'
assert resp['journeys'][0]['arrival_date_time'] == '20120617T000100'
assert resp['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert resp['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
disruption_query = 'disruptions?_current _datetime={dt}'.format(dt='20120615T080000')
initial_nb_disruptions = len(self.query_region(disruption_query)['disruptions'])
pt_response = self.query_region('vehicle_journeys')
initial_nb_vehicle_journeys = len(pt_response['vehicle_journeys'])
empty_query = (
"journeys?from={f}&to={to}&data_freshness=realtime&max_duration_to_pt=0&"
"datetime={dt}&_current_datetime={dt}"
)
# Check journeys in realtime for 20120615(the day of the future disruption) from B to A
# vjPM circulates everyday with departure at 23:55:00 and arrival at 00:01:00 the day after
ba_15T23_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120615T235000'
)
response = self.query_region(ba_15T23_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120615T235500'
assert response['journeys'][0]['arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'base_schedule'
# vjPM circulates the day before at 23:55:00 and arrival at 00:01:00 the day after
ba_14T23_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120614T235000'
)
response = self.query_region(ba_14T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_day_before(response)
# vjPM circulates the day after at 23:55:00 and arrival at 00:01:00 the day after
ba_16T23_journey_query = empty_query.format(
f='stop_point:stopB', to='stop_point:stopA', dt='20120616T235000'
)
response = self.query_region(ba_16T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_next_day(response)
# A new disruption with a delay on departure station before midnight
self.send_mock(
"vjPM",
"20120615",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120615T235700"),
departure=tstamp("20120615T235700"),
arrival_delay=2 * 60,
departure_delay=2 * 60,
message="Delay before pass midnight",
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120616T000100"),
departure=tstamp("20120616T000100"),
arrival_delay=0,
),
],
disruption_id='delay_before_pm',
effect='delayed',
)
# A new disruption is added
disruptions_after = self.query_region(disruption_query)
assert initial_nb_disruptions + 1 == len(disruptions_after['disruptions'])
# Now we have 1 more vehicle_journey than before
pt_response = self.query_region('vehicle_journeys')
assert initial_nb_vehicle_journeys + 1 == len(pt_response['vehicle_journeys'])
# Check journeys in realtime for 20120615, the day of the disruption from B to A
# vjB circulates with departure at 23:57:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_15T23_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120615T235700'
assert response['journeys'][0]['arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert response['journeys'][0]['sections'][0]['base_departure_date_time'] == '20120615T235500'
assert response['journeys'][0]['sections'][0]['departure_date_time'] == '20120615T235700'
assert response['journeys'][0]['sections'][0]['base_arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
# vjPM circulates the day before at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_14T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_day_before(response)
# vjPM circulates the day after at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_16T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_next_day(response)
# Disruption is modified with a delay on first station so that there is no more pass midnight
# and the departure is the day after
self.send_mock(
"vjPM",
"20120615",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120616T000100"),
departure=tstamp("20120616T000100"),
arrival_delay=6 * 60,
departure_delay=6 * 60,
message="Departure the next day",
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120616T000600"),
departure=tstamp("20120616T000600"),
arrival_delay=5 * 60,
message="Arrival delayed",
),
],
disruption_id='delay_before_pm',
effect='delayed',
)
# The disruption created above is modified so no disruption is added
disruptions_after = self.query_region(disruption_query)
assert initial_nb_disruptions + 1 == len(disruptions_after['disruptions'])
# We have 1 more vehicle_journey than initial as realtime vj is deleted and a new one is added
pt_response = self.query_region('vehicle_journeys')
assert initial_nb_vehicle_journeys + 1 == len(pt_response['vehicle_journeys'])
# Check journeys in realtime for 20120615, the day of the disruption from B to A
# vjB circulates with departure at 23:55:00 and arrival at 00:06:00 the day after
response = self.query_region(ba_15T23_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120616T000100'
assert response['journeys'][0]['arrival_date_time'] == '20120616T000600'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert response['journeys'][0]['sections'][0]['base_departure_date_time'] == '20120615T235500'
assert response['journeys'][0]['sections'][0]['departure_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['base_arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['arrival_date_time'] == '20120616T000600'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
# vjPM circulates the day before at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_14T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_day_before(response)
# vjPM circulates the day after at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_16T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_next_day(response)
# Disruption is modified with a smaller delay on first station and advance on arrival station
# so that there is no pass midnight and the departure is the same day as original (base_schedule)
self.send_mock(
"vjPM",
"20120615",
'modified',
[
UpdatedStopTime(
"stop_point:stopB",
arrival=tstamp("20120615T235600"),
departure=tstamp("20120615T235600"),
arrival_delay=1 * 60,
departure_delay=1 * 60,
message="Departure the same day",
),
UpdatedStopTime(
"stop_point:stopA",
arrival=tstamp("20120615T235900"),
departure=tstamp("20120615T235900"),
arrival_delay=-2 * 60,
message="Arrival advanced",
),
],
disruption_id='delay_before_pm',
effect='delayed',
)
# The disruption created above is modified so no disruption is added
disruptions_after = self.query_region(disruption_query)
assert initial_nb_disruptions + 1 == len(disruptions_after['disruptions'])
# We have 1 more vehicle_journey than initial as realtime vj is deleted and a new one is added
pt_response = self.query_region('vehicle_journeys')
assert initial_nb_vehicle_journeys + 1 == len(pt_response['vehicle_journeys'])
# Check journeys in realtime for 20120615, the day of the disruption from B to A
# vjB circulates with departure at 23:56:00 and arrival at 23:59:00 the same day
response = self.query_region(ba_15T23_journey_query)
assert len(response['journeys']) == 1
assert response['journeys'][0]['departure_date_time'] == '20120615T235600'
assert response['journeys'][0]['arrival_date_time'] == '20120615T235900'
assert response['journeys'][0]['sections'][0]['display_informations']['name'] == 'PM'
assert response['journeys'][0]['sections'][0]['base_departure_date_time'] == '20120615T235500'
assert response['journeys'][0]['sections'][0]['departure_date_time'] == '20120615T235600'
assert response['journeys'][0]['sections'][0]['base_arrival_date_time'] == '20120616T000100'
assert response['journeys'][0]['sections'][0]['arrival_date_time'] == '20120615T235900'
assert response['journeys'][0]['sections'][0]['data_freshness'] == 'realtime'
# vjPM circulates the day before at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_14T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_day_before(response)
# vjPM circulates the day after at 23:55:00 and arrival at 00:01:00 the day after
response = self.query_region(ba_16T23_journey_query)
assert len(response['journeys']) == 1
journey_base_schedule_for_next_day(response)
def make_mock_kirin_item(
vj_id,
date,
status='canceled',
new_stop_time_list=[],
disruption_id=None,
effect=None,
physical_mode_id=None,
headsign=None,
):
feed_message = gtfs_realtime_pb2.FeedMessage()
feed_message.header.gtfs_realtime_version = '1.0'
feed_message.header.incrementality = gtfs_realtime_pb2.FeedHeader.DIFFERENTIAL
feed_message.header.timestamp = 0
entity = feed_message.entity.add()
entity.id = disruption_id or "{}".format(uuid.uuid1())
trip_update = entity.trip_update
trip = trip_update.trip
trip.trip_id = vj_id
trip.start_date = date
trip.Extensions[kirin_pb2.contributor] = rt_topic
if headsign:
trip_update.Extensions[kirin_pb2.headsign] = headsign
if physical_mode_id:
trip_update.vehicle.Extensions[kirin_pb2.physical_mode_id] = physical_mode_id
if effect == 'unknown':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.UNKNOWN_EFFECT
elif effect == 'modified':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.MODIFIED_SERVICE
elif effect == 'delayed':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.SIGNIFICANT_DELAYS
elif effect == 'detour':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.DETOUR
elif effect == 'reduced_service':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.REDUCED_SERVICE
elif effect == 'additional_service':
trip_update.Extensions[kirin_pb2.effect] = gtfs_realtime_pb2.Alert.ADDITIONAL_SERVICE
if status == 'canceled':
# TODO: remove this deprecated code (for retrocompatibility with Kirin < 0.8.0 only)
trip.schedule_relationship = gtfs_realtime_pb2.TripDescriptor.CANCELED
elif status in ['modified', 'added']:
# TODO: remove this deprecated code (for retrocompatibility with Kirin < 0.8.0 only)
if status == 'modified':
trip.schedule_relationship = gtfs_realtime_pb2.TripDescriptor.SCHEDULED
elif status == 'added':
trip.schedule_relationship = gtfs_realtime_pb2.TripDescriptor.ADDED
for st in new_stop_time_list:
stop_time_update = trip_update.stop_time_update.add()
stop_time_update.stop_id = st.stop_id
stop_time_update.arrival.time = st.arrival
stop_time_update.arrival.delay = st.arrival_delay
stop_time_update.departure.time = st.departure
stop_time_update.departure.delay = st.departure_delay
def get_stop_time_status(is_skipped=False, is_added=False, is_detour=False):
if is_skipped:
if is_detour:
return kirin_pb2.DELETED_FOR_DETOUR
return kirin_pb2.DELETED
if is_added:
if is_detour:
return kirin_pb2.ADDED_FOR_DETOUR
return kirin_pb2.ADDED
return kirin_pb2.SCHEDULED
stop_time_update.arrival.Extensions[kirin_pb2.stop_time_event_status] = get_stop_time_status(
st.arrival_skipped, st.is_added, st.is_detour
)
stop_time_update.departure.Extensions[kirin_pb2.stop_time_event_status] = get_stop_time_status(
st.departure_skipped, st.is_added, st.is_detour
)
if st.message:
stop_time_update.Extensions[kirin_pb2.stoptime_message] = st.message
else:
# TODO
pass
return feed_message.SerializeToString()
| agpl-3.0 |
ximenesuk/openmicroscopy | components/tools/OmeroWeb/omeroweb/webclient/controller/history.py | 3 | 8170 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2011 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import calendar
import datetime
import time
from django.conf import settings
from django.core.urlresolvers import reverse
from webclient.controller import BaseController
class BaseCalendar(BaseController):
day = None
month = None
year = None
next_month = None
next_month_name = None
last_month = None
last_month_name = None
next_year = None
last_year = None
def __init__(self, conn, year=None, month=None, day=None, eid=None, **kw):
BaseController.__init__(self, conn)
self.year = int(year)
self.month = int(month)
if eid is None:
self.eid = self.conn.getEventContext().userId
else:
self.eid = eid
if day:
self.day = int(day)
date = datetime.datetime.strptime(("%i-%i-%i" % (self.year, self.month, self.day)), "%Y-%m-%d")
self.displayDate = '%s %s' % (date.strftime("%A, %d"), date.strftime("%B %Y"))
self.nameday = date.strftime("%A")
else:
date = datetime.datetime.strptime(("%i-%i" % (self.year, self.month)), "%Y-%m")
def create_calendar(self):
calendar.setfirstweekday(settings.FIRST_DAY_OF_WEEK)
now = datetime.datetime(self.year, self.month, 1)
if self.month == 12:
self.next_month = now.replace(year=now.year+1, month=1)
self.next_year = self.year+1
else:
self.next_month = now.replace(month=now.month+1)
self.next_year = self.year
if self.month == 1:
self.last_month = now.replace(year=self.year-1, month=12)
self.last_year = self.year-1
else:
self.last_month = now.replace(month=now.month-1)
self.last_year = self.year
next_month_name = self.next_month.strftime('%B')
last_month_name = self.last_month.strftime('%B')
self.week_day_labels = [x for x in calendar.weekheader(5).split(' ') if x != '']
self.current_month = datetime.datetime(self.year, self.month, 1)
self.month_name = calendar.month_name[self.month]
if self.month == 12:
self.next_month = self.current_month.replace(year=self.year+1, month=1)
else:
self.next_month = self.current_month.replace(month=self.current_month.month+1)
self.next_month_name = self.next_month.strftime('%B')
if self.month == 1:
self.last_month = self.current_month.replace(year=self.year-1, month=12)
else:
self.last_month = self.current_month.replace(month=self.current_month.month-1)
self.last_month_name = self.last_month.strftime('%B')
self.cal_weeks = calendar.monthcalendar(self.year, self.month)
self.monthrange = calendar.monthrange(self.year, self.month)[1]
self.cal_days = []
items = self.calendar_items(self.month, self.monthrange)
for week,day in [(week,day) for week in xrange(0,len(self.cal_weeks)) for day in xrange(0,7)]:
imgCounter = dict()
rdCounter = dict()
dsCounter = dict()
prCounter = dict()
imgCounter = 0
rdCounter = 0
dsCounter = 0
prCounter = 0
d = int(self.cal_weeks[week][day])
if d > 0:
t_items = {'image':[], 'dataset':[], 'project':[]}
for item in items.get(d):
if item.get('type') == 'ome.model.core.Image':
try:
t_items['image'].index(item.get('id'))
except:
imgCounter += 1
t_items['image'].append(item.get('id'))
elif item.get('type') == 'ome.model.containers.Dataset':
try:
t_items['dataset'].index(item.get('id'))
except:
dsCounter += 1
t_items['dataset'].append(item.get('id'))
elif item.get('type') == 'ome.model.containers.Project':
try:
t_items['project'].index(item.get('id'))
except:
prCounter += 1
t_items['project'].append(item.get('id'))
self.cal_days.append({'day':self.cal_weeks[week][day], 'counter': {'imgCounter':imgCounter, 'dsCounter':dsCounter, 'prCounter':prCounter }})
else:
self.cal_days.append({'day':self.cal_weeks[week][day], 'counter': {}})
self.cal_weeks[week][day] = {'cell': self.cal_days[-1]}
def calendar_items(self, month, monthrange):
if month < 10:
mn = '0%i' % month
else:
mn = month
d1 = datetime.datetime.strptime(("%i-%s-01 00:00:00" % (self.year, mn)), "%Y-%m-%d %H:%M:%S")
d2 = datetime.datetime.strptime(("%i-%s-%i 23:59:59" % (self.year, mn, monthrange)), "%Y-%m-%d %H:%M:%S")
start = long(time.mktime(d1.timetuple())+1e-6*d1.microsecond)*1000
end = long(time.mktime(d2.timetuple())+1e-6*d2.microsecond)*1000
all_logs = self.conn.getEventsByPeriod(start, end, self.eid)
items = dict()
for d in xrange(1,monthrange+1):
items[d] = list()
for i in all_logs:
for d in items:
if time.gmtime(i.event.time.val / 1000).tm_mday == d:
items[d].append({'id':i.entityId.val, 'type': i.entityType.val, 'action': i.action.val})
return items
def month_range(self, year, month):
if month == 12:
year += 1
month = 1
else:
month += 1
return (datetime.date(year, month, 1), datetime.date(year, month, 1)-datetime.timedelta(days=1))
def get_items(self, page=None):
if self.month < 10:
mn = '0%i' % self.month
else:
mn = self.month
if self.day < 10:
dy = '0%i' % self.day
else:
dy = self.day
d1 = datetime.datetime.strptime(('%i-%s-%s 00:00:00' % (self.year, mn, dy)), "%Y-%m-%d %H:%M:%S")
d2 = datetime.datetime.strptime(('%i-%s-%s 23:59:59' % (self.year, mn, dy)), "%Y-%m-%d %H:%M:%S")
start = long(time.mktime(d1.timetuple())+1e-6*d1.microsecond)*1000
end = long(time.mktime(d2.timetuple())+1e-6*d2.microsecond)*1000
self.day_items = list()
self.day_items_size = 0
self.total_items_size = self.conn.countDataByPeriod(start, end, self.eid)
obj_logs = self.conn.getDataByPeriod(start=start, end=end, eid=self.eid, page=page)
obj_logs_counter = self.conn.countDataByPeriod(start, end, self.eid)
if len(obj_logs['image']) > 0 or len(obj_logs['dataset']) > 0 or len(obj_logs['project']) > 0:
self.day_items.append({'project':obj_logs['project'], 'dataset':obj_logs['dataset'], 'image':obj_logs['image']})
self.day_items_size = len(obj_logs['project'])+len(obj_logs['dataset'])+len(obj_logs['image'])
self.paging = self.doPaging(page, self.day_items_size, obj_logs_counter)
| gpl-2.0 |
Shadow6363/Competitions | 2015/Advent of Code/Day23.py | 1 | 1537 | # -*- coding: utf-8 -*-
import sys
class ChristmasComputer(object):
def __init__(self, a=0, b=0):
self.registers = {
'a': a,
'b': b,
'pc': 0
}
def hlf(self, r):
self.registers[r] /= 2
self.registers['pc'] += 1
def tpl(self, r):
self.registers[r] *= 3
self.registers['pc'] += 1
def inc(self, r):
self.registers[r] += 1
self.registers['pc'] += 1
def jmp(self, offset):
offset = int(offset)
self.registers['pc'] += offset
def jie(self, r, offset):
offset = int(offset)
if self.registers[r] % 2 == 0:
self.registers['pc'] += offset
else:
self.registers['pc'] += 1
def jio(self, r, offset):
offset = int(offset)
if self.registers[r] == 1:
self.registers['pc'] += offset
else:
self.registers['pc'] += 1
def main():
instructions = [instruction.strip().split(' ', 1) for instruction in sys.stdin]
computer = ChristmasComputer(1)
instruction_map = {
'hlf': computer.hlf,
'tpl': computer.tpl,
'inc': computer.inc,
'jmp': computer.jmp,
'jie': computer.jie,
'jio': computer.jio
}
while computer.registers['pc'] < len(instructions):
instruction, arg = instructions[computer.registers['pc']]
instruction_map[instruction](*arg.split(', '))
print computer.registers['b']
if __name__ == '__main__':
main()
| unlicense |
Nikoli/youtube-dl | youtube_dl/extractor/googlesearch.py | 168 | 1699 | from __future__ import unicode_literals
import itertools
import re
from .common import SearchInfoExtractor
from ..compat import (
compat_urllib_parse,
)
class GoogleSearchIE(SearchInfoExtractor):
IE_DESC = 'Google Video search'
_MAX_RESULTS = 1000
IE_NAME = 'video.google:search'
_SEARCH_KEY = 'gvsearch'
_TEST = {
'url': 'gvsearch15:python language',
'info_dict': {
'id': 'python language',
'title': 'python language',
},
'playlist_count': 15,
}
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
entries = []
res = {
'_type': 'playlist',
'id': query,
'title': query,
}
for pagenum in itertools.count():
result_url = (
'http://www.google.com/search?tbm=vid&q=%s&start=%s&hl=en'
% (compat_urllib_parse.quote_plus(query), pagenum * 10))
webpage = self._download_webpage(
result_url, 'gvsearch:' + query,
note='Downloading result page ' + str(pagenum + 1))
for hit_idx, mobj in enumerate(re.finditer(
r'<h3 class="r"><a href="([^"]+)"', webpage)):
# Skip playlists
if not re.search(r'id="vidthumb%d"' % (hit_idx + 1), webpage):
continue
entries.append({
'_type': 'url',
'url': mobj.group(1)
})
if (len(entries) >= n) or not re.search(r'id="pnnext"', webpage):
res['entries'] = entries[:n]
return res
| unlicense |
HyperBaton/ansible | lib/ansible/modules/cloud/amazon/redshift_subnet_group.py | 10 | 5994 | #!/usr/bin/python
# Copyright 2014 Jens Carl, Hothead Games Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
author:
- "Jens Carl (@j-carl), Hothead Games Inc."
module: redshift_subnet_group
version_added: "2.2"
short_description: manage Redshift cluster subnet groups
description:
- Create, modifies, and deletes Redshift cluster subnet groups.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
choices: ['present', 'absent' ]
type: str
group_name:
description:
- Cluster subnet group name.
required: true
aliases: ['name']
type: str
group_description:
description:
- Database subnet group description.
aliases: ['description']
type: str
group_subnets:
description:
- List of subnet IDs that make up the cluster subnet group.
aliases: ['subnets']
type: list
elements: str
requirements: [ 'boto' ]
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Create a Redshift subnet group
- local_action:
module: redshift_subnet_group
state: present
group_name: redshift-subnet
group_description: Redshift subnet
group_subnets:
- 'subnet-aaaaa'
- 'subnet-bbbbb'
# Remove subnet group
- redshift_subnet_group:
state: absent
group_name: redshift-subnet
'''
RETURN = '''
group:
description: dictionary containing all Redshift subnet group information
returned: success
type: complex
contains:
name:
description: name of the Redshift subnet group
returned: success
type: str
sample: "redshift_subnet_group_name"
vpc_id:
description: Id of the VPC where the subnet is located
returned: success
type: str
sample: "vpc-aabb1122"
'''
try:
import boto
import boto.redshift
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO, connect_to_aws, ec2_argument_spec, get_aws_connection_info
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
group_name=dict(required=True, aliases=['name']),
group_description=dict(required=False, aliases=['description']),
group_subnets=dict(required=False, aliases=['subnets'], type='list'),
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto v2.9.0+ required for this module')
state = module.params.get('state')
group_name = module.params.get('group_name')
group_description = module.params.get('group_description')
group_subnets = module.params.get('group_subnets')
if state == 'present':
for required in ('group_name', 'group_description', 'group_subnets'):
if not module.params.get(required):
module.fail_json(msg=str("parameter %s required for state='present'" % required))
else:
for not_allowed in ('group_description', 'group_subnets'):
if module.params.get(not_allowed):
module.fail_json(msg=str("parameter %s not allowed for state='absent'" % not_allowed))
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg=str("Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file"))
# Connect to the Redshift endpoint.
try:
conn = connect_to_aws(boto.redshift, region, **aws_connect_params)
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
try:
changed = False
exists = False
group = None
try:
matching_groups = conn.describe_cluster_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except boto.exception.JSONResponseError as e:
if e.body['Error']['Code'] != 'ClusterSubnetGroupNotFoundFault':
# if e.code != 'ClusterSubnetGroupNotFoundFault':
module.fail_json(msg=str(e))
if state == 'absent':
if exists:
conn.delete_cluster_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_cluster_subnet_group(group_name, group_description, group_subnets)
group = {
'name': new_group['CreateClusterSubnetGroupResponse']['CreateClusterSubnetGroupResult']
['ClusterSubnetGroup']['ClusterSubnetGroupName'],
'vpc_id': new_group['CreateClusterSubnetGroupResponse']['CreateClusterSubnetGroupResult']
['ClusterSubnetGroup']['VpcId'],
}
else:
changed_group = conn.modify_cluster_subnet_group(group_name, group_subnets, description=group_description)
group = {
'name': changed_group['ModifyClusterSubnetGroupResponse']['ModifyClusterSubnetGroupResult']
['ClusterSubnetGroup']['ClusterSubnetGroupName'],
'vpc_id': changed_group['ModifyClusterSubnetGroupResponse']['ModifyClusterSubnetGroupResult']
['ClusterSubnetGroup']['VpcId'],
}
changed = True
except boto.exception.JSONResponseError as e:
module.fail_json(msg=str(e))
module.exit_json(changed=changed, group=group)
if __name__ == '__main__':
main()
| gpl-3.0 |
tinloaf/home-assistant | tests/components/test_mythicbeastsdns.py | 12 | 2026 | """Test the Mythic Beasts DNS component."""
import logging
import asynctest
from homeassistant.setup import async_setup_component
from homeassistant.components import mythicbeastsdns
_LOGGER = logging.getLogger(__name__)
async def mbddns_update_mock(domain, password, host, ttl=60, session=None):
"""Mock out mythic beasts updater."""
if password == 'incorrect':
_LOGGER.error("Updating Mythic Beasts failed: Not authenticated")
return False
if host[0] == '$':
_LOGGER.error("Updating Mythic Beasts failed: Invalid Character")
return False
return True
@asynctest.mock.patch('mbddns.update', new=mbddns_update_mock)
async def test_update(hass):
"""Run with correct values and check true is returned."""
result = await async_setup_component(
hass,
mythicbeastsdns.DOMAIN,
{
mythicbeastsdns.DOMAIN: {
'domain': 'example.org',
'password': 'correct',
'host': 'hass'
}
}
)
assert result
@asynctest.mock.patch('mbddns.update', new=mbddns_update_mock)
async def test_update_fails_if_wrong_token(hass):
"""Run with incorrect token and check false is returned."""
result = await async_setup_component(
hass,
mythicbeastsdns.DOMAIN,
{
mythicbeastsdns.DOMAIN: {
'domain': 'example.org',
'password': 'incorrect',
'host': 'hass'
}
}
)
assert not result
@asynctest.mock.patch('mbddns.update', new=mbddns_update_mock)
async def test_update_fails_if_invalid_host(hass):
"""Run with invalid characters in host and check false is returned."""
result = await async_setup_component(
hass,
mythicbeastsdns.DOMAIN,
{
mythicbeastsdns.DOMAIN: {
'domain': 'example.org',
'password': 'correct',
'host': '$hass'
}
}
)
assert not result
| apache-2.0 |
anryko/ansible | lib/ansible/modules/packaging/os/sorcery.py | 52 | 20201 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015-2016, Vlad Glagolev <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: sorcery
short_description: Package manager for Source Mage GNU/Linux
description:
- Manages "spells" on Source Mage GNU/Linux using I(sorcery) toolchain
author: "Vlad Glagolev (@vaygr)"
version_added: "2.3"
notes:
- When all three components are selected, the update goes by the sequence --
Sorcery -> Grimoire(s) -> Spell(s); you cannot override it.
- grimoire handling (i.e. add/remove, including SCM/rsync versions) is not
yet supported.
requirements:
- bash
options:
name:
description:
- Name of the spell
- multiple names can be given, separated by commas
- special value '*' in conjunction with states C(latest) or
C(rebuild) will update or rebuild the whole system respectively
aliases: ["spell"]
state:
description:
- Whether to cast, dispel or rebuild a package
- state C(cast) is an equivalent of C(present), not C(latest)
- state C(latest) always triggers C(update_cache=yes)
- state C(rebuild) implies cast of all specified spells, not only
those existed before
choices: ["present", "latest", "absent", "cast", "dispelled", "rebuild"]
default: "present"
depends:
description:
- Comma-separated list of _optional_ dependencies to build a spell
(or make sure it is built) with; use +/- in front of dependency
to turn it on/off ('+' is optional though)
- this option is ignored if C(name) parameter is equal to '*' or
contains more than one spell
- providers must be supplied in the form recognized by Sorcery, e.g.
'openssl(SSL)'
update:
description:
- Whether or not to update sorcery scripts at the very first stage
type: bool
default: 'no'
update_cache:
description:
- Whether or not to update grimoire collection before casting spells
type: bool
default: 'no'
aliases: ["update_codex"]
cache_valid_time:
description:
- Time in seconds to invalidate grimoire collection on update
- especially useful for SCM and rsync grimoires
- makes sense only in pair with C(update_cache)
'''
EXAMPLES = '''
# Make sure spell 'foo' is installed
- sorcery:
spell: foo
state: present
# Make sure spells 'foo', 'bar' and 'baz' are removed
- sorcery:
spell: foo,bar,baz
state: absent
# Make sure spell 'foo' with dependencies 'bar' and 'baz' is installed
- sorcery:
spell: foo
depends: bar,baz
state: present
# Make sure spell 'foo' with 'bar' and without 'baz' dependencies is installed
- sorcery:
spell: foo
depends: +bar,-baz
state: present
# Make sure spell 'foo' with libressl (providing SSL) dependency is installed
- sorcery:
spell: foo
depends: libressl(SSL)
state: present
# Playbook: make sure spells with/without required dependencies (if any) are installed
- sorcery:
name: "{{ item.spell }}"
depends: "{{ item.depends | default(None) }}"
state: present
loop:
- { spell: 'vifm', depends: '+file,-gtk+2' }
- { spell: 'fwknop', depends: 'gpgme' }
- { spell: 'pv,tnftp,tor' }
# Install the latest version of spell 'foo' using regular glossary
- sorcery:
name: foo
state: latest
# Rebuild spell 'foo'
- sorcery:
spell: foo
state: rebuild
# Rebuild the whole system, but update Sorcery and Codex first
- sorcery:
spell: '*'
state: rebuild
update: yes
update_cache: yes
# Refresh the grimoire collection if it's 1 day old using native sorcerous alias
- sorcery:
update_codex: yes
cache_valid_time: 86400
# Update only Sorcery itself
- sorcery:
update: yes
'''
RETURN = '''
'''
import datetime
import fileinput
import os
import re
import shutil
import sys
# auto-filled at module init
SORCERY = {
'sorcery': None,
'scribe': None,
'cast': None,
'dispel': None,
'gaze': None
}
SORCERY_LOG_DIR = "/var/log/sorcery"
SORCERY_STATE_DIR = "/var/state/sorcery"
def get_sorcery_ver(module):
""" Get Sorcery version. """
cmd_sorcery = "%s --version" % SORCERY['sorcery']
rc, stdout, stderr = module.run_command(cmd_sorcery)
if rc != 0 or not stdout:
module.fail_json(msg="unable to get Sorcery version")
return stdout.strip()
def codex_fresh(codex, module):
""" Check if grimoire collection is fresh enough. """
if not module.params['cache_valid_time']:
return False
timedelta = datetime.timedelta(seconds=module.params['cache_valid_time'])
for grimoire in codex:
lastupdate_path = os.path.join(SORCERY_STATE_DIR,
grimoire + ".lastupdate")
try:
mtime = os.stat(lastupdate_path).st_mtime
except Exception:
return False
lastupdate_ts = datetime.datetime.fromtimestamp(mtime)
# if any grimoire is not fresh, we invalidate the Codex
if lastupdate_ts + timedelta < datetime.datetime.now():
return False
return True
def codex_list(module):
""" List valid grimoire collection. """
codex = {}
cmd_scribe = "%s index" % SORCERY['scribe']
rc, stdout, stderr = module.run_command(cmd_scribe)
if rc != 0:
module.fail_json(msg="unable to list grimoire collection, fix your Codex")
rex = re.compile(r"^\s*\[\d+\] : (?P<grim>[\w\-+.]+) : [\w\-+./]+(?: : (?P<ver>[\w\-+.]+))?\s*$")
# drop 4-line header and empty trailing line
for line in stdout.splitlines()[4:-1]:
match = rex.match(line)
if match:
codex[match.group('grim')] = match.group('ver')
if not codex:
module.fail_json(msg="no grimoires to operate on; add at least one")
return codex
def update_sorcery(module):
""" Update sorcery scripts.
This runs 'sorcery update' ('sorcery -u'). Check mode always returns a
positive change value.
"""
changed = False
if module.check_mode:
if not module.params['name'] and not module.params['update_cache']:
module.exit_json(changed=True, msg="would have updated Sorcery")
else:
sorcery_ver = get_sorcery_ver(module)
cmd_sorcery = "%s update" % SORCERY['sorcery']
rc, stdout, stderr = module.run_command(cmd_sorcery)
if rc != 0:
module.fail_json(msg="unable to update Sorcery: " + stdout)
if sorcery_ver != get_sorcery_ver(module):
changed = True
if not module.params['name'] and not module.params['update_cache']:
module.exit_json(changed=changed,
msg="successfully updated Sorcery")
def update_codex(module):
""" Update grimoire collections.
This runs 'scribe update'. Check mode always returns a positive change
value when 'cache_valid_time' is used.
"""
params = module.params
changed = False
codex = codex_list(module)
fresh = codex_fresh(codex, module)
if module.check_mode:
if not params['name']:
if not fresh:
changed = True
module.exit_json(changed=changed, msg="would have updated Codex")
elif not fresh or params['name'] and params['state'] == 'latest':
# SILENT is required as a workaround for query() in libgpg
module.run_command_environ_update.update(dict(SILENT='1'))
cmd_scribe = "%s update" % SORCERY['scribe']
rc, stdout, stderr = module.run_command(cmd_scribe)
if rc != 0:
module.fail_json(msg="unable to update Codex: " + stdout)
if codex != codex_list(module):
changed = True
if not params['name']:
module.exit_json(changed=changed,
msg="successfully updated Codex")
def match_depends(module):
""" Check for matching dependencies.
This inspects spell's dependencies with the desired states and returns
'False' if a recast is needed to match them. It also adds required lines
to the system-wide depends file for proper recast procedure.
"""
params = module.params
spells = params['name']
depends = {}
depends_ok = True
if len(spells) > 1 or not params['depends']:
return depends_ok
spell = spells[0]
if module.check_mode:
sorcery_depends_orig = os.path.join(SORCERY_STATE_DIR, "depends")
sorcery_depends = os.path.join(SORCERY_STATE_DIR, "depends.check")
try:
shutil.copy2(sorcery_depends_orig, sorcery_depends)
except IOError:
module.fail_json(msg="failed to copy depends.check file")
else:
sorcery_depends = os.path.join(SORCERY_STATE_DIR, "depends")
rex = re.compile(r"^(?P<status>\+?|\-){1}(?P<depend>[a-z0-9]+[a-z0-9_\-\+\.]*(\([A-Z0-9_\-\+\.]+\))*)$")
for d in params['depends'].split(','):
match = rex.match(d)
if not match:
module.fail_json(msg="wrong depends line for spell '%s'" % spell)
# normalize status
if not match.group('status') or match.group('status') == '+':
status = 'on'
else:
status = 'off'
depends[match.group('depend')] = status
# drop providers spec
depends_list = [s.split('(')[0] for s in depends]
cmd_gaze = "%s -q version %s" % (SORCERY['gaze'], ' '.join(depends_list))
rc, stdout, stderr = module.run_command(cmd_gaze)
if rc != 0:
module.fail_json(msg="wrong dependencies for spell '%s'" % spell)
fi = fileinput.input(sorcery_depends, inplace=True)
try:
try:
for line in fi:
if line.startswith(spell + ':'):
match = None
for d in depends:
# when local status is 'off' and dependency is provider,
# use only provider value
d_offset = d.find('(')
if d_offset == -1:
d_p = ''
else:
d_p = re.escape(d[d_offset:])
# .escape() is needed mostly for the spells like 'libsigc++'
rex = re.compile("%s:(?:%s|%s):(?P<lstatus>on|off):optional:" %
(re.escape(spell), re.escape(d), d_p))
match = rex.match(line)
# we matched the line "spell:dependency:on|off:optional:"
if match:
# if we also matched the local status, mark dependency
# as empty and put it back into depends file
if match.group('lstatus') == depends[d]:
depends[d] = None
sys.stdout.write(line)
# status is not that we need, so keep this dependency
# in the list for further reverse switching;
# stop and process the next line in both cases
break
if not match:
sys.stdout.write(line)
else:
sys.stdout.write(line)
except IOError:
module.fail_json(msg="I/O error on the depends file")
finally:
fi.close()
depends_new = [v for v in depends if depends[v]]
if depends_new:
try:
try:
fl = open(sorcery_depends, 'a')
for k in depends_new:
fl.write("%s:%s:%s:optional::\n" % (spell, k, depends[k]))
except IOError:
module.fail_json(msg="I/O error on the depends file")
finally:
fl.close()
depends_ok = False
if module.check_mode:
try:
os.remove(sorcery_depends)
except IOError:
module.fail_json(msg="failed to clean up depends.backup file")
return depends_ok
def manage_spells(module):
""" Cast or dispel spells.
This manages the whole system ('*'), list or a single spell. Command 'cast'
is used to install or rebuild spells, while 'dispel' takes care of theirs
removal from the system.
"""
params = module.params
spells = params['name']
sorcery_queue = os.path.join(SORCERY_LOG_DIR, "queue/install")
if spells == '*':
if params['state'] == 'latest':
# back up original queue
try:
os.rename(sorcery_queue, sorcery_queue + ".backup")
except IOError:
module.fail_json(msg="failed to backup the update queue")
# see update_codex()
module.run_command_environ_update.update(dict(SILENT='1'))
cmd_sorcery = "%s queue"
rc, stdout, stderr = module.run_command(cmd_sorcery)
if rc != 0:
module.fail_json(msg="failed to generate the update queue")
try:
queue_size = os.stat(sorcery_queue).st_size
except Exception:
module.fail_json(msg="failed to read the update queue")
if queue_size != 0:
if module.check_mode:
try:
os.rename(sorcery_queue + ".backup", sorcery_queue)
except IOError:
module.fail_json(msg="failed to restore the update queue")
module.exit_json(changed=True, msg="would have updated the system")
cmd_cast = "%s --queue" % SORCERY['cast']
rc, stdout, stderr = module.run_command(cmd_cast)
if rc != 0:
module.fail_json(msg="failed to update the system")
module.exit_json(changed=True, msg="successfully updated the system")
else:
module.exit_json(changed=False, msg="the system is already up to date")
elif params['state'] == 'rebuild':
if module.check_mode:
module.exit_json(changed=True, msg="would have rebuilt the system")
cmd_sorcery = "%s rebuild" % SORCERY['sorcery']
rc, stdout, stderr = module.run_command(cmd_sorcery)
if rc != 0:
module.fail_json(msg="failed to rebuild the system: " + stdout)
module.exit_json(changed=True, msg="successfully rebuilt the system")
else:
module.fail_json(msg="unsupported operation on '*' name value")
else:
if params['state'] in ('present', 'latest', 'rebuild', 'absent'):
# extract versions from the 'gaze' command
cmd_gaze = "%s -q version %s" % (SORCERY['gaze'], ' '.join(spells))
rc, stdout, stderr = module.run_command(cmd_gaze)
# fail if any of spells cannot be found
if rc != 0:
module.fail_json(msg="failed to locate spell(s) in the list (%s)" %
', '.join(spells))
cast_queue = []
dispel_queue = []
rex = re.compile(r"[^|]+\|[^|]+\|(?P<spell>[^|]+)\|(?P<grim_ver>[^|]+)\|(?P<inst_ver>[^$]+)")
# drop 2-line header and empty trailing line
for line in stdout.splitlines()[2:-1]:
match = rex.match(line)
cast = False
if params['state'] == 'present':
# spell is not installed..
if match.group('inst_ver') == '-':
# ..so set up depends reqs for it
match_depends(module)
cast = True
# spell is installed..
else:
# ..but does not conform depends reqs
if not match_depends(module):
cast = True
elif params['state'] == 'latest':
# grimoire and installed versions do not match..
if match.group('grim_ver') != match.group('inst_ver'):
# ..so check for depends reqs first and set them up
match_depends(module)
cast = True
# grimoire and installed versions match..
else:
# ..but the spell does not conform depends reqs
if not match_depends(module):
cast = True
elif params['state'] == 'rebuild':
cast = True
# 'absent'
else:
if match.group('inst_ver') != '-':
dispel_queue.append(match.group('spell'))
if cast:
cast_queue.append(match.group('spell'))
if cast_queue:
if module.check_mode:
module.exit_json(changed=True, msg="would have cast spell(s)")
cmd_cast = "%s -c %s" % (SORCERY['cast'], ' '.join(cast_queue))
rc, stdout, stderr = module.run_command(cmd_cast)
if rc != 0:
module.fail_json(msg="failed to cast spell(s): %s" + stdout)
module.exit_json(changed=True, msg="successfully cast spell(s)")
elif params['state'] != 'absent':
module.exit_json(changed=False, msg="spell(s) are already cast")
if dispel_queue:
if module.check_mode:
module.exit_json(changed=True, msg="would have dispelled spell(s)")
cmd_dispel = "%s %s" % (SORCERY['dispel'], ' '.join(dispel_queue))
rc, stdout, stderr = module.run_command(cmd_dispel)
if rc != 0:
module.fail_json(msg="failed to dispel spell(s): %s" + stdout)
module.exit_json(changed=True, msg="successfully dispelled spell(s)")
else:
module.exit_json(changed=False, msg="spell(s) are already dispelled")
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(default=None, aliases=['spell'], type='list'),
state=dict(default='present', choices=['present', 'latest',
'absent', 'cast', 'dispelled', 'rebuild']),
depends=dict(default=None),
update=dict(default=False, type='bool'),
update_cache=dict(default=False, aliases=['update_codex'], type='bool'),
cache_valid_time=dict(default=0, type='int')
),
required_one_of=[['name', 'update', 'update_cache']],
supports_check_mode=True
)
if os.geteuid() != 0:
module.fail_json(msg="root privileges are required for this operation")
for c in SORCERY:
SORCERY[c] = module.get_bin_path(c, True)
# prepare environment: run sorcery commands without asking questions
module.run_command_environ_update = dict(PROMPT_DELAY='0', VOYEUR='0')
params = module.params
# normalize 'state' parameter
if params['state'] in ('present', 'cast'):
params['state'] = 'present'
elif params['state'] in ('absent', 'dispelled'):
params['state'] = 'absent'
if params['update']:
update_sorcery(module)
if params['update_cache'] or params['state'] == 'latest':
update_codex(module)
if params['name']:
manage_spells(module)
# import module snippets
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
| gpl-3.0 |
ashrith/dpkt | dpkt/ssl.py | 3 | 19686 | # $Id: ssl.py 90 2014-04-02 22:06:23Z [email protected] $
# Portion Copyright 2012 Google Inc. All rights reserved.
"""Secure Sockets Layer / Transport Layer Security."""
import dpkt
import ssl_ciphersuites
import struct
import binascii
import traceback
import datetime
#
# Note from April 2011: [email protected] added code that parses SSL3/TLS messages more in depth.
#
# Jul 2012: [email protected] modified and extended SSL support further.
#
class SSL2(dpkt.Packet):
__hdr__ = (
('len', 'H', 0),
('msg', 's', ''),
('pad', 's', ''),
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
if self.len & 0x8000:
n = self.len = self.len & 0x7FFF
self.msg, self.data = self.data[:n], self.data[n:]
else:
n = self.len = self.len & 0x3FFF
padlen = ord(self.data[0])
self.msg = self.data[1:1+n]
self.pad = self.data[1+n:1+n+padlen]
self.data = self.data[1+n+padlen:]
# SSLv3/TLS versions
SSL3_V = 0x0300
TLS1_V = 0x0301
TLS11_V = 0x0302
TLS12_V = 0x0303
ssl3_versions_str = {
SSL3_V: 'SSL3',
TLS1_V: 'TLS 1.0',
TLS11_V: 'TLS 1.1',
TLS12_V: 'TLS 1.2'
}
SSL3_VERSION_BYTES = set(('\x03\x00', '\x03\x01', '\x03\x02', '\x03\x03'))
# Alert levels
SSL3_AD_WARNING = 1
SSL3_AD_FATAL = 2
alert_level_str = {
SSL3_AD_WARNING: 'SSL3_AD_WARNING',
SSL3_AD_FATAL: 'SSL3_AD_FATAL'
}
# SSL3 alert descriptions
SSL3_AD_CLOSE_NOTIFY = 0
SSL3_AD_UNEXPECTED_MESSAGE = 10 # fatal
SSL3_AD_BAD_RECORD_MAC = 20 # fatal
SSL3_AD_DECOMPRESSION_FAILURE = 30 # fatal
SSL3_AD_HANDSHAKE_FAILURE = 40 # fatal
SSL3_AD_NO_CERTIFICATE = 41
SSL3_AD_BAD_CERTIFICATE = 42
SSL3_AD_UNSUPPORTED_CERTIFICATE = 43
SSL3_AD_CERTIFICATE_REVOKED = 44
SSL3_AD_CERTIFICATE_EXPIRED = 45
SSL3_AD_CERTIFICATE_UNKNOWN = 46
SSL3_AD_ILLEGAL_PARAMETER = 47 # fatal
# TLS1 alert descriptions
TLS1_AD_DECRYPTION_FAILED = 21
TLS1_AD_RECORD_OVERFLOW = 22
TLS1_AD_UNKNOWN_CA = 48 # fatal
TLS1_AD_ACCESS_DENIED = 49 # fatal
TLS1_AD_DECODE_ERROR = 50 # fatal
TLS1_AD_DECRYPT_ERROR = 51
TLS1_AD_EXPORT_RESTRICTION = 60 # fatal
TLS1_AD_PROTOCOL_VERSION = 70 # fatal
TLS1_AD_INSUFFICIENT_SECURITY = 71 # fatal
TLS1_AD_INTERNAL_ERROR = 80 # fatal
TLS1_AD_USER_CANCELLED = 90
TLS1_AD_NO_RENEGOTIATION = 100
#/* codes 110-114 are from RFC3546 */
TLS1_AD_UNSUPPORTED_EXTENSION = 110
TLS1_AD_CERTIFICATE_UNOBTAINABLE = 111
TLS1_AD_UNRECOGNIZED_NAME = 112
TLS1_AD_BAD_CERTIFICATE_STATUS_RESPONSE = 113
TLS1_AD_BAD_CERTIFICATE_HASH_VALUE = 114
TLS1_AD_UNKNOWN_PSK_IDENTITY = 115 # fatal
# Mapping alert types to strings
alert_description_str = {
SSL3_AD_CLOSE_NOTIFY: 'SSL3_AD_CLOSE_NOTIFY',
SSL3_AD_UNEXPECTED_MESSAGE: 'SSL3_AD_UNEXPECTED_MESSAGE',
SSL3_AD_BAD_RECORD_MAC: 'SSL3_AD_BAD_RECORD_MAC',
SSL3_AD_DECOMPRESSION_FAILURE: 'SSL3_AD_DECOMPRESSION_FAILURE',
SSL3_AD_HANDSHAKE_FAILURE: 'SSL3_AD_HANDSHAKE_FAILURE',
SSL3_AD_NO_CERTIFICATE: 'SSL3_AD_NO_CERTIFICATE',
SSL3_AD_BAD_CERTIFICATE: 'SSL3_AD_BAD_CERTIFICATE',
SSL3_AD_UNSUPPORTED_CERTIFICATE: 'SSL3_AD_UNSUPPORTED_CERTIFICATE',
SSL3_AD_CERTIFICATE_REVOKED: 'SSL3_AD_CERTIFICATE_REVOKED',
SSL3_AD_CERTIFICATE_EXPIRED: 'SSL3_AD_CERTIFICATE_EXPIRED',
SSL3_AD_CERTIFICATE_UNKNOWN: 'SSL3_AD_CERTIFICATE_UNKNOWN',
SSL3_AD_ILLEGAL_PARAMETER: 'SSL3_AD_ILLEGAL_PARAMETER',
TLS1_AD_DECRYPTION_FAILED: 'TLS1_AD_DECRYPTION_FAILED',
TLS1_AD_RECORD_OVERFLOW: 'TLS1_AD_RECORD_OVERFLOW',
TLS1_AD_UNKNOWN_CA: 'TLS1_AD_UNKNOWN_CA',
TLS1_AD_ACCESS_DENIED: 'TLS1_AD_ACCESS_DENIED',
TLS1_AD_DECODE_ERROR: 'TLS1_AD_DECODE_ERROR',
TLS1_AD_DECRYPT_ERROR: 'TLS1_AD_DECRYPT_ERROR',
TLS1_AD_EXPORT_RESTRICTION: 'TLS1_AD_EXPORT_RESTRICTION',
TLS1_AD_PROTOCOL_VERSION: 'TLS1_AD_PROTOCOL_VERSION',
TLS1_AD_INSUFFICIENT_SECURITY: 'TLS1_AD_INSUFFICIENT_SECURITY',
TLS1_AD_INTERNAL_ERROR: 'TLS1_AD_INTERNAL_ERROR',
TLS1_AD_USER_CANCELLED: 'TLS1_AD_USER_CANCELLED',
TLS1_AD_NO_RENEGOTIATION: 'TLS1_AD_NO_RENEGOTIATION',
TLS1_AD_UNSUPPORTED_EXTENSION: 'TLS1_AD_UNSUPPORTED_EXTENSION',
TLS1_AD_CERTIFICATE_UNOBTAINABLE: 'TLS1_AD_CERTIFICATE_UNOBTAINABLE',
TLS1_AD_UNRECOGNIZED_NAME: 'TLS1_AD_UNRECOGNIZED_NAME',
TLS1_AD_BAD_CERTIFICATE_STATUS_RESPONSE: 'TLS1_AD_BAD_CERTIFICATE_STATUS_RESPONSE',
TLS1_AD_BAD_CERTIFICATE_HASH_VALUE: 'TLS1_AD_BAD_CERTIFICATE_HASH_VALUE',
TLS1_AD_UNKNOWN_PSK_IDENTITY: 'TLS1_AD_UNKNOWN_PSK_IDENTITY'
}
# struct format strings for parsing buffer lengths
# don't forget, you have to pad a 3-byte value with \x00
_SIZE_FORMATS = ['!B', '!H', '!I', '!I']
def parse_variable_array(buf, lenbytes):
"""
Parse an array described using the 'Type name<x..y>' syntax from the spec
Read a length at the start of buf, and returns that many bytes
after, in a tuple with the TOTAL bytes consumed (including the size). This
does not check that the array is the right length for any given datatype.
"""
# first have to figure out how to parse length
assert lenbytes <= 4 # pretty sure 4 is impossible, too
size_format = _SIZE_FORMATS[lenbytes - 1]
padding = '\x00' if lenbytes == 3 else ''
# read off the length
size = struct.unpack(size_format, padding + buf[:lenbytes])[0]
# read the actual data
data = buf[lenbytes:lenbytes + size]
# if len(data) != size: insufficient data
return data, size + lenbytes
class SSL3Exception(Exception):
pass
class TLSRecord(dpkt.Packet):
"""
SSLv3 or TLSv1+ packet.
In addition to the fields specified in the header, there are
compressed and decrypted fields, indicating whether, in the language
of the spec, this is a TLSPlaintext, TLSCompressed, or
TLSCiphertext. The application will have to figure out when it's
appropriate to change these values.
"""
__hdr__ = (
('type', 'B', 0),
('version', 'H', 0),
('length', 'H', 0),
)
def __init__(self, *args, **kwargs):
# assume plaintext unless specified otherwise in arguments
self.compressed = kwargs.pop('compressed', False)
self.encrypted = kwargs.pop('encrypted', False)
# parent constructor
dpkt.Packet.__init__(self, *args, **kwargs)
# make sure length and data are consistent
self.length = len(self.data)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
header_length = self.__hdr_len__
self.data = buf[header_length:header_length+self.length]
# make sure buffer was long enough
if len(self.data) != self.length:
raise dpkt.NeedData('TLSRecord data was too short.')
# assume compressed and encrypted when it's been parsed from
# raw data
self.compressed = True
self.encrypted = True
class TLSChangeCipherSpec(dpkt.Packet):
"""
ChangeCipherSpec message is just a single byte with value 1
"""
__hdr__ = (('type', 'B', 1),)
class TLSAppData(str):
"""
As far as TLSRecord is concerned, AppData is just an opaque blob.
"""
pass
class TLSAlert(dpkt.Packet):
__hdr__ = (
('level', 'B', 1),
('description', 'B', 0),
)
class TLSHelloRequest(dpkt.Packet):
__hdr__ = tuple()
class TLSClientHello(dpkt.Packet):
__hdr__ = (
('version', 'H', 0x0301),
('random', '32s', '\x00'*32),
) # the rest is variable-length and has to be done manually
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
# now session, cipher suites, extensions are in self.data
self.session_id, pointer = parse_variable_array(self.data, 1)
# print 'pointer',pointer
# handle ciphersuites
ciphersuites, parsed = parse_variable_array(self.data[pointer:], 2)
pointer += parsed
self.num_ciphersuites = len(ciphersuites) / 2
# check len(ciphersuites) % 2 == 0 ?
# compression methods
compression_methods, parsed = parse_variable_array(
self.data[pointer:], 1)
pointer += parsed
self.num_compression_methods = parsed - 1
self.compression_methods = map(ord, compression_methods)
# extensions
class TLSServerHello(dpkt.Packet):
__hdr__ = (
('version', 'H', '0x0301'),
('random', '32s', '\x00'*32),
) # session is variable, forcing rest to be manual
def unpack(self, buf):
try:
dpkt.Packet.unpack(self, buf)
self.session_id, pointer = parse_variable_array(self.data, 1)
# single cipher suite
self.cipher_suite = struct.unpack('!H', self.data[pointer:pointer+2])[0]
pointer += 2
# single compression method
self.compression = struct.unpack('!B', self.data[pointer:pointer+1])[0]
pointer += 1
# ignore extensions for now
except struct.error:
# probably data too short
raise dpkt.NeedData
class TLSUnknownHandshake(dpkt.Packet):
__hdr__ = tuple()
TLSCertificate = TLSUnknownHandshake
TLSServerKeyExchange = TLSUnknownHandshake
TLSCertificateRequest = TLSUnknownHandshake
TLSServerHelloDone = TLSUnknownHandshake
TLSCertificateVerify = TLSUnknownHandshake
TLSClientKeyExchange = TLSUnknownHandshake
TLSFinished = TLSUnknownHandshake
# mapping of handshake type ids to their names
# and the classes that implement them
HANDSHAKE_TYPES = {
0: ('HelloRequest', TLSHelloRequest),
1: ('ClientHello', TLSClientHello),
2: ('ServerHello', TLSServerHello),
11: ('Certificate', TLSCertificate),
12: ('ServerKeyExchange', TLSServerKeyExchange),
13: ('CertificateRequest', TLSCertificateRequest),
14: ('ServerHelloDone', TLSServerHelloDone),
15: ('CertificateVerify', TLSCertificateVerify),
16: ('ClientKeyExchange', TLSClientKeyExchange),
20: ('Finished', TLSFinished),
}
class TLSHandshake(dpkt.Packet):
'''
A TLS Handshake message
This goes for all messages encapsulated in the Record layer, but especially
important for handshakes and app data: A message may be spread across a
number of TLSRecords, in addition to the possibility of there being more
than one in a given Record. You have to put together the contents of
TLSRecord's yourself.
'''
# struct.unpack can't handle the 3-byte int, so we parse it as bytes
# (and store it as bytes so dpkt doesn't get confused), and turn it into
# an int in a user-facing property
__hdr__ = (
('type', 'B', 0),
('length_bytes', '3s', 0),
)
def unpack(self, buf):
dpkt.Packet.unpack(self, buf)
# Wait, might there be more than one message of self.type?
embedded_type = HANDSHAKE_TYPES.get(self.type, None)
if embedded_type is None:
raise SSL3Exception('Unknown or invalid handshake type %d' %
self.type)
# only take the right number of bytes
self.data = self.data[:self.length]
if len(self.data) != self.length:
raise dpkt.NeedData
# get class out of embedded_type tuple
self.data = embedded_type[1](self.data)
@property
def length(self):
return struct.unpack('!I', '\x00' + self.length_bytes)[0]
RECORD_TYPES = {
20: TLSChangeCipherSpec,
21: TLSAlert,
22: TLSHandshake,
23: TLSAppData,
}
class SSLFactory(object):
def __new__(cls, buf):
v = buf[1:3]
if v in [ '\x03\x00', '\x03\x01', '\x03\x02' ]:
return SSL3(buf)
# SSL2 has no characteristic header or magic bytes, so we just assume
# that the msg is an SSL2 msg if it is not detected as SSL3+
return SSL2(buf)
def TLSMultiFactory(buf):
'''
Attempt to parse one or more TLSRecord's out of buf
Args:
buf: string containing SSL/TLS messages. May have an incomplete record
on the end
Returns:
[TLSRecord]
int, total bytes consumed, != len(buf) if an incomplete record was left at
the end.
Raises SSL3Exception.
'''
i, n = 0, len(buf)
msgs = []
while i < n:
v = buf[i+1:i+3]
if v in SSL3_VERSION_BYTES:
try:
msg = TLSRecord(buf[i:])
msgs.append(msg)
except dpkt.NeedData:
break
else:
raise SSL3Exception('Bad TLS version in buf: %r' % buf[i:i+5])
i += len(msg)
return msgs, i
import unittest
_hexdecode = binascii.a2b_hex
class TLSRecordTest(unittest.TestCase):
"""
Test basic TLSRecord functionality
For this test, the contents of the record doesn't matter, since we're not
parsing the next layer.
"""
def setUp(self):
# add some extra data, to make sure length is parsed correctly
self.p = TLSRecord('\x17\x03\x01\x00\x08abcdefghzzzzzzzzzzz')
def testContentType(self):
self.assertEqual(self.p.type, 23)
def testVersion(self):
self.assertEqual(self.p.version, 0x0301)
def testLength(self):
self.assertEqual(self.p.length, 8)
def testData(self):
self.assertEqual(self.p.data, 'abcdefgh')
def testInitialFlags(self):
self.assertTrue(self.p.compressed)
self.assertTrue(self.p.encrypted)
def testRepack(self):
p2 = TLSRecord(type=23, version=0x0301, data='abcdefgh')
self.assertEqual(p2.type, 23)
self.assertEqual(p2.version, 0x0301)
self.assertEqual(p2.length, 8)
self.assertEqual(p2.data, 'abcdefgh')
self.assertEqual(p2.pack(), self.p.pack())
def testTotalLength(self):
# that len(p) includes header
self.assertEqual(len(self.p), 13)
def testRaisesNeedDataWhenBufIsShort(self):
self.assertRaises(
dpkt.NeedData,
TLSRecord,
'\x16\x03\x01\x00\x10abc')
class TLSChangeCipherSpecTest(unittest.TestCase):
"It's just a byte. This will be quick, I promise"
def setUp(self):
self.p = TLSChangeCipherSpec('\x01')
def testParses(self):
self.assertEqual(self.p.type, 1)
def testTotalLength(self):
self.assertEqual(len(self.p), 1)
class TLSAppDataTest(unittest.TestCase):
"AppData is basically just a string"
def testValue(self):
d = TLSAppData('abcdefgh')
self.assertEqual(d, 'abcdefgh')
class TLSHandshakeTest(unittest.TestCase):
def setUp(self):
self.h = TLSHandshake('\x00\x00\x00\x01\xff')
def testCreatedInsideMessage(self):
self.assertTrue(isinstance(self.h.data, TLSHelloRequest))
def testLength(self):
self.assertEqual(self.h.length, 0x01)
def testRaisesNeedData(self):
self.assertRaises(dpkt.NeedData, TLSHandshake, '\x00\x00\x01\x01')
class ClientHelloTest(unittest.TestCase):
'This data is extracted from and verified by Wireshark'
def setUp(self):
self.data = _hexdecode(
"01000199" # handshake header
"0301" # version
"5008220ce5e0e78b6891afe204498c9363feffbe03235a2d9e05b7d990eb708d" # rand
"2009bc0192e008e6fa8fe47998fca91311ba30ddde14a9587dc674b11c3d3e5ed1" # session id
# cipher suites
"005400ffc00ac0140088008700390038c00fc00500840035c007c009c011c0130045004400330032c00cc00ec002c0040096004100050004002fc008c01200160013c00dc003feff000ac006c010c00bc00100020001"
"0100" # compresssion methods
# extensions
"00fc0000000e000c0000096c6f63616c686f7374000a00080006001700180019000b00020100002300d0a50b2e9f618a9ea9bf493ef49b421835cd2f6b05bbe1179d8edf70d58c33d656e8696d36d7e7e0b9d3ecc0e4de339552fa06c64c0fcb550a334bc43944e2739ca342d15a9ebbe981ac87a0d38160507d47af09bdc16c5f0ee4cdceea551539382333226048a026d3a90a0535f4a64236467db8fee22b041af986ad0f253bc369137cd8d8cd061925461d7f4d7895ca9a4181ab554dad50360ac31860e971483877c9335ac1300c5e78f3e56f3b8e0fc16358fcaceefd5c8d8aaae7b35be116f8832856ca61144fcdd95e071b94d0cf7233740000"
"FFFFFFFFFFFFFFFF") # random garbage
self.p = TLSHandshake(self.data)
def testClientHelloConstructed(self):
'Make sure the correct class was constructed'
#print self.p
self.assertTrue(isinstance(self.p.data, TLSClientHello))
# def testClientDateCorrect(self):
# self.assertEqual(self.p.random_unixtime, 1342710284)
def testClientRandomCorrect(self):
self.assertEqual(self.p.data.random,
_hexdecode('5008220ce5e0e78b6891afe204498c9363feffbe03235a2d9e05b7d990eb708d'))
def testCipherSuiteLength(self):
# we won't bother testing the identity of each cipher suite in the list.
self.assertEqual(self.p.data.num_ciphersuites, 42)
#self.assertEqual(len(self.p.ciphersuites), 42)
def testSessionId(self):
self.assertEqual(self.p.data.session_id,
_hexdecode('09bc0192e008e6fa8fe47998fca91311ba30ddde14a9587dc674b11c3d3e5ed1'))
def testCompressionMethods(self):
self.assertEqual(self.p.data.num_compression_methods, 1)
def testTotalLength(self):
self.assertEqual(len(self.p), 413)
class ServerHelloTest(unittest.TestCase):
'Again, from Wireshark'
def setUp(self):
self.data = _hexdecode('0200004d03015008220c8ec43c5462315a7c99f5d5b6bff009ad285b51dc18485f352e9fdecd2009bc0192e008e6fa8fe47998fca91311ba30ddde14a9587dc674b11c3d3e5ed10002000005ff01000100')
self.p = TLSHandshake(self.data)
def testConstructed(self):
self.assertTrue(isinstance(self.p.data, TLSServerHello))
# def testDateCorrect(self):
# self.assertEqual(self.p.random_unixtime, 1342710284)
def testRandomCorrect(self):
self.assertEqual(self.p.data.random,
_hexdecode('5008220c8ec43c5462315a7c99f5d5b6bff009ad285b51dc18485f352e9fdecd'))
def testCipherSuite(self):
self.assertEqual(
ssl_ciphersuites.BY_CODE[self.p.data.cipher_suite].name,
'TLS_RSA_WITH_NULL_SHA')
def testTotalLength(self):
self.assertEqual(len(self.p), 81)
class TLSMultiFactoryTest(unittest.TestCase):
"Made up test data"
def setUp(self):
self.data = _hexdecode('1703010010' # header 1
'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' # data 1
'1703010010' # header 2
'BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB' # data 2
'1703010010' # header 3
'CCCCCCCC') # data 3 (incomplete)
self.msgs, self.bytes_parsed = TLSMultiFactory(self.data)
def testNumMessages(self):
# only complete messages should be parsed, incomplete ones left
# in buffer
self.assertEqual(len(self.msgs), 2)
def testBytesParsed(self):
self.assertEqual(self.bytes_parsed, (5 + 16) * 2)
def testFirstMsgData(self):
self.assertEqual(self.msgs[0].data, _hexdecode('AA' * 16))
def testSecondMsgData(self):
self.assertEqual(self.msgs[1].data, _hexdecode('BB' * 16))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
junhuac/MQUIC | src/build/rmdir_and_stamp.py | 11 | 1412 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wipes out a directory recursively and then touches a stamp file.
This odd pairing of operations is used to support build scripts which
slurp up entire directories (e.g. build/android/javac.py when handling
generated sources) as inputs.
The general pattern of use is:
- Add a target which generates |gen_sources| into |out_path| from |inputs|.
- Include |stamp_file| as an input for that target or any of its rules which
generate files in |out_path|.
- Add an action which depends on |inputs| and which outputs |stamp_file|;
the action should run this script and pass |out_path| and |stamp_file| as
its arguments.
The net result is that you will force |out_path| to be wiped and all
|gen_sources| to be regenerated any time any file in |inputs| changes.
See //mojo/mojom_bindings_generator.gypi for an example use case.
"""
import errno
import os
import shutil
import sys
def Main(dst_dir, stamp_file):
try:
shutil.rmtree(os.path.normpath(dst_dir))
except OSError as e:
# Ignore only "not found" errors.
if e.errno != errno.ENOENT:
raise e
with open(stamp_file, 'a'):
os.utime(stamp_file, None)
if __name__ == '__main__':
sys.exit(Main(sys.argv[1], sys.argv[2]))
| mit |
xarisd/honcho | honcho/test/unit/test_environ.py | 3 | 8050 | # coding=utf-8
import textwrap
from ..helpers import TestCase
from honcho import environ
from honcho import compat
ENVFILE_FIXTURES = [
[
"""
FOO=bar
""",
{'FOO': 'bar'}
],
[
"""
FOO=bar
BAZ=qux
""",
{'FOO': 'bar', 'BAZ': 'qux'}
],
[
# No newline at EOF
"""
FOO=bar""",
{'FOO': 'bar'}
],
[
# Comments
"""
#commented: command
""",
{}
],
[
# Invalid characters
"""
-foo=command
""",
{}
],
[
# Single quoted
"""
MYVAR='hello"world'
""",
{'MYVAR': 'hello"world'}
],
[
# Double quoted
"""
MYVAR="hello'world"
""",
{'MYVAR': "hello'world"}
],
[
# Quotation mark surrounded
r"""
MYVAR='"surrounded"'
""",
{'MYVAR': '"surrounded"'}
],
[
# Escaped quotation mark surrounded
r"""
MYVAR=\"escaped\"
""",
{'MYVAR': '"escaped"'}
],
[
# At-sign in value
r"""
[email protected]
""",
{'MYVAR': '[email protected]'}
],
[
# Much punctuation in value
r"""
MYVAR=~pun|u@|0n$=
""",
{'MYVAR': '~pun|u@|0n$='}
],
[
# Unicode values
r"""
MYVAR=⋃ñᴉ—☪ó∂ǝ
""",
{'MYVAR': '⋃ñᴉ—☪ó∂ǝ'}
],
[
# Unicode keys
r"""
ṀẎṾẠṚ=value
""",
{}
],
[
# Quoted space in value
r"""
MYVAR='sp ace'
""",
{'MYVAR': 'sp ace'}
],
[
# Escaped characters in value
r"""
TABS='foo\tbar'
NEWLINES='foo\nbar'
DOLLAR='foo\$bar'
""",
{'TABS': 'foo\tbar',
'NEWLINES': 'foo\nbar',
'DOLLAR': 'foo\\$bar'}
],
]
PROCFILE_FIXTURES = [
[
# Simple
"""
web: command
""",
{'web': 'command'}
],
[
# Simple 2
"""
foo: python foo.py
bar: python bar.py
""",
{'foo': 'python foo.py', 'bar': 'python bar.py'}
],
[
# No newline at EOF
"""
web: command""",
{'web': 'command'}
],
[
# Comments
"""
#commented: command
""",
{}
],
[
# Invalid characters
"""
-foo: command
""",
{}
],
[
# Shell metacharacters
"""
web: sh -c "echo $FOOBAR" >/dev/null 2>&1
""",
{'web': 'sh -c "echo $FOOBAR" >/dev/null 2>&1'}
],
]
class TestEnviron(TestCase):
def test_environ_parse(self):
for content, commands in ENVFILE_FIXTURES:
content = textwrap.dedent(content)
result = environ.parse(content)
self.assertEqual(result, commands)
class TestProcfileParse(TestCase):
def test_parse_procfiles(self):
for content, processes in PROCFILE_FIXTURES:
content = textwrap.dedent(content)
p = environ.parse_procfile(content)
self.assertEqual(p.processes, processes)
def test_procfile_ordered(self):
content = textwrap.dedent("""
one: onecommand
two: twocommand
three: twocommand
four: fourcommand
""")
p = environ.parse_procfile(content)
order = [k for k in p.processes]
self.assertEqual(['one', 'two', 'three', 'four'], order)
class TestProcfile(TestCase):
def test_init(self):
p = environ.Procfile()
self.assertEqual(0, len(p.processes))
def test_add_process(self):
p = environ.Procfile()
p.add_process('foo', 'echo 123')
self.assertEqual('echo 123', p.processes['foo'])
def test_add_process_ensures_unique_name(self):
p = environ.Procfile()
p.add_process('foo', 'echo 123')
self.assertRaises(AssertionError, p.add_process, 'foo', 'echo 123')
def ep(*args, **kwargs):
return environ.expand_processes(compat.OrderedDict(args), **kwargs)
class TestExpandProcesses(TestCase):
def test_name(self):
p = ep(("foo", "some command"))
self.assertEqual(1, len(p))
self.assertEqual("foo.1", p[0].name)
def test_name_multiple(self):
p = ep(("foo", "some command"), ("bar", "another command"))
self.assertEqual(2, len(p))
self.assertEqual("foo.1", p[0].name)
self.assertEqual("bar.1", p[1].name)
def test_name_concurrency(self):
p = ep(("foo", "some command"), concurrency={"foo": 3})
self.assertEqual(3, len(p))
self.assertEqual("foo.1", p[0].name)
self.assertEqual("foo.2", p[1].name)
self.assertEqual("foo.3", p[2].name)
def test_name_concurrency_multiple(self):
p = ep(("foo", "some command"), ("bar", "another command"),
concurrency={"foo": 3, "bar": 2})
self.assertEqual(5, len(p))
self.assertEqual("foo.1", p[0].name)
self.assertEqual("foo.2", p[1].name)
self.assertEqual("foo.3", p[2].name)
self.assertEqual("bar.1", p[3].name)
self.assertEqual("bar.2", p[4].name)
def test_command(self):
p = ep(("foo", "some command"))
self.assertEqual("some command", p[0].cmd)
def test_port_not_defaulted(self):
p = ep(("foo", "some command"))
self.assertEqual({}, p[0].env)
def test_port(self):
p = ep(("foo", "some command"), port=8000)
self.assertEqual({"PORT": "8000"}, p[0].env)
def test_port_multiple(self):
p = ep(("foo", "some command"),
("bar", "another command"),
port=8000)
self.assertEqual({"PORT": "8000"}, p[0].env)
self.assertEqual({"PORT": "8100"}, p[1].env)
def test_port_from_env(self):
p = ep(("foo", "some command"),
("bar", "another command"),
env={"PORT": 8000})
self.assertEqual({"PORT": "8000"}, p[0].env)
self.assertEqual({"PORT": "8100"}, p[1].env)
def test_port_from_env_coerced_to_number(self):
p = ep(("foo", "some command"), env={"PORT": "5000"})
self.assertEqual({"PORT": "5000"}, p[0].env)
def test_port_from_env_overrides(self):
p = ep(("foo", "some command"), env={"PORT": 5000}, port=8000)
self.assertEqual({"PORT": "5000"}, p[0].env)
def test_port_concurrency(self):
p = ep(("foo", "some command"),
("bar", "another command"),
concurrency={"foo": 3, "bar": 2},
port=4000)
self.assertEqual({"PORT": "4000"}, p[0].env)
self.assertEqual({"PORT": "4001"}, p[1].env)
self.assertEqual({"PORT": "4002"}, p[2].env)
self.assertEqual({"PORT": "4100"}, p[3].env)
self.assertEqual({"PORT": "4101"}, p[4].env)
def test_quiet(self):
p = ep(("foo", "some command"), quiet=["foo", "bar"])
self.assertEqual(True, p[0].quiet)
def test_quiet_multiple(self):
p = ep(("foo", "some command"),
("bar", "another command"),
quiet=["foo"])
self.assertEqual(True, p[0].quiet)
self.assertEqual(False, p[1].quiet)
def test_env(self):
p = ep(("foo", "some command"),
env={"ANIMAL": "giraffe", "DEBUG": "false"})
self.assertEqual("giraffe", p[0].env["ANIMAL"])
self.assertEqual("false", p[0].env["DEBUG"])
def test_env_multiple(self):
p = ep(("foo", "some command"),
("bar", "another command"),
env={"ANIMAL": "giraffe", "DEBUG": "false"})
self.assertEqual("giraffe", p[0].env["ANIMAL"])
self.assertEqual("false", p[0].env["DEBUG"])
self.assertEqual("giraffe", p[1].env["ANIMAL"])
self.assertEqual("false", p[1].env["DEBUG"])
| mit |
sebadiaz/rethinkdb | test/common/vcoptparse.py | 32 | 9768 | # Copyright 2010-2012 RethinkDB, all rights reserved.
"""
vcoptparse is short for Value-Centric Option Parser. It's a tiny argument parsing library. It has
less features than optparse or argparse, but it kicks more ass.
optparse and argparse allow the client to specify the flags that should be parsed, and as an
afterthought specify what keys should appear in the options dictionary when the parse is over.
vcoptparse works the other way around: you specify the keys and how to determine the keys from the
command line. That's why it's called "value-centric".
Here is a simple example:
>>> op = OptParser()
>>> op["verbose"] = BoolFlag("--verbose")
>>> op["count"] = IntFlag("--count", 5) # Default count is 5
>>> op["infiles"] = ManyPositionalArgs()
>>> op.parse(["foo.py", "--count", "5", "file1.txt", "file2.txt"])
{'count': 5, 'verbose': False, 'infiles': ['file1.txt', 'file2.txt']}
"""
class NoValueClass(object):
pass
NoValue = NoValueClass()
class Arg(object):
pass
class OptError(StandardError):
pass
class OptParser(object):
def __init__(self):
self.parsers_by_key = {}
self.parsers_in_order = []
def __setitem__(self, key, parser):
assert isinstance(parser, Arg)
if key in self.parsers_by_key: del self[key]
assert parser not in self.parsers_by_key.values()
self.parsers_by_key[key] = parser
self.parsers_in_order.append((key, parser))
def __getitem__(self, key):
return self.parsers_by_key[key]
def __delitem__(self, key):
self.parsers_in_order.remove((key, self.parsers_by_key[key]))
del self.parsers_by_key[key]
def parse(self, args):
args = args[1:] # Cut off name of program
values = dict((key, NoValue) for key in self.parsers_by_key.keys())
def name_for_key(key):
return getattr(self.parsers_by_key[key], "name", key)
def set_value(key, new_value):
combiner = getattr(self.parsers_by_key[key], "combiner", enforce_one_combiner)
try:
values[key] = combiner(values[key], new_value)
except OptError as e:
raise OptError(str(e) % {"name": name_for_key(key)})
# Build flag table
flags = {}
for key, parser in self.parsers_in_order:
if hasattr(parser, "flags"):
for flag in parser.flags:
assert flag.startswith("-")
if flag in flags:
raise ValueError("The flag %r has two different meanings." % flag)
flags[flag] = (key, parser)
# Handle flag arguments and store positional arguments
positionals = []
while args:
arg = args.pop(0)
if arg.startswith("-"):
if arg in flags:
key, parser = flags[arg]
set_value(key, parser.flag(arg, args))
else:
raise OptError("Don't know how to handle flag %r" % arg)
else:
positionals.append(arg)
# Handle positional arguments
for key, parser in self.parsers_in_order:
if hasattr(parser, "positional"):
set_value(key, parser.positional(positionals))
if positionals:
raise OptError("Unexpected extra positional argument(s): %s" % ", ".join(repr(x) for x in positionals))
# Apply defaults
for key, parser in self.parsers_by_key.iteritems():
if values[key] is NoValue:
if hasattr(parser, "default") and parser.default is not NoValue:
values[key] = parser.default
else:
raise OptError("You need to specify a value for %r" % name_for_key(key))
return values
# Combiners (indicate how to combine repeat specifications of the same flag)
def most_recent_combiner(old, new):
return new
def enforce_one_combiner(old, new):
if old is not NoValue:
raise OptError("%(name)r should only be specified once.")
return new
def append_combiner(old, new):
if old is NoValue: old = []
return old + [new]
# Converters (indicate how to convert from string arguments to values)
def bool_converter(x):
if x.lower() in ["yes", "true", "y", "t"]: return True
elif x.lower() in ["no", "false", "n", "f"]: return False
else: raise OptError("Expected a yes/no value. Got %r." % x)
def int_converter(x):
try: return int(x)
except ValueError: raise OptError("Expected an integer. Got %r." % x)
def float_converter(x):
try: return float(x)
except ValueError: raise OptError("Expected a float. Got %r." % x)
def choice_converter(choices):
def check(x):
if x in choices: return x
else: raise OptError("Expected one of %s. Got %r." % (", ".join(choices), x))
return check
# Standard argument parsers for common situations
class BoolFlag(Arg):
def __init__(self, arg, invert=False):
assert isinstance(invert, bool)
self.flags = [arg]
self.default = invert
def flag(self, flag, args):
return not self.default
class ChoiceFlags(Arg):
def __init__(self, choices, default = NoValue):
assert all(isinstance(x, str) for x in choices)
self.flags = choices
self.default = default
def flag(self, flag, args):
return flag.lstrip("-")
class ValueFlag(Arg):
def __init__(self, name, converter = str, default = NoValue, combiner = enforce_one_combiner):
assert isinstance(name, str)
assert callable(converter)
assert callable(combiner)
self.flags = [name]
self.converter = converter
self.combiner = combiner
self.default = default
def flag(self, flag, args):
try: value = args.pop(0)
except IndexError:
raise OptError("Flag %r expects an argument." % flag)
try: value2 = self.converter(value)
except OptError as e:
raise OptError("Problem in argument to flag %r: %s" % (flag, e))
return value2
class StringFlag(ValueFlag):
def __init__(self, name, default = NoValue):
ValueFlag.__init__(self, name, str, default = default)
class IntFlag(ValueFlag):
def __init__(self, name, default = NoValue):
ValueFlag.__init__(self, name, int_converter, default = default)
class FloatFlag(ValueFlag):
def __init__(self, name, default = NoValue):
ValueFlag.__init__(self, name, float_converter, default = default)
class ChoiceFlag(ValueFlag):
def __init__(self, name, choices, default = NoValue):
ValueFlag.__init__(self, name, choice_converter(choices), default = default)
class MultiValueFlag(Arg):
def __init__(self, name, converters = [str], default = NoValue, combiner = enforce_one_combiner):
assert isinstance(name, str)
assert all(callable(x) for x in converters)
assert callable(combiner)
self.flags = [name]
self.converters = converters
self.combiner = combiner
self.default = default
def flag(self, flag, args):
new_values = ()
args_gotten = 0
for converter in self.converters:
try: value = args.pop(0)
except IndexError:
raise OptError("Flag %r expects %d argument(s), but only got %d." % (flag, len(self.converters), args_gotten))
try: value2 = converter(value)
except OptError as e:
raise OptError("Problem in argument %d to flag %r: %s" % (args_gotten + 1, flag, e))
new_values += (value2, )
args_gotten += 1
return new_values
class AllArgsAfterFlag(Arg):
def __init__(self, name, converter = str, default = NoValue):
assert isinstance(name, str)
assert callable(converter)
self.flags = [name]
self.converter = converter
self.default = default
def flag(self, flag, args):
args2 = []
for arg in args:
try: args2.append(self.converter(arg))
except OptError as e: raise OptError("For %r: %s" % (flag, e))
del args[:] # We consume all arguments remaining
return args2
class PositionalArg(Arg):
def __init__(self, name = None, converter = str, default = NoValue):
assert callable(converter)
self.name = name
self.converter = converter
self.default = default
def positional(self, args):
try: value = args.pop(0)
except IndexError:
if self.default is NoValue:
if self.name is None:
raise OptError("Too few positional arguments.")
else:
raise OptError("Too few positional arguments; need a value for %r." % self.name)
else:
return NoValue
try: value2 = self.converter(value)
except OptError as e:
if self.name is None: raise
else: raise OptError("For %r: %s" % (self.name, e))
return value2
class ManyPositionalArgs(Arg):
def __init__(self, name = None, converter = str):
assert callable(converter)
self.name = name
self.converter = converter
def positional(self, args):
args2 = []
for arg in args:
try: args2.append(self.converter(arg))
except OptError as e:
if self.name is None: raise
else: raise OptError("For %r: %s" % (self.name, e))
del args[:] # We consume all arguments remaining
return args2
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.