file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_software_deployment.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import re
import uuid
import mock
import six
from heat.common import exception as exc
from heat.common.i18n import _
from heat.engine.clients.os import nova
from heat.engine.clients.os import swift
from heat.engine.clients.os import zaqar
from heat.engine.resources.openstack.heat import software_deployment as sd
from heat.engine import rsrc_defn
from heat.engine import stack as parser
from heat.engine import template
from heat.tests import common
from heat.tests import utils
class SoftwareDeploymentTest(common.HeatTestCase):
template = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar'},
}
}
}
}
template_with_server = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': 'server',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar'},
}
},
'server': {
'Type': 'OS::Nova::Server',
'Properties': {
'image': 'fedora-amd64',
'flavor': 'm1.small',
'key_name': 'heat_key'
}
}
}
}
template_no_signal = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar', 'bink': 'bonk'},
'signal_transport': 'NO_SIGNAL',
'name': '00_run_me_first'
}
}
}
}
template_temp_url_signal = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar', 'bink': 'bonk'},
'signal_transport': 'TEMP_URL_SIGNAL',
'name': '00_run_me_first'
}
}
}
}
template_zaqar_signal = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar', 'bink': 'bonk'},
'signal_transport': 'ZAQAR_SIGNAL',
'name': '00_run_me_first'
}
}
}
}
template_delete_suspend_resume = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'config': '48e8ade1-9196-42d5-89a2-f709fde42632',
'input_values': {'foo': 'bar'},
'actions': ['DELETE', 'SUSPEND', 'RESUME'],
}
}
}
}
template_no_config = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {
'server': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'input_values': {'foo': 'bar', 'bink': 'bonk'},
'signal_transport': 'NO_SIGNAL',
}
}
}
}
template_no_server = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'deployment_mysql': {
'Type': 'OS::Heat::SoftwareDeployment',
'Properties': {}
}
}
}
def setUp(self):
super(SoftwareDeploymentTest, self).setUp()
self.ctx = utils.dummy_context()
def _create_stack(self, tmpl):
self.stack = parser.Stack(
self.ctx, 'software_deployment_test_stack',
template.Template(tmpl),
stack_id='42f6f66b-631a-44e7-8d01-e22fb54574a9',
stack_user_project_id='65728b74-cfe7-4f17-9c15-11d4f686e591'
)
self.patchobject(nova.NovaClientPlugin, 'get_server',
return_value=mock.MagicMock())
self.patchobject(sd.SoftwareDeployment, '_create_user')
self.patchobject(sd.SoftwareDeployment, '_create_keypair')
self.patchobject(sd.SoftwareDeployment, '_delete_user')
self.patchobject(sd.SoftwareDeployment, '_delete_ec2_signed_url')
get_ec2_signed_url = self.patchobject(
sd.SoftwareDeployment, '_get_ec2_signed_url')
get_ec2_signed_url.return_value = 'http://192.0.2.2/signed_url'
self.deployment = self.stack['deployment_mysql']
self.rpc_client = mock.MagicMock()
self.deployment._rpc_client = self.rpc_client
def test_validate(self):
template = dict(self.template_with_server)
props = template['Resources']['server']['Properties']
props['user_data_format'] = 'SOFTWARE_CONFIG'
self._create_stack(self.template_with_server)
sd = self.deployment
self.assertEqual('CFN_SIGNAL', sd.properties.get('signal_transport'))
sd.validate()
def test_validate_without_server(self):
stack = utils.parse_stack(self.template_no_server)
snip = stack.t.resource_definitions(stack)['deployment_mysql']
deployment = sd.SoftwareDeployment('deployment_mysql', snip, stack)
err = self.assertRaises(exc.StackValidationFailed, deployment.validate)
self.assertEqual("Property error: "
"Resources.deployment_mysql.Properties: "
"Property server not assigned", six.text_type(err))
def test_validate_failed(self):
template = dict(self.template_with_server)
props = template['Resources']['server']['Properties']
props['user_data_format'] = 'RAW'
self._create_stack(template)
sd = self.deployment
err = self.assertRaises(exc.StackValidationFailed, sd.validate)
self.assertEqual("Resource server's property "
"user_data_format should be set to "
"SOFTWARE_CONFIG since there are "
"software deployments on it.", six.text_type(err))
def test_resource_mapping(self):
self._create_stack(self.template)
self.assertIsInstance(self.deployment, sd.SoftwareDeployment)
def mock_software_config(self):
config = {
'id': '48e8ade1-9196-42d5-89a2-f709fde42632',
'group': 'Test::Group',
'name': 'myconfig',
'config': 'the config',
'options': {},
'inputs': [{
'name': 'foo',
'type': 'String',
'default': 'baa',
}, {
'name': 'bar',
'type': 'String',
'default': 'baz',
}],
'outputs': [],
}
self.rpc_client.show_software_config.return_value = config
return config
def mock_software_component(self):
config = {
'id': '48e8ade1-9196-42d5-89a2-f709fde42632',
'group': 'component',
'name': 'myconfig',
'config': {
'configs': [
{
'actions': ['CREATE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['DELETE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['UPDATE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['SUSPEND'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['RESUME'],
'config': 'the config',
'tool': 'a_tool'
}
]
},
'options': {},
'inputs': [{
'name': 'foo',
'type': 'String',
'default': 'baa',
}, {
'name': 'bar',
'type': 'String',
'default': 'baz',
}],
'outputs': [],
}
self.rpc_client.show_software_config.return_value = config
return config
def mock_derived_software_config(self):
sc = {'id': '9966c8e7-bc9c-42de-aa7d-f2447a952cb2'}
self.rpc_client.create_software_config.return_value = sc
return sc
def mock_deployment(self):
sd = {
'id': 'c8a19429-7fde-47ea-a42f-40045488226c',
'config_id': '9966c8e7-bc9c-42de-aa7d-f2447a952cb2'
}
self.rpc_client.create_software_deployment.return_value = sd
return sd
def test_handle_create(self):
self._create_stack(self.template_no_signal)
self.mock_software_config()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
self.deployment.handle_create()
self.assertEqual(sd['id'], self.deployment.resource_id)
self.assertEqual({
'config': 'the config',
'group': 'Test::Group',
'name': '00_run_me_first',
'inputs': [{
'default': 'baa',
'name': 'foo',
'type': 'String',
'value': 'bar'
}, {
'default': 'baz',
'name': 'bar',
'type': 'String',
'value': 'baz'
}, {
'name': 'bink',
'type': 'String',
'value': 'bonk'
}, {
'description': 'ID of the server being deployed to',
'name': 'deploy_server_id',
'type': 'String',
'value': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0'
}, {
'description': 'Name of the current action being deployed',
'name': 'deploy_action',
'type': 'String',
'value': 'CREATE'
}, {
'description': 'ID of the stack this deployment belongs to',
'name': 'deploy_stack_id',
'type': 'String',
'value': ('software_deployment_test_stack'
'/42f6f66b-631a-44e7-8d01-e22fb54574a9')
}, {
'description': 'Name of this deployment resource in the stack',
'name': 'deploy_resource_name',
'type': 'String',
'value': 'deployment_mysql'
}, {
'description': ('How the server should signal to heat with '
'the deployment output values.'),
'name': 'deploy_signal_transport',
'type': 'String',
'value': 'NO_SIGNAL'
}],
'options': {},
'outputs': []
}, self.rpc_client.create_software_config.call_args[1])
self.assertEqual(
{'action': 'CREATE',
'config_id': derived_sc['id'],
'server_id': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'stack_user_project_id': '65728b74-cfe7-4f17-9c15-11d4f686e591',
'status': 'COMPLETE',
'status_reason': 'Not waiting for outputs signal'},
self.rpc_client.create_software_deployment.call_args[1])
def test_handle_create_without_config(self):
self._create_stack(self.template_no_config)
sd = self.mock_deployment()
derived_sc = self.mock_derived_software_config()
self.deployment.handle_create()
self.assertEqual(sd['id'], self.deployment.resource_id)
call_arg = self.rpc_client.create_software_config.call_args[1]
call_arg['inputs'] = sorted(
call_arg['inputs'], key=lambda k: k['name'])
self.assertEqual({
'config': '',
'group': 'Heat::Ungrouped',
'name': self.deployment.physical_resource_name(),
'inputs': [{
'name': 'bink',
'type': 'String',
'value': 'bonk'
}, {
'description': 'Name of the current action being deployed',
'name': 'deploy_action',
'type': 'String',
'value': 'CREATE'
}, {
'description': 'Name of this deployment resource in the stack',
'name': 'deploy_resource_name',
'type': 'String',
'value': 'deployment_mysql'
}, {
'description': 'ID of the server being deployed to',
'name': 'deploy_server_id',
'type': 'String',
'value': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0'
}, {
'description': ('How the server should signal to heat with '
'the deployment output values.'),
'name': 'deploy_signal_transport',
'type': 'String',
'value': 'NO_SIGNAL'
}, {
'description': 'ID of the stack this deployment belongs to',
'name': 'deploy_stack_id',
'type': 'String',
'value': ('software_deployment_test_stack'
'/42f6f66b-631a-44e7-8d01-e22fb54574a9')
}, {
'name': 'foo',
'type': 'String',
'value': 'bar'
}],
'options': None,
'outputs': None
}, call_arg)
self.assertEqual(
{'action': 'CREATE',
'config_id': derived_sc['id'],
'server_id': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'stack_user_project_id': '65728b74-cfe7-4f17-9c15-11d4f686e591',
'status': 'COMPLETE',
'status_reason': 'Not waiting for outputs signal'},
self.rpc_client.create_software_deployment.call_args[1])
def test_handle_create_for_component(self):
self._create_stack(self.template_no_signal)
self.mock_software_component()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
self.deployment.handle_create()
self.assertEqual(sd['id'], self.deployment.resource_id)
self.assertEqual({
'config': {
'configs': [
{
'actions': ['CREATE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['DELETE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['UPDATE'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['SUSPEND'],
'config': 'the config',
'tool': 'a_tool'
},
{
'actions': ['RESUME'],
'config': 'the config',
'tool': 'a_tool'
}
]
},
'group': 'component',
'name': '00_run_me_first',
'inputs': [{
'default': 'baa',
'name': 'foo',
'type': 'String',
'value': 'bar'
}, {
'default': 'baz',
'name': 'bar',
'type': 'String',
'value': 'baz'
}, {
'name': 'bink',
'type': 'String',
'value': 'bonk'
}, {
'description': 'ID of the server being deployed to',
'name': 'deploy_server_id',
'type': 'String',
'value': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0'
}, {
'description': 'Name of the current action being deployed',
'name': 'deploy_action',
'type': 'String',
'value': 'CREATE'
}, {
'description': 'ID of the stack this deployment belongs to',
'name': 'deploy_stack_id',
'type': 'String',
'value': ('software_deployment_test_stack'
'/42f6f66b-631a-44e7-8d01-e22fb54574a9')
}, {
'description': 'Name of this deployment resource in the stack',
'name': 'deploy_resource_name',
'type': 'String',
'value': 'deployment_mysql'
}, {
'description': ('How the server should signal to heat with '
'the deployment output values.'),
'name': 'deploy_signal_transport',
'type': 'String',
'value': 'NO_SIGNAL'
}],
'options': {},
'outputs': []
}, self.rpc_client.create_software_config.call_args[1])
self.assertEqual(
{'action': 'CREATE',
'config_id': derived_sc['id'],
'server_id': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'stack_user_project_id': '65728b74-cfe7-4f17-9c15-11d4f686e591',
'status': 'COMPLETE',
'status_reason': 'Not waiting for outputs signal'},
self.rpc_client.create_software_deployment.call_args[1])
def test_handle_create_do_not_wait(self):
self._create_stack(self.template)
self.mock_software_config()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
self.deployment.handle_create()
self.assertEqual(sd['id'], self.deployment.resource_id)
self.assertEqual(
{'action': 'CREATE',
'config_id': derived_sc['id'],
'server_id': '9f1f0e00-05d2-4ca5-8602-95021f19c9d0',
'stack_user_project_id': '65728b74-cfe7-4f17-9c15-11d4f686e591',
'status': 'IN_PROGRESS',
'status_reason': 'Deploy data available'},
self.rpc_client.create_software_deployment.call_args[1])
def test_check_create_complete(self):
self._create_stack(self.template)
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
sd['status'] = self.deployment.COMPLETE
self.assertTrue(self.deployment.check_create_complete(sd))
sd['status'] = self.deployment.IN_PROGRESS
self.assertFalse(self.deployment.check_create_complete(sd))
def test_check_create_complete_none(self):
self._create_stack(self.template)
self.assertTrue(self.deployment.check_create_complete(sd=None))
def test_check_update_complete(self):
self._create_stack(self.template)
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
sd['status'] = self.deployment.COMPLETE
self.assertTrue(self.deployment.check_update_complete(sd))
sd['status'] = self.deployment.IN_PROGRESS
self.assertFalse(self.deployment.check_update_complete(sd))
def test_check_update_complete_none(self):
self._create_stack(self.template)
self.assertTrue(self.deployment.check_update_complete(sd=None))
def test_check_suspend_complete(self):
self._create_stack(self.template)
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
sd['status'] = self.deployment.COMPLETE
self.assertTrue(self.deployment.check_suspend_complete(sd))
sd['status'] = self.deployment.IN_PROGRESS
self.assertFalse(self.deployment.check_suspend_complete(sd))
def test_check_suspend_complete_none(self):
self._create_stack(self.template)
self.assertTrue(self.deployment.check_suspend_complete(sd=None))
def test_check_resume_complete(self):
self._create_stack(self.template)
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
sd['status'] = self.deployment.COMPLETE
self.assertTrue(self.deployment.check_resume_complete(sd))
sd['status'] = self.deployment.IN_PROGRESS
self.assertFalse(self.deployment.check_resume_complete(sd))
def test_check_resume_complete_none(self):
self._create_stack(self.template)
self.assertTrue(self.deployment.check_resume_complete(sd=None))
def test_check_create_complete_error(self):
self._create_stack(self.template)
sd = {
'status': self.deployment.FAILED,
'status_reason': 'something wrong'
}
self.rpc_client.show_software_deployment.return_value = sd
err = self.assertRaises(
exc.Error, self.deployment.check_create_complete, sd)
self.assertEqual(
'Deployment to server failed: something wrong', six.text_type(err))
def test_handle_delete(self):
self._create_stack(self.template)
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
self.deployment.resource_id = sd['id']
self.deployment.handle_delete()
self.deployment.check_delete_complete()
self.assertEqual(
(self.ctx, sd['id']),
self.rpc_client.delete_software_deployment.call_args[0])
def test_handle_delete_resource_id_is_None(self):
self._create_stack(self.template_delete_suspend_resume)
self.mock_software_config()
sd = self.mock_deployment()
self.assertEqual(sd, self.deployment.handle_delete())
def test_delete_complete(self):
self._create_stack(self.template_delete_suspend_resume)
self.mock_software_config()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
self.deployment.resource_id = sd['id']
self.rpc_client.show_software_deployment.return_value = sd
self.rpc_client.update_software_deployment.return_value = sd
self.assertEqual(sd, self.deployment.handle_delete())
self.assertEqual({
'deployment_id': 'c8a19429-7fde-47ea-a42f-40045488226c',
'action': 'DELETE',
'config_id': derived_sc['id'],
'status': 'IN_PROGRESS',
'status_reason': 'Deploy data available'},
self.rpc_client.update_software_deployment.call_args[1])
sd['status'] = self.deployment.IN_PROGRESS
self.assertFalse(self.deployment.check_delete_complete(sd))
sd['status'] = self.deployment.COMPLETE
self.assertTrue(self.deployment.check_delete_complete(sd))
def test_handle_delete_notfound(self):
self._create_stack(self.template)
deployment_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
self.deployment.resource_id = deployment_id
self.mock_software_config()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
sd['config_id'] = derived_sc['id']
self.rpc_client.show_software_deployment.return_value = sd
nf = exc.NotFound
self.rpc_client.delete_software_deployment.side_effect = nf
self.rpc_client.delete_software_config.side_effect = nf
self.assertIsNone(self.deployment.handle_delete())
self.assertTrue(self.deployment.check_delete_complete())
self.assertEqual(
(self.ctx, derived_sc['id']),
self.rpc_client.delete_software_config.call_args[0])
def test_handle_delete_none(self):
self._create_stack(self.template)
deployment_id = None
self.deployment.resource_id = deployment_id
self.assertIsNone(self.deployment.handle_delete())
def test_check_delete_complete_none(self):
self._create_stack(self.template)
self.assertTrue(self.deployment.check_delete_complete())
def test_check_delete_complete_delete_sd(self):
# handle_delete will return None if NO_SIGNAL,
# in this case also need to call the _delete_resource(),
# otherwise the sd data will residue in db
self._create_stack(self.template)
sd = self.mock_deployment()
self.deployment.resource_id = sd['id']
self.rpc_client.show_software_deployment.return_value = sd
self.assertTrue(self.deployment.check_delete_complete())
self.assertEqual(
(self.ctx, sd['id']),
self.rpc_client.delete_software_deployment.call_args[0])
def test_handle_update(self):
self._create_stack(self.template)
self.mock_derived_software_config()
sd = self.mock_deployment()
rsrc = self.stack['deployment_mysql']
self.rpc_client.show_software_deployment.return_value = sd
self.deployment.resource_id = sd['id']
config_id = '0ff2e903-78d7-4cca-829e-233af3dae705'
prop_diff = {'config': config_id}
props = copy.copy(rsrc.properties.data)
props.update(prop_diff)
snippet = rsrc_defn.ResourceDefinition(rsrc.name, rsrc.type(), props)
self.deployment.handle_update(
json_snippet=snippet, tmpl_diff=None, prop_diff=prop_diff)
self.assertEqual(
(self.ctx, config_id),
self.rpc_client.show_software_config.call_args[0])
self.assertEqual(
(self.ctx, sd['id']),
self.rpc_client.show_software_deployment.call_args[0])
self.assertEqual({
'deployment_id': 'c8a19429-7fde-47ea-a42f-40045488226c',
'action': 'UPDATE',
'config_id': '9966c8e7-bc9c-42de-aa7d-f2447a952cb2',
'status': 'IN_PROGRESS',
'status_reason': u'Deploy data available'},
self.rpc_client.update_software_deployment.call_args[1])
def test_handle_suspend_resume(self):
self._create_stack(self.template_delete_suspend_resume)
self.mock_software_config()
derived_sc = self.mock_derived_software_config()
sd = self.mock_deployment()
self.rpc_client.show_software_deployment.return_value = sd
self.deployment.resource_id = sd['id']
# first, handle the suspend
self.deployment.handle_suspend()
self.assertEqual({
'deployment_id': 'c8a19429-7fde-47ea-a42f-40045488226c',
'action': 'SUSPEND',
'config_id': derived_sc['id'],
'status': 'IN_PROGRESS',
'status_reason': 'Deploy data available'},
self.rpc_client.update_software_deployment.call_args[1])
sd['status'] = 'IN_PROGRESS'
self.assertFalse(self.deployment.check_suspend_complete(sd))
sd['status'] = 'COMPLETE'
self.assertTrue(self.deployment.check_suspend_complete(sd))
# now, handle the resume
self.deployment.handle_resume()
self.assertEqual({
'deployment_id': 'c8a19429-7fde-47ea-a42f-40045488226c',
'action': 'RESUME',
'config_id': derived_sc['id'],
'status': 'IN_PROGRESS',
'status_reason': 'Deploy data available'},
self.rpc_client.update_software_deployment.call_args[1])
sd['status'] = 'IN_PROGRESS'
self.assertFalse(self.deployment.check_resume_complete(sd))
sd['status'] = 'COMPLETE'
self.assertTrue(self.deployment.check_resume_complete(sd))
def test_handle_signal_ok_zero(self):
self._create_stack(self.template)
self.deployment.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = 'deployment succeeded'
details = {
'foo': 'bar',
'deploy_status_code': 0
}
ret = self.deployment.handle_signal(details)
self.assertEqual('deployment succeeded', ret)
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c', ca[1])
self.assertEqual({'foo': 'bar', 'deploy_status_code': 0}, ca[2])
self.assertIsNotNone(ca[3])
def test_no_signal_action(self):
self._create_stack(self.template)
self.deployment.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = 'deployment succeeded'
details = {
'foo': 'bar',
'deploy_status_code': 0
}
actions = [self.deployment.SUSPEND, self.deployment.DELETE]
ev = self.patchobject(self.deployment, 'handle_signal')
for action in actions:
for status in self.deployment.STATUSES:
self.deployment.state_set(action, status)
self.deployment.signal(details)
ev.assert_called_with(details)
def test_handle_signal_ok_str_zero(self):
self._create_stack(self.template)
self.deployment.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = 'deployment succeeded'
details = {
'foo': 'bar',
'deploy_status_code': '0'
}
ret = self.deployment.handle_signal(details)
self.assertEqual('deployment succeeded', ret)
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c', ca[1])
self.assertEqual({'foo': 'bar', 'deploy_status_code': '0'}, ca[2])
self.assertIsNotNone(ca[3])
def test_handle_signal_failed(self):
self._create_stack(self.template)
self.deployment.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = 'deployment failed'
details = {'failed': 'no enough memory found.'}
ret = self.deployment.handle_signal(details)
self.assertEqual('deployment failed', ret)
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c', ca[1])
self.assertEqual(details, ca[2])
self.assertIsNotNone(ca[3])
# Test bug 1332355, where details contains a translateable message
details = {'failed': _('need more memory.')}
ret = self.deployment.handle_signal(details)
self.assertEqual('deployment failed', ret)
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c', ca[1])
self.assertEqual(details, ca[2])
self.assertIsNotNone(ca[3])
def test_handle_status_code_failed(self):
self._create_stack(self.template)
self.deployment.resource_id = 'c8a19429-7fde-47ea-a42f-40045488226c'
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = 'deployment failed'
details = {
'deploy_stdout': 'A thing happened',
'deploy_stderr': 'Then it broke',
'deploy_status_code': -1
}
self.deployment.handle_signal(details)
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertEqual('c8a19429-7fde-47ea-a42f-40045488226c', ca[1])
self.assertEqual(details, ca[2])
self.assertIsNotNone(ca[3])
def test_handle_signal_not_waiting(self):
|
def test_fn_get_att(self):
self._create_stack(self.template)
sd = {
'outputs': [
{'name': 'failed', 'error_output': True},
{'name': 'foo'}
],
'output_values': {
'foo': 'bar',
'deploy_stdout': 'A thing happened',
'deploy_stderr': 'Extraneous logging',
'deploy_status_code': 0
},
'status': self.deployment.COMPLETE
}
self.rpc_client.show_software_deployment.return_value = sd
self.assertEqual('bar', self.deployment.FnGetAtt('foo'))
self.assertEqual('A thing happened',
self.deployment.FnGetAtt('deploy_stdout'))
self.assertEqual('Extraneous logging',
self.deployment.FnGetAtt('deploy_stderr'))
self.assertEqual(0, self.deployment.FnGetAtt('deploy_status_code'))
def test_fn_get_att_error(self):
self._create_stack(self.template)
sd = {
'outputs': [],
'output_values': {'foo': 'bar'},
}
self.rpc_client.show_software_deployment.return_value = sd
err = self.assertRaises(
exc.InvalidTemplateAttribute,
self.deployment.FnGetAtt, 'foo2')
self.assertEqual(
'The Referenced Attribute (deployment_mysql foo2) is incorrect.',
six.text_type(err))
def test_handle_action(self):
self._create_stack(self.template)
self.mock_software_config()
sd = self.mock_deployment()
rsrc = self.stack['deployment_mysql']
self.rpc_client.show_software_deployment.return_value = sd
self.deployment.resource_id = sd['id']
config_id = '0ff2e903-78d7-4cca-829e-233af3dae705'
prop_diff = {'config': config_id}
props = copy.copy(rsrc.properties.data)
props.update(prop_diff)
snippet = rsrc_defn.ResourceDefinition(rsrc.name, rsrc.type(), props)
# by default (no 'actions' property) SoftwareDeployment must only
# trigger for CREATE and UPDATE
self.assertIsNotNone(self.deployment.handle_create())
self.assertIsNotNone(self.deployment.handle_update(
json_snippet=snippet, tmpl_diff=None, prop_diff=prop_diff))
# ... but it must not trigger for SUSPEND, RESUME and DELETE
self.assertIsNone(self.deployment.handle_suspend())
self.assertIsNone(self.deployment.handle_resume())
self.assertIsNone(self.deployment.handle_delete())
def test_handle_action_for_component(self):
self._create_stack(self.template)
self.mock_software_component()
sd = self.mock_deployment()
rsrc = self.stack['deployment_mysql']
self.rpc_client.show_software_deployment.return_value = sd
self.deployment.resource_id = sd['id']
config_id = '0ff2e903-78d7-4cca-829e-233af3dae705'
prop_diff = {'config': config_id}
props = copy.copy(rsrc.properties.data)
props.update(prop_diff)
snippet = rsrc_defn.ResourceDefinition(rsrc.name, rsrc.type(), props)
# for a SoftwareComponent, SoftwareDeployment must always trigger
self.assertIsNotNone(self.deployment.handle_create())
self.assertIsNotNone(self.deployment.handle_update(
json_snippet=snippet, tmpl_diff=None, prop_diff=prop_diff))
self.assertIsNotNone(self.deployment.handle_suspend())
self.assertIsNotNone(self.deployment.handle_resume())
self.assertIsNotNone(self.deployment.handle_delete())
def test_get_temp_url(self):
dep_data = {}
sc = mock.MagicMock()
scc = self.patch(
'heat.engine.clients.os.swift.SwiftClientPlugin._create')
scc.return_value = sc
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.1/v1/AUTH_test_tenant_id'
self._create_stack(self.template_temp_url_signal)
def data_set(key, value, redact=False):
dep_data[key] = value
self.deployment.data_set = data_set
self.deployment.data = mock.Mock(
return_value=dep_data)
self.deployment.id = 23
self.deployment.uuid = str(uuid.uuid4())
self.deployment.action = self.deployment.CREATE
object_name = self.deployment.physical_resource_name()
temp_url = self.deployment._get_swift_signal_url()
temp_url_pattern = re.compile(
'^http://192.0.2.1/v1/AUTH_test_tenant_id/'
'(.*)/(software_deployment_test_stack-deployment_mysql-.*)'
'\\?temp_url_sig=.*&temp_url_expires=\\d*$')
self.assertRegex(temp_url, temp_url_pattern)
m = temp_url_pattern.search(temp_url)
container = m.group(1)
self.assertEqual(object_name, m.group(2))
self.assertEqual(dep_data['swift_signal_object_name'], object_name)
self.assertEqual(dep_data['swift_signal_url'], temp_url)
self.assertEqual(temp_url, self.deployment._get_swift_signal_url())
sc.put_container.assert_called_once_with(container)
sc.put_object.assert_called_once_with(container, object_name, '')
def test_delete_temp_url(self):
object_name = str(uuid.uuid4())
dep_data = {
'swift_signal_object_name': object_name
}
self._create_stack(self.template_temp_url_signal)
self.deployment.data_delete = mock.MagicMock()
self.deployment.data = mock.Mock(
return_value=dep_data)
sc = mock.MagicMock()
sc.get_container.return_value = ({}, [{'name': object_name}])
sc.head_container.return_value = {
'x-container-object-count': 0
}
scc = self.patch(
'heat.engine.clients.os.swift.SwiftClientPlugin._create')
scc.return_value = sc
self.deployment.id = 23
self.deployment.uuid = str(uuid.uuid4())
container = self.stack.id
self.deployment._delete_swift_signal_url()
sc.delete_object.assert_called_once_with(container, object_name)
self.assertEqual(
[mock.call('swift_signal_object_name'),
mock.call('swift_signal_url')],
self.deployment.data_delete.mock_calls)
swift_exc = swift.SwiftClientPlugin.exceptions_module
sc.delete_object.side_effect = swift_exc.ClientException(
'Not found', http_status=404)
self.deployment._delete_swift_signal_url()
self.assertEqual(
[mock.call('swift_signal_object_name'),
mock.call('swift_signal_url'),
mock.call('swift_signal_object_name'),
mock.call('swift_signal_url')],
self.deployment.data_delete.mock_calls)
del(dep_data['swift_signal_object_name'])
self.deployment.physical_resource_name = mock.Mock()
self.deployment._delete_swift_signal_url()
self.assertFalse(self.deployment.physical_resource_name.called)
def test_handle_action_temp_url(self):
self._create_stack(self.template_temp_url_signal)
dep_data = {
'swift_signal_url': (
'http://192.0.2.1/v1/AUTH_a/b/c'
'?temp_url_sig=ctemp_url_expires=1234')
}
self.deployment.data = mock.Mock(
return_value=dep_data)
self.mock_software_config()
for action in ('DELETE', 'SUSPEND', 'RESUME'):
self.assertIsNone(self.deployment._handle_action(action))
for action in ('CREATE', 'UPDATE'):
self.assertIsNotNone(self.deployment._handle_action(action))
def test_get_zaqar_queue(self):
dep_data = {}
zc = mock.MagicMock()
zcc = self.patch(
'heat.engine.clients.os.zaqar.ZaqarClientPlugin._create')
zcc.return_value = zc
self._create_stack(self.template_zaqar_signal)
def data_set(key, value, redact=False):
dep_data[key] = value
self.deployment.data_set = data_set
self.deployment.data = mock.Mock(return_value=dep_data)
self.deployment.id = 23
self.deployment.uuid = str(uuid.uuid4())
self.deployment.action = self.deployment.CREATE
queue_id = self.deployment._get_zaqar_signal_queue_id()
self.assertEqual(2, len(zc.queue.mock_calls))
self.assertEqual(queue_id, zc.queue.mock_calls[0][1][0])
self.assertEqual(queue_id, dep_data['zaqar_signal_queue_id'])
self.assertEqual(queue_id,
self.deployment._get_zaqar_signal_queue_id())
def test_delete_zaqar_queue(self):
queue_id = str(uuid.uuid4())
dep_data = {
'zaqar_signal_queue_id': queue_id
}
self._create_stack(self.template_zaqar_signal)
self.deployment.data_delete = mock.MagicMock()
self.deployment.data = mock.Mock(return_value=dep_data)
zc = mock.MagicMock()
zcc = self.patch(
'heat.engine.clients.os.zaqar.ZaqarClientPlugin._create')
zcc.return_value = zc
self.deployment.id = 23
self.deployment.uuid = str(uuid.uuid4())
self.deployment._delete_zaqar_signal_queue()
zc.queue.assert_called_once_with(queue_id)
self.assertTrue(zc.queue(self.deployment.uuid).delete.called)
self.assertEqual(
[mock.call('zaqar_signal_queue_id')],
self.deployment.data_delete.mock_calls)
zaqar_exc = zaqar.ZaqarClientPlugin.exceptions_module
zc.queue.delete.side_effect = zaqar_exc.ResourceNotFound()
self.deployment._delete_zaqar_signal_queue()
self.assertEqual(
[mock.call('zaqar_signal_queue_id'),
mock.call('zaqar_signal_queue_id')],
self.deployment.data_delete.mock_calls)
dep_data.pop('zaqar_signal_queue_id')
self.deployment.physical_resource_name = mock.Mock()
self.deployment._delete_zaqar_signal_queue()
self.assertEqual(2, len(self.deployment.data_delete.mock_calls))
class SoftwareDeploymentGroupTest(common.HeatTestCase):
template = {
'heat_template_version': '2013-05-23',
'resources': {
'deploy_mysql': {
'type': 'OS::Heat::SoftwareDeploymentGroup',
'properties': {
'config': 'config_uuid',
'servers': {'server1': 'uuid1', 'server2': 'uuid2'},
'input_values': {'foo': 'bar'},
'name': '10_config'
}
}
}
}
def setUp(self):
common.HeatTestCase.setUp(self)
self.rpc_client = mock.MagicMock()
def test_build_resource_definition(self):
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
expect = {
'type': 'OS::Heat::SoftwareDeployment',
'properties': {
'actions': ['CREATE', 'UPDATE'],
'config': 'config_uuid',
'input_values': {'foo': 'bar'},
'name': '10_config',
'signal_transport': 'CFN_SIGNAL'
}
}
self.assertEqual(
expect, resg._build_resource_definition())
self.assertEqual(
expect, resg._build_resource_definition(include_all=True))
def test_resource_names(self):
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
self.assertEqual(
set(('server1', 'server2')),
set(resg._resource_names())
)
resg.properties = {'servers': {'s1': 'u1', 's2': 'u2', 's3': 'u3'}}
self.assertEqual(
set(('s1', 's2', 's3')),
set(resg._resource_names()))
def test_assemble_nested(self):
"""Tests nested stack implements group creation based on properties.
Tests that the nested stack that implements the group is created
appropriately based on properties.
"""
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
templ = {
"heat_template_version": "2015-04-30",
"resources": {
"server1": {
'type': 'OS::Heat::SoftwareDeployment',
'properties': {
'server': 'uuid1',
'actions': ['CREATE', 'UPDATE'],
'config': 'config_uuid',
'input_values': {'foo': 'bar'},
'name': '10_config',
'signal_transport': 'CFN_SIGNAL'
}
},
"server2": {
'type': 'OS::Heat::SoftwareDeployment',
'properties': {
'server': 'uuid2',
'actions': ['CREATE', 'UPDATE'],
'config': 'config_uuid',
'input_values': {'foo': 'bar'},
'name': '10_config',
'signal_transport': 'CFN_SIGNAL'
}
}
}
}
self.assertEqual(templ, resg._assemble_nested(['server1', 'server2']))
def test_attributes(self):
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
nested = self.patchobject(resg, 'nested')
server1 = mock.MagicMock()
server2 = mock.MagicMock()
nested.return_value = {
'server1': server1,
'server2': server2
}
server1.FnGetAtt.return_value = 'Thing happened on server1'
server2.FnGetAtt.return_value = 'ouch'
self.assertEqual({
'server1': 'Thing happened on server1',
'server2': 'ouch'
}, resg.FnGetAtt('deploy_stdouts'))
server1.FnGetAtt.return_value = ''
server2.FnGetAtt.return_value = 'Its gone Pete Tong'
self.assertEqual({
'server1': '',
'server2': 'Its gone Pete Tong'
}, resg.FnGetAtt('deploy_stderrs'))
server1.FnGetAtt.return_value = 0
server2.FnGetAtt.return_value = 1
self.assertEqual({
'server1': 0,
'server2': 1
}, resg.FnGetAtt('deploy_status_codes'))
server1.FnGetAtt.assert_has_calls([
mock.call('deploy_stdout'),
mock.call('deploy_stderr'),
mock.call('deploy_status_code'),
])
server2.FnGetAtt.assert_has_calls([
mock.call('deploy_stdout'),
mock.call('deploy_stderr'),
mock.call('deploy_status_code'),
])
def test_attributes_path(self):
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
nested = self.patchobject(resg, 'nested')
server1 = mock.MagicMock()
server2 = mock.MagicMock()
nested.return_value = {
'server1': server1,
'server2': server2
}
server1.FnGetAtt.return_value = 'Thing happened on server1'
server2.FnGetAtt.return_value = 'ouch'
self.assertEqual('Thing happened on server1',
resg.FnGetAtt('deploy_stdouts', 'server1'))
self.assertEqual('ouch',
resg.FnGetAtt('deploy_stdouts', 'server2'))
server1.FnGetAtt.return_value = ''
server2.FnGetAtt.return_value = 'Its gone Pete Tong'
self.assertEqual('', resg.FnGetAtt('deploy_stderrs', 'server1'))
self.assertEqual('Its gone Pete Tong',
resg.FnGetAtt('deploy_stderrs', 'server2'))
server1.FnGetAtt.return_value = 0
server2.FnGetAtt.return_value = 1
self.assertEqual(0, resg.FnGetAtt('deploy_status_codes', 'server1'))
self.assertEqual(1, resg.FnGetAtt('deploy_status_codes', 'server2'))
server1.FnGetAtt.assert_has_calls([
mock.call('deploy_stdout'),
mock.call('deploy_stdout'),
mock.call('deploy_stderr'),
mock.call('deploy_stderr'),
mock.call('deploy_status_code'),
mock.call('deploy_status_code'),
])
server2.FnGetAtt.assert_has_calls([
mock.call('deploy_stdout'),
mock.call('deploy_stdout'),
mock.call('deploy_stderr'),
mock.call('deploy_stderr'),
mock.call('deploy_status_code'),
mock.call('deploy_status_code'),
])
def test_attributes_passthrough_key(self):
'''Prove attributes not in the schema pass-through.'''
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('test', snip, stack)
nested = self.patchobject(resg, 'nested')
server1 = mock.MagicMock()
server2 = mock.MagicMock()
nested.return_value = {
'server1': server1,
'server2': server2
}
server1.FnGetAtt.return_value = 'attr1'
server2.FnGetAtt.return_value = 'attr2'
self.assertEqual({
'server1': 'attr1',
'server2': 'attr2'
}, resg.FnGetAtt('some_attr'))
server1.FnGetAtt.assert_has_calls([
mock.call('some_attr'),
])
server2.FnGetAtt.assert_has_calls([
mock.call('some_attr'),
])
def test_validate(self):
stack = utils.parse_stack(self.template)
snip = stack.t.resource_definitions(stack)['deploy_mysql']
resg = sd.SoftwareDeploymentGroup('deploy_mysql', snip, stack)
self.assertIsNone(resg.validate())
|
self._create_stack(self.template)
rpcc = self.rpc_client
rpcc.signal_software_deployment.return_value = None
details = None
self.assertIsNone(self.deployment.handle_signal(details))
ca = rpcc.signal_software_deployment.call_args[0]
self.assertEqual(self.ctx, ca[0])
self.assertIsNone(ca[1])
self.assertIsNone(ca[2])
self.assertIsNotNone(ca[3])
|
label.rs
|
use std::ops::Deref;
use derivative::Derivative;
use gl_image::Image;
use gm::Color;
use rtools::{data_manager::DataManager, Rglica, ToRglica};
use crate::{
basic::label_layout::LabelLayout,
impl_view, view,
view::{ViewData, ViewFrame, ViewSubviews},
Font, ImageView, View, ViewBase, ViewCallbacks,
};
#[view]
#[derive(Default, Derivative)]
#[derivative(Debug)]
pub struct Label {
#[derivative(Debug = "ignore")]
font: Font,
text: String,
#[derivative(Debug = "ignore")]
layout: LabelLayout,
image: Rglica<ImageView>,
}
impl_view!(Label);
impl Label {
pub fn text(&self) -> &str {
&self.text
}
pub fn set_text(&mut self, text: impl ToString) -> &mut Self {
let text = text.to_string();
if self.text == text {
return self;
}
self.text = text;
self.set_letters();
self
}
pub fn
|
(&mut self) -> &Self {
self.set_text("")
}
fn set_letters(&mut self) {
if self.text.is_empty() {
self.image.set_image(Default::default());
return;
}
if let Some(image) = Image::handle_with_name(&self.text) {
self.image.set_image(image);
return;
}
self.layout.clear();
self.layout.set_text(&self.font, &self.text);
let size = self.layout.size();
let mut this = self.to_rglica();
let image = Image::draw(&self.text, size, move |image| {
let mut content = ViewBase::default();
content.set_frame(size);
let mut fe = this;
let glyphs = this.layout.glyphs();
for glyph in glyphs {
let image = fe.font.glyph_for_char(glyph.parent).image;
fe.drawer().draw_image(
image.deref(),
&(
glyph.x,
size.height - glyph.y - glyph.height as f32 + 10.0,
glyph.width,
glyph.height,
)
.into(),
Color::WHITE,
true,
);
}
image.flipped_y = true;
image.channels = 1;
fe.drawer().reset_viewport();
});
self.image.set_image(image);
}
}
impl ViewCallbacks for Label {
fn setup(&mut self) {
self.image = self.add_view();
self.set_letters();
}
fn layout(&mut self) {
self.image.place().as_background();
}
}
|
clear
|
content-type.js
|
function
|
(regex, contentType) {
return contentType ? regex.test(contentType) : false;
}
exports.isJson = function (contentType) {
return match(/json/i, contentType);
};
exports.isMultipart = function (contentType) {
return match(/multipart\/form-data/i, contentType);
};
exports.isFormUrlEncoded = function (contentType) {
return match(/application\/x-www-form-urlencoded/i, contentType);
};
|
match
|
tag.service.ts
|
import { Injectable } from '@nestjs/common';
import { TagEntity } from './tag.entity';
import { Repository } from 'typeorm';
import { InjectRepository } from '@nestjs/typeorm';
@Injectable()
export class
|
{
constructor(
@InjectRepository(TagEntity)
private readonly tagRepository: Repository<TagEntity>,
) {}
async findAll(): Promise<TagEntity[]> {
return await this.tagRepository.find();
}
}
|
TagService
|
impls_syntax.rs
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains `HashStable` implementations for various data types
//! from libsyntax in no particular order.
use ich::StableHashingContext;
use std::hash as std_hash;
use std::mem;
use syntax::ast;
use syntax::parse::token;
use syntax::symbol::InternedString;
use syntax::tokenstream;
use syntax_pos::FileMap;
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
StableHasher, StableHasherResult};
use rustc_data_structures::accumulate_vec::AccumulateVec;
impl<'gcx> HashStable<StableHashingContext<'gcx>> for InternedString {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let s: &str = &**self;
s.hash_stable(hcx, hasher);
}
}
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for InternedString {
type KeyType = InternedString;
#[inline]
fn to_stable_hash_key(&self,
_: &StableHashingContext<'gcx>)
-> InternedString {
self.clone()
}
}
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Name {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
self.as_str().hash_stable(hcx, hasher);
}
}
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::Name {
type KeyType = InternedString;
#[inline]
fn to_stable_hash_key(&self,
_: &StableHashingContext<'gcx>)
-> InternedString {
self.as_str()
}
}
impl_stable_hash_for!(enum ::syntax::ast::AsmDialect {
Att,
Intel
});
impl_stable_hash_for!(enum ::syntax::ext::base::MacroKind {
Bang,
Attr,
Derive
});
impl_stable_hash_for!(enum ::syntax::abi::Abi {
Cdecl,
Stdcall,
Fastcall,
Vectorcall,
Thiscall,
Aapcs,
Win64,
SysV64,
PtxKernel,
Msp430Interrupt,
X86Interrupt,
Rust,
C,
System,
RustIntrinsic,
RustCall,
PlatformIntrinsic,
Unadjusted
});
impl_stable_hash_for!(struct ::syntax::attr::Deprecation { since, note });
impl_stable_hash_for!(struct ::syntax::attr::Stability {
level,
feature,
rustc_depr,
rustc_const_unstable
});
impl<'gcx> HashStable<StableHashingContext<'gcx>>
for ::syntax::attr::StabilityLevel {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
::syntax::attr::StabilityLevel::Unstable { ref reason, ref issue } => {
reason.hash_stable(hcx, hasher);
issue.hash_stable(hcx, hasher);
}
::syntax::attr::StabilityLevel::Stable { ref since } => {
since.hash_stable(hcx, hasher);
}
}
}
}
impl_stable_hash_for!(struct ::syntax::attr::RustcDeprecation { since, reason });
impl_stable_hash_for!(struct ::syntax::attr::RustcConstUnstable { feature });
impl_stable_hash_for!(enum ::syntax::attr::IntType {
SignedInt(int_ty),
UnsignedInt(uint_ty)
});
impl_stable_hash_for!(enum ::syntax::ast::LitIntType {
Signed(int_ty),
Unsigned(int_ty),
Unsuffixed
});
impl_stable_hash_for_spanned!(::syntax::ast::LitKind);
impl_stable_hash_for!(enum ::syntax::ast::LitKind {
Str(value, style),
|
Byte(value),
Char(value),
Int(value, lit_int_type),
Float(value, float_ty),
FloatUnsuffixed(value),
Bool(value)
});
impl_stable_hash_for!(enum ::syntax::ast::IntTy { Is, I8, I16, I32, I64, I128 });
impl_stable_hash_for!(enum ::syntax::ast::UintTy { Us, U8, U16, U32, U64, U128 });
impl_stable_hash_for!(enum ::syntax::ast::FloatTy { F32, F64 });
impl_stable_hash_for!(enum ::syntax::ast::Unsafety { Unsafe, Normal });
impl_stable_hash_for!(enum ::syntax::ast::Constness { Const, NotConst });
impl_stable_hash_for!(enum ::syntax::ast::Defaultness { Default, Final });
impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, span, ident });
impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) });
impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner });
impl<'gcx> HashStable<StableHashingContext<'gcx>> for [ast::Attribute] {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
if self.len() == 0 {
self.len().hash_stable(hcx, hasher);
return
}
// Some attributes are always ignored during hashing.
let filtered: AccumulateVec<[&ast::Attribute; 8]> = self
.iter()
.filter(|attr| {
!attr.is_sugared_doc &&
attr.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true)
})
.collect();
filtered.len().hash_stable(hcx, hasher);
for attr in filtered {
attr.hash_stable(hcx, hasher);
}
}
}
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Attribute {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
// Make sure that these have been filtered out.
debug_assert!(self.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true));
debug_assert!(!self.is_sugared_doc);
let ast::Attribute {
id: _,
style,
ref path,
ref tokens,
is_sugared_doc: _,
span,
} = *self;
style.hash_stable(hcx, hasher);
path.segments.len().hash_stable(hcx, hasher);
for segment in &path.segments {
segment.identifier.name.hash_stable(hcx, hasher);
}
for tt in tokens.trees() {
tt.hash_stable(hcx, hasher);
}
span.hash_stable(hcx, hasher);
}
}
impl<'gcx> HashStable<StableHashingContext<'gcx>>
for tokenstream::TokenTree {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
tokenstream::TokenTree::Token(span, ref token) => {
span.hash_stable(hcx, hasher);
hash_token(token, hcx, hasher);
}
tokenstream::TokenTree::Delimited(span, ref delimited) => {
span.hash_stable(hcx, hasher);
std_hash::Hash::hash(&delimited.delim, hasher);
for sub_tt in delimited.stream().trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
}
}
}
impl<'gcx> HashStable<StableHashingContext<'gcx>>
for tokenstream::TokenStream {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
for sub_tt in self.trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
}
fn hash_token<'gcx, W: StableHasherResult>(token: &token::Token,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
mem::discriminant(token).hash_stable(hcx, hasher);
match *token {
token::Token::Eq |
token::Token::Lt |
token::Token::Le |
token::Token::EqEq |
token::Token::Ne |
token::Token::Ge |
token::Token::Gt |
token::Token::AndAnd |
token::Token::OrOr |
token::Token::Not |
token::Token::Tilde |
token::Token::At |
token::Token::Dot |
token::Token::DotDot |
token::Token::DotDotDot |
token::Token::DotDotEq |
token::Token::DotEq |
token::Token::Comma |
token::Token::Semi |
token::Token::Colon |
token::Token::ModSep |
token::Token::RArrow |
token::Token::LArrow |
token::Token::FatArrow |
token::Token::Pound |
token::Token::Dollar |
token::Token::Question |
token::Token::Underscore |
token::Token::Whitespace |
token::Token::Comment |
token::Token::Eof => {}
token::Token::BinOp(bin_op_token) |
token::Token::BinOpEq(bin_op_token) => {
std_hash::Hash::hash(&bin_op_token, hasher);
}
token::Token::OpenDelim(delim_token) |
token::Token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
token::Token::Literal(ref lit, ref opt_name) => {
mem::discriminant(lit).hash_stable(hcx, hasher);
match *lit {
token::Lit::Byte(val) |
token::Lit::Char(val) |
token::Lit::Integer(val) |
token::Lit::Float(val) |
token::Lit::Str_(val) |
token::Lit::ByteStr(val) => val.hash_stable(hcx, hasher),
token::Lit::StrRaw(val, n) |
token::Lit::ByteStrRaw(val, n) => {
val.hash_stable(hcx, hasher);
n.hash_stable(hcx, hasher);
}
};
opt_name.hash_stable(hcx, hasher);
}
token::Token::Ident(ident) |
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
token::Token::Interpolated(_) => {
bug!("interpolated tokens should not be present in the HIR")
}
token::Token::DocComment(val) |
token::Token::Shebang(val) => val.hash_stable(hcx, hasher),
}
}
impl_stable_hash_for_spanned!(::syntax::ast::NestedMetaItemKind);
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItemKind {
MetaItem(meta_item),
Literal(lit)
});
impl_stable_hash_for!(struct ::syntax::ast::MetaItem {
name,
node,
span
});
impl_stable_hash_for!(enum ::syntax::ast::MetaItemKind {
Word,
List(nested_items),
NameValue(lit)
});
impl<'gcx> HashStable<StableHashingContext<'gcx>> for FileMap {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'gcx>,
hasher: &mut StableHasher<W>) {
let FileMap {
ref name,
name_was_remapped,
unmapped_path: _,
crate_of_origin,
// Do not hash the source as it is not encoded
src: _,
src_hash,
external_src: _,
start_pos,
end_pos: _,
ref lines,
ref multibyte_chars,
} = *self;
name.hash_stable(hcx, hasher);
name_was_remapped.hash_stable(hcx, hasher);
DefId {
krate: CrateNum::from_u32(crate_of_origin),
index: CRATE_DEF_INDEX,
}.hash_stable(hcx, hasher);
src_hash.hash_stable(hcx, hasher);
// We only hash the relative position within this filemap
let lines = lines.borrow();
lines.len().hash_stable(hcx, hasher);
for &line in lines.iter() {
stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
}
// We only hash the relative position within this filemap
let multibyte_chars = multibyte_chars.borrow();
multibyte_chars.len().hash_stable(hcx, hasher);
for &char_pos in multibyte_chars.iter() {
stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
}
}
}
fn stable_byte_pos(pos: ::syntax_pos::BytePos,
filemap_start: ::syntax_pos::BytePos)
-> u32 {
pos.0 - filemap_start.0
}
fn stable_multibyte_char(mbc: ::syntax_pos::MultiByteChar,
filemap_start: ::syntax_pos::BytePos)
-> (u32, u32) {
let ::syntax_pos::MultiByteChar {
pos,
bytes,
} = mbc;
(pos.0 - filemap_start.0, bytes as u32)
}
|
ByteStr(value),
|
gpd.py
|
"""Zhang Gradient Projection Debiasing Baseline Model."""
from __future__ import annotations
from typing import NamedTuple, cast
import ethicml as em
from kit import implements
from kit.torch import CrossEntropyLoss, TrainingMode
import pandas as pd
import pytorch_lightning as pl
from pytorch_lightning.utilities.types import EPOCH_OUTPUT
import torch
from torch import Tensor, nn
from torch.optim.optimizer import Optimizer
from conduit.data.structures import TernarySample
from conduit.models.base import CdtModel
from conduit.models.utils import aggregate_over_epoch, prediction, prefix_keys
from conduit.types import LRScheduler, Stage
__all__ = ["GPD"]
def compute_proj_grads(*, model: nn.Module, loss_p: Tensor, loss_a: Tensor, alpha: float) -> None:
"""Computes the adversarial-gradient projection term.
:param model: Model whose parameters the gradients are to be computed w.r.t.
:param loss_p: Prediction loss.
:param loss_a: Adversarial loss.
:param alpha: Pre-factor for adversarial loss.
"""
grad_p = torch.autograd.grad(loss_p, tuple(model.parameters()), retain_graph=True)
grad_a = torch.autograd.grad(loss_a, tuple(model.parameters()), retain_graph=True)
def _proj(a: Tensor, b: Tensor) -> Tensor:
return b * torch.sum(a * b) / torch.sum(b * b).clamp(min=torch.finfo(b.dtype).eps)
grad_p = [p - _proj(p, a) - alpha * a for p, a in zip(grad_p, grad_a)]
for param, grad in zip(model.parameters(), grad_p):
param.grad = grad
def compute_grad(*, model: nn.Module, loss: Tensor) -> None:
"""Computes the adversarial gradient projection term.
:param model: Model whose parameters the gradients are to be computed w.r.t.
:param loss: Adversarial loss.
"""
grad_list = torch.autograd.grad(loss, tuple(model.parameters()), retain_graph=True)
for param, grad in zip(model.parameters(), grad_list):
param.grad = grad
class ModelOut(NamedTuple):
s: Tensor
y: Tensor
class GPD(CdtModel):
"""Zhang Mitigating Unwanted Biases."""
def __init__(
self,
*,
adv: nn.Module,
enc: nn.Module,
clf: nn.Module,
lr: float = 3.0e-4,
weight_decay: float = 0.0,
lr_initial_restart: int = 10,
lr_restart_mult: int = 2,
lr_sched_interval: TrainingMode = TrainingMode.epoch,
lr_sched_freq: int = 1,
) -> None:
|
@implements(CdtModel)
@torch.no_grad()
def inference_step(self, batch: TernarySample, *, stage: Stage) -> dict[str, Tensor]:
assert isinstance(batch.x, Tensor)
model_out = self.forward(batch.x)
loss_adv, loss_clf, loss = self._get_losses(model_out=model_out, batch=batch)
logging_dict = {
"loss": loss.item(),
"loss_adv": loss_adv.item(),
"loss_clf": loss_clf.item(),
}
logging_dict = prefix_keys(dict_=logging_dict, prefix=str(stage), sep="/")
self.log_dict(logging_dict)
return {
"targets": batch.y.view(-1),
"subgroup_inf": batch.s.view(-1),
"logits_y": model_out.y,
}
@implements(CdtModel)
def inference_epoch_end(self, outputs: EPOCH_OUTPUT, stage: Stage) -> dict[str, float]:
targets_all = aggregate_over_epoch(outputs=outputs, metric="targets")
subgroup_inf_all = aggregate_over_epoch(outputs=outputs, metric="subgroup_inf")
logits_y_all = aggregate_over_epoch(outputs=outputs, metric="logits_y")
preds_y_all = prediction(logits_y_all)
dt = em.DataTuple(
x=pd.DataFrame(
torch.rand_like(subgroup_inf_all).detach().cpu().numpy(),
columns=["x0"],
),
s=pd.DataFrame(subgroup_inf_all.detach().cpu().numpy(), columns=["s"]),
y=pd.DataFrame(targets_all.detach().cpu().numpy(), columns=["y"]),
)
return em.run_metrics(
predictions=em.Prediction(hard=pd.Series(preds_y_all.detach().cpu().numpy())),
actual=dt,
metrics=[em.Accuracy(), em.RenyiCorrelation(), em.Yanovich()],
per_sens_metrics=[em.Accuracy(), em.ProbPos(), em.TPR()],
)
def _get_losses(
self, model_out: ModelOut, *, batch: TernarySample
) -> tuple[Tensor, Tensor, Tensor]:
loss_adv = self._loss_adv_fn(model_out.s, target=batch.s)
loss_clf = self._loss_clf_fn(model_out.y, target=batch.y)
return loss_adv, loss_clf, loss_adv + loss_clf
@implements(pl.LightningModule)
def training_step(self, batch: TernarySample, batch_idx: int) -> None:
assert isinstance(batch.x, Tensor)
opt = cast(Optimizer, self.optimizers())
opt.zero_grad()
model_out: ModelOut = self.forward(batch.x)
loss_adv, loss_clf, loss = self._get_losses(model_out=model_out, batch=batch)
logging_dict = {
"adv_loss": loss_adv.item(),
"clf_loss": loss_clf.item(),
"loss": loss.item(),
}
logging_dict = prefix_keys(dict_=logging_dict, prefix="train", sep="/")
self.log_dict(logging_dict)
compute_proj_grads(model=self.enc, loss_p=loss_clf, loss_a=loss_adv, alpha=1.0)
compute_grad(model=self.adv, loss=loss_adv)
compute_grad(model=self.clf, loss=loss_clf)
opt.step()
if (self.lr_sched_interval is TrainingMode.step) and (
self.global_step % self.lr_sched_freq == 0
):
sch = cast(LRScheduler, self.lr_schedulers())
sch.step()
if (self.lr_sched_interval is TrainingMode.epoch) and self.trainer.is_last_batch:
sch = cast(LRScheduler, self.lr_schedulers())
sch.step()
@implements(nn.Module)
def forward(self, x: Tensor) -> ModelOut:
embedding = self.enc(x)
y_pred = self.clf(embedding)
s_pred = self.adv(embedding)
return ModelOut(y=y_pred, s=s_pred)
|
super().__init__(
lr=lr,
weight_decay=weight_decay,
lr_initial_restart=lr_initial_restart,
lr_restart_mult=lr_restart_mult,
lr_sched_interval=lr_sched_interval,
lr_sched_freq=lr_sched_freq,
)
self.adv = adv
self.enc = enc
self.clf = clf
self._loss_adv_fn = CrossEntropyLoss()
self._loss_clf_fn = CrossEntropyLoss()
self.automatic_optimization = False # Mark for manual optimization
|
visit_each.rs
|
#![allow(clippy::cognitive_complexity)]
use std::{mem, path::PathBuf};
use syn::visit::Visit;
use yarte_helpers::config::Config;
use yarte_parser::{Helper, Node, Partial, PartialBlock, SNode};
use super::{is_super, Context, LoweringContext};
use crate::{
error::{GError, GResult},
Struct,
};
pub(super) fn find_loop_var(g: &LoweringContext, nodes: &[SNode]) -> GResult<bool> {
FindEach::from(g).find(nodes)
}
// Find {{ index }} {{ index0 }} {{ first }}
#[derive(Clone)]
pub struct FindEach<'a> {
loop_var: bool,
s: &'a Struct<'a>,
c: &'a Config<'a>,
ctx: Context<'a>,
on_path: PathBuf,
block: Vec<(&'a [SNode<'a>], FindEach<'a>)>,
on_: usize,
recursion: usize,
on_error: Option<GError>,
}
impl<'a> From<&LoweringContext<'a>> for FindEach<'a> {
fn from(g: &LoweringContext<'a>) -> FindEach<'a> {
FindEach {
loop_var: false,
c: g.c,
s: g.s,
ctx: g.ctx,
on_path: g.on_path.clone(),
block: g.block.iter().map(|(_, x, g)| (*x, g.into())).collect(),
on_: 0,
recursion: g.recursion,
|
}
macro_rules! breaks {
($_self:ident) => {
if $_self.loop_var || $_self.on_error.is_some() {
break;
}
};
}
impl<'a> FindEach<'a> {
// TODO: #39
pub fn find(&mut self, nodes: &'a [SNode]) -> GResult<bool> {
macro_rules! partial {
($path:ident, $expr:ident) => {{
self.recursion += 1;
if self.s.recursion_limit <= self.recursion {
self.on_error.replace(GError::RecursionLimit);
break;
}
let p = self.c.resolve_partial(&self.on_path, $path.t());
let nodes = self.ctx.get(&p).unwrap();
let expr = $expr.t();
if !expr.is_empty() {
let at = if let syn::Expr::Assign(_) = *expr[0] {
0
} else {
1
};
for e in &expr[at..] {
self.visit_expr(e);
breaks!(self);
}
if at == 1 {
break;
}
}
(mem::replace(&mut self.on_path, p), nodes)
}};
}
for n in nodes {
match n.t() {
Node::Local(expr) => self.visit_local(expr.t()),
Node::Expr(_, expr) | Node::Safe(_, expr) => self.visit_expr(expr.t()),
#[cfg(feature = "wasm-app")]
Node::RExpr(_, expr) => self.visit_expr(expr.t()),
Node::Helper(h) => {
let h: &Helper = &*h;
match h {
Helper::If((_, first, block), else_if, els) => {
self.visit_expr(first.t());
breaks!(self);
self.find(block)?;
for (_, e, b) in else_if {
breaks!(self);
self.visit_expr(e.t());
breaks!(self);
self.find(b)?;
}
breaks!(self);
if let Some((_, els)) = els {
self.find(els)?;
}
}
Helper::With(_, e, b) => {
self.visit_expr(e.t());
breaks!(self);
self.on_ += 1;
self.find(b)?;
self.on_ -= 1;
}
Helper::Unless(_, expr, block) => {
self.visit_expr(expr.t());
breaks!(self);
self.find(block)?;
}
Helper::Each(_, expr, block) => {
self.visit_expr(expr.t());
breaks!(self);
self.on_ += 1;
self.find(block)?;
self.on_ -= 1;
}
Helper::Defined(..) => {
// TODO: #39
self.on_error.replace(GError::Unimplemented);
}
}
}
Node::Partial(Partial(_, path, expr)) => {
let (parent, nodes) = partial!(path, expr);
self.find(nodes)?;
self.on_path = parent;
self.recursion -= 1;
}
Node::PartialBlock(PartialBlock(_, path, expr, block)) => {
let (parent, nodes) = partial!(path, expr);
self.block.push((block, self.clone()));
self.find(nodes)?;
self.on_path = parent;
self.block.pop();
self.recursion -= 1;
}
Node::Block(_) => {
if let Some((block, mut old)) = self.block.pop() {
old.find(block)?;
self.loop_var |= old.loop_var;
self.block.push((block, old));
} else {
// TODO: #39
self.on_error.replace(GError::PartialBlockNoParent);
}
}
Node::Raw(..) | Node::Lit(..) | Node::Comment(_) => (),
#[allow(unreachable_patterns)]
_ => (),
}
breaks!(self);
}
if let Some(err) = self.on_error.take() {
Err(err)
} else {
Ok(self.loop_var)
}
}
}
impl<'a> Visit<'a> for FindEach<'a> {
fn visit_expr_path(&mut self, i: &'a syn::ExprPath) {
macro_rules! search {
($ident:expr) => {
match $ident.as_ref() {
"index" | "index0" | "first" => self.loop_var = true,
_ => (),
}
};
}
if !self.loop_var {
if i.path.segments.len() == 1 {
search!(i.path.segments[0].ident.to_string());
} else if 0 < self.on_ {
if let Some((j, ident)) = is_super(&i.path.segments) {
if j == self.on_ {
search!(ident);
}
}
}
}
}
}
|
on_error: None,
}
}
|
redis.py
|
"""Redis transport."""
from __future__ import absolute_import, unicode_literals
import numbers
import socket
from bisect import bisect
from collections import namedtuple
from contextlib import contextmanager
from time import time
from vine import promise
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.five import Empty, values, string_t
from kombu.log import get_logger
from kombu.utils.compat import register_after_fork
from kombu.utils.eventio import poll, READ, ERR
from kombu.utils.encoding import bytes_to_str
from kombu.utils.json import loads, dumps
from kombu.utils.objects import cached_property
from kombu.utils.scheduling import cycle_by_name
from kombu.utils.url import _parse_url
from kombu.utils.uuid import uuid
from kombu.utils.compat import _detect_environment
from . import virtual
try:
import redis
except ImportError: # pragma: no cover
redis = None # noqa
try:
from redis import sentinel
except ImportError: # pragma: no cover
sentinel = None # noqa
logger = get_logger('kombu.transport.redis')
crit, warn = logger.critical, logger.warn
DEFAULT_PORT = 6379
DEFAULT_DB = 0
PRIORITY_STEPS = [0, 3, 6, 9]
error_classes_t = namedtuple('error_classes_t', (
'connection_errors', 'channel_errors',
))
NO_ROUTE_ERROR = """
Cannot route message for exchange {0!r}: Table empty or key no longer exists.
Probably the key ({1!r}) has been removed from the Redis database.
"""
# This implementation may seem overly complex, but I assure you there is
# a good reason for doing it this way.
#
# Consuming from several connections enables us to emulate channels,
# which means we can have different service guarantees for individual
# channels.
#
# So we need to consume messages from multiple connections simultaneously,
# and using epoll means we don't have to do so using multiple threads.
#
# Also it means we can easily use PUBLISH/SUBSCRIBE to do fanout
# exchanges (broadcast), as an alternative to pushing messages to fanout-bound
# queues manually.
def get_redis_error_classes():
"""Return tuple of redis error classes."""
from redis import exceptions
# This exception suddenly changed name between redis-py versions
if hasattr(exceptions, 'InvalidData'):
DataError = exceptions.InvalidData
else:
DataError = exceptions.DataError
return error_classes_t(
(virtual.Transport.connection_errors + (
InconsistencyError,
socket.error,
IOError,
OSError,
exceptions.ConnectionError,
exceptions.AuthenticationError,
exceptions.TimeoutError)),
(virtual.Transport.channel_errors + (
DataError,
exceptions.InvalidResponse,
exceptions.ResponseError)),
)
def get_redis_ConnectionError():
"""Return the redis ConnectionError exception class."""
from redis import exceptions
return exceptions.ConnectionError
class MutexHeld(Exception):
"""Raised when another party holds the lock."""
@contextmanager
def Mutex(client, name, expire):
"""The Redis lock implementation (probably shaky)."""
lock_id = uuid()
i_won = client.setnx(name, lock_id)
try:
if i_won:
client.expire(name, expire)
yield
else:
if not client.ttl(name):
client.expire(name, expire)
raise MutexHeld()
finally:
if i_won:
try:
with client.pipeline(True) as pipe:
pipe.watch(name)
if pipe.get(name) == lock_id:
pipe.multi()
pipe.delete(name)
pipe.execute()
pipe.unwatch()
except redis.WatchError:
pass
def _after_fork_cleanup_channel(channel):
channel._after_fork()
class QoS(virtual.QoS):
"""Redis Ack Emulation."""
restore_at_shutdown = True
def __init__(self, *args, **kwargs):
super(QoS, self).__init__(*args, **kwargs)
self._vrestore_count = 0
def
|
(self, message, delivery_tag):
delivery = message.delivery_info
EX, RK = delivery['exchange'], delivery['routing_key']
# TODO: Remove this once we soley on Redis-py 3.0.0+
if redis.VERSION[0] >= 3:
# Redis-py changed the format of zadd args in v3.0.0
zadd_args = [{delivery_tag: time()}]
else:
zadd_args = [time(), delivery_tag]
with self.pipe_or_acquire() as pipe:
pipe.zadd(self.unacked_index_key, *zadd_args) \
.hset(self.unacked_key, delivery_tag,
dumps([message._raw, EX, RK])) \
.execute()
super(QoS, self).append(message, delivery_tag)
def restore_unacked(self, client=None):
with self.channel.conn_or_acquire(client) as client:
for tag in self._delivered:
self.restore_by_tag(tag, client=client)
self._delivered.clear()
def ack(self, delivery_tag):
self._remove_from_indices(delivery_tag).execute()
super(QoS, self).ack(delivery_tag)
def reject(self, delivery_tag, requeue=False):
if requeue:
self.restore_by_tag(delivery_tag, leftmost=True)
self.ack(delivery_tag)
@contextmanager
def pipe_or_acquire(self, pipe=None, client=None):
if pipe:
yield pipe
else:
with self.channel.conn_or_acquire(client) as client:
yield client.pipeline()
def _remove_from_indices(self, delivery_tag, pipe=None):
with self.pipe_or_acquire(pipe) as pipe:
return pipe.zrem(self.unacked_index_key, delivery_tag) \
.hdel(self.unacked_key, delivery_tag)
def restore_visible(self, start=0, num=10, interval=10):
self._vrestore_count += 1
if (self._vrestore_count - 1) % interval:
return
with self.channel.conn_or_acquire() as client:
ceil = time() - self.visibility_timeout
try:
with Mutex(client, self.unacked_mutex_key,
self.unacked_mutex_expire):
env = _detect_environment()
if env == 'gevent':
ceil = time()
visible = client.zrevrangebyscore(
self.unacked_index_key, ceil, 0,
start=num and start, num=num, withscores=True)
for tag, score in visible or []:
self.restore_by_tag(tag, client)
except MutexHeld:
pass
def restore_by_tag(self, tag, client=None, leftmost=False):
with self.channel.conn_or_acquire(client) as client:
with client.pipeline() as pipe:
p, _, _ = self._remove_from_indices(
tag, pipe.hget(self.unacked_key, tag)).execute()
if p:
M, EX, RK = loads(bytes_to_str(p)) # json is unicode
self.channel._do_restore_message(M, EX, RK, client, leftmost)
@cached_property
def unacked_key(self):
return self.channel.unacked_key
@cached_property
def unacked_index_key(self):
return self.channel.unacked_index_key
@cached_property
def unacked_mutex_key(self):
return self.channel.unacked_mutex_key
@cached_property
def unacked_mutex_expire(self):
return self.channel.unacked_mutex_expire
@cached_property
def visibility_timeout(self):
return self.channel.visibility_timeout
class MultiChannelPoller(object):
"""Async I/O poller for Redis transport."""
eventflags = READ | ERR
#: Set by :meth:`get` while reading from the socket.
_in_protected_read = False
#: Set of one-shot callbacks to call after reading from socket.
after_read = None
def __init__(self):
# active channels
self._channels = set()
# file descriptor -> channel map.
self._fd_to_chan = {}
# channel -> socket map
self._chan_to_sock = {}
# poll implementation (epoll/kqueue/select)
self.poller = poll()
# one-shot callbacks called after reading from socket.
self.after_read = set()
def close(self):
for fd in values(self._chan_to_sock):
try:
self.poller.unregister(fd)
except (KeyError, ValueError):
pass
self._channels.clear()
self._fd_to_chan.clear()
self._chan_to_sock.clear()
def add(self, channel):
self._channels.add(channel)
def discard(self, channel):
self._channels.discard(channel)
def _on_connection_disconnect(self, connection):
try:
self.poller.unregister(connection._sock)
except (AttributeError, TypeError):
pass
def _register(self, channel, client, type):
if (channel, client, type) in self._chan_to_sock:
self._unregister(channel, client, type)
if client.connection._sock is None: # not connected yet.
client.connection.connect()
sock = client.connection._sock
self._fd_to_chan[sock.fileno()] = (channel, type)
self._chan_to_sock[(channel, client, type)] = sock
self.poller.register(sock, self.eventflags)
def _unregister(self, channel, client, type):
self.poller.unregister(self._chan_to_sock[(channel, client, type)])
def _client_registered(self, channel, client, cmd):
if getattr(client, 'connection', None) is None:
client.connection = client.connection_pool.get_connection('_')
return (client.connection._sock is not None and
(channel, client, cmd) in self._chan_to_sock)
def _register_BRPOP(self, channel):
"""Enable BRPOP mode for channel."""
ident = channel, channel.client, 'BRPOP'
if not self._client_registered(channel, channel.client, 'BRPOP'):
channel._in_poll = False
self._register(*ident)
if not channel._in_poll: # send BRPOP
channel._brpop_start()
def _register_LISTEN(self, channel):
"""Enable LISTEN mode for channel."""
if not self._client_registered(channel, channel.subclient, 'LISTEN'):
channel._in_listen = False
self._register(channel, channel.subclient, 'LISTEN')
if not channel._in_listen:
channel._subscribe() # send SUBSCRIBE
def on_poll_start(self):
for channel in self._channels:
if channel.active_queues: # BRPOP mode?
if channel.qos.can_consume():
self._register_BRPOP(channel)
if channel.active_fanout_queues: # LISTEN mode?
self._register_LISTEN(channel)
def on_poll_init(self, poller):
self.poller = poller
for channel in self._channels:
return channel.qos.restore_visible(
num=channel.unacked_restore_limit,
)
def maybe_restore_messages(self):
for channel in self._channels:
if channel.active_queues:
# only need to do this once, as they are not local to channel.
return channel.qos.restore_visible(
num=channel.unacked_restore_limit,
)
def on_readable(self, fileno):
chan, type = self._fd_to_chan[fileno]
if chan.qos.can_consume():
chan.handlers[type]()
def handle_event(self, fileno, event):
if event & READ:
return self.on_readable(fileno), self
elif event & ERR:
chan, type = self._fd_to_chan[fileno]
chan._poll_error(type)
def get(self, callback, timeout=None):
self._in_protected_read = True
try:
for channel in self._channels:
if channel.active_queues: # BRPOP mode?
if channel.qos.can_consume():
self._register_BRPOP(channel)
if channel.active_fanout_queues: # LISTEN mode?
self._register_LISTEN(channel)
events = self.poller.poll(timeout)
if events:
for fileno, event in events:
ret = self.handle_event(fileno, event)
if ret:
return
# - no new data, so try to restore messages.
# - reset active redis commands.
self.maybe_restore_messages()
raise Empty()
finally:
self._in_protected_read = False
while self.after_read:
try:
fun = self.after_read.pop()
except KeyError:
break
else:
fun()
@property
def fds(self):
return self._fd_to_chan
class Channel(virtual.Channel):
"""Redis Channel."""
QoS = QoS
_client = None
_subclient = None
_closing = False
supports_fanout = True
keyprefix_queue = '_kombu.binding.%s'
keyprefix_fanout = '/{db}.'
sep = '\x06\x16'
_in_poll = False
_in_listen = False
_fanout_queues = {}
ack_emulation = True
unacked_key = 'unacked'
unacked_index_key = 'unacked_index'
unacked_mutex_key = 'unacked_mutex'
unacked_mutex_expire = 300 # 5 minutes
unacked_restore_limit = None
visibility_timeout = 3600 # 1 hour
priority_steps = PRIORITY_STEPS
socket_timeout = None
socket_connect_timeout = None
socket_keepalive = None
socket_keepalive_options = None
max_connections = 10
#: Transport option to disable fanout keyprefix.
#: Can also be string, in which case it changes the default
#: prefix ('/{db}.') into to something else. The prefix must
#: include a leading slash and a trailing dot.
#:
#: Enabled by default since Kombu 4.x.
#: Disable for backwards compatibility with Kombu 3.x.
fanout_prefix = True
#: If enabled the fanout exchange will support patterns in routing
#: and binding keys (like a topic exchange but using PUB/SUB).
#:
#: Enabled by default since Kombu 4.x.
#: Disable for backwards compatibility with Kombu 3.x.
fanout_patterns = True
#: Order in which we consume from queues.
#:
#: Can be either string alias, or a cycle strategy class
#:
#: - ``round_robin``
#: (:class:`~kombu.utils.scheduling.round_robin_cycle`).
#:
#: Make sure each queue has an equal opportunity to be consumed from.
#:
#: - ``sorted``
#: (:class:`~kombu.utils.scheduling.sorted_cycle`).
#:
#: Consume from queues in alphabetical order.
#: If the first queue in the sorted list always contains messages,
#: then the rest of the queues will never be consumed from.
#:
#: - ``priority``
#: (:class:`~kombu.utils.scheduling.priority_cycle`).
#:
#: Consume from queues in original order, so that if the first
#: queue always contains messages, the rest of the queues
#: in the list will never be consumed from.
#:
#: The default is to consume from queues in round robin.
queue_order_strategy = 'round_robin'
_async_pool = None
_pool = None
from_transport_options = (
virtual.Channel.from_transport_options +
('ack_emulation',
'unacked_key',
'unacked_index_key',
'unacked_mutex_key',
'unacked_mutex_expire',
'visibility_timeout',
'unacked_restore_limit',
'fanout_prefix',
'fanout_patterns',
'socket_timeout',
'socket_connect_timeout',
'socket_keepalive',
'socket_keepalive_options',
'queue_order_strategy',
'max_connections',
'priority_steps') # <-- do not add comma here!
)
connection_class = redis.Connection if redis else None
def __init__(self, *args, **kwargs):
super_ = super(Channel, self)
super_.__init__(*args, **kwargs)
if not self.ack_emulation: # disable visibility timeout
self.QoS = virtual.QoS
self._queue_cycle = cycle_by_name(self.queue_order_strategy)()
self.Client = self._get_client()
self.ResponseError = self._get_response_error()
self.active_fanout_queues = set()
self.auto_delete_queues = set()
self._fanout_to_queue = {}
self.handlers = {'BRPOP': self._brpop_read, 'LISTEN': self._receive}
if self.fanout_prefix:
if isinstance(self.fanout_prefix, string_t):
self.keyprefix_fanout = self.fanout_prefix
else:
# previous versions did not set a fanout, so cannot enable
# by default.
self.keyprefix_fanout = ''
# Evaluate connection.
try:
self.client.ping()
except Exception:
self._disconnect_pools()
raise
self.connection.cycle.add(self) # add to channel poller.
# copy errors, in case channel closed but threads still
# are still waiting for data.
self.connection_errors = self.connection.connection_errors
if register_after_fork is not None:
register_after_fork(self, _after_fork_cleanup_channel)
def _after_fork(self):
self._disconnect_pools()
def _disconnect_pools(self):
pool = self._pool
async_pool = self._async_pool
self._async_pool = self._pool = None
if pool is not None:
pool.disconnect()
if async_pool is not None:
async_pool.disconnect()
def _on_connection_disconnect(self, connection):
if self._in_poll is connection:
self._in_poll = None
if self._in_listen is connection:
self._in_listen = None
if self.connection and self.connection.cycle:
self.connection.cycle._on_connection_disconnect(connection)
def _do_restore_message(self, payload, exchange, routing_key,
client=None, leftmost=False):
with self.conn_or_acquire(client) as client:
try:
try:
payload['headers']['redelivered'] = True
except KeyError:
pass
for queue in self._lookup(exchange, routing_key):
(client.lpush if leftmost else client.rpush)(
queue, dumps(payload),
)
except Exception:
crit('Could not restore message: %r', payload, exc_info=True)
def _restore(self, message, leftmost=False):
if not self.ack_emulation:
return super(Channel, self)._restore(message)
tag = message.delivery_tag
with self.conn_or_acquire() as client:
with client.pipeline() as pipe:
P, _ = pipe.hget(self.unacked_key, tag) \
.hdel(self.unacked_key, tag) \
.execute()
if P:
M, EX, RK = loads(bytes_to_str(P)) # json is unicode
self._do_restore_message(M, EX, RK, client, leftmost)
def _restore_at_beginning(self, message):
return self._restore(message, leftmost=True)
def basic_consume(self, queue, *args, **kwargs):
if queue in self._fanout_queues:
exchange, _ = self._fanout_queues[queue]
self.active_fanout_queues.add(queue)
self._fanout_to_queue[exchange] = queue
ret = super(Channel, self).basic_consume(queue, *args, **kwargs)
# Update fair cycle between queues.
#
# We cycle between queues fairly to make sure that
# each queue is equally likely to be consumed from,
# so that a very busy queue will not block others.
#
# This works by using Redis's `BRPOP` command and
# by rotating the most recently used queue to the
# and of the list. See Kombu github issue #166 for
# more discussion of this method.
self._update_queue_cycle()
return ret
def basic_cancel(self, consumer_tag):
# If we are busy reading messages we may experience
# a race condition where a message is consumed after
# canceling, so we must delay this operation until reading
# is complete (Issue celery/celery#1773).
connection = self.connection
if connection:
if connection.cycle._in_protected_read:
return connection.cycle.after_read.add(
promise(self._basic_cancel, (consumer_tag,)),
)
return self._basic_cancel(consumer_tag)
def _basic_cancel(self, consumer_tag):
try:
queue = self._tag_to_queue[consumer_tag]
except KeyError:
return
try:
self.active_fanout_queues.remove(queue)
except KeyError:
pass
else:
self._unsubscribe_from(queue)
try:
exchange, _ = self._fanout_queues[queue]
self._fanout_to_queue.pop(exchange)
except KeyError:
pass
ret = super(Channel, self).basic_cancel(consumer_tag)
self._update_queue_cycle()
return ret
def _get_publish_topic(self, exchange, routing_key):
if routing_key and self.fanout_patterns:
return ''.join([self.keyprefix_fanout, exchange, '/', routing_key])
return ''.join([self.keyprefix_fanout, exchange])
def _get_subscribe_topic(self, queue):
exchange, routing_key = self._fanout_queues[queue]
return self._get_publish_topic(exchange, routing_key)
def _subscribe(self):
keys = [self._get_subscribe_topic(queue)
for queue in self.active_fanout_queues]
if not keys:
return
c = self.subclient
if c.connection._sock is None:
c.connection.connect()
self._in_listen = c.connection
c.psubscribe(keys)
def _unsubscribe_from(self, queue):
topic = self._get_subscribe_topic(queue)
c = self.subclient
if c.connection and c.connection._sock:
c.unsubscribe([topic])
def _handle_message(self, client, r):
if bytes_to_str(r[0]) == 'unsubscribe' and r[2] == 0:
client.subscribed = False
return
if bytes_to_str(r[0]) == 'pmessage':
type, pattern, channel, data = r[0], r[1], r[2], r[3]
else:
type, pattern, channel, data = r[0], None, r[1], r[2]
return {
'type': type,
'pattern': pattern,
'channel': channel,
'data': data,
}
def _receive(self):
c = self.subclient
ret = []
try:
ret.append(self._receive_one(c))
except Empty:
pass
if c.connection is not None:
while c.connection.can_read(timeout=0):
ret.append(self._receive_one(c))
return any(ret)
def _receive_one(self, c):
response = None
try:
response = c.parse_response()
except self.connection_errors:
self._in_listen = None
raise
if response is not None:
payload = self._handle_message(c, response)
if bytes_to_str(payload['type']).endswith('message'):
channel = bytes_to_str(payload['channel'])
if payload['data']:
if channel[0] == '/':
_, _, channel = channel.partition('.')
try:
message = loads(bytes_to_str(payload['data']))
except (TypeError, ValueError):
warn('Cannot process event on channel %r: %s',
channel, repr(payload)[:4096], exc_info=1)
raise Empty()
exchange = channel.split('/', 1)[0]
self.connection._deliver(
message, self._fanout_to_queue[exchange])
return True
def _brpop_start(self, timeout=1):
queues = self._queue_cycle.consume(len(self.active_queues))
if not queues:
return
keys = [self._q_for_pri(queue, pri) for pri in self.priority_steps
for queue in queues] + [timeout or 0]
self._in_poll = self.client.connection
self.client.connection.send_command('BRPOP', *keys)
def _brpop_read(self, **options):
try:
try:
dest__item = self.client.parse_response(self.client.connection,
'BRPOP',
**options)
except self.connection_errors:
# if there's a ConnectionError, disconnect so the next
# iteration will reconnect automatically.
self.client.connection.disconnect()
raise
if dest__item:
dest, item = dest__item
dest = bytes_to_str(dest).rsplit(self.sep, 1)[0]
self._queue_cycle.rotate(dest)
self.connection._deliver(loads(bytes_to_str(item)), dest)
return True
else:
raise Empty()
finally:
self._in_poll = None
def _poll_error(self, type, **options):
if type == 'LISTEN':
self.subclient.parse_response()
else:
self.client.parse_response(self.client.connection, type)
def _get(self, queue):
with self.conn_or_acquire() as client:
for pri in self.priority_steps:
item = client.rpop(self._q_for_pri(queue, pri))
if item:
return loads(bytes_to_str(item))
raise Empty()
def _size(self, queue):
with self.conn_or_acquire() as client:
with client.pipeline() as pipe:
for pri in self.priority_steps:
pipe = pipe.llen(self._q_for_pri(queue, pri))
sizes = pipe.execute()
return sum(size for size in sizes
if isinstance(size, numbers.Integral))
def _q_for_pri(self, queue, pri):
pri = self.priority(pri)
return '%s%s%s' % ((queue, self.sep, pri) if pri else (queue, '', ''))
def priority(self, n):
steps = self.priority_steps
return steps[bisect(steps, n) - 1]
def _put(self, queue, message, **kwargs):
"""Deliver message."""
pri = self._get_message_priority(message, reverse=False)
with self.conn_or_acquire() as client:
client.lpush(self._q_for_pri(queue, pri), dumps(message))
def _put_fanout(self, exchange, message, routing_key, **kwargs):
"""Deliver fanout message."""
with self.conn_or_acquire() as client:
client.publish(
self._get_publish_topic(exchange, routing_key),
dumps(message),
)
def _new_queue(self, queue, auto_delete=False, **kwargs):
if auto_delete:
self.auto_delete_queues.add(queue)
def _queue_bind(self, exchange, routing_key, pattern, queue):
if self.typeof(exchange).type == 'fanout':
# Mark exchange as fanout.
self._fanout_queues[queue] = (
exchange, routing_key.replace('#', '*'),
)
with self.conn_or_acquire() as client:
client.sadd(self.keyprefix_queue % (exchange,),
self.sep.join([routing_key or '',
pattern or '',
queue or '']))
def _delete(self, queue, exchange, routing_key, pattern, *args, **kwargs):
self.auto_delete_queues.discard(queue)
with self.conn_or_acquire(client=kwargs.get('client')) as client:
client.srem(self.keyprefix_queue % (exchange,),
self.sep.join([routing_key or '',
pattern or '',
queue or '']))
with client.pipeline() as pipe:
for pri in self.priority_steps:
pipe = pipe.delete(self._q_for_pri(queue, pri))
pipe.execute()
def _has_queue(self, queue, **kwargs):
with self.conn_or_acquire() as client:
with client.pipeline() as pipe:
for pri in self.priority_steps:
pipe = pipe.exists(self._q_for_pri(queue, pri))
return any(pipe.execute())
def get_table(self, exchange):
key = self.keyprefix_queue % exchange
with self.conn_or_acquire() as client:
values = client.smembers(key)
if not values:
raise InconsistencyError(NO_ROUTE_ERROR.format(exchange, key))
return [tuple(bytes_to_str(val).split(self.sep)) for val in values]
def _purge(self, queue):
with self.conn_or_acquire() as client:
with client.pipeline() as pipe:
for pri in self.priority_steps:
priq = self._q_for_pri(queue, pri)
pipe = pipe.llen(priq).delete(priq)
sizes = pipe.execute()
return sum(sizes[::2])
def close(self):
self._closing = True
if not self.closed:
# remove from channel poller.
self.connection.cycle.discard(self)
# delete fanout bindings
client = self.__dict__.get('client') # only if property cached
if client is not None:
for queue in self._fanout_queues:
if queue in self.auto_delete_queues:
self.queue_delete(queue, client=client)
self._disconnect_pools()
self._close_clients()
super(Channel, self).close()
def _close_clients(self):
# Close connections
for attr in 'client', 'subclient':
try:
client = self.__dict__[attr]
connection, client.connection = client.connection, None
connection.disconnect()
except (KeyError, AttributeError, self.ResponseError):
pass
def _prepare_virtual_host(self, vhost):
if not isinstance(vhost, numbers.Integral):
if not vhost or vhost == '/':
vhost = DEFAULT_DB
elif vhost.startswith('/'):
vhost = vhost[1:]
try:
vhost = int(vhost)
except ValueError:
raise ValueError(
'Database is int between 0 and limit - 1, not {0}'.format(
vhost,
))
return vhost
def _filter_tcp_connparams(self, socket_keepalive=None,
socket_keepalive_options=None, **params):
return params
def _connparams(self, asynchronous=False):
conninfo = self.connection.client
connparams = {
'host': conninfo.hostname or '127.0.0.1',
'port': conninfo.port or self.connection.default_port,
'virtual_host': conninfo.virtual_host,
'password': conninfo.password,
'max_connections': self.max_connections,
'socket_timeout': self.socket_timeout,
'socket_connect_timeout': self.socket_connect_timeout,
'socket_keepalive': self.socket_keepalive,
'socket_keepalive_options': self.socket_keepalive_options,
}
if conninfo.ssl:
# Connection(ssl={}) must be a dict containing the keys:
# 'ssl_cert_reqs', 'ssl_ca_certs', 'ssl_certfile', 'ssl_keyfile'
try:
connparams.update(conninfo.ssl)
connparams['connection_class'] = redis.SSLConnection
except TypeError:
pass
host = connparams['host']
if '://' in host:
scheme, _, _, _, password, path, query = _parse_url(host)
if scheme == 'socket':
connparams = self._filter_tcp_connparams(**connparams)
connparams.update({
'connection_class': redis.UnixDomainSocketConnection,
'path': '/' + path}, **query)
connparams.pop('socket_connect_timeout', None)
connparams.pop('socket_keepalive', None)
connparams.pop('socket_keepalive_options', None)
connparams['password'] = password
connparams.pop('host', None)
connparams.pop('port', None)
connparams['db'] = self._prepare_virtual_host(
connparams.pop('virtual_host', None))
channel = self
connection_cls = (
connparams.get('connection_class') or
self.connection_class
)
if asynchronous:
class Connection(connection_cls):
def disconnect(self):
# NOTE: see celery issue #3898
# redis-py Connection shutdown()s the socket
# which causes all copies of file descriptor
# to become unusable, however close() only
# affect process-local copies of fds.
# So we just override Connection's disconnect method.
self._parser.on_disconnect()
channel._on_connection_disconnect(self)
if self._sock is None:
return
try:
# self._sock.shutdown(socket.SHUT_RDWR)
self._sock.close()
except socket.error:
pass
self._sock = None
connection_cls = Connection
connparams['connection_class'] = connection_cls
return connparams
def _create_client(self, asynchronous=False):
if asynchronous:
return self.Client(connection_pool=self.async_pool)
return self.Client(connection_pool=self.pool)
def _get_pool(self, asynchronous=False):
params = self._connparams(asynchronous=asynchronous)
self.keyprefix_fanout = self.keyprefix_fanout.format(db=params['db'])
return redis.ConnectionPool(**params)
def _get_client(self):
if redis.VERSION < (2, 10, 5):
raise VersionMismatch(
'Redis transport requires redis-py versions 2.10.5 or later. '
'You have {0.__version__}'.format(redis))
return redis.StrictRedis
@contextmanager
def conn_or_acquire(self, client=None):
if client:
yield client
else:
yield self._create_client()
@property
def pool(self):
if self._pool is None:
self._pool = self._get_pool()
return self._pool
@property
def async_pool(self):
if self._async_pool is None:
self._async_pool = self._get_pool(asynchronous=True)
return self._async_pool
@cached_property
def client(self):
"""Client used to publish messages, BRPOP etc."""
return self._create_client(asynchronous=True)
@cached_property
def subclient(self):
"""Pub/Sub connection used to consume fanout queues."""
client = self._create_client(asynchronous=True)
return client.pubsub()
def _update_queue_cycle(self):
self._queue_cycle.update(self.active_queues)
def _get_response_error(self):
from redis import exceptions
return exceptions.ResponseError
@property
def active_queues(self):
"""Set of queues being consumed from (excluding fanout queues)."""
return {queue for queue in self._active_queues
if queue not in self.active_fanout_queues}
class Transport(virtual.Transport):
"""Redis Transport."""
Channel = Channel
polling_interval = None # disable sleep between unsuccessful polls.
default_port = DEFAULT_PORT
driver_type = 'redis'
driver_name = 'redis'
implements = virtual.Transport.implements.extend(
asynchronous=True,
exchange_type=frozenset(['direct', 'topic', 'fanout'])
)
def __init__(self, *args, **kwargs):
if redis is None:
raise ImportError('Missing redis library (pip install redis)')
super(Transport, self).__init__(*args, **kwargs)
# Get redis-py exceptions.
self.connection_errors, self.channel_errors = self._get_errors()
# All channels share the same poller.
self.cycle = MultiChannelPoller()
def driver_version(self):
return redis.__version__
def register_with_event_loop(self, connection, loop):
cycle = self.cycle
cycle.on_poll_init(loop.poller)
cycle_poll_start = cycle.on_poll_start
add_reader = loop.add_reader
on_readable = self.on_readable
def _on_disconnect(connection):
if connection._sock:
loop.remove(connection._sock)
cycle._on_connection_disconnect = _on_disconnect
def on_poll_start():
cycle_poll_start()
[add_reader(fd, on_readable, fd) for fd in cycle.fds]
loop.on_tick.add(on_poll_start)
loop.call_repeatedly(10, cycle.maybe_restore_messages)
def on_readable(self, fileno):
"""Handle AIO event for one of our file descriptors."""
self.cycle.on_readable(fileno)
def _get_errors(self):
"""Utility to import redis-py's exceptions at runtime."""
return get_redis_error_classes()
class SentinelChannel(Channel):
"""Channel with explicit Redis Sentinel knowledge.
Broker url is supposed to look like:
sentinel://0.0.0.0:26379;sentinel://0.0.0.0:26380/...
where each sentinel is separated by a `;`. Multiple sentinels are handled
by :class:`kombu.Connection` constructor, and placed in the alternative
list of servers to connect to in case of connection failure.
Other arguments for the sentinel should come from the transport options
(see :method:`Celery.connection` which is in charge of creating the
`Connection` object).
You must provide at least one option in Transport options:
* `master_name` - name of the redis group to poll
"""
from_transport_options = Channel.from_transport_options + (
'master_name',
'min_other_sentinels',
'sentinel_kwargs')
connection_class = sentinel.SentinelManagedConnection if sentinel else None
def _sentinel_managed_pool(self, asynchronous=False):
connparams = self._connparams(asynchronous)
additional_params = connparams.copy()
additional_params.pop('host', None)
additional_params.pop('port', None)
sentinel_inst = sentinel.Sentinel(
[(connparams['host'], connparams['port'])],
min_other_sentinels=getattr(self, 'min_other_sentinels', 0),
sentinel_kwargs=getattr(self, 'sentinel_kwargs', {}),
**additional_params)
master_name = getattr(self, 'master_name', None)
return sentinel_inst.master_for(
master_name,
self.Client,
).connection_pool
def _get_pool(self, asynchronous=False):
return self._sentinel_managed_pool(asynchronous)
class SentinelTransport(Transport):
"""Redis Sentinel Transport."""
default_port = 26379
Channel = SentinelChannel
|
append
|
product.service.ts
|
import { Injectable, NotFoundException } from '@nestjs/common';
import { Product } from './product.model';
@Injectable()
export class ProductsService {
private products: Product[] = [];
insertProduct(title: string, desc: string, price: number): string {
const prodId = Math.random().toString();
const newProduct = new Product(prodId, title, desc, price);
this.products.push(newProduct);
return prodId;
}
getProducts() {
return [...this.products];
}
getSingleProduct(productId: string) {
const product = this.findProduct(productId);
return { ...product };
}
updateProduct(
productId: string,
title: string,
description: string,
price: number,
) {
const [product, index] = this.findProduct(productId);
const updateProduct = { ...product };
if (title) {
updateProduct.title = title;
}
if (description) {
updateProduct.description = description;
}
if (price) {
updateProduct.price = price;
}
this.products[index] = updateProduct;
}
deleteProduct(prodId: string) {
const index = this.findProduct(prodId)[1];
this.products.splice(index, 1);
|
private findProduct(id: string): [Product, number] {
const productIndex = this.products.findIndex((prod) => prod.id == id);
const product = this.products[productIndex];
if (!product) {
throw new NotFoundException('Could not find Product');
}
return [product, productIndex];
}
}
|
}
|
index.d.ts
|
import { Observable } from "rxjs";
|
private pool;
constructor(config?: string | ConnectionConfig);
query(sql: string, values?: any): Observable<any>;
transaction(queries: ((rxpg: RxPg, prevResult: any) => Observable<any>)[]): Observable<any>;
}
|
import { ConnectionConfig } from "pg";
export class RxPg {
|
list_tasks.go
|
package port
import (
"context"
"github.com/kzmake/micro-kit/service/task-service/domain/aggregate"
)
// ListTasksInputData はタスク一覧取得のための InputData です。
// DTO (Data Transfer Object) として InputData を生成します。
type ListTasksInputData struct {
}
// ListTasksOutputData はタスク一覧取得のための OutputData です。
|
}
// ListTasks はタスク一覧取得のための Port です。
type ListTasks interface {
Handle(ctx context.Context, in *ListTasksInputData) *ListTasksOutputData
}
|
// DPO (Data Payload Object) として OutputData を生成します。
type ListTasksOutputData struct {
Tasks []*aggregate.Task
Error error
|
test_v1beta1_self_subject_rules_review.py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta1_self_subject_rules_review import V1beta1SelfSubjectRulesReview
class TestV1beta1SelfSubjectRulesReview(unittest.TestCase):
""" V1beta1SelfSubjectRulesReview unit test stubs """
|
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1SelfSubjectRulesReview(self):
"""
Test V1beta1SelfSubjectRulesReview
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta1_self_subject_rules_review.V1beta1SelfSubjectRulesReview()
pass
if __name__ == '__main__':
unittest.main()
| |
puller.go
|
package dblog
import (
"context"
"errors"
"github.com/rueian/pgcapture/pkg/pb"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
type DumpInfoPuller interface {
Pull(ctx context.Context, uri string) chan DumpInfo
}
type DumpInfo struct {
Resp *pb.DumpInfoResponse
client pb.DBLogController_PullDumpInfoClient
}
func (i *DumpInfo) Ack(requeueReason string) error {
if i.client == nil {
return nil
}
err := i.client.Send(&pb.DumpInfoRequest{RequeueReason: requeueReason})
i.client = nil
return err
}
type GRPCDumpInfoPuller struct {
Client pb.DBLogControllerClient
}
func (p *GRPCDumpInfoPuller) Pull(ctx context.Context, uri string) chan DumpInfo {
resp := make(chan DumpInfo, 1)
go func() {
defer close(resp)
for {
err := p.pulling(ctx, uri, resp)
if e, ok := status.FromError(err); (ok && e.Code() == codes.Canceled) || errors.Is(err, context.Canceled) {
return
}
}
}()
return resp
}
func (p *GRPCDumpInfoPuller) pulling(ctx context.Context, uri string, resp chan DumpInfo) error {
client, err := p.Client.PullDumpInfo(ctx)
if err != nil {
return err
}
if err = client.Send(&pb.DumpInfoRequest{Uri: uri}); err != nil {
return err
}
for {
msg, err := client.Recv()
if err != nil {
return err
}
|
Resp: msg,
client: client,
}
}
}
|
resp <- DumpInfo{
|
syntaxAnalysis.conditionals.test.ts
|
import { symbols } from '../../symbols';
import { syntaxAnalysis } from '../syntaxAnalysis';
describe('syntaxAnalysis', () => {
it('should identify if, else and conditions', () => {
const tokens = [
symbols.program,
symbols.identificador,
symbols[';'],
symbols.var,
symbols.identificador,
symbols[':'],
symbols.integer,
symbols[';'],
symbols.identificador,
symbols[':'],
symbols.integer,
symbols[';'],
symbols.identificador,
symbols[':'],
symbols.integer,
symbols[';'],
symbols.begin,
symbols.identificador,
symbols[':='],
symbols.inteiro,
symbols[';'],
symbols.identificador,
symbols[':='],
symbols.inteiro,
symbols[';'],
symbols.identificador,
symbols[':='],
symbols.inteiro,
symbols[';'],
symbols.if,
symbols.identificador,
symbols['>'],
symbols.inteiro,
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['-'],
symbols.inteiro,
symbols.else,
symbols.if,
symbols.identificador,
symbols['>='],
symbols.inteiro,
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['-'],
symbols.inteiro,
symbols.else,
symbols.if,
symbols.identificador,
symbols['<'],
symbols.inteiro,
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['-'],
symbols.inteiro,
symbols.else,
symbols.if,
symbols.identificador,
symbols['<='],
symbols.inteiro,
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['-'],
symbols.inteiro,
symbols[';'],
symbols.if,
symbols['('],
symbols.identificador,
symbols['>'],
symbols.inteiro,
symbols[')'],
symbols.and,
symbols['('],
symbols.identificador,
symbols['<'],
symbols.inteiro,
symbols[')'],
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['+'],
symbols.inteiro,
symbols[';'],
symbols.if,
symbols['('],
symbols.identificador,
symbols['>='],
symbols.inteiro,
symbols[')'],
symbols.and,
symbols['('],
symbols.identificador,
symbols['<='],
symbols.inteiro,
symbols[')'],
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['+'],
symbols.inteiro,
symbols[';'],
symbols.if,
symbols['('],
symbols.identificador,
symbols['>'],
symbols.inteiro,
symbols[')'],
symbols.or,
symbols['('],
symbols.identificador,
symbols['<'],
symbols.inteiro,
symbols[')'],
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['+'],
symbols.inteiro,
symbols[';'],
symbols.if,
symbols['('],
symbols.identificador,
symbols['>='],
symbols.inteiro,
symbols[')'],
symbols.or,
symbols.identificador,
symbols['<='],
symbols.inteiro,
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
|
symbols.inteiro,
symbols[';'],
symbols.if,
symbols['('],
symbols.identificador,
symbols['='],
symbols.identificador,
symbols[')'],
symbols.or,
symbols.identificador,
symbols['='],
symbols.identificador,
symbols.then,
symbols.identificador,
symbols[':='],
symbols['-'],
symbols.identificador,
symbols['+'],
symbols.identificador,
symbols[';'],
symbols.if,
symbols.identificador,
symbols['='],
symbols.identificador,
symbols.and,
symbols['('],
symbols.not,
symbols['('],
symbols.identificador,
symbols['='],
symbols.identificador,
symbols[')'],
symbols[')'],
symbols.then,
symbols.identificador,
symbols[':='],
symbols.identificador,
symbols['-'],
symbols.identificador,
symbols[';'],
symbols.if,
symbols['('],
symbols.not,
symbols['('],
symbols.identificador,
symbols['<>'],
symbols.identificador,
symbols[')'],
symbols[')'],
symbols.or,
symbols.identificador,
symbols['<>'],
symbols.identificador,
symbols.then,
symbols.identificador,
symbols[':='],
symbols['-'],
symbols.identificador,
symbols['-'],
symbols.identificador,
symbols[';'],
symbols.end,
symbols['.']
].reverse();
expect(() => syntaxAnalysis(tokens)).not.toThrow();
expect(tokens).toEqual([]);
});
});
|
symbols['+'],
|
entity_type_lib.py
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes and methods for working with entity types in the ontology."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
import typing
from typing import Optional, Tuple
from yamlformat.validator import base_lib
from yamlformat.validator import config_folder_lib
from yamlformat.validator import field_lib
from yamlformat.validator import findings_lib
ENTITY_TYPE_NAME_REGEX = re.compile(r'^[a-zA-Z][a-zA-Z0-9]*(?:_[a-zA-Z0-9]+)*$')
FIELD_INCREMENT_STRIPPER_REGEX = re.compile(
r'(^[a-z][a-z0-9]*(?:_[a-z][a-z0-9]*)*)((?:_[0-9]+)+)$')
FieldParts = typing.NamedTuple('FieldParts',
[('namespace', str), ('field', str),
('increment', str)])
OptWrapper = typing.NamedTuple('OptWrapper', [('field', FieldParts),
('optional', bool)])
TypeParts = typing.NamedTuple('TypeParts', [('namespace', str),
('typename', str)])
EntityIdByEntry = typing.NamedTuple('EntityIdByEntry', [('namespace', str),
('typename', str)])
def SeparateFieldNamespace(qualified_field_name: str) -> Tuple[str, str]:
"""Returns the namespace and its field name as separate values or an Error.
Args:
qualified_field_name: a qualified field string like `HVAC/run_status`
Throws:
TypeError: if the field is not qualified
"""
fqf_parsed = qualified_field_name.split('/')
if len(fqf_parsed) == 1:
raise TypeError('Type improperly formatted, a namespace is missing: ',
fqf_parsed)
if len(fqf_parsed) > 2:
raise ValueError('Type improperly formatted, too many separators: ',
fqf_parsed)
return fqf_parsed[0], fqf_parsed[1]
def SeparateFieldIncrement(field_name) -> Tuple[str, str]:
"""Takes as an input a field_name (string) and returns a tuple of strings.
The first element is the standard field name and its increment when available.
For example: zone_occupancy_status_1 -> [zone_occupancy_status, 1]
Args:
field_name: the field name to parse.
Returns:
A tuple of string, the standard field name and its increment if available.
"""
field_name_part = field_name
increment_part = ''
match = FIELD_INCREMENT_STRIPPER_REGEX.match(field_name)
if match:
field_name_part = match.group(1)
increment_part = match.group(2)
return field_name_part, increment_part
class EntityTypeUniverse(findings_lib.Findings):
"""Helper class to represent the defined universe of EntityTypes.
Only contains valid EntityTypes.
Attributes;
namespace_folder_map: a map of namespace names to EntityTypeFolders.
type_namespaces_map: a map of type names to TypeNamespaces.
type_ids_map: maps type IDs to entity types. Contains all valid types w/IDs.
"""
def __init__(self, entity_type_folders):
"""Init.
Args:
entity_type_folders: list of EntityTypeFolder objects parsed from files.
"""
super(EntityTypeUniverse, self).__init__()
self.namespace_folder_map = {}
self.type_namespaces_map = {}
self.type_ids_map = {}
self._BuildNamespaceFolderMap(entity_type_folders)
self._BuildTypeMaps(
[folder.local_namespace for folder in entity_type_folders])
def GetEntityType(self, namespace_name, typename):
"""Finds entity_type by namespace and typename and returns it or None."""
if namespace_name not in self.type_namespaces_map:
return None
return self.type_namespaces_map[namespace_name].GetType(typename)
def GetNamespace(self, namespace_name):
"""Finds namespace in the universe by name and returns it or None."""
return self.type_namespaces_map.get(namespace_name, None)
def GetNamespaces(self):
"""Get the entity type namespace objects in this universe.
Returns:
A list of EntityTypeNamespace objects
"""
return list(self.type_namespaces_map.values())
def _GetDynamicFindings(self, filter_old_warnings):
findings = []
for folder in self.namespace_folder_map.values():
findings += folder.GetFindings(filter_old_warnings)
return findings
def _BuildTypeMaps(self, type_namespaces):
"""Creates a dict mapping namespace strings to TypeNamespace objects.
Sets the self.type_namespaces_map attribute of the class.
Args:
type_namespaces: a list of TypeNamespace objects.
Raises:
RuntimeError: if assumptions about internal data structures are violated.
"""
for type_namespace in type_namespaces:
self.type_namespaces_map[type_namespace.namespace] = type_namespace
for entity_type in type_namespace.valid_types_map.values():
if entity_type.uid:
if entity_type.uid in self.type_ids_map:
dup_id_entry = self.type_ids_map[entity_type.uid]
dup_id_type = self.GetEntityType(dup_id_entry.namespace,
dup_id_entry.typename)
if dup_id_type is None:
raise RuntimeError('Duplicate type with uid ' + entity_type.uid +
' should always be mapped')
entity_type.AddFinding(
findings_lib.DuplicateIdsError(type_namespace.namespace,
entity_type, dup_id_type))
dup_id_type.AddFinding(
findings_lib.DuplicateIdsError(dup_id_entry.namespace,
dup_id_type, entity_type))
self.type_ids_map[entity_type.uid] = EntityIdByEntry(
namespace=type_namespace.namespace, typename=entity_type.typename)
def _BuildNamespaceFolderMap(self, type_folders):
"""Creates a dict mapping namespace strings to EntityTypeFolder objects.
Sets the self.namespace_folder_map attribute of the class.
Args:
type_folders: a list of EntityTypeFolder objects.
"""
for folder in type_folders:
self.namespace_folder_map[folder.local_namespace.namespace] = folder
class EntityTypeFolder(config_folder_lib.ConfigFolder):
"""Class representing a namespace folder of entity types.
Class fully validates all entity types defined within the namespace folder,
collects issues found, and stores all valid entity types.
Attributes:
local_namespace: TypeNamespace object representing this namespace.
"""
def __init__(self, folderpath, field_universe=None):
"""Init.
Args:
folderpath: required string with full path to the folder containing entity
type files. Path should be relative to google3/ and have no leading or
trailing /.
field_universe: optional FieldsUniverse object.
"""
super(EntityTypeFolder, self).__init__(folderpath,
base_lib.ComponentType.ENTITY_TYPE)
self.local_namespace = TypeNamespace(self._namespace_name, field_universe)
def Finalize(self):
"""Call to complete entity creation after all types are added."""
self.local_namespace.QualifyParentNames()
def _AddFromConfigHelper(self, document, context):
for type_name in document:
new_type = self._ConstructType(type_name, document[type_name],
context.filepath)
self._AddType(new_type)
def _ConstructField(self, local_field_names, optional, output_array):
for qualified_field_name in local_field_names:
field_ns, raw_field_name = field_lib.SplitFieldName(qualified_field_name)
std_field_name, increment = SeparateFieldIncrement(raw_field_name)
# Field will look local if undefined, but we'll catch the error later
# Because we do explict existence checks and it will fail
# TODO(berkoben) refactor so validation happens in an order that
# prevents this logic lint
field_ns = self.local_namespace.GetQualifiedNamespace(
field_ns, std_field_name)
output_array.append(
OptWrapper(
field=FieldParts(
namespace=field_ns, field=std_field_name,
increment=increment),
optional=optional))
def _ConstructType(self, type_name, type_contents, filepath):
"""Reads a entity type config block and generates an EntityType object."""
description = ''
parents = None
local_field_names = None
opt_local_field_names = None
is_abstract = False
is_canonical = False
uid = None
expected_keys = set([
'description', 'implements', 'uses', 'opt_uses', 'is_abstract', 'id',
'is_canonical'
])
if 'description' in type_contents:
description = type_contents['description']
if 'implements' in type_contents:
parents = type_contents['implements']
if 'uses' in type_contents:
local_field_names = type_contents['uses']
if 'opt_uses' in type_contents:
opt_local_field_names = type_contents['opt_uses']
if 'is_abstract' in type_contents:
is_abstract = type_contents['is_abstract']
if 'is_canonical' in type_contents:
is_canonical = type_contents['is_canonical']
if 'id' in type_contents:
uid = type_contents['id']
# Generate tuples to represent each field
fq_lfn = []
if local_field_names:
self._ConstructField(local_field_names, False, fq_lfn)
if opt_local_field_names:
self._ConstructField(opt_local_field_names, True, fq_lfn)
entity_type = EntityType(
filepath=filepath,
typename=type_name,
description=description,
parents=parents,
local_field_tuples=fq_lfn,
is_abstract=is_abstract,
inherited_fields_expanded=False,
is_canonical=is_canonical,
uid=uid,
namespace=self.local_namespace)
# Add errors to type if there's anything extra in the block. We add to the
# entity type because an extra key here is likely a typo in a real key name
# that would result in information being lost from the type.
for key in type_contents:
if key not in expected_keys:
entity_type.AddFinding(
findings_lib.UnrecognizedKeyError(key, entity_type.file_context))
return entity_type
def _AddType(self, entity_type):
"""Adds entity_type if it is fully valid.
If formatting is correct, continues on to field validation.
Records all findings in object.
Args:
entity_type: EntityType object.
Returns:
True if the entity type was successfully validated and added. False
otherwise.
"""
if not entity_type.IsValid():
self.AddFindings(entity_type.GetFindings())
return False
return self.local_namespace.InsertType(entity_type)
class TypeNamespace(findings_lib.Findings):
"""Class representing a namespace of entity types.
Attributes:
namespace: string
valid_types_map: Dict mapping typename strings to EntityType objects.
"""
def __init__(self, namespace, field_universe=None):
super(TypeNamespace, self).__init__()
self.namespace = namespace
self._field_universe = field_universe
self.valid_types_map = {}
self._parents_qualified = False
def _GetDynamicFindings(self, filter_old_warnings):
findings = []
for entity_type in self.valid_types_map.values():
findings += entity_type.GetFindings(filter_old_warnings)
return findings
def GetType(self, typename):
return self.valid_types_map.get(typename, None)
def InsertType(self, entity_type):
"""Validate that declared fields are defined.
Adds type if valid and unique.
Findings for non-validated fields are applied to this TypeNamespace.
Args:
entity_type: entity to attempt to add.
Returns:
True if entity was added successfully.
Raises:
RuntimeError: if this is called after qualifying parent names
"""
if self._parents_qualified:
raise RuntimeError('Cannot add types after Qualifying parents')
if self._ValidateFields(entity_type):
typename = entity_type.typename
mapped_entity_type = self.valid_types_map.get(typename)
if mapped_entity_type is None:
self.valid_types_map[typename] = entity_type
return True
# entity_type is a duplicate type
self.AddFinding(
findings_lib.DuplicateEntityTypeDefinitionError(
self, entity_type, mapped_entity_type.file_context))
return False
return False
def GetQualifiedNamespace(self, field_ns, field_name):
|
def _BuildQualifiedParentTuple(self, parent_name):
"""Creates the two-part parent tuple with a fully-qualified namespace.
Args:
parent_name: string as specified in the config file.
Returns:
A TypeParts tuple representing this parent.
"""
namespace_name = self.namespace
split = parent_name.split('/')
if len(split) != 2:
if not self.GetType(parent_name):
# parent is in the global namespace
namespace_name = ''
else:
namespace_name = split[0]
parent_name = split[1]
return TypeParts(namespace=namespace_name, typename=parent_name)
def QualifyParentNames(self):
"""Sets parents attribute of this namespace with fully qualified names."""
if self._parents_qualified:
return
for entity_type in self.valid_types_map.values():
fq_tuplemap = {}
for parent in entity_type.unqualified_parent_names:
fq_tuple = self._BuildQualifiedParentTuple(parent)
fq_name = '{0}/{1}'.format(fq_tuple.namespace, fq_tuple.typename)
fq_tuplemap[fq_name] = fq_tuple
entity_type.parent_names = fq_tuplemap
self._parents_qualified = True
def IsLocalField(self, field_name):
"""Returns true if this unqualified field is defined in the namespace.
Args:
field_name: an unqualified field name with no leading '/'
"""
if not self._field_universe:
return False
return self._field_universe.IsFieldDefined(field_name, self.namespace)
def _ValidateFields(self, entity):
"""Validates that all fields declared by entity are defined."""
# if field_universe is not defined just return true
if not self._field_universe:
return True
valid = True
for field_tuple in entity.local_field_names.values():
if not self._ValidateField(field_tuple.field, entity):
valid = False
return valid
def _ValidateField(self, field_tuple, entity):
"""Validates that field declared by entity is defined.
Field formatting has already been validated.
Findings are saved on the TypeNamespace.
Args:
field_tuple: tuple representing a fully qualified field
entity: EntityType
Returns:
True if field is defined.
"""
if not self._field_universe.IsFieldDefined(field_tuple.field,
field_tuple.namespace):
self.AddFinding(
findings_lib.UndefinedFieldError(entity, field_tuple.field))
return False
return True
def BuildQualifiedField(opt_tuple):
field_tuple = opt_tuple.field
return '{0}/{1}{2}'.format(field_tuple.namespace, field_tuple.field,
field_tuple.increment)
class EntityType(findings_lib.Findings):
"""Creates an EntityType object from a set of values describing the type.
Attributes:
file_context: FileContext object containing file info.
typename: string.
description: string.
parent_names: a list of parent typename strings.
local_field_names: the local set of standard field names
inherited_field_names: the set of inherited field names. Is always assigned
to an empty set at init, to be expanded later.
inherited_fields_expanded: boolean.
is_canonical: boolean indicating if this is a curated canonical type.
uid: the database ID string of this type if uploaded
namespace: a reference to the namespace object the entity belongs to
Returns:
An instance of the EntityType class.
"""
def __init__(self,
begin_line_number=0,
filepath='',
typename='',
description='',
parents=None,
local_field_tuples=None,
is_abstract=False,
inherited_fields_expanded=False,
is_canonical=False,
uid=None,
namespace=None):
"""Init.
Args:
begin_line_number: int. Starting line number for the entity type
definition.
filepath: string. google3 path to the file defining the type.
typename: required string.
description: required string.
parents: list of parent typename strings.
local_field_tuples: list of OptWrapper tuples
is_abstract: boolean indicating if this is an abstract type.
inherited_fields_expanded: boolean. Should be false at init.
is_canonical: boolean indicating if this is a curated canonical type.
uid: the database ID string of this type if uploaded
namespace: a reference to the namespace object the entity belongs to
"""
super(EntityType, self).__init__()
self.file_context = findings_lib.FileContext(
begin_line_number=begin_line_number, filepath=filepath)
self.typename = typename
self.description = description
self.namespace = namespace
self.local_field_names = {}
local_field_names = []
if local_field_tuples:
local_field_names = [
BuildQualifiedField(opt_parts) for opt_parts in local_field_tuples
]
for i, lfn in enumerate(local_field_names):
self.local_field_names[lfn] = local_field_tuples[i]
self.inherited_field_names = {}
self.inherited_fields_expanded = inherited_fields_expanded
if parents is None:
parents = []
self.parent_names = None
self.parent_name_tuples = None
self.unqualified_parent_names = parents
self._all_fields = None
self._has_optional_fields = None
self.is_abstract = is_abstract
self.is_canonical = is_canonical
self.uid = uid
# TODO(berkoben) update this method to use tuples if possible
self._ValidateType(local_field_names)
def HasOptionalFields(self, run_unsafe=False):
if not (self.inherited_fields_expanded or run_unsafe):
raise RuntimeError('Type has not been expanded')
if self._has_optional_fields is not None:
return self._has_optional_fields
fields = self.GetAllFields()
for field in fields.values():
if field.optional:
self._has_optional_fields = True
return self._has_optional_fields
self._has_optional_fields = False
return self._has_optional_fields
def GetAllFields(self, run_unsafe=False):
"""Returns the expanded set of fields for this type.
Args:
run_unsafe: set true to run against a type before fields are fully
expanded. Running in this mode does not memoize the result.
Returns:
A dictionary of fully qualified strings representing fields in the type to
OptWrapper tuples representing the contents of the field.
Raises:
RuntimeError: if fields have not yet been expanded.
"""
if not (self.inherited_fields_expanded or run_unsafe):
raise RuntimeError('Type {0} has not been expanded'.format(self.typename))
if self._all_fields is None:
tmp = self.local_field_names.copy()
tmp.update(self.inherited_field_names)
if run_unsafe:
return tmp
self._all_fields = tmp
return self._all_fields
def HasFieldAsWritten(self,
fieldname_as_written: str,
run_unsafe: bool = False) -> bool:
"""Returns true if a valid config file value maps to a field in the type.
Accepts a field name as written in a configuration file
referencing this type. The method applies context-aware namespace
omission (i.e. referencing a field without its namespace) to identify the
field regardless of the namespace and syntax variation.
Note: to minimize redundancy, this method simply wraps.
`GetFieldFromConfigText()`. If your application also needs the `Field` use
that method instead to eliminate redundant processing.
Args:
fieldname_as_written: string verbatim from a building or ontology config
run_unsafe: set true to allow calls before parent type fields are expanded
Returns:
True if the Field is defined on the type. False otherwise.
"""
return self.GetFieldFromConfigText(fieldname_as_written,
run_unsafe) is not None
def GetFieldFromConfigText(self,
fieldname_as_written: str,
run_unsafe: bool = False) -> Optional[OptWrapper]:
"""Returns `OptWrapper` provided string validates against the entity.
Accepts a field name as written in a configuration file
referencing this type. The method applies all shorthanding rules to identify
the field regardless of the namespace and syntax variation.
Args:
fieldname_as_written: string verbatim from a building or ontology config
run_unsafe: set true to allow calls before parent type fields are expanded
Returns:
`OptWrapper` if field is present, None otherwise
"""
try:
# Check the field as if it's fully qualified.
return self.GetField(fieldname_as_written, run_unsafe)
except TypeError:
pass
# Field is unqualified so it is either global or type-namespace-local
# Check for a locally defined field first using type's namespace
field = self._GetField(
self.namespace.namespace + '/' + fieldname_as_written, run_unsafe)
if not field:
# Check field as if it's in the global namespace
field = self._GetField('/' + fieldname_as_written, run_unsafe)
return field
def HasField(self,
fully_qualified_fieldname: str,
run_unsafe: bool = False) -> bool:
"""Returns True if field string validates against the entity's fields.
Args:
fully_qualified_fieldname: a fully qualified names for example:
"HVAC/run_status_1".
run_unsafe: set true to run against a type before fields are fully
expanded. Running in this mode does not memoize the result.
Throws:
TypeError: if the field is not fully qualified
"""
return self.GetField(fully_qualified_fieldname, run_unsafe) is not None
def GetField(self,
fully_qualified_fieldname: str,
run_unsafe: bool = False) -> Optional[OptWrapper]:
"""Returns `OptWrapper` if field string validates against the entity.
Args:
fully_qualified_fieldname: a fully qualified names for example:
"HVAC/run_status_1".
run_unsafe: set true to run against a type before fields are fully
expanded. Running in this mode does not memoize the result.
Returns:
`OptWrapper` if field is present, None otherwise
Throws:
TypeError: if the field is not fully qualified
"""
# Throws an error in the case that this isn't a fully qualified field
_, _ = SeparateFieldNamespace(fully_qualified_fieldname)
return self._GetField(fully_qualified_fieldname, run_unsafe)
def _GetField(self,
fully_qualified_fieldname: str,
run_unsafe: bool = False) -> Optional[OptWrapper]:
return self.GetAllFields(run_unsafe).get(fully_qualified_fieldname)
def _ValidateType(self, local_field_names):
"""Validates that the entity type is formatted correctly.
Checks for formatting and duplicate fields and parents.
Records any errors found.
Args:
local_field_names: list of local field names for the type.
"""
# Make sure the typename is non-empty.
if not self.typename:
self.AddFinding(findings_lib.MissingTypenameError(self))
elif not isinstance(self.typename, str):
self.AddFinding(
findings_lib.IllegalKeyTypeError(self.typename, self.file_context))
elif not ENTITY_TYPE_NAME_REGEX.match(self.typename):
self.AddFinding(
findings_lib.InvalidTypenameError(self.typename, self.file_context))
# Make sure the type description is non-empty.
if not self.description:
self.AddFinding(findings_lib.MissingEntityTypeDescriptionWarning(self))
# Check for duplicate local fields.
# this check is case insensitive to catch dupes earlier in the event that
# we stop explicitly rejecting upper case characters
check_fields = set()
for field in local_field_names:
field_lower = field.lower()
if field_lower in check_fields:
self.AddFinding(findings_lib.DuplicateFieldError(self, field))
continue
check_fields.add(field_lower)
# TODO(berkoben): Add more checks to validate fields in isolation
# (in case we don't have a field set to check against)
# (i.e. check for chirality, formatting. Could use actual Field objects)
# Check formatting of field name
if len(field.split('/')) > 2:
self.AddFinding(findings_lib.UnrecognizedFieldFormatError(self, field))
# Check for duplicate parent names.
parent_names_check = set()
for parent_name in self.unqualified_parent_names:
if parent_name in parent_names_check:
self.AddFinding(findings_lib.DuplicateParentError(self, parent_name))
continue
parent_names_check.add(parent_name)
# Check formatting of parent name
if len(parent_name.split('/')) > 2:
self.AddFinding(
findings_lib.UnrecognizedParentFormatError(self, parent_name))
# Enforce that the inherited_fields_expanded field is not set
if self.inherited_fields_expanded:
self.AddFinding(findings_lib.InheritedFieldsSetError(self))
|
"""Returns the namespace name for this field.
Args:
field_ns: namespace of field as parsed from the config
field_name: unqualified field name string
Returns:
The fully qualified field string.
"""
if not field_ns and self.IsLocalField(field_name):
return self.namespace
return field_ns
|
SimSpace_Occupied_Default.py
|
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimSpace_Occupied_Default', [dirname(__file__)])
except ImportError:
import _SimSpace_Occupied_Default
return _SimSpace_Occupied_Default
if fp is not None:
try:
_mod = imp.load_module('_SimSpace_Occupied_Default', fp, pathname, description)
finally:
fp.close()
return _mod
_SimSpace_Occupied_Default = swig_import_helper()
del swig_import_helper
else:
import _SimSpace_Occupied_Default
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
class SimSpace(base.SimSpatialStructureElement):
__swig_setmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace, name, value)
__swig_getmethods__ = {}
for _s in [base.SimSpatialStructureElement]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace, name)
__repr__ = _swig_repr
def SpaceZoneAssignments(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceZoneAssignments(self, *args)
def SpaceNumber(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNumber(self, *args)
def SpaceName(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceName(self, *args)
def SpaceInteriorOrExterior(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceInteriorOrExterior(self, *args)
def SpaceDatumElevation(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceDatumElevation(self, *args)
def SpaceThermalSimulationType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceThermalSimulationType(self, *args)
def SpaceConditioningRequirement(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceConditioningRequirement(self, *args)
def
|
(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantDensity(self, *args)
def SpaceOccupantHeatRateLatent(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantHeatRateLatent(self, *args)
def SpaceOccupantHeatRateSensible(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantHeatRateSensible(self, *args)
def SpaceOccupantLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupantLoad(self, *args)
def SpaceEquipmentLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceEquipmentLoad(self, *args)
def SpaceLightingLoad(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceLightingLoad(self, *args)
def InsideDryBulbTempHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideDryBulbTempHeating(self, *args)
def InsideRelHumidityHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideRelHumidityHeating(self, *args)
def InsideDryBulbTempCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideDryBulbTempCooling(self, *args)
def InsideRelHumidityCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_InsideRelHumidityCooling(self, *args)
def IncludesReturnAirPlenum(self, *args):
return _SimSpace_Occupied_Default.SimSpace_IncludesReturnAirPlenum(self, *args)
def PeakAirFlowCooling(self, *args):
return _SimSpace_Occupied_Default.SimSpace_PeakAirFlowCooling(self, *args)
def PeakAirFlowHeating(self, *args):
return _SimSpace_Occupied_Default.SimSpace_PeakAirFlowHeating(self, *args)
def ExhaustAirFlowRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ExhaustAirFlowRate(self, *args)
def NaturalAirChangeRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_NaturalAirChangeRate(self, *args)
def MechanicalAirChangeRate(self, *args):
return _SimSpace_Occupied_Default.SimSpace_MechanicalAirChangeRate(self, *args)
def VentilationType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_VentilationType(self, *args)
def OutsideAirPerPerson(self, *args):
return _SimSpace_Occupied_Default.SimSpace_OutsideAirPerPerson(self, *args)
def SpaceHeight(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceHeight(self, *args)
def SpaceGrossPerimeter(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossPerimeter(self, *args)
def SpaceGrossFloorArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossFloorArea(self, *args)
def SpaceNetFloorArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetFloorArea(self, *args)
def SpaceGrossVolume(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceGrossVolume(self, *args)
def SpaceNetVolume(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetVolume(self, *args)
def SpaceNetFloorAreaBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceNetFloorAreaBOMA(self, *args)
def SpaceUsableFloorAreaBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceUsableFloorAreaBOMA(self, *args)
def ClassRef_SpaceByFunction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceByFunction(self, *args)
def ClassRef_SpaceTypeOwner(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceTypeOwner(self, *args)
def ClassRef_SpaceCategoryOwner(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceCategoryOwner(self, *args)
def ClassRef_SpaceCategoryBOMA(self, *args):
return _SimSpace_Occupied_Default.SimSpace_ClassRef_SpaceCategoryBOMA(self, *args)
def SpaceOccupants(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceOccupants(self, *args)
def OccupancyScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_OccupancyScheduleAssignment(self, *args)
def LightingScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_LightingScheduleAssignment(self, *args)
def EquipmentScheduleAssignment(self, *args):
return _SimSpace_Occupied_Default.SimSpace_EquipmentScheduleAssignment(self, *args)
def GeometricRepresentations(self, *args):
return _SimSpace_Occupied_Default.SimSpace_GeometricRepresentations(self, *args)
def SpaceInSpatialContainer(self, *args):
return _SimSpace_Occupied_Default.SimSpace_SpaceInSpatialContainer(self, *args)
def AssociatedPlena(self, *args):
return _SimSpace_Occupied_Default.SimSpace_AssociatedPlena(self, *args)
def AssociatedElements(self, *args):
return _SimSpace_Occupied_Default.SimSpace_AssociatedElements(self, *args)
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace
__del__ = lambda self: None
SimSpace_swigregister = _SimSpace_Occupied_Default.SimSpace_swigregister
SimSpace_swigregister(SimSpace)
class SimSpace_Occupied(SimSpace):
__swig_setmethods__ = {}
for _s in [SimSpace]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied, name, value)
__swig_getmethods__ = {}
for _s in [SimSpace]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied, name)
__repr__ = _swig_repr
def T24CommRefrigEPD(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigEPD(self, *args)
def T24CommRefrigEqmtSchedRef(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigEqmtSchedRef(self, *args)
def T24CommRefrigLatentFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigLatentFraction(self, *args)
def T24CommRefrigLostFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigLostFraction(self, *args)
def T24CommRefrigRadFraction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24CommRefrigRadFraction(self, *args)
def T24EnvelopeStatus(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24EnvelopeStatus(self, *args)
def T24ExhaustAirChangesPerHour(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustAirChangesPerHour(self, *args)
def T24ExhaustPerArea(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustPerArea(self, *args)
def T24ExhaustPerSpace(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ExhaustPerSpace(self, *args)
def T24HasProcessExhaust(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24HasProcessExhaust(self, *args)
def T24IntLightingSpecMethod(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24IntLightingSpecMethod(self, *args)
def T24KitchExhHoodDutyList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodDutyList(self, *args)
def T24KitchExhHoodFlowList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodFlowList(self, *args)
def T24KitchExhHoodLengthList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodLengthList(self, *args)
def T24KitchExhHoodStyleList(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24KitchExhHoodStyleList(self, *args)
def T24LabExhRateType(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24LabExhRateType(self, *args)
def T24LightingStatus(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24LightingStatus(self, *args)
def T24MandLightCntrlCntRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlCntRpt(self, *args)
def T24MandLightCntrlDescRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlDescRpt(self, *args)
def T24MandLightCntrlAccepReqRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlAccepReqRpt(self, *args)
def T24MandLightCntrlIsAutoShOffCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsAutoShOffCntrlRpt(self, *args)
def T24MandLightCntrlIsDayltngCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsDayltngCntrlRpt(self, *args)
def T24MandLightCntrlIsDmndRespCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsDmndRespCntrlRpt(self, *args)
def T24MandLightCntrlIsManAreaCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsManAreaCntrlRpt(self, *args)
def T24MandLightCntrlIsMultLvlCntrlRpt(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24MandLightCntrlIsMultLvlCntrlRpt(self, *args)
def T24SkylightRqmtExcep(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24SkylightRqmtExcep(self, *args)
def T24SpaceFunction(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24SpaceFunction(self, *args)
def T24ConstructStatus3(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_T24ConstructStatus3(self, *args)
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace_Occupied__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied
__del__ = lambda self: None
SimSpace_Occupied_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_swigregister
SimSpace_Occupied_swigregister(SimSpace_Occupied)
class SimSpace_Occupied_Default(SimSpace_Occupied):
__swig_setmethods__ = {}
for _s in [SimSpace_Occupied]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied_Default, name, value)
__swig_getmethods__ = {}
for _s in [SimSpace_Occupied]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied_Default, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied_Default(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default__clone(self, f, c)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied_Default
__del__ = lambda self: None
SimSpace_Occupied_Default_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_Default_swigregister
SimSpace_Occupied_Default_swigregister(SimSpace_Occupied_Default)
class SimSpace_Occupied_Default_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimSpace_Occupied_Default_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimSpace_Occupied_Default_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimSpace_Occupied_Default.new_SimSpace_Occupied_Default_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_assign(self, n, x)
def begin(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_begin(self, *args)
def end(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_end(self, *args)
def rbegin(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_rend(self, *args)
def at(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_at(self, *args)
def front(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_front(self, *args)
def back(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_back(self, *args)
def push_back(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_push_back(self, *args)
def pop_back(self):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_insert(self, *args)
def erase(self, *args):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_swap(self, x)
__swig_destroy__ = _SimSpace_Occupied_Default.delete_SimSpace_Occupied_Default_sequence
__del__ = lambda self: None
SimSpace_Occupied_Default_sequence_swigregister = _SimSpace_Occupied_Default.SimSpace_Occupied_Default_sequence_swigregister
SimSpace_Occupied_Default_sequence_swigregister(SimSpace_Occupied_Default_sequence)
# This file is compatible with both classic and new-style classes.
|
SpaceOccupantDensity
|
bundle_manager.py
|
import datetime
import logging
import os
import random
import re
import sys
import threading
import time
import traceback
from codalab.objects.permission import check_bundles_have_read_permission
from codalab.common import PermissionError
from codalab.lib import bundle_util, formatting, path_util
from codalabworker.file_util import remove_path
from codalabworker.bundle_state import State
logger = logging.getLogger(__name__)
WORKER_TIMEOUT_SECONDS = 60
class BundleManager(object):
"""
Assigns run bundles to workers and makes make bundles.
"""
@staticmethod
def create(codalab_manager):
config = codalab_manager.config.get('workers')
if not config:
print >>sys.stderr, 'config.json file missing a workers section.'
exit(1)
from codalab.worker.default_bundle_manager import DefaultBundleManager
self = DefaultBundleManager()
self._model = codalab_manager.model()
self._worker_model = codalab_manager.worker_model()
self._bundle_store = codalab_manager.bundle_store()
self._upload_manager = codalab_manager.upload_manager()
self._exiting_lock = threading.Lock()
self._exiting = False
self._make_uuids_lock = threading.Lock()
self._make_uuids = set()
def parse(to_value, field):
return to_value(config[field]) if field in config else None
self._max_request_time = parse(formatting.parse_duration, 'max_request_time')
self._max_request_memory = parse(formatting.parse_size, 'max_request_memory')
self._max_request_disk = parse(formatting.parse_size, 'max_request_disk')
self._default_cpu_image = config.get('default_cpu_image')
self._default_gpu_image = config.get('default_gpu_image')
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
return self
def run(self, sleep_time):
logger.info('Bundle manager running.')
while not self._is_exiting():
try:
self._run_iteration()
except Exception:
traceback.print_exc()
time.sleep(sleep_time)
while self._is_making_bundles():
time.sleep(sleep_time)
def signal(self):
with self._exiting_lock:
self._exiting = True
def _is_exiting(self):
with self._exiting_lock:
return self._exiting
def _run_iteration(self):
|
def _schedule_run_bundles(self):
"""
Sub classes should implement this. See DefaultBundleManager
"""
raise NotImplementedError
def _stage_bundles(self):
"""
Stages bundles by:
1) Failing any bundles that have any missing or failed dependencies.
2) Staging any bundles that have all ready dependencies.
"""
bundles = self._model.batch_get_bundles(state=State.CREATED)
parent_uuids = set(dep.parent_uuid for bundle in bundles for dep in bundle.dependencies)
parents = self._model.batch_get_bundles(uuid=parent_uuids)
all_parent_states = {parent.uuid: parent.state for parent in parents}
all_parent_uuids = set(all_parent_states)
bundles_to_fail = []
bundles_to_stage = []
for bundle in bundles:
parent_uuids = set(dep.parent_uuid for dep in bundle.dependencies)
try:
check_bundles_have_read_permission(
self._model, self._model.get_user(bundle.owner_id), parent_uuids
)
except PermissionError as e:
bundles_to_fail.append((bundle, str(e)))
continue
missing_uuids = parent_uuids - all_parent_uuids
if missing_uuids:
bundles_to_fail.append(
(bundle, 'Missing parent bundles: %s' % ', '.join(missing_uuids))
)
continue
parent_states = {uuid: all_parent_states[uuid] for uuid in parent_uuids}
acceptable_states = [State.READY]
if bundle.metadata.allow_failed_dependencies:
acceptable_states.append(State.FAILED)
acceptable_states.append(State.KILLED)
else:
failed_uuids = [
uuid for uuid, state in parent_states.iteritems() if state == State.FAILED
]
killed_uuids = [
uuid for uuid, state in parent_states.iteritems() if state == State.KILLED
]
failure_message = ''
if failed_uuids:
failure_message += ' Parent bundles failed: %s' % ', '.join(failed_uuids)
if killed_uuids:
failure_message += ' Parent bundles were killed: %s' % ', '.join(killed_uuids)
if failure_message:
failure_message += ' (Please use the --allow-failed-dependencies flag to depend on results fo failed or killed bundles)'
bundles_to_fail.append((bundle, failure_message))
continue
if all(state in acceptable_states for state in parent_states.itervalues()):
bundles_to_stage.append(bundle)
for bundle, failure_message in bundles_to_fail:
logger.info('Failing bundle %s: %s', bundle.uuid, failure_message)
self._model.update_bundle(
bundle, {'state': State.FAILED, 'metadata': {'failure_message': failure_message}}
)
for bundle in bundles_to_stage:
logger.info('Staging %s', bundle.uuid)
self._model.update_bundle(bundle, {'state': State.STAGED})
def _make_bundles(self):
# Re-stage any stuck bundles. This would happen if the bundle manager
# died.
for bundle in self._model.batch_get_bundles(state=State.MAKING, bundle_type='make'):
if not self._is_making_bundle(bundle.uuid):
logger.info('Re-staging make bundle %s', bundle.uuid)
self._model.update_bundle(bundle, {'state': State.STAGED})
for bundle in self._model.batch_get_bundles(state=State.STAGED, bundle_type='make'):
logger.info('Making bundle %s', bundle.uuid)
self._model.update_bundle(bundle, {'state': State.MAKING})
with self._make_uuids_lock:
self._make_uuids.add(bundle.uuid)
# Making a bundle could take time, so do the work in a separate
# thread to ensure quick scheduling.
threading.Thread(target=BundleManager._make_bundle, args=[self, bundle]).start()
def _is_making_bundles(self):
with self._make_uuids_lock:
return bool(self._make_uuids)
def _is_making_bundle(self, uuid):
with self._make_uuids_lock:
return uuid in self._make_uuids
def _make_bundle(self, bundle):
try:
path = os.path.normpath(self._bundle_store.get_bundle_location(bundle.uuid))
deps = []
for dep in bundle.dependencies:
parent_bundle_path = os.path.normpath(
self._bundle_store.get_bundle_location(dep.parent_uuid)
)
dependency_path = os.path.normpath(
os.path.join(parent_bundle_path, dep.parent_path)
)
if not dependency_path.startswith(parent_bundle_path) or (
not os.path.islink(dependency_path) and not os.path.exists(dependency_path)
):
raise Exception(
'Invalid dependency %s'
% (path_util.safe_join(dep.parent_uuid, dep.parent_path))
)
child_path = os.path.normpath(os.path.join(path, dep.child_path))
if not child_path.startswith(path):
raise Exception('Invalid key for dependency: %s' % (dep.child_path))
deps.append((dependency_path, child_path))
remove_path(path)
if len(deps) == 1 and deps[0][1] == path:
path_util.copy(deps[0][0], path, follow_symlinks=False)
else:
os.mkdir(path)
for dependency_path, child_path in deps:
path_util.copy(dependency_path, child_path, follow_symlinks=False)
self._upload_manager.update_metadata_and_save(bundle, enforce_disk_quota=True)
logger.info('Finished making bundle %s', bundle.uuid)
self._model.update_bundle(bundle, {'state': State.READY})
except Exception as e:
logger.info('Failing bundle %s: %s', bundle.uuid, str(e))
self._model.update_bundle(
bundle, {'state': State.FAILED, 'metadata': {'failure_message': str(e)}}
)
finally:
with self._make_uuids_lock:
self._make_uuids.remove(bundle.uuid)
def _cleanup_dead_workers(self, workers, callback=None):
"""
Clean-up workers that we haven't heard from for more than WORKER_TIMEOUT_SECONDS seconds.
Such workers probably died without checking out properly.
"""
for worker in workers.workers():
if datetime.datetime.now() - worker['checkin_time'] > datetime.timedelta(
seconds=WORKER_TIMEOUT_SECONDS
):
logger.info(
'Cleaning up dead worker (%s, %s)', worker['user_id'], worker['worker_id']
)
self._worker_model.worker_cleanup(worker['user_id'], worker['worker_id'])
workers.remove(worker)
if callback is not None:
callback(worker)
def _restage_stuck_starting_bundles(self, workers):
"""
Moves bundles that got stuck in the STARTING state back to the STAGED
state so that they can be scheduled to run again.
"""
for bundle in self._model.batch_get_bundles(state=State.STARTING, bundle_type='run'):
if (
not workers.is_running(bundle.uuid)
or time.time() - bundle.metadata.last_updated > 5 * 60
): # Run message went missing.
logger.info('Re-staging run bundle %s', bundle.uuid)
if self._model.restage_bundle(bundle):
workers.restage(bundle.uuid)
def _acknowledge_recently_finished_bundles(self, workers):
"""
Acknowledges recently finished bundles to workers so they can discard run information
"""
for bundle in self._model.batch_get_bundles(state=State.FINALIZING, bundle_type='run'):
worker = workers.get_bundle_worker(bundle.uuid)
if worker is None:
logger.info(
'Bringing bundle offline %s: %s', bundle.uuid, 'No worker claims bundle'
)
self._model.set_offline_bundle(bundle)
elif self._worker_model.send_json_message(
worker['socket_id'], {'type': 'mark_finalized', 'uuid': bundle.uuid}, 0.2
):
logger.info('Acknowleded finalization of run bundle %s', bundle.uuid)
self._model.finish_bundle(bundle)
def _bring_offline_stuck_running_bundles(self, workers):
"""
Make bundles that got stuck in the RUNNING or PREPARING state into WORKER_OFFLINE state.
Bundles in WORKER_OFFLINE state can be moved back to the RUNNING or PREPARING state if a
worker resumes the bundle indicating that it's still in one of those states.
"""
active_bundles = self._model.batch_get_bundles(
state=State.RUNNING, bundle_type='run'
) + self._model.batch_get_bundles(state=State.PREPARING, bundle_type='run')
now = time.time()
for bundle in active_bundles:
failure_message = None
if not workers.is_running(bundle.uuid):
failure_message = 'No worker claims bundle'
if now - bundle.metadata.last_updated > WORKER_TIMEOUT_SECONDS:
failure_message = 'Worker offline'
if failure_message is not None:
logger.info('Bringing bundle offline %s: %s', bundle.uuid, failure_message)
self._model.set_offline_bundle(bundle)
def _schedule_run_bundles_on_workers(self, workers, user_owned):
"""
Schedules STAGED bundles to run on the given workers. If user_owned is
True, then schedules on workers run by the owner of each bundle.
Otherwise, uses CodaLab-owned workers, which have user ID root_user_id.
"""
for bundle in self._model.batch_get_bundles(state=State.STAGED, bundle_type='run'):
if user_owned:
workers_list = workers.user_owned_workers(bundle.owner_id)
else:
workers_list = workers.user_owned_workers(self._model.root_user_id)
workers_list = self._filter_and_sort_workers(workers_list, bundle)
for worker in workers_list:
if self._try_start_bundle(workers, worker, bundle):
break
else:
continue # Try the next worker.
def _deduct_worker_resources(self, workers_list):
"""
From each worker, subtract resources used by running bundles. Modifies the list.
"""
for worker in workers_list:
for uuid in worker['run_uuids']:
bundle = self._model.get_bundle(uuid)
worker['cpus'] -= self._compute_request_cpus(bundle)
worker['gpus'] -= self._compute_request_gpus(bundle)
worker['memory_bytes'] -= self._compute_request_memory(bundle)
def _filter_and_sort_workers(self, workers_list, bundle):
"""
Filters the workers to those that can run the given bundle and returns
the list sorted in order of preference for running the bundle.
"""
# keep track of which workers have GPUs
has_gpu = {}
for worker in workers_list:
worker_id = worker['worker_id']
has_gpu[worker_id] = worker['gpus'] > 0
# deduct worker resources based on running bundles
self._deduct_worker_resources(workers_list)
# Filter by CPUs.
request_cpus = self._compute_request_cpus(bundle)
if request_cpus:
workers_list = filter(lambda worker: worker['cpus'] >= request_cpus, workers_list)
# Filter by GPUs.
request_gpus = self._compute_request_gpus(bundle)
if request_gpus:
workers_list = filter(lambda worker: worker['gpus'] >= request_gpus, workers_list)
# Filter by memory.
request_memory = self._compute_request_memory(bundle)
if request_memory:
workers_list = filter(
lambda worker: worker['memory_bytes'] >= request_memory, workers_list
)
# Filter by tag.
request_queue = bundle.metadata.request_queue
if request_queue:
tagm = re.match('tag=(.+)', request_queue)
if tagm:
workers_list = filter(lambda worker: worker['tag'] == tagm.group(1), workers_list)
else:
# We don't know how to handle this type of request queue
# argument.
return []
# Sort workers list according to these keys in the following succession:
# - whether the worker is a CPU-only worker, if the bundle doesn't request GPUs
# - number of dependencies available, descending
# - number of free cpus, descending
# - random key
#
# Breaking ties randomly is important, since multiple workers frequently
# have the same number of dependencies and free CPUs for a given bundle
# (in particular, bundles with no dependencies) and we may end up
# selecting the same worker over and over again for new jobs. While this
# is not a problem for the performance of the jobs themselves, this can
# cause one worker to collect a disproportionate number of dependencies
# in its cache.
needed_deps = set(map(lambda dep: (dep.parent_uuid, dep.parent_path), bundle.dependencies))
def get_sort_key(worker):
deps = set(worker['dependencies'])
worker_id = worker['worker_id']
# if the bundle doesn't request GPUs (only request CPUs), prioritize workers that don't have GPUs
gpu_priority = self._compute_request_gpus(bundle) or not has_gpu[worker_id]
return (gpu_priority, len(needed_deps & deps), worker['cpus'], random.random())
workers_list.sort(key=get_sort_key, reverse=True)
return workers_list
def _try_start_bundle(self, workers, worker, bundle):
"""
Tries to start running the bundle on the given worker, returning False
if that failed.
"""
if self._model.set_starting_bundle(bundle, worker['user_id'], worker['worker_id']):
workers.set_starting(bundle.uuid, worker)
if (
self._worker_model.shared_file_system
and worker['user_id'] == self._model.root_user_id
):
# On a shared file system we create the path here to avoid NFS
# directory cache issues.
path = self._bundle_store.get_bundle_location(bundle.uuid)
remove_path(path)
os.mkdir(path)
if self._worker_model.send_json_message(
worker['socket_id'], self._construct_run_message(worker, bundle), 0.2
):
logger.info('Starting run bundle %s', bundle.uuid)
return True
else:
self._model.restage_bundle(bundle)
workers.restage(bundle.uuid)
return False
else:
return False
def _compute_request_cpus(self, bundle):
"""
Compute the CPU limit used for scheduling the run.
The default of 1 is for backwards compatibilty for
runs from before when we added client-side defaults
"""
if not bundle.metadata.request_cpus:
return 1
return bundle.metadata.request_cpus
def _compute_request_gpus(self, bundle):
"""
Compute the GPU limit used for scheduling the run.
The default of 0 is for backwards compatibilty for
runs from before when we added client-side defaults
"""
if bundle.metadata.request_gpus is None:
return 0
return bundle.metadata.request_gpus
def _compute_request_memory(self, bundle):
"""
Compute the memory limit used for scheduling the run.
The default of 2g is for backwards compatibilty for
runs from before when we added client-side defaults
"""
if not bundle.metadata.request_memory:
return formatting.parse_size('2g')
return formatting.parse_size(bundle.metadata.request_memory)
def _compute_request_disk(self, bundle):
"""
Compute the disk limit used for scheduling the run.
The default is min(disk quota the user has left, global max)
"""
if not bundle.metadata.request_disk:
return min(
self._model.get_user_disk_quota_left(bundle.owner_id) - 1, self._max_request_disk
)
return formatting.parse_size(bundle.metadata.request_disk)
def _compute_request_time(self, bundle):
"""
Compute the time limit used for scheduling the run.
The default is min(time quota the user has left, global max)
"""
if not bundle.metadata.request_time:
return min(
self._model.get_user_time_quota_left(bundle.owner_id) - 1, self._max_request_time
)
return formatting.parse_duration(bundle.metadata.request_time)
def _get_docker_image(self, bundle):
"""
Set docker image to be the default if not specified
Unlike other metadata fields this can actually be None
from client
"""
if not bundle.metadata.request_docker_image:
if bundle.metadata.request_gpus:
return self._default_gpu_image
else:
return self._default_cpu_image
return bundle.metadata.request_docker_image
def _construct_run_message(self, worker, bundle):
"""
Constructs the run message that is sent to the given worker to tell it
to run the given bundle.
"""
message = {}
message['type'] = 'run'
message['bundle'] = bundle_util.bundle_to_bundle_info(self._model, bundle)
if self._worker_model.shared_file_system and worker['user_id'] == self._model.root_user_id:
message['bundle']['location'] = self._bundle_store.get_bundle_location(bundle.uuid)
for dependency in message['bundle']['dependencies']:
dependency['location'] = self._bundle_store.get_bundle_location(
dependency['parent_uuid']
)
# Figure out the resource requirements.
resources = message['resources'] = {}
resources['request_cpus'] = self._compute_request_cpus(bundle)
resources['request_gpus'] = self._compute_request_gpus(bundle)
resources['docker_image'] = self._get_docker_image(bundle)
resources['request_time'] = self._compute_request_time(bundle)
resources['request_memory'] = self._compute_request_memory(bundle)
resources['request_disk'] = self._compute_request_disk(bundle)
resources['request_network'] = bundle.metadata.request_network
return message
|
self._stage_bundles()
self._make_bundles()
self._schedule_run_bundles()
|
login.component.ts
|
import { Component, OnInit } from '@angular/core';
import { ToastrService } from 'ngx-toastr';
import { Router } from '@angular/router';
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
styleUrls: ['./login.component.css']
})
export class LoginComponent implements OnInit {
username: any;
password: any;
constructor(private toastr: ToastrService, private router: Router) { }
ngOnInit(): void {
}
|
if(!this.username){
this.toastr.error("Please enter username");
}
else if(!this.password){
this.toastr.error("Please enter password");
}
else if(this.username.toLowerCase() != 'admin'){
this.toastr.error("Incorrect Username");
}
else if(this.password != '123456'){
this.toastr.error("Incorrect password");
}
else{
this.router.navigate(['/dashboard']);
}
}
}
|
login(){
|
builtin_time.go
|
// Copyright 2013 The ql Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSES/QL-LICENSE file.
// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package expression
import (
"fmt"
"math"
"regexp"
"strings"
"time"
"github.com/juju/errors"
"github.com/ngaut/log"
"github.com/pingcap/tidb/ast"
"github.com/pingcap/tidb/context"
"github.com/pingcap/tidb/mysql"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/terror"
"github.com/pingcap/tidb/util/types"
)
const ( // GET_FORMAT first argument.
dateFormat = "DATE"
datetimeFormat = "DATETIME"
timestampFormat = "TIMESTAMP"
timeFormat = "TIME"
)
const ( // GET_FORMAT location.
usaLocation = "USA"
jisLocation = "JIS"
isoLocation = "ISO"
eurLocation = "EUR"
internalLocation = "INTERNAL"
)
// DurationPattern determine whether to match the format of duration.
var DurationPattern = regexp.MustCompile(`^(|[-]?)(|\d{1,2}\s)(\d{2,3}:\d{2}:\d{2}|\d{1,2}:\d{2}|\d{1,6})(|\.\d*)$`)
var (
_ functionClass = &dateFunctionClass{}
_ functionClass = &dateDiffFunctionClass{}
_ functionClass = &timeDiffFunctionClass{}
_ functionClass = &dateFormatFunctionClass{}
_ functionClass = &hourFunctionClass{}
_ functionClass = &minuteFunctionClass{}
_ functionClass = &secondFunctionClass{}
_ functionClass = µSecondFunctionClass{}
_ functionClass = &monthFunctionClass{}
_ functionClass = &monthNameFunctionClass{}
_ functionClass = &nowFunctionClass{}
_ functionClass = &dayNameFunctionClass{}
_ functionClass = &dayOfMonthFunctionClass{}
_ functionClass = &dayOfWeekFunctionClass{}
_ functionClass = &dayOfYearFunctionClass{}
_ functionClass = &weekFunctionClass{}
_ functionClass = &weekDayFunctionClass{}
_ functionClass = &weekOfYearFunctionClass{}
_ functionClass = &yearFunctionClass{}
_ functionClass = &yearWeekFunctionClass{}
_ functionClass = &fromUnixTimeFunctionClass{}
_ functionClass = &getFormatFunctionClass{}
_ functionClass = &strToDateFunctionClass{}
_ functionClass = &sysDateFunctionClass{}
_ functionClass = ¤tDateFunctionClass{}
_ functionClass = ¤tTimeFunctionClass{}
_ functionClass = &timeFunctionClass{}
_ functionClass = &utcDateFunctionClass{}
_ functionClass = &utcTimestampFunctionClass{}
_ functionClass = &extractFunctionClass{}
_ functionClass = &arithmeticFunctionClass{}
_ functionClass = &unixTimestampFunctionClass{}
_ functionClass = &addTimeFunctionClass{}
_ functionClass = &convertTzFunctionClass{}
_ functionClass = &makeDateFunctionClass{}
_ functionClass = &makeTimeFunctionClass{}
_ functionClass = &periodAddFunctionClass{}
_ functionClass = &periodDiffFunctionClass{}
_ functionClass = &quarterFunctionClass{}
_ functionClass = &secToTimeFunctionClass{}
_ functionClass = &subTimeFunctionClass{}
_ functionClass = &timeFormatFunctionClass{}
_ functionClass = &timeToSecFunctionClass{}
_ functionClass = ×tampAddFunctionClass{}
_ functionClass = &toDaysFunctionClass{}
_ functionClass = &toSecondsFunctionClass{}
_ functionClass = &utcTimeFunctionClass{}
_ functionClass = ×tampFunctionClass{}
_ functionClass = &lastDayFunctionClass{}
)
var (
_ builtinFunc = &builtinDateSig{}
_ builtinFunc = &builtinDateDiffSig{}
_ builtinFunc = &builtinTimeDiffSig{}
_ builtinFunc = &builtinDateFormatSig{}
_ builtinFunc = &builtinHourSig{}
_ builtinFunc = &builtinMinuteSig{}
_ builtinFunc = &builtinSecondSig{}
_ builtinFunc = &builtinMicroSecondSig{}
_ builtinFunc = &builtinMonthSig{}
_ builtinFunc = &builtinMonthNameSig{}
_ builtinFunc = &builtinNowWithArgSig{}
_ builtinFunc = &builtinNowWithoutArgSig{}
_ builtinFunc = &builtinDayNameSig{}
_ builtinFunc = &builtinDayOfMonthSig{}
_ builtinFunc = &builtinDayOfWeekSig{}
_ builtinFunc = &builtinDayOfYearSig{}
_ builtinFunc = &builtinWeekWithModeSig{}
_ builtinFunc = &builtinWeekWithoutModeSig{}
_ builtinFunc = &builtinWeekDaySig{}
_ builtinFunc = &builtinWeekOfYearSig{}
_ builtinFunc = &builtinYearSig{}
_ builtinFunc = &builtinYearWeekWithModeSig{}
_ builtinFunc = &builtinYearWeekWithoutModeSig{}
_ builtinFunc = &builtinFromUnixTimeSig{}
_ builtinFunc = &builtinGetFormatSig{}
_ builtinFunc = &builtinStrToDateSig{}
_ builtinFunc = &builtinSysDateWithFspSig{}
_ builtinFunc = &builtinSysDateWithoutFspSig{}
_ builtinFunc = &builtinCurrentDateSig{}
_ builtinFunc = &builtinCurrentTimeSig{}
_ builtinFunc = &builtinTimeSig{}
|
_ builtinFunc = &builtinArithmeticSig{}
_ builtinFunc = &builtinUnixTimestampSig{}
_ builtinFunc = &builtinAddTimeSig{}
_ builtinFunc = &builtinConvertTzSig{}
_ builtinFunc = &builtinMakeDateSig{}
_ builtinFunc = &builtinMakeTimeSig{}
_ builtinFunc = &builtinPeriodAddSig{}
_ builtinFunc = &builtinPeriodDiffSig{}
_ builtinFunc = &builtinQuarterSig{}
_ builtinFunc = &builtinSecToTimeSig{}
_ builtinFunc = &builtinSubTimeSig{}
_ builtinFunc = &builtinTimeToSecSig{}
_ builtinFunc = &builtinTimestampAddSig{}
_ builtinFunc = &builtinToDaysSig{}
_ builtinFunc = &builtinToSecondsSig{}
_ builtinFunc = &builtinUTCTimeWithArgSig{}
_ builtinFunc = &builtinUTCTimeWithoutArgSig{}
_ builtinFunc = &builtinTimestampSig{}
_ builtinFunc = &builtinLastDaySig{}
)
// handleInvalidTimeError reports error or warning depend on the context.
func handleInvalidTimeError(ctx context.Context, err error) error {
if err == nil || !terror.ErrorEqual(err, types.ErrInvalidTimeFormat) {
return err
}
sc := ctx.GetSessionVars().StmtCtx
if ctx.GetSessionVars().StrictSQLMode && (sc.InInsertStmt || sc.InUpdateOrDeleteStmt) {
return err
}
sc.AppendWarning(err)
return nil
}
func convertTimeToMysqlTime(t time.Time, fsp int) (types.Time, error) {
tr, err := types.RoundFrac(t, int(fsp))
if err != nil {
return types.Time{}, errors.Trace(err)
}
return types.Time{
Time: types.FromGoTime(tr),
Type: mysql.TypeDatetime,
Fsp: fsp,
}, nil
}
func convertToTimeWithFsp(sc *variable.StatementContext, arg types.Datum, tp byte, fsp int) (d types.Datum, err error) {
if fsp > types.MaxFsp {
fsp = types.MaxFsp
}
f := types.NewFieldType(tp)
f.Decimal = fsp
d, err = arg.ConvertTo(sc, f)
if err != nil {
d.SetNull()
return d, errors.Trace(err)
}
if d.IsNull() {
return
}
if d.Kind() != types.KindMysqlTime {
d.SetNull()
return d, errors.Errorf("need time type, but got %T", d.GetValue())
}
return
}
func convertToTime(sc *variable.StatementContext, arg types.Datum, tp byte) (d types.Datum, err error) {
return convertToTimeWithFsp(sc, arg, tp, types.MaxFsp)
}
func convertToDuration(sc *variable.StatementContext, arg types.Datum, fsp int) (d types.Datum, err error) {
f := types.NewFieldType(mysql.TypeDuration)
f.Decimal = fsp
d, err = arg.ConvertTo(sc, f)
if err != nil {
d.SetNull()
return d, errors.Trace(err)
}
if d.IsNull() {
return
}
if d.Kind() != types.KindMysqlDuration {
d.SetNull()
return d, errors.Errorf("need duration type, but got %T", d.GetValue())
}
return
}
type dateFunctionClass struct {
baseFunctionClass
}
func (c *dateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Tp, bf.tp.Flen, bf.tp.Decimal = mysql.TypeDate, 10, 0
sig := &builtinDateSig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDateSig struct {
baseTimeBuiltinFunc
}
// evalTime evals DATE(expr).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date
func (b *builtinDateSig) evalTime(row []types.Datum) (types.Time, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
expr, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return types.Time{}, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if expr.IsZero() {
return types.Time{}, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
expr.Time = types.FromDate(expr.Time.Year(), expr.Time.Month(), expr.Time.Day(), 0, 0, 0, 0)
expr.Type = mysql.TypeDate
return expr, false, nil
}
func convertDatumToTime(sc *variable.StatementContext, d types.Datum) (t types.Time, err error) {
if d.Kind() != types.KindMysqlTime {
d, err = convertToTime(sc, d, mysql.TypeDatetime)
if err != nil {
return t, errors.Trace(err)
}
}
return d.GetMysqlTime(), nil
}
type dateDiffFunctionClass struct {
baseFunctionClass
}
func (c *dateDiffFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinDateDiffSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDateDiffSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinDateDiffSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_datediff
func (b *builtinDateDiffSig) evalInt(row []types.Datum) (int64, bool, error) {
ctx := b.ctx.GetSessionVars().StmtCtx
t1, isNull, err := b.args[0].EvalTime(row, ctx)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
t2, isNull, err := b.args[1].EvalTime(row, ctx)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if t1.Time.Month() == 0 || t1.Time.Day() == 0 || t2.Time.Month() == 0 || t2.Time.Day() == 0 {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(types.DateDiff(t1.Time, t2.Time)), false, nil
}
type timeDiffFunctionClass struct {
baseFunctionClass
}
func (c *timeDiffFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimeDiffSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinTimeDiffSig struct {
baseBuiltinFunc
}
func (b *builtinTimeDiffSig) getStrFsp(strArg string, fsp int) int {
if n := strings.IndexByte(strArg, '.'); n >= 0 {
lenStrFsp := len(strArg[n+1:])
if lenStrFsp <= types.MaxFsp {
fsp = int(math.Max(float64(lenStrFsp), float64(fsp)))
}
}
return fsp
}
func (b *builtinTimeDiffSig) convertArgToTime(sc *variable.StatementContext, arg types.Datum, fsp int) (t types.Time, err error) {
// Fix issue #3923, see https://github.com/pingcap/tidb/issues/3923,
// TIMEDIFF() returns expr1 − expr2 expressed as a Duration value. expr1 and expr2 are Duration or date-and-time expressions,
// but both must be of the same type. if expr is a string, we first try to convert it to Duration, if it failed,
// we then try to convert it to Datetime
switch arg.Kind() {
case types.KindString, types.KindBytes:
strArg := arg.GetString()
fsp = b.getStrFsp(strArg, fsp)
t, err = types.StrToDuration(sc, strArg, fsp)
case types.KindMysqlDuration:
t, err = arg.GetMysqlDuration().ConvertToTime(mysql.TypeDuration)
default:
t, err = convertDatumToTime(sc, arg)
}
return t, errors.Trace(err)
}
// eval evals a builtinTimeDiffSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_timediff
func (b *builtinTimeDiffSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() {
return
}
sc := b.ctx.GetSessionVars().StmtCtx
fsp := int(math.Max(float64(args[0].Frac()), float64(args[1].Frac())))
t0, err := b.convertArgToTime(sc, args[0], fsp)
if err != nil {
return d, errors.Trace(err)
}
t1, err := b.convertArgToTime(sc, args[1], fsp)
if err != nil {
return d, errors.Trace(err)
}
if (types.IsTemporalWithDate(t0.Type) &&
t1.Type == mysql.TypeDuration) ||
(types.IsTemporalWithDate(t1.Type) &&
t0.Type == mysql.TypeDuration) {
return d, nil // Incompatible types, return NULL
}
t := t0.Sub(&t1)
ret, truncated := types.TruncateOverflowMySQLTime(t.Duration)
if truncated {
err = types.ErrTruncatedWrongVal.GenByArgs("time", t.String())
err = sc.HandleTruncate(err)
}
t.Duration = ret
d.SetMysqlDuration(t)
return
}
type dateFormatFunctionClass struct {
baseFunctionClass
}
func (c *dateFormatFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinDateFormatSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinDateFormatSig struct {
baseBuiltinFunc
}
// eval evals a builtinDateFormatSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date-format
func (b *builtinDateFormatSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
return builtinDateFormat(args, b.ctx)
}
// builtinDateFormat ...
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_date-format
func builtinDateFormat(args []types.Datum, ctx context.Context) (d types.Datum, err error) {
date, err := convertToTime(ctx.GetSessionVars().StmtCtx, args[0], mysql.TypeDatetime)
if err != nil {
return d, errors.Trace(err)
}
if date.IsNull() {
return
}
t := date.GetMysqlTime()
str, err := t.DateFormat(args[1].GetString())
if err != nil {
return d, errors.Trace(err)
}
d.SetString(str)
return
}
type fromDaysFunctionClass struct {
baseFunctionClass
}
func (c *fromDaysFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, tpInt)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 10, 0
sig := &builtinFromDaysSig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinFromDaysSig struct {
baseTimeBuiltinFunc
}
// evalTime evals FROM_DAYS(N).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_from-days
func (b *builtinFromDaysSig) evalTime(row []types.Datum) (types.Time, bool, error) {
sc := b.getCtx().GetSessionVars().StmtCtx
n, isNull, err := b.args[0].EvalInt(row, sc)
if isNull || err != nil {
return types.Time{}, true, errors.Trace(err)
}
return types.TimeFromDays(n), false, nil
}
type hourFunctionClass struct {
baseFunctionClass
}
func (c *hourFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDuration)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 3, 0
sig := &builtinHourSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinHourSig struct {
baseIntBuiltinFunc
}
// evalInt evals HOUR(time).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_hour
func (b *builtinHourSig) evalInt(row []types.Datum) (int64, bool, error) {
dur, isNull, err := b.args[0].EvalDuration(row, b.ctx.GetSessionVars().StmtCtx)
// ignore error and return NULL
if isNull || err != nil {
return 0, true, nil
}
return int64(dur.Hour()), false, nil
}
type minuteFunctionClass struct {
baseFunctionClass
}
func (c *minuteFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDuration)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 2, 0
sig := &builtinMinuteSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinMinuteSig struct {
baseIntBuiltinFunc
}
// evalInt evals MINUTE(time).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_minute
func (b *builtinMinuteSig) evalInt(row []types.Datum) (int64, bool, error) {
dur, isNull, err := b.args[0].EvalDuration(row, b.ctx.GetSessionVars().StmtCtx)
// ignore error and return NULL
if isNull || err != nil {
return 0, true, nil
}
return int64(dur.Minute()), false, nil
}
type secondFunctionClass struct {
baseFunctionClass
}
func (c *secondFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDuration)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 2, 0
sig := &builtinSecondSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinSecondSig struct {
baseIntBuiltinFunc
}
// evalInt evals SECOND(time).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_second
func (b *builtinSecondSig) evalInt(row []types.Datum) (int64, bool, error) {
dur, isNull, err := b.args[0].EvalDuration(row, b.ctx.GetSessionVars().StmtCtx)
// ignore error and return NULL
if isNull || err != nil {
return 0, true, nil
}
return int64(dur.Second()), false, nil
}
type microSecondFunctionClass struct {
baseFunctionClass
}
func (c *microSecondFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDuration)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 6, 0
sig := &builtinMicroSecondSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinMicroSecondSig struct {
baseIntBuiltinFunc
}
// evalInt evals MICROSECOND(expr).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_microsecond
func (b *builtinMicroSecondSig) evalInt(row []types.Datum) (int64, bool, error) {
dur, isNull, err := b.args[0].EvalDuration(row, b.ctx.GetSessionVars().StmtCtx)
// ignore error and return NULL
if isNull || err != nil {
return 0, true, nil
}
return int64(dur.MicroSecond()), false, nil
}
type monthFunctionClass struct {
baseFunctionClass
}
func (c *monthFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 2, 0
sig := &builtinMonthSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinMonthSig struct {
baseIntBuiltinFunc
}
// evalInt evals MONTH(date).
// see: https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_month
func (b *builtinMonthSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.getCtx().GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(date.Time.Month()), false, nil
}
// builtinMonth ...
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_month
func builtinMonth(args []types.Datum, ctx context.Context) (d types.Datum, err error) {
d, err = convertToTime(ctx.GetSessionVars().StmtCtx, args[0], mysql.TypeDate)
if err != nil || d.IsNull() {
return d, errors.Trace(err)
}
// No need to check type here.
t := d.GetMysqlTime()
i := int64(0)
if t.IsZero() {
d.SetInt64(i)
return
}
i = int64(t.Time.Month())
d.SetInt64(i)
return
}
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_monthname
type monthNameFunctionClass struct {
baseFunctionClass
}
func (c *monthNameFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpString, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 10
sig := &builtinMonthNameSig{baseStringBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinMonthNameSig struct {
baseStringBuiltinFunc
}
func (b *builtinMonthNameSig) evalString(row []types.Datum) (string, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
arg, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return "", true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
mon := arg.Time.Month()
if arg.IsZero() || mon < 0 || mon > len(types.MonthNames) {
return "", true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
} else if mon == 0 {
return "", true, nil
}
return types.MonthNames[mon-1], false, nil
}
type dayNameFunctionClass struct {
baseFunctionClass
}
func (c *dayNameFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpString, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 10
sig := &builtinDayNameSig{baseStringBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDayNameSig struct {
baseStringBuiltinFunc
}
// evalString evals a builtinDayNameSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_dayname
func (b *builtinDayNameSig) evalString(row []types.Datum) (string, bool, error) {
arg, isNull, err := b.args[0].EvalTime(row, b.ctx.GetSessionVars().StmtCtx)
if isNull || err != nil {
return "", isNull, errors.Trace(err)
}
if arg.InvalidZero() {
return "", true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
// Monday is 0, ... Sunday = 6 in MySQL
// but in go, Sunday is 0, ... Saturday is 6
// w will do a conversion.
res := (int64(arg.Time.Weekday()) + 6) % 7
return types.WeekdayNames[res], false, nil
}
type dayOfMonthFunctionClass struct {
baseFunctionClass
}
func (c *dayOfMonthFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 2
sig := &builtinDayOfMonthSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDayOfMonthSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinDayOfMonthSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_dayofmonth
func (b *builtinDayOfMonthSig) evalInt(row []types.Datum) (int64, bool, error) {
arg, isNull, err := b.args[0].EvalTime(row, b.ctx.GetSessionVars().StmtCtx)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if arg.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(arg.Time.Day()), false, nil
}
type dayOfWeekFunctionClass struct {
baseFunctionClass
}
func (c *dayOfWeekFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 1
sig := &builtinDayOfWeekSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDayOfWeekSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinDayOfWeekSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_dayofweek
func (b *builtinDayOfWeekSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
arg, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if arg.InvalidZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
// 1 is Sunday, 2 is Monday, .... 7 is Saturday
return int64(arg.Time.Weekday() + 1), false, nil
}
type dayOfYearFunctionClass struct {
baseFunctionClass
}
func (c *dayOfYearFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 3
sig := &builtinDayOfYearSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinDayOfYearSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinDayOfYearSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_dayofyear
func (b *builtinDayOfYearSig) evalInt(row []types.Datum) (int64, bool, error) {
arg, isNull, err := b.args[0].EvalTime(row, b.ctx.GetSessionVars().StmtCtx)
if isNull || err != nil {
return 0, isNull, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if arg.InvalidZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(arg.Time.YearDay()), false, nil
}
type weekFunctionClass struct {
baseFunctionClass
}
func (c *weekFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := []evalTp{tpDatetime}
if len(args) == 2 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 2, 0
var sig builtinFunc
if len(args) == 2 {
sig = &builtinWeekWithModeSig{baseIntBuiltinFunc{bf}}
} else {
sig = &builtinWeekWithoutModeSig{baseIntBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
type builtinWeekWithModeSig struct {
baseIntBuiltinFunc
}
// evalInt evals WEEK(date, mode).
// see: https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_week
func (b *builtinWeekWithModeSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
mode, isNull, err := b.args[1].EvalInt(row, sc)
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
week := date.Time.Week(int(mode))
return int64(week), false, nil
}
type builtinWeekWithoutModeSig struct {
baseIntBuiltinFunc
}
// evalInt evals WEEK(date).
// see: https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_week
func (b *builtinWeekWithoutModeSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
week := date.Time.Week(0)
return int64(week), false, nil
}
type weekDayFunctionClass struct {
baseFunctionClass
}
func (c *weekDayFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 1
sig := &builtinWeekDaySig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinWeekDaySig struct {
baseIntBuiltinFunc
}
// evalInt evals WEEKDAY(date).
func (b *builtinWeekDaySig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(date.Time.Weekday()+6) % 7, false, nil
}
type weekOfYearFunctionClass struct {
baseFunctionClass
}
func (c *weekOfYearFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 2, 0
sig := &builtinWeekOfYearSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinWeekOfYearSig struct {
baseIntBuiltinFunc
}
// evalInt evals WEEKOFYEAR(date).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_weekofyear
func (b *builtinWeekOfYearSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
week := date.Time.Week(3)
return int64(week), false, nil
}
type yearFunctionClass struct {
baseFunctionClass
}
func (c *yearFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 4, 0
sig := &builtinYearSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinYearSig struct {
baseIntBuiltinFunc
}
// evalInt evals YEAR(date).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_year
func (b *builtinYearSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.getCtx().GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64(date.Time.Year()), false, nil
}
type yearWeekFunctionClass struct {
baseFunctionClass
}
func (c *yearWeekFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := []evalTp{tpDatetime}
if len(args) == 2 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 6, 0
var sig builtinFunc
if len(args) == 2 {
sig = &builtinYearWeekWithModeSig{baseIntBuiltinFunc{bf}}
} else {
sig = &builtinYearWeekWithoutModeSig{baseIntBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
type builtinYearWeekWithModeSig struct {
baseIntBuiltinFunc
}
// evalInt evals YEARWEEK(date,mode).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_yearweek
func (b *builtinYearWeekWithModeSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, isNull, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
mode, isNull, err := b.args[1].EvalInt(row, sc)
if err != nil {
return 0, true, errors.Trace(err)
}
if isNull {
mode = 0
}
year, week := date.Time.YearWeek(int(mode))
result := int64(week + year*100)
if result < 0 {
return int64(math.MaxUint32), false, nil
}
return result, false, nil
}
type builtinYearWeekWithoutModeSig struct {
baseIntBuiltinFunc
}
// evalInt evals YEARWEEK(date).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_yearweek
func (b *builtinYearWeekWithoutModeSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.InvalidZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
year, week := date.Time.YearWeek(0)
result := int64(week + year*100)
if result < 0 {
return int64(math.MaxUint32), false, nil
}
return result, false, nil
}
type fromUnixTimeFunctionClass struct {
baseFunctionClass
}
func (c *fromUnixTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinFromUnixTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinFromUnixTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinFromUnixTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_from-unixtime
func (b *builtinFromUnixTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
sc := b.ctx.GetSessionVars().StmtCtx
unixTimeStamp, err := args[0].ToDecimal(sc)
if err != nil {
return d, errors.Trace(err)
}
// 0 <= unixTimeStamp <= INT32_MAX
if unixTimeStamp.IsNegative() {
return
}
integralPart, err := unixTimeStamp.ToInt()
if err == types.ErrTruncated {
err = nil
}
if err != nil {
return d, errors.Trace(err)
}
if integralPart > int64(math.MaxInt32) {
return
}
// Split the integral part and fractional part of a decimal timestamp.
// e.g. for timestamp 12345.678,
// first get the integral part 12345,
// then (12345.678 - 12345) * (10^9) to get the decimal part and convert it to nanosecond precision.
integerDecimalTp := new(types.MyDecimal).FromInt(integralPart)
fracDecimalTp := new(types.MyDecimal)
err = types.DecimalSub(unixTimeStamp, integerDecimalTp, fracDecimalTp)
if err != nil {
return d, errors.Trace(err)
}
nano := new(types.MyDecimal).FromInt(int64(time.Second))
x := new(types.MyDecimal)
err = types.DecimalMul(fracDecimalTp, nano, x)
if err != nil {
return d, errors.Trace(err)
}
fractionalPart, err := x.ToInt() // here fractionalPart is result multiplying the original fractional part by 10^9.
if err == types.ErrTruncated {
err = nil
}
if err != nil {
return d, errors.Trace(err)
}
_, fracDigitsNumber := unixTimeStamp.PrecisionAndFrac()
fsp := fracDigitsNumber
if fracDigitsNumber > types.MaxFsp {
fsp = types.MaxFsp
}
t, err := convertTimeToMysqlTime(time.Unix(integralPart, fractionalPart), fsp)
if err != nil {
return d, errors.Trace(err)
}
if args[0].Kind() == types.KindString { // Keep consistent with MySQL.
t.Fsp = types.MaxFsp
}
d.SetMysqlTime(t)
if len(args) == 1 {
return
}
return builtinDateFormat([]types.Datum{d, args[1]}, b.ctx)
}
type getFormatFunctionClass struct {
baseFunctionClass
}
func (c *getFormatFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinGetFormatSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinGetFormatSig struct {
baseBuiltinFunc
}
// eval evals a builtinGetFormatSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_get-format
func (b *builtinGetFormatSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
t := args[0].GetString()
l := args[1].GetString()
switch t {
case dateFormat:
switch l {
case usaLocation:
d.SetString("%m.%d.%Y")
case jisLocation:
d.SetString("%Y-%m-%d")
case isoLocation:
d.SetString("%Y-%m-%d")
case eurLocation:
d.SetString("%d.%m.%Y")
case internalLocation:
d.SetString("%Y%m%d")
}
case datetimeFormat, timestampFormat:
switch l {
case usaLocation:
d.SetString("%Y-%m-%d %H.%i.%s")
case jisLocation:
d.SetString("%Y-%m-%d %H:%i:%s")
case isoLocation:
d.SetString("%Y-%m-%d %H:%i:%s")
case eurLocation:
d.SetString("%Y-%m-%d %H.%i.%s")
case internalLocation:
d.SetString("%Y%m%d%H%i%s")
}
case timeFormat:
switch l {
case usaLocation:
d.SetString("%h:%i:%s %p")
case jisLocation:
d.SetString("%H:%i:%s")
case isoLocation:
d.SetString("%H:%i:%s")
case eurLocation:
d.SetString("%H.%i.%s")
case internalLocation:
d.SetString("%H%i%s")
}
}
return
}
type strToDateFunctionClass struct {
baseFunctionClass
}
func (c *strToDateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinStrToDateSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinStrToDateSig struct {
baseBuiltinFunc
}
// eval evals a builtinStrToDateSig.
// See https://dev.mysql.com/doc/refman/5.5/en/date-and-time-functions.html#function_str-to-date
func (b *builtinStrToDateSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
date := args[0].GetString()
format := args[1].GetString()
var t types.Time
succ := t.StrToDate(date, format)
if !succ {
d.SetNull()
return
}
d.SetMysqlTime(t)
return
}
type sysDateFunctionClass struct {
baseFunctionClass
}
func (c *sysDateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := []evalTp{}
if len(args) == 1 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 19, 0
bf.deterministic = false
var sig builtinFunc
if len(args) == 1 {
sig = &builtinSysDateWithFspSig{baseTimeBuiltinFunc{bf}}
} else {
sig = &builtinSysDateWithoutFspSig{baseTimeBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
type builtinSysDateWithFspSig struct {
baseTimeBuiltinFunc
}
// evalTime evals SYSDATE(fsp).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_sysdate
func (b *builtinSysDateWithFspSig) evalTime(row []types.Datum) (d types.Time, isNull bool, err error) {
sc := b.ctx.GetSessionVars().StmtCtx
fsp, isNull, err := b.args[0].EvalInt(row, sc)
if isNull || err != nil {
return types.Time{}, isNull, errors.Trace(err)
}
result, err := convertTimeToMysqlTime(time.Now(), int(fsp))
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
return result, false, nil
}
type builtinSysDateWithoutFspSig struct {
baseTimeBuiltinFunc
}
// evalTime evals SYSDATE().
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_sysdate
func (b *builtinSysDateWithoutFspSig) evalTime(row []types.Datum) (d types.Time, isNull bool, err error) {
result, err := convertTimeToMysqlTime(time.Now(), 0)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
return result, false, nil
}
type currentDateFunctionClass struct {
baseFunctionClass
}
func (c *currentDateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 10, 0
bf.deterministic = false
sig := &builtinCurrentDateSig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinCurrentDateSig struct {
baseTimeBuiltinFunc
}
// eval evals CURDATE().
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_curdate
func (b *builtinCurrentDateSig) evalTime(row []types.Datum) (d types.Time, isNull bool, err error) {
year, month, day := time.Now().Date()
result := types.Time{
Time: types.FromDate(year, int(month), day, 0, 0, 0, 0),
Type: mysql.TypeDate,
Fsp: 0}
return result, false, nil
}
type currentTimeFunctionClass struct {
baseFunctionClass
}
func (c *currentTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinCurrentTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinCurrentTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinCurrentTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_curtime
func (b *builtinCurrentTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
fsp := 0
sc := b.ctx.GetSessionVars().StmtCtx
if len(args) == 1 && !args[0].IsNull() {
if fsp, err = checkFsp(sc, args[0]); err != nil {
d.SetNull()
return d, errors.Trace(err)
}
}
d.SetString(time.Now().Format("15:04:05.000000"))
return convertToDuration(b.ctx.GetSessionVars().StmtCtx, d, fsp)
}
type timeFunctionClass struct {
baseFunctionClass
}
func (c *timeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDuration, tpString)
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimeSig{baseDurationBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinTimeSig struct {
baseDurationBuiltinFunc
}
// evalDuration evals a builtinTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_time.
func (b *builtinTimeSig) evalDuration(row []types.Datum) (res types.Duration, isNull bool, err error) {
sc := b.getCtx().GetSessionVars().StmtCtx
expr, isNull, err := b.args[0].EvalString(row, sc)
if isNull || err != nil {
return res, isNull, errors.Trace(err)
}
fsp := 0
if idx := strings.Index(expr, "."); idx != -1 {
fsp = len(expr) - idx - 1
}
if fsp, err = types.CheckFsp(fsp); err != nil {
return res, isNull, errors.Trace(err)
}
res, err = types.ParseDuration(expr, fsp)
if types.ErrTruncatedWrongVal.Equal(err) {
err = sc.HandleTruncate(err)
}
return res, isNull, errors.Trace(err)
}
type utcDateFunctionClass struct {
baseFunctionClass
}
func (c *utcDateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = 10, 0
bf.deterministic = false
sig := &builtinUTCDateSig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinUTCDateSig struct {
baseTimeBuiltinFunc
}
// evalTime evals UTC_DATE, UTC_DATE().
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_utc-date
func (b *builtinUTCDateSig) evalTime(row []types.Datum) (types.Time, bool, error) {
year, month, day := time.Now().UTC().Date()
result := types.Time{
Time: types.FromGoTime(time.Date(year, month, day, 0, 0, 0, 0, time.UTC)),
Type: mysql.TypeDate,
Fsp: types.UnspecifiedFsp}
return result, false, nil
}
type utcTimestampFunctionClass struct {
baseFunctionClass
}
func getFlenAndDecimal4UTCTimestampAndNow(sc *variable.StatementContext, arg Expression) (flen, decimal int) {
if constant, ok := arg.(*Constant); ok {
fsp, isNull, err := constant.EvalInt(nil, sc)
if isNull || err != nil || fsp > int64(types.MaxFsp) {
decimal = types.MaxFsp
} else if fsp < int64(types.MinFsp) {
decimal = types.MinFsp
} else {
decimal = int(fsp)
}
}
if decimal > 0 {
flen = 19 + 1 + decimal
} else {
flen = 19
}
return flen, decimal
}
func (c *utcTimestampFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := make([]evalTp, 0, 1)
if len(args) == 1 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
if len(args) == 1 {
bf.tp.Flen, bf.tp.Decimal = getFlenAndDecimal4UTCTimestampAndNow(bf.ctx.GetSessionVars().StmtCtx, args[0])
} else {
bf.tp.Flen, bf.tp.Decimal = 19, 0
}
bf.deterministic = false
var sig builtinFunc
if len(args) == 1 {
sig = &builtinUTCTimestampWithArgSig{baseTimeBuiltinFunc{bf}}
} else {
sig = &builtinUTCTimestampWithoutArgSig{baseTimeBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
func evalUTCTimestampWithFsp(fsp int) (types.Time, bool, error) {
result, err := convertTimeToMysqlTime(time.Now().UTC(), fsp)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
return result, false, nil
}
type builtinUTCTimestampWithArgSig struct {
baseTimeBuiltinFunc
}
// evalTime evals UTC_TIMESTAMP(fsp).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_utc-timestamp
func (b *builtinUTCTimestampWithArgSig) evalTime(row []types.Datum) (types.Time, bool, error) {
num, isNull, err := b.args[0].EvalInt(row, b.ctx.GetSessionVars().StmtCtx)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
if !isNull && num > int64(types.MaxFsp) {
return types.Time{}, true, errors.Errorf("Too-big precision %v specified for 'utc_timestamp'. Maximum is %v.", num, types.MaxFsp)
}
if !isNull && num < int64(types.MinFsp) {
return types.Time{}, true, errors.Errorf("Invalid negative %d specified, must in [0, 6].", num)
}
result, isNull, err := evalUTCTimestampWithFsp(int(num))
return result, isNull, errors.Trace(err)
}
type builtinUTCTimestampWithoutArgSig struct {
baseTimeBuiltinFunc
}
// evalTime evals UTC_TIMESTAMP().
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_utc-timestamp
func (b *builtinUTCTimestampWithoutArgSig) evalTime(row []types.Datum) (types.Time, bool, error) {
result, isNull, err := evalUTCTimestampWithFsp(0)
return result, isNull, errors.Trace(err)
}
type nowFunctionClass struct {
baseFunctionClass
}
func (c *nowFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := make([]evalTp, 0, 1)
if len(args) == 1 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
if len(args) == 1 {
bf.tp.Flen, bf.tp.Decimal = getFlenAndDecimal4UTCTimestampAndNow(bf.ctx.GetSessionVars().StmtCtx, args[0])
} else {
bf.tp.Flen, bf.tp.Decimal = 19, 0
}
bf.deterministic = false
var sig builtinFunc
if len(args) == 1 {
sig = &builtinNowWithArgSig{baseTimeBuiltinFunc{bf}}
} else {
sig = &builtinNowWithoutArgSig{baseTimeBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
func evalNowWithFsp(ctx context.Context, fsp int) (types.Time, bool, error) {
sysTs, err := getSystemTimestamp(ctx)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
result, err := convertTimeToMysqlTime(sysTs, fsp)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
err = result.ConvertTimeZone(time.Local, ctx.GetSessionVars().GetTimeZone())
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
return result, false, nil
}
type builtinNowWithArgSig struct {
baseTimeBuiltinFunc
}
// evalTime evals NOW(fsp)
// see: https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_now
func (b *builtinNowWithArgSig) evalTime(row []types.Datum) (types.Time, bool, error) {
fsp, isNull, err := b.args[0].EvalInt(row, b.ctx.GetSessionVars().StmtCtx)
if err != nil {
return types.Time{}, true, errors.Trace(err)
}
if isNull {
fsp = 0
} else if fsp > int64(types.MaxFsp) {
return types.Time{}, true, errors.Errorf("Too-big precision %v specified for 'now'. Maximum is %v.", fsp, types.MaxFsp)
} else if fsp < int64(types.MinFsp) {
return types.Time{}, true, errors.Errorf("Invalid negative %d specified, must in [0, 6].", fsp)
}
result, isNull, err := evalNowWithFsp(b.ctx, int(fsp))
return result, isNull, errors.Trace(err)
}
type builtinNowWithoutArgSig struct {
baseTimeBuiltinFunc
}
// evalTime evals NOW()
// see: https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_now
func (b *builtinNowWithoutArgSig) evalTime(row []types.Datum) (types.Time, bool, error) {
result, isNull, err := evalNowWithFsp(b.ctx, 0)
return result, isNull, errors.Trace(err)
}
type extractFunctionClass struct {
baseFunctionClass
}
func (c *extractFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinExtractSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinExtractSig struct {
baseBuiltinFunc
}
// eval evals a builtinExtractSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_extract
func (b *builtinExtractSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
unit := args[0].GetString()
vd := args[1]
if vd.IsNull() {
d.SetNull()
return
}
f := types.NewFieldType(mysql.TypeDatetime)
f.Decimal = types.MaxFsp
val, err := vd.ConvertTo(b.ctx.GetSessionVars().StmtCtx, f)
if err != nil {
d.SetNull()
return d, errors.Trace(err)
}
if val.IsNull() {
d.SetNull()
return
}
if val.Kind() != types.KindMysqlTime {
d.SetNull()
return d, errors.Errorf("need time type, but got %T", val)
}
t := val.GetMysqlTime()
n, err1 := types.ExtractTimeNum(unit, t)
if err1 != nil {
d.SetNull()
return d, errors.Trace(err1)
}
d.SetInt64(n)
return
}
// TODO: duplicate with types.CheckFsp, better use types.CheckFsp.
func checkFsp(sc *variable.StatementContext, arg types.Datum) (int, error) {
fsp, err := arg.ToInt64(sc)
if err != nil {
return 0, errors.Trace(err)
}
if int(fsp) > types.MaxFsp {
return 0, errors.Errorf("Too big precision %d specified. Maximum is 6.", fsp)
} else if fsp < 0 {
return 0, errors.Errorf("Invalid negative %d specified, must in [0, 6].", fsp)
}
return int(fsp), nil
}
type dateArithFunctionClass struct {
baseFunctionClass
op ast.DateArithType
}
func (c *dateArithFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinDateArithSig{newBaseBuiltinFunc(args, ctx), c.op}
return sig.setSelf(sig), nil
}
type builtinDateArithSig struct {
baseBuiltinFunc
op ast.DateArithType
}
func (b *builtinDateArithSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
// args[0] -> Date
// args[1] -> Interval Value
// args[2] -> Interval Unit
// health check for date and interval
if args[0].IsNull() || args[1].IsNull() {
return
}
nodeDate := args[0]
nodeIntervalValue := args[1]
nodeIntervalUnit := args[2].GetString()
if nodeIntervalValue.IsNull() {
return
}
// parse date
fieldType := mysql.TypeDate
var resultField *types.FieldType
switch nodeDate.Kind() {
case types.KindMysqlTime:
x := nodeDate.GetMysqlTime()
if (x.Type == mysql.TypeDatetime) || (x.Type == mysql.TypeTimestamp) {
fieldType = mysql.TypeDatetime
}
case types.KindString:
x := nodeDate.GetString()
if !types.IsDateFormat(x) {
fieldType = mysql.TypeDatetime
}
case types.KindInt64:
x := nodeDate.GetInt64()
if t, err1 := types.ParseTimeFromInt64(x); err1 == nil {
if (t.Type == mysql.TypeDatetime) || (t.Type == mysql.TypeTimestamp) {
fieldType = mysql.TypeDatetime
}
}
}
sc := b.ctx.GetSessionVars().StmtCtx
if types.IsClockUnit(nodeIntervalUnit) {
fieldType = mysql.TypeDatetime
}
resultField = types.NewFieldType(fieldType)
resultField.Decimal = types.MaxFsp
value, err := nodeDate.ConvertTo(b.ctx.GetSessionVars().StmtCtx, resultField)
if err != nil {
return d, errInvalidOperation.Gen("DateArith invalid args, need date but get %T", nodeDate)
}
if value.IsNull() {
return d, errInvalidOperation.Gen("DateArith invalid args, need date but get %v", value.GetValue())
}
if value.Kind() != types.KindMysqlTime {
return d, errInvalidOperation.Gen("DateArith need time type, but got %T", value.GetValue())
}
result := value.GetMysqlTime()
// parse interval
var interval string
if strings.ToLower(nodeIntervalUnit) == "day" {
day, err1 := parseDayInterval(sc, nodeIntervalValue)
if err1 != nil {
return d, errInvalidOperation.Gen("DateArith invalid day interval, need int but got %T", nodeIntervalValue.GetString())
}
interval = fmt.Sprintf("%d", day)
} else {
if nodeIntervalValue.Kind() == types.KindString {
interval = fmt.Sprintf("%v", nodeIntervalValue.GetString())
} else {
ii, err1 := nodeIntervalValue.ToInt64(sc)
if err1 != nil {
return d, errors.Trace(err1)
}
interval = fmt.Sprintf("%v", ii)
}
}
year, month, day, dur, err := types.ExtractTimeValue(nodeIntervalUnit, interval)
if err != nil {
return d, errors.Trace(err)
}
if b.op == ast.DateArithSub {
year, month, day, dur = -year, -month, -day, -dur
}
// TODO: Consider time_zone variable.
t, err := result.Time.GoTime(time.Local)
if err != nil {
return d, errors.Trace(err)
}
t = t.Add(dur)
t = t.AddDate(int(year), int(month), int(day))
if t.Nanosecond() == 0 {
result.Fsp = 0
}
result.Time = types.FromGoTime(t)
d.SetMysqlTime(result)
return
}
var reg = regexp.MustCompile(`[\d]+`)
func parseDayInterval(sc *variable.StatementContext, value types.Datum) (int64, error) {
switch value.Kind() {
case types.KindString:
vs := value.GetString()
s := strings.ToLower(vs)
if s == "false" {
return 0, nil
} else if s == "true" {
return 1, nil
}
value.SetString(reg.FindString(vs))
}
return value.ToInt64(sc)
}
type timestampDiffFunctionClass struct {
baseFunctionClass
}
func (c *timestampDiffFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpString, tpDatetime, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimestampDiffSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinTimestampDiffSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinTimestampDiffSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_timestampdiff
func (b *builtinTimestampDiffSig) evalInt(row []types.Datum) (int64, bool, error) {
ctx := b.getCtx().GetSessionVars().StmtCtx
unit, isNull, err := b.args[0].EvalString(row, ctx)
if isNull || err != nil {
return 0, isNull, errors.Trace(err)
}
t1, isNull, err := b.args[1].EvalTime(row, ctx)
if isNull || err != nil {
return 0, isNull, errors.Trace(handleInvalidTimeError(b.getCtx(), err))
}
t2, isNull, err := b.args[2].EvalTime(row, ctx)
if isNull || err != nil {
return 0, isNull, errors.Trace(handleInvalidTimeError(b.getCtx(), err))
}
if t1.InvalidZero() || t2.InvalidZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return types.TimestampDiff(unit, t1, t2), false, nil
}
type unixTimestampFunctionClass struct {
baseFunctionClass
}
func (c *unixTimestampFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinUnixTimestampSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinUnixTimestampSig struct {
baseBuiltinFunc
}
// eval evals a builtinUnixTimestampSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_unix-timestamp
func (b *builtinUnixTimestampSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if len(args) == 0 {
now := time.Now().Unix()
d.SetInt64(now)
return
}
var (
t types.Time
t1 time.Time
)
switch args[0].Kind() {
case types.KindString:
t, err = types.ParseTime(args[0].GetString(), mysql.TypeDatetime, types.MaxFsp)
if err != nil {
return d, errors.Trace(err)
}
case types.KindInt64, types.KindUint64:
t, err = types.ParseTimeFromInt64(args[0].GetInt64())
if err != nil {
return d, errors.Trace(err)
}
case types.KindMysqlTime:
t = args[0].GetMysqlTime()
case types.KindNull:
return
default:
return d, errors.Errorf("Unknown args type for unix_timestamp %d", args[0].Kind())
}
t1, err = t.Time.GoTime(getTimeZone(b.ctx))
if err != nil {
d.SetInt64(0)
return d, nil
}
if t.Time.Microsecond() > 0 {
var dec types.MyDecimal
dec.FromFloat64(float64(t1.Unix()) + float64(t.Time.Microsecond())/1e6)
d.SetMysqlDecimal(&dec)
} else {
d.SetInt64(t1.Unix())
}
return
}
type timestampFunctionClass struct {
baseFunctionClass
}
func (c *timestampFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimestampSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinTimestampSig struct {
baseBuiltinFunc
}
// eval evals a builtinTimestampSig.
// See https://dev.mysql.com/doc/refman/5.5/en/date-and-time-functions.html#function_timestamp
func (b *builtinTimestampSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() {
return
}
var arg0 types.Time
switch tp := args[0].Kind(); tp {
case types.KindInt64, types.KindUint64:
arg0, err = types.ParseDatetimeFromNum(args[0].GetInt64())
if err != nil {
return d, errors.Trace(err)
}
case types.KindString, types.KindBytes, types.KindMysqlDecimal, types.KindFloat32, types.KindFloat64:
s, err1 := args[0].ToString()
if err1 != nil {
return d, errors.Trace(err1)
}
arg0, err = types.ParseTime(s, mysql.TypeDatetime, getFsp(s))
if err != nil {
return d, errors.Trace(err)
}
case types.KindMysqlTime:
arg0 = args[0].GetMysqlTime()
default:
return d, errors.Errorf("Unknown args type for timestamp %d", tp)
}
if len(args) == 1 {
d.SetMysqlTime(arg0)
return
}
// If exists args[1].
s, err := args[1].ToString()
if err != nil {
return d, errors.Trace(err)
}
arg1, err := types.ParseDuration(s, getFsp(s))
if err != nil {
return d, errors.Trace(err)
}
tmpDuration := arg0.Add(arg1)
result, err := tmpDuration.ConvertToTime(arg0.Type)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlTime(result)
return
}
func getFsp(s string) (fsp int) {
fsp = len(s) - strings.Index(s, ".") - 1
if fsp == len(s) {
fsp = 0
} else if fsp > 6 {
fsp = 6
}
return
}
func getTimeZone(ctx context.Context) *time.Location {
ret := ctx.GetSessionVars().TimeZone
if ret == nil {
ret = time.Local
}
return ret
}
// isDuration returns a boolean indicating whether the str matches the format of duration.
// See https://dev.mysql.com/doc/refman/5.7/en/time.html
func isDuration(str string) bool {
return DurationPattern.MatchString(str)
}
// strDatetimeAddDuration adds duration to datetime string, returns a datum value.
func strDatetimeAddDuration(d string, arg1 types.Duration) (result types.Datum, err error) {
arg0, err := types.ParseTime(d, mysql.TypeDatetime, getFsp(d))
if err != nil {
return result, errors.Trace(err)
}
tmpDuration := arg0.Add(arg1)
resultDuration, err := tmpDuration.ConvertToTime(mysql.TypeDatetime)
if err != nil {
return result, errors.Trace(err)
}
if getFsp(d) != 0 {
tmpDuration.Fsp = types.MaxFsp
} else {
tmpDuration.Fsp = types.MinFsp
}
result.SetString(resultDuration.String())
return
}
// strDurationAddDuration adds duration to duration string, returns a datum value.
func strDurationAddDuration(d string, arg1 types.Duration) (result types.Datum, err error) {
arg0, err := types.ParseDuration(d, getFsp(d))
if err != nil {
return result, errors.Trace(err)
}
tmpDuration, err := arg0.Add(arg1)
if err != nil {
return result, errors.Trace(err)
}
if getFsp(d) != 0 {
tmpDuration.Fsp = types.MaxFsp
} else {
tmpDuration.Fsp = types.MinFsp
}
result.SetString(tmpDuration.String())
return
}
// strDatetimeSubDuration subtracts duration from datetime string, returns a datum value.
func strDatetimeSubDuration(d string, arg1 types.Duration) (result types.Datum, err error) {
arg0, err := types.ParseTime(d, mysql.TypeDatetime, getFsp(d))
if err != nil {
return result, errors.Trace(err)
}
arg1time, err := arg1.ConvertToTime(uint8(getFsp(arg1.String())))
if err != nil {
return result, errors.Trace(err)
}
tmpDuration := arg0.Sub(&arg1time)
resultDuration, err := tmpDuration.ConvertToTime(mysql.TypeDatetime)
if err != nil {
return result, errors.Trace(err)
}
if getFsp(d) != 0 {
tmpDuration.Fsp = types.MaxFsp
} else {
tmpDuration.Fsp = types.MinFsp
}
result.SetString(resultDuration.String())
return
}
// strDurationSubDuration subtracts duration from duration string, returns a datum value.
func strDurationSubDuration(d string, arg1 types.Duration) (result types.Datum, err error) {
arg0, err := types.ParseDuration(d, getFsp(d))
if err != nil {
return result, errors.Trace(err)
}
tmpDuration, err := arg0.Sub(arg1)
if err != nil {
return result, errors.Trace(err)
}
if getFsp(d) != 0 {
tmpDuration.Fsp = types.MaxFsp
} else {
tmpDuration.Fsp = types.MinFsp
}
result.SetString(tmpDuration.String())
return
}
type addTimeFunctionClass struct {
baseFunctionClass
}
func (c *addTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinAddTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinAddTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinAddTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_addtime
func (b *builtinAddTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() {
return
}
var arg1 types.Duration
switch tp := args[1].Kind(); tp {
case types.KindMysqlDuration:
arg1 = args[1].GetMysqlDuration()
default:
s, err := args[1].ToString()
if err != nil {
return d, errors.Trace(err)
}
if getFsp(s) == 0 {
arg1, err = types.ParseDuration(s, 0)
} else {
arg1, err = types.ParseDuration(s, types.MaxFsp)
}
if err != nil {
return d, errors.Trace(err)
}
}
switch tp := args[0].Kind(); tp {
case types.KindMysqlTime:
arg0 := args[0].GetMysqlTime()
tmpDuration := arg0.Add(arg1)
result, err := tmpDuration.ConvertToTime(arg0.Type)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlTime(result)
case types.KindMysqlDuration:
arg0 := args[0].GetMysqlDuration()
result, err := arg0.Add(arg1)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlDuration(result)
default:
ss, err := args[0].ToString()
if err != nil {
return d, errors.Trace(err)
}
if isDuration(ss) {
return strDurationAddDuration(ss, arg1)
}
return strDatetimeAddDuration(ss, arg1)
}
return
}
type convertTzFunctionClass struct {
baseFunctionClass
}
func (c *convertTzFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinConvertTzSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinConvertTzSig struct {
baseBuiltinFunc
}
// eval evals a builtinConvertTzSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_convert-tz
func (b *builtinConvertTzSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() || args[2].IsNull() {
return
}
sc := b.ctx.GetSessionVars().StmtCtx
fsp := 0
if args[0].Kind() == types.KindString {
fsp = types.DateFSP(args[0].GetString())
}
arg0, err := convertToTimeWithFsp(sc, args[0], mysql.TypeDatetime, fsp)
if err != nil {
return d, errors.Trace(err)
}
if arg0.IsNull() {
return
}
dt := arg0.GetMysqlTime()
fromTZ := args[1].GetString()
toTZ := args[2].GetString()
const tzArgReg = `(^(\+|-)(0?[0-9]|1[0-2]):[0-5]?\d$)|(^\+13:00$)`
r, _ := regexp.Compile(tzArgReg)
fmatch := r.MatchString(fromTZ)
tmatch := r.MatchString(toTZ)
if !fmatch && !tmatch {
ftz, err := time.LoadLocation(fromTZ)
if err != nil {
return d, errors.Trace(err)
}
ttz, err := time.LoadLocation(toTZ)
if err != nil {
return d, errors.Trace(err)
}
t, err := dt.Time.GoTime(ftz)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlTime(types.Time{
Time: types.FromGoTime(t.In(ttz)),
Type: mysql.TypeDatetime,
Fsp: dt.Fsp,
})
return d, nil
}
if fmatch && tmatch {
t, err := dt.Time.GoTime(time.Local)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlTime(types.Time{
Time: types.FromGoTime(t.Add(timeZone2Duration(toTZ) - timeZone2Duration(fromTZ))),
Type: mysql.TypeDatetime,
Fsp: dt.Fsp,
})
}
return
}
type makeDateFunctionClass struct {
baseFunctionClass
}
func (c *makeDateFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, tpInt, tpInt)
if err != nil {
return nil, errors.Trace(err)
}
tp := bf.tp
tp.Tp, tp.Flen, tp.Decimal = mysql.TypeDate, mysql.MaxDateWidth, 0
sig := &builtinMakeDateSig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinMakeDateSig struct {
baseTimeBuiltinFunc
}
// evalTime evaluates a builtinMakeDateSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_makedate
func (b *builtinMakeDateSig) evalTime(row []types.Datum) (d types.Time, isNull bool, err error) {
args := b.getArgs()
sc := b.ctx.GetSessionVars().StmtCtx
var year, dayOfYear int64
year, isNull, err = args[0].EvalInt(row, sc)
if isNull || err != nil {
return d, true, errors.Trace(err)
}
dayOfYear, isNull, err = args[1].EvalInt(row, sc)
if isNull || err != nil {
return d, true, errors.Trace(err)
}
if dayOfYear <= 0 || year < 0 || year > 9999 {
return d, true, nil
}
if year < 70 {
year += 2000
} else if year < 100 {
year += 1900
}
startTime := types.Time{
Time: types.FromDate(int(year), 1, 1, 0, 0, 0, 0),
Type: mysql.TypeDate,
Fsp: 0,
}
retTimestamp := types.TimestampDiff("DAY", types.ZeroDate, startTime)
if retTimestamp == 0 {
return d, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
ret := types.TimeFromDays(retTimestamp + dayOfYear - 1)
if ret.IsZero() || ret.Time.Year() > 9999 {
return d, true, nil
}
return ret, false, nil
}
type makeTimeFunctionClass struct {
baseFunctionClass
}
func (c *makeTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinMakeTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinMakeTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinMakeTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_maketime
func (b *builtinMakeTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
// MAKETIME(hour, minute, second)
if args[0].IsNull() || args[1].IsNull() || args[2].IsNull() {
return
}
var (
hour int64
minute int64
second float64
overflow bool
)
sc := b.ctx.GetSessionVars().StmtCtx
hour, _ = args[0].ToInt64(sc)
// MySQL TIME datatype: https://dev.mysql.com/doc/refman/5.7/en/time.html
// ranges from '-838:59:59.000000' to '838:59:59.000000'
if hour < -838 {
hour = -838
overflow = true
} else if hour > 838 {
hour = 838
overflow = true
}
minute, _ = args[1].ToInt64(sc)
if minute < 0 || minute >= 60 {
return
}
second, _ = args[2].ToFloat64(sc)
if second < 0 || second >= 60 {
return
}
if hour == -838 || hour == 838 {
if second > 59 {
second = 59
}
}
if overflow {
minute = 59
second = 59
}
var dur types.Duration
fsp := types.MaxFsp
if args[2].Kind() != types.KindString {
sec, _ := args[2].ToString()
secs := strings.Split(sec, ".")
if len(secs) <= 1 {
fsp = 0
} else if len(secs[1]) < fsp {
fsp = len(secs[1])
}
}
dur, err = types.ParseDuration(fmt.Sprintf("%02d:%02d:%v", hour, minute, second), fsp)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlDuration(dur)
return
}
type periodAddFunctionClass struct {
baseFunctionClass
}
func (c *periodAddFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinPeriodAddSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinPeriodAddSig struct {
baseBuiltinFunc
}
// eval evals a builtinPeriodAddSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_period-add
func (b *builtinPeriodAddSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() {
return d, errors.Trace(err)
}
sc := b.ctx.GetSessionVars().StmtCtx
period, err := args[0].ToInt64(sc)
if err != nil {
return d, errors.Trace(err)
}
//Check zero
if period <= 0 {
d.SetInt64(0)
return
}
y, m := getYearAndMonth(period)
months, err := args[1].ToInt64(sc)
if err != nil {
return d, errors.Trace(err)
}
sum := time.Month(m + months)
// TODO: Consider time_zone variable.
t := time.Date(int(y), sum, 1, 0, 0, 0, 0, time.Local)
ret := int64(t.Year())*100 + int64(t.Month())
d.SetInt64(ret)
return
}
type periodDiffFunctionClass struct {
baseFunctionClass
}
func (c *periodDiffFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinPeriodDiffSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinPeriodDiffSig struct {
baseBuiltinFunc
}
// eval evals a builtinPeriodDiffSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_period-diff
func (b *builtinPeriodDiffSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() {
return
}
sc := b.ctx.GetSessionVars().StmtCtx
period, err := args[0].ToInt64(sc)
if err != nil {
return d, errors.Trace(err)
}
period2, err := args[1].ToInt64(sc)
if err != nil {
return d, errors.Trace(err)
}
d.SetInt64(getMonth(period) - getMonth(period2))
return
}
func getYearAndMonth(period int64) (int64, int64) {
y := period / 100
m := period % 100
// YYMM, 00-69 year: 2000-2069
// YYMM, 70-99 year: 1970-1999
if y <= 69 {
y += 2000
} else if y <= 99 {
y += 1900
}
return y, m
}
func getMonth(period int64) int64 {
if period == 0 {
return int64(0)
}
y, m := getYearAndMonth(period)
return y*12 + m - 1
}
type quarterFunctionClass struct {
baseFunctionClass
}
func (c *quarterFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen = 1
sig := &builtinQuarterSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinQuarterSig struct {
baseIntBuiltinFunc
}
// evalInt evals QUARTER(date).
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_quarter
func (b *builtinQuarterSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
date, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
if date.IsZero() {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return int64((date.Time.Month() + 2) / 3), false, nil
}
type secToTimeFunctionClass struct {
baseFunctionClass
}
func (c *secToTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinSecToTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinSecToTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinSecToTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_sec-to-time
func (b *builtinSecToTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() {
return
}
var (
hour int64
minute int64
second int64
demical float64
secondDemical float64
negative string
)
sc := b.ctx.GetSessionVars().StmtCtx
secondsFloat, err := args[0].ToFloat64(sc)
if err != nil {
if args[0].Kind() == types.KindString && types.ErrTruncated.Equal(err) {
secondsFloat = float64(0)
} else {
return d, errors.Trace(err)
}
}
if secondsFloat < 0 {
negative = "-"
secondsFloat = math.Abs(secondsFloat)
}
seconds := int64(secondsFloat)
demical = secondsFloat - float64(seconds)
hour = seconds / 3600
if hour > 838 {
hour = 838
minute = 59
second = 59
} else {
minute = seconds % 3600 / 60
second = seconds % 60
}
secondDemical = float64(second) + demical
var dur types.Duration
fsp := types.MaxFsp
if args[0].Kind() != types.KindString {
sec, _ := args[0].ToString()
secs := strings.Split(sec, ".")
if len(secs) <= 1 {
fsp = 0
} else if len(secs[1]) < fsp {
fsp = len(secs[1])
}
}
dur, err = types.ParseDuration(fmt.Sprintf("%s%02d:%02d:%v", negative, hour, minute, secondDemical), fsp)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlDuration(dur)
return
}
type subTimeFunctionClass struct {
baseFunctionClass
}
func (c *subTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinSubTimeSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinSubTimeSig struct {
baseBuiltinFunc
}
// eval evals a builtinSubTimeSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_subtime
func (b *builtinSubTimeSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
if args[0].IsNull() || args[1].IsNull() {
return
}
var arg1 types.Duration
switch args[1].Kind() {
case types.KindMysqlDuration:
arg1 = args[1].GetMysqlDuration()
default:
s, err := args[1].ToString()
if err != nil {
return d, errors.Trace(err)
}
if getFsp(s) == 0 {
arg1, err = types.ParseDuration(s, 0)
} else {
arg1, err = types.ParseDuration(s, types.MaxFsp)
}
if err != nil {
return d, errors.Trace(err)
}
}
switch args[0].Kind() {
case types.KindMysqlTime:
arg0 := args[0].GetMysqlTime()
arg1time, err := arg1.ConvertToTime(uint8(getFsp(arg1.String())))
if err != nil {
return d, errors.Trace(err)
}
tmpDuration := arg0.Sub(&arg1time)
result, err := tmpDuration.ConvertToTime(arg0.Type)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlTime(result)
case types.KindMysqlDuration:
arg0 := args[0].GetMysqlDuration()
result, err := arg0.Sub(arg1)
if err != nil {
return d, errors.Trace(err)
}
d.SetMysqlDuration(result)
default:
ss, err := args[0].ToString()
if err != nil {
return d, errors.Trace(err)
}
if isDuration(ss) {
return strDurationSubDuration(ss, arg1)
}
return strDatetimeSubDuration(ss, arg1)
}
return
}
type timeFormatFunctionClass struct {
baseFunctionClass
}
func (c *timeFormatFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpString, tpDuration, tpString)
if err != nil {
return nil, errors.Trace(err)
}
// worst case: formatMask=%r%r%r...%r, each %r takes 11 characters
bf.tp.Flen = (args[1].GetType().Flen + 1) / 2 * 11
sig := &builtinTimeFormatSig{baseStringBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinTimeFormatSig struct {
baseStringBuiltinFunc
}
// evalString evals a builtinTimeFormatSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_time-format
func (b *builtinTimeFormatSig) evalString(row []types.Datum) (string, bool, error) {
dur, isNull, err := b.args[0].EvalDuration(row, b.ctx.GetSessionVars().StmtCtx)
// if err != nil, then dur is ZeroDuration, outputs 00:00:00 in this case which follows the behavior of mysql.
if err != nil {
log.Warnf("Expression.EvalDuration() in time_format() failed, due to %s", err.Error())
}
if isNull {
return "", isNull, errors.Trace(err)
}
formatMask, isNull, err := b.args[1].EvalString(row, b.ctx.GetSessionVars().StmtCtx)
if err != nil || isNull {
return "", isNull, errors.Trace(err)
}
res, err := b.formatTime(dur, formatMask, b.ctx)
return res, isNull, errors.Trace(err)
}
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_time-format
func (b *builtinTimeFormatSig) formatTime(t types.Duration, formatMask string, ctx context.Context) (res string, err error) {
t2 := types.Time{
Time: types.FromDate(0, 0, 0, t.Hour(), t.Minute(), t.Second(), t.MicroSecond()),
Type: mysql.TypeDate, Fsp: 0}
str, err := t2.DateFormat(formatMask)
return str, errors.Trace(err)
}
type timeToSecFunctionClass struct {
baseFunctionClass
}
func (c *timeToSecFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimeToSecSig{newBaseBuiltinFunc(args, ctx)}
return sig.setSelf(sig), nil
}
type builtinTimeToSecSig struct {
baseBuiltinFunc
}
// eval evals a builtinTimeToSecSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_time-to-sec
func (b *builtinTimeToSecSig) eval(row []types.Datum) (d types.Datum, err error) {
args, err := b.evalArgs(row)
if err != nil {
return d, errors.Trace(err)
}
d, err = convertToDuration(b.ctx.GetSessionVars().StmtCtx, args[0], 0)
if err != nil || d.IsNull() {
return d, errors.Trace(err)
}
t := d.GetMysqlDuration()
// TODO: select TIME_TO_SEC('-2:-2:-2') not handle well.
if t.Compare(types.ZeroDuration) < 0 {
d.SetInt64(int64(-1 * (t.Hour()*3600 + t.Minute()*60 + t.Second())))
} else {
d.SetInt64(int64(t.Hour()*3600 + t.Minute()*60 + t.Second()))
}
return
}
type timestampAddFunctionClass struct {
baseFunctionClass
}
func (c *timestampAddFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpString, tpString, tpInt, tpDatetime)
bf.tp = &types.FieldType{Tp: mysql.TypeString, Flen: mysql.MaxDatetimeWidthNoFsp, Decimal: types.UnspecifiedLength}
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinTimestampAddSig{baseStringBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinTimestampAddSig struct {
baseStringBuiltinFunc
}
// evalString evals a builtinTimestampAddSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_timestampadd
func (b *builtinTimestampAddSig) evalString(row []types.Datum) (string, bool, error) {
ctx := b.getCtx().GetSessionVars().StmtCtx
unit, isNull, err := b.args[0].EvalString(row, ctx)
if isNull || err != nil {
return "", isNull, errors.Trace(err)
}
v, isNull, err := b.args[1].EvalInt(row, ctx)
if isNull || err != nil {
return "", isNull, errors.Trace(err)
}
arg, isNull, err := b.args[2].EvalTime(row, ctx)
if isNull || err != nil {
return "", isNull, errors.Trace(err)
}
tm1, err := arg.Time.GoTime(time.Local)
if err != nil {
return "", isNull, errors.Trace(err)
}
var tb time.Time
fsp := types.DefaultFsp
switch unit {
case "MICROSECOND":
tb = tm1.Add(time.Duration(v) * time.Microsecond)
fsp = types.MaxFsp
case "SECOND":
tb = tm1.Add(time.Duration(v) * time.Second)
case "MINUTE":
tb = tm1.Add(time.Duration(v) * time.Minute)
case "HOUR":
tb = tm1.Add(time.Duration(v) * time.Hour)
case "DAY":
tb = tm1.AddDate(0, 0, int(v))
case "WEEK":
tb = tm1.AddDate(0, 0, 7*int(v))
case "MONTH":
tb = tm1.AddDate(0, int(v), 0)
case "QUARTER":
tb = tm1.AddDate(0, 3*int(v), 0)
case "YEAR":
tb = tm1.AddDate(int(v), 0, 0)
default:
return "", true, errors.Trace(types.ErrInvalidTimeFormat)
}
r := types.Time{Time: types.FromGoTime(tb), Type: mysql.TypeDatetime, Fsp: fsp}
if err = r.Check(); err != nil {
return "", true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
return r.String(), false, nil
}
type toDaysFunctionClass struct {
baseFunctionClass
}
func (c *toDaysFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinToDaysSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinToDaysSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinToDaysSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_to-days
func (b *builtinToDaysSig) evalInt(row []types.Datum) (int64, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
arg, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
ret := types.TimestampDiff("DAY", types.ZeroDate, arg)
if ret == 0 {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return ret, false, nil
}
type toSecondsFunctionClass struct {
baseFunctionClass
}
func (c *toSecondsFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpInt, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
sig := &builtinToSecondsSig{baseIntBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinToSecondsSig struct {
baseIntBuiltinFunc
}
// evalInt evals a builtinToSecondsSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_to-seconds
func (b *builtinToSecondsSig) evalInt(row []types.Datum) (int64, bool, error) {
arg, isNull, err := b.args[0].EvalTime(row, b.getCtx().GetSessionVars().StmtCtx)
if isNull || err != nil {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
ret := types.TimestampDiff("SECOND", types.ZeroDate, arg)
if ret == 0 {
return 0, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
return ret, false, nil
}
type utcTimeFunctionClass struct {
baseFunctionClass
}
func (c *utcTimeFunctionClass) getFlenAndDecimal4UTCTime(sc *variable.StatementContext, args []Expression) (flen, decimal int) {
if len(args) == 0 {
flen, decimal = 8, 0
return
}
if constant, ok := args[0].(*Constant); ok {
fsp, isNull, err := constant.EvalInt(nil, sc)
if isNull || err != nil || fsp > int64(types.MaxFsp) {
decimal = types.MaxFsp
} else if fsp < int64(types.MinFsp) {
decimal = types.MinFsp
} else {
decimal = int(fsp)
}
}
if decimal > 0 {
flen = 8 + 1 + decimal
} else {
flen = 8
}
return flen, decimal
}
func (c *utcTimeFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
argTps := make([]evalTp, 0, 1)
if len(args) == 1 {
argTps = append(argTps, tpInt)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDuration, argTps...)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Flen, bf.tp.Decimal = c.getFlenAndDecimal4UTCTime(bf.ctx.GetSessionVars().StmtCtx, args)
var sig builtinFunc
if len(args) == 1 {
sig = &builtinUTCTimeWithArgSig{baseDurationBuiltinFunc{bf}}
} else {
sig = &builtinUTCTimeWithoutArgSig{baseDurationBuiltinFunc{bf}}
}
return sig.setSelf(sig), nil
}
type builtinUTCTimeWithoutArgSig struct {
baseDurationBuiltinFunc
}
// evalDuration evals a builtinUTCTimeWithoutArgSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_utc-time
func (b *builtinUTCTimeWithoutArgSig) evalDuration(row []types.Datum) (types.Duration, bool, error) {
// the types.ParseDuration here would never fail, so the err returned can be ignored.
v, _ := types.ParseDuration(time.Now().UTC().Format("00:00:00"), 0)
return v, false, nil
}
type builtinUTCTimeWithArgSig struct {
baseDurationBuiltinFunc
}
// evalDuration evals a builtinUTCTimeWithArgSig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_utc-time
func (b *builtinUTCTimeWithArgSig) evalDuration(row []types.Datum) (types.Duration, bool, error) {
fsp, isNull, err := b.args[0].EvalInt(row, b.ctx.GetSessionVars().StmtCtx)
if isNull || err != nil {
return types.Duration{}, isNull, errors.Trace(err)
}
if fsp > int64(types.MaxFsp) {
return types.Duration{}, true, errors.Errorf("Too-big precision %v specified for 'utc_time'. Maximum is %v.", fsp, types.MaxFsp)
}
if fsp < int64(types.MinFsp) {
return types.Duration{}, true, errors.Errorf("Invalid negative %d specified, must in [0, 6].", fsp)
}
// the types.ParseDuration here would never fail, so the err returned can be ignored.
v, _ := types.ParseDuration(time.Now().UTC().Format("00:00:00.000000"), int(fsp))
return v, false, nil
}
type lastDayFunctionClass struct {
baseFunctionClass
}
func (c *lastDayFunctionClass) getFunction(args []Expression, ctx context.Context) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, errors.Trace(err)
}
bf, err := newBaseBuiltinFuncWithTp(args, ctx, tpDatetime, tpDatetime)
if err != nil {
return nil, errors.Trace(err)
}
bf.tp.Tp, bf.tp.Flen, bf.tp.Decimal = mysql.TypeDate, mysql.MaxDateWidth, types.DefaultFsp
sig := &builtinLastDaySig{baseTimeBuiltinFunc{bf}}
return sig.setSelf(sig), nil
}
type builtinLastDaySig struct {
baseTimeBuiltinFunc
}
// evalTime evals a builtinLastDaySig.
// See https://dev.mysql.com/doc/refman/5.7/en/date-and-time-functions.html#function_last-day
func (b *builtinLastDaySig) evalTime(row []types.Datum) (types.Time, bool, error) {
sc := b.ctx.GetSessionVars().StmtCtx
arg, isNull, err := b.args[0].EvalTime(row, sc)
if isNull || err != nil {
return types.Time{}, true, errors.Trace(handleInvalidTimeError(b.ctx, err))
}
tm := arg.Time
year, month, day := tm.Year(), tm.Month(), 30
if year == 0 && month == 0 && tm.Day() == 0 {
return types.Time{}, true, errors.Trace(handleInvalidTimeError(b.ctx, types.ErrInvalidTimeFormat))
}
if month == 1 || month == 3 || month == 5 ||
month == 7 || month == 8 || month == 10 || month == 12 {
day = 31
} else if month == 2 {
day = 28
if tm.IsLeapYear() {
day = 29
}
}
ret := types.Time{
Time: types.FromDate(year, month, day, 0, 0, 0, 0),
Type: mysql.TypeDate,
Fsp: types.DefaultFsp,
}
return ret, false, nil
}
|
_ builtinFunc = &builtinUTCDateSig{}
_ builtinFunc = &builtinUTCTimestampWithArgSig{}
_ builtinFunc = &builtinUTCTimestampWithoutArgSig{}
_ builtinFunc = &builtinExtractSig{}
|
TEMPONet_float.py
|
#*----------------------------------------------------------------------------*
#* Copyright (C) 2021 Politecnico di Torino, Italy *
#* SPDX-License-Identifier: Apache-2.0 *
#* *
#* Licensed under the Apache License, Version 2.0 (the "License"); *
#* you may not use this file except in compliance with the License. *
#* You may obtain a copy of the License at *
#* *
#* http://www.apache.org/licenses/LICENSE-2.0 *
#* *
#* Unless required by applicable law or agreed to in writing, software *
#* distributed under the License is distributed on an "AS IS" BASIS, *
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
#* See the License for the specific language governing permissions and *
#* limitations under the License. *
#* *
#* Author: Alessio Burrello *
#*----------------------------------------------------------------------------*
import torch.nn as nn
import torch.nn.functional as F
from base import BaseModel
from math import ceil
import sys
sys.path.append("..")
from models import quant_module_1d as qm
__all__ = ['TempoNetfloat']
def TempoNetfloat(**kwargs):
return TEMPONet(**kwargs)
class TEMPONet(BaseModel):
"""
TEMPONet architecture:
Three repeated instances of TemporalConvBlock and ConvBlock organized as follows:
- TemporalConvBlock
- ConvBlock
Two instances of Regressor followed by a final Linear layer with a single neuron.
"""
def __init__(self, dataset_name='PPG_Dalia', dataset_args={}):
|
def forward(self, x):
x = self.cb0(
self.tcb01(
self.tcb00(
x
)
)
)
x = self.cb1(
self.tcb11(
self.tcb10(
x
)
)
)
x = self.cb2(
self.tcb21(
self.tcb20(
x
)
)
)
x = x.flatten(1)
x = self.regr0(
x
)
x = self.regr1(
x
)
x = self.out_neuron(
x
)
return x
class TempConvBlock(BaseModel):
"""
Temporal Convolutional Block composed of one temporal convolutional layers.
The block is composed of :
- Conv1d layer
- Chomp1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param dil: Amount of dilation
:param pad: Amount of padding
"""
def __init__(self, ch_in, ch_out, k_size, dil, pad):
super(TempConvBlock, self).__init__()
self.tcn0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
dilation=dil,
bias = False,
padding=pad
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ch_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.tcn0(x)))
return x
class ConvBlock(BaseModel):
"""
Convolutional Block composed of:
- Conv1d layer
- AvgPool1d layer
- ReLU layer
- BatchNorm1d layer
:param ch_in: Number of input channels
:param ch_out: Number of output channels
:param k_size: Kernel size
:param strd: Amount of stride
:param pad: Amount of padding
"""
def __init__(self, ch_in, ch_out, k_size, strd, pad, dilation=1):
super(ConvBlock, self).__init__()
self.conv0 = nn.Conv1d(
in_channels=ch_in,
out_channels=ch_out,
kernel_size=k_size,
stride=strd,
dilation=dilation,
bias = False,
padding=pad
)
self.pool0 = nn.AvgPool1d(
kernel_size=2,
stride=2,
padding=0
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(ch_out)
def forward(self, x):
x = self.relu0(self.bn0(self.pool0(self.conv0(x))))
return x
class Regressor(BaseModel):
"""
Regressor block composed of :
- Linear layer
- ReLU layer
- BatchNorm1d layer
:param ft_in: Number of input channels
:param ft_out: Number of output channels
"""
def __init__(self, ft_in, ft_out):
super(Regressor, self).__init__()
self.ft_in = ft_in
self.ft_out = ft_out
self.fc0 = nn.Linear(
in_features=ft_in,
out_features=ft_out,
bias = False
)
self.relu0 = nn.ReLU6()
self.bn0 = nn.BatchNorm1d(
num_features=ft_out
)
def forward(self, x):
x = self.relu0(self.bn0(self.fc0(x)))
return x
class Chomp1d(BaseModel):
"""
Module that perform a chomping operation on the input tensor.
It is used to chomp the amount of zero-padding added on the right of the input tensor, this operation is necessary to compute causal convolutions.
:param chomp_size: amount of padding 0s to be removed
"""
def __init__(self, chomp_size):
super(Chomp1d, self).__init__()
self.chomp_size = chomp_size
def forward(self, x):
return x[:, :, :-self.chomp_size].contiguous()
|
super(TEMPONet, self).__init__()
self.dil = [
2, 2, 1,
4, 4,
8, 8
]
self.rf = [
5, 5, 5,
9, 9,
17, 17
]
self.ch = [
32, 32, 64,
64, 64, 128,
128, 128, 128,
256, 128
]
# 1st instance of two TempConvBlocks and ConvBlock
k_tcb00 = ceil(self.rf[0] / self.dil[0])
self.tcb00 = TempConvBlock(
ch_in=4,
ch_out=self.ch[0],
k_size=k_tcb00,
dil=self.dil[0],
pad=((k_tcb00 - 1) * self.dil[0] + 1) // 2
)
k_tcb01 = ceil(self.rf[1] / self.dil[1])
self.tcb01 = TempConvBlock(
ch_in=self.ch[0],
ch_out=self.ch[1],
k_size=k_tcb01,
dil=self.dil[1],
pad=((k_tcb01 - 1) * self.dil[1] + 1) // 2
)
k_cb0 = ceil(self.rf[2] / self.dil[2])
self.cb0 = ConvBlock(
ch_in=self.ch[1],
ch_out=self.ch[2],
k_size=k_cb0,
strd=1,
pad=((k_cb0 - 1) * self.dil[2] + 1) // 2,
dilation=self.dil[2]
)
# 2nd instance of two TempConvBlocks and ConvBlock
k_tcb10 = ceil(self.rf[3] / self.dil[3])
self.tcb10 = TempConvBlock(
ch_in=self.ch[2],
ch_out=self.ch[3],
k_size=k_tcb10,
dil=self.dil[3],
pad=((k_tcb10 - 1) * self.dil[3] + 1) // 2
)
k_tcb11 = ceil(self.rf[4] / self.dil[4])
self.tcb11 = TempConvBlock(
ch_in=self.ch[3],
ch_out=self.ch[4],
k_size=k_tcb11,
dil=self.dil[4],
pad=((k_tcb11 - 1) * self.dil[4] + 1) // 2
)
self.cb1 = ConvBlock(
ch_in=self.ch[4],
ch_out=self.ch[5],
k_size=5,
strd=2,
pad=2
)
# 3td instance of TempConvBlock and ConvBlock
k_tcb20 = ceil(self.rf[5] / self.dil[5])
self.tcb20 = TempConvBlock(
ch_in=self.ch[5],
ch_out=self.ch[6],
k_size=k_tcb20,
dil=self.dil[5],
pad=((k_tcb20 - 1) * self.dil[5] + 1) // 2
)
k_tcb21 = ceil(self.rf[6] / self.dil[6])
self.tcb21 = TempConvBlock(
ch_in=self.ch[6],
ch_out=self.ch[7],
k_size=k_tcb21,
dil=self.dil[6],
pad=((k_tcb21 - 1) * self.dil[6] + 1) // 2
)
self.cb2 = ConvBlock(
ch_in=self.ch[7],
ch_out=self.ch[8],
k_size=5,
strd=4,
pad=4
)
# 1st instance of regressor
self.regr0 = Regressor(
ft_in=self.ch[8] * 4,
ft_out=self.ch[9]
)
# 2nd instance of regressor
self.regr1 = Regressor(
ft_in=self.ch[9],
ft_out=self.ch[10]
)
self.out_neuron = nn.Linear(
in_features=self.ch[10],
out_features=1
)
|
run_multiagent_rllib.py
|
# coding=utf-8
# Copyright 2019 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A simple example of setting up a multi-agent version of GFootball with rllib.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import gfootball.env as football_env
import gym
import ray
from ray import tune
from ray.rllib.env.multi_agent_env import MultiAgentEnv
from ray.tune.registry import register_env
parser = argparse.ArgumentParser()
parser.add_argument('--num-agents', type=int, default=3)
parser.add_argument('--num-policies', type=int, default=3)
parser.add_argument('--num-iters', type=int, default=100000)
parser.add_argument('--simple', action='store_true')
|
"""An example of a wrapper for GFootball to make it compatible with rllib."""
def __init__(self, num_agents):
self.env = football_env.create_environment(
env_name='test_example_multiagent', stacked=False,
logdir='/tmp/rllib_test',
write_goal_dumps=False, write_full_episode_dumps=False, render=True,
dump_frequency=0,
number_of_left_players_agent_controls=num_agents,
channel_dimensions=(42, 42))
self.action_space = gym.spaces.Discrete(self.env.action_space.nvec[1])
self.observation_space = gym.spaces.Box(
low=self.env.observation_space.low[0],
high=self.env.observation_space.high[0],
dtype=self.env.observation_space.dtype)
self.num_agents = num_agents
def reset(self):
original_obs = self.env.reset()
obs = {}
for x in range(self.num_agents):
if self.num_agents > 1:
obs['agent_%d' % x] = original_obs[x]
else:
obs['agent_%d' % x] = original_obs
return obs
def step(self, action_dict):
actions = []
for key, value in sorted(action_dict.items()):
actions.append(value)
o, r, d, i = self.env.step(actions)
rewards = {}
obs = {}
infos = {}
for pos, key in enumerate(sorted(action_dict.keys())):
infos[key] = i
if self.num_agents > 1:
rewards[key] = r[pos]
obs[key] = o[pos]
else:
rewards[key] = r
obs[key] = o
dones = {'__all__': d}
return obs, rewards, dones, infos
if __name__ == '__main__':
args = parser.parse_args()
ray.init(num_gpus=1)
# Simple environment with `num_agents` independent players
register_env('gfootball', lambda _: RllibGFootball(args.num_agents))
single_env = RllibGFootball(args.num_agents)
obs_space = single_env.observation_space
act_space = single_env.action_space
def gen_policy(_):
return (None, obs_space, act_space, {})
# Setup PPO with an ensemble of `num_policies` different policies
policies = {
'policy_{}'.format(i): gen_policy(i) for i in range(args.num_policies)
}
policy_ids = list(policies.keys())
tune.run(
'PPO',
stop={'training_iteration': args.num_iters},
checkpoint_freq=50,
config={
'env': 'gfootball',
'lambda': 0.95,
'kl_coeff': 0.2,
'clip_rewards': False,
'vf_clip_param': 10.0,
'entropy_coeff': 0.01,
'train_batch_size': 2000,
'sample_batch_size': 100,
'sgd_minibatch_size': 500,
'num_sgd_iter': 10,
'num_workers': 10,
'num_envs_per_worker': 1,
'batch_mode': 'truncate_episodes',
'observation_filter': 'NoFilter',
'vf_share_layers': 'true',
'num_gpus': 1,
'lr': 2.5e-4,
'log_level': 'DEBUG',
'simple_optimizer': args.simple,
'multiagent': {
'policies': policies,
'policy_mapping_fn': tune.function(
lambda agent_id: policy_ids[int(agent_id[6:])]),
},
},
)
|
class RllibGFootball(MultiAgentEnv):
|
bayesian_gplvm.py
|
#!/usr/bin/env python3
from ..approximate_gp import ApproximateGP
class BayesianGPLVM(ApproximateGP):
"""
The Gaussian Process Latent Variable Model (GPLVM) class for unsupervised learning.
The class supports
1. Point estimates for latent X when prior_x = None
2. MAP Inference for X when prior_x is not None and inference == 'map'
3. Gaussian variational distribution q(X) when prior_x is not None and inference == 'variational'
|
:param X: An instance of a sub-class of the LatentVariable class. One of,
:class:`~gpytorch.models.gplvm.PointLatentVariable`, :class:`~gpytorch.models.gplvm.MAPLatentVariable`, or
:class:`~gpytorch.models.gplvm.VariationalLatentVariable`, to facilitate inference with 1, 2, or 3 respectively.
:type X: ~gpytorch.models.LatentVariable
:param ~gpytorch.variational._VariationalStrategy variational_strategy: The strategy that determines
how the model marginalizes over the variational distribution (over inducing points)
to produce the approximate posterior distribution (over data)
"""
def __init__(self, X, variational_strategy):
super().__init__(variational_strategy)
# Assigning Latent Variable
self.X = X
def forward(self):
raise NotImplementedError
def sample_latent_variable(self):
sample = self.X()
return sample
|
.. seealso::
The `GPLVM tutorial
<examples/04_Variational_and_Approximate_GPs/Gaussian_Process_Latent_Variable_Models_with_Stochastic_Variational_Inference.ipynb>`_
for use instructions.
|
inject_hub_function.go
|
/*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
*/
package pipeline
import (
"context"
"github.com/pkg/errors"
|
"github.com/Azure/azure-service-operator/hack/generator/pkg/astmodel"
"github.com/Azure/azure-service-operator/hack/generator/pkg/functions"
)
// InjectHubFunctionStageID is the unique identifier for this pipeline stage
const InjectHubFunctionStageID = "injectHubFunction"
// InjectHubFunction modifies the nominates storage version (aka hub version) of each resource by injecting a Hub()
// function so that it satisfies the required interface.
func InjectHubFunction(idFactory astmodel.IdentifierFactory) Stage {
stage := MakeLegacyStage(
InjectHubFunctionStageID,
"Inject the function Hub() into each hub resource",
func(ctx context.Context, types astmodel.Types) (astmodel.Types, error) {
injector := astmodel.NewFunctionInjector()
result := types.Copy()
resources := astmodel.FindResourceTypes(types)
for name, def := range resources {
rt, ok := astmodel.AsResourceType(def.Type())
if !ok {
return nil, errors.Errorf("expected %s to be a resource type (should never happen)", name)
}
if rt.IsStorageVersion() {
fn := functions.NewHubFunction(idFactory)
defWithFn, err := injector.Inject(def, fn)
if err != nil {
return nil, errors.Wrapf(err, "injecting Hub() into %s", name)
}
result[name] = defWithFn
}
}
return result, nil
})
stage.RequiresPrerequisiteStages(MarkStorageVersionStageId)
return stage
}
| |
plan.rs
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! This module contains the `LogicalPlan` enum that describes queries
//! via a logical query plan.
use std::{
fmt::{self, Display},
sync::Arc,
};
use arrow::datatypes::{DataType, Field, Schema, SchemaRef};
use arrow::record_batch::RecordBatch;
use crate::datasource::TableProvider;
use crate::sql::parser::FileType;
use super::display::{GraphvizVisitor, IndentVisitor};
use super::expr::Expr;
use super::extension::UserDefinedLogicalNode;
use crate::logical_plan::dfschema::DFSchemaRef;
/// Describes the source of the table, either registered on the context or by reference
#[derive(Clone)]
pub enum TableSource {
/// The source provider is registered in the context with the corresponding name
FromContext(String),
/// The source provider is passed directly by reference
FromProvider(Arc<dyn TableProvider + Send + Sync>),
}
/// Join type
#[derive(Debug, Clone)]
pub enum JoinType {
/// Inner join
Inner,
/// Left join
Left,
/// Right join
Right,
}
/// A LogicalPlan represents the different types of relational
/// operators (such as Projection, Filter, etc) and can be created by
/// the SQL query planner and the DataFrame API.
///
/// A LogicalPlan represents transforming an input relation (table) to
/// an output relation (table) with a (potentially) different
/// schema. A plan represents a dataflow tree where data flows
/// from leaves up to the root to produce the query result.
#[derive(Clone)]
pub enum LogicalPlan {
/// Evaluates an arbitrary list of expressions (essentially a
/// SELECT with an expression list) on its input.
Projection {
/// The list of expressions
expr: Vec<Expr>,
/// The incoming logical plan
input: Arc<LogicalPlan>,
/// The schema description of the output
schema: DFSchemaRef,
},
/// Filters rows from its input that do not match an
/// expression (essentially a WHERE clause with a predicate
/// expression).
///
/// Semantically, `<predicate>` is evaluated for each row of the input;
/// If the value of `<predicate>` is true, the input row is passed to
/// the output. If the value of `<predicate>` is false, the row is
/// discarded.
Filter {
/// The predicate expression, which must have Boolean type.
predicate: Expr,
/// The incoming logical plan
input: Arc<LogicalPlan>,
},
/// Aggregates its input based on a set of grouping and aggregate
/// expressions (e.g. SUM).
Aggregate {
/// The incoming logical plan
input: Arc<LogicalPlan>,
/// Grouping expressions
group_expr: Vec<Expr>,
/// Aggregate expressions
aggr_expr: Vec<Expr>,
/// The schema description of the aggregate output
schema: DFSchemaRef,
},
/// Sorts its input according to a list of sort expressions.
Sort {
/// The sort expressions
expr: Vec<Expr>,
/// The incoming logical plan
input: Arc<LogicalPlan>,
},
/// Join two logical plans on one or more join columns
Join {
/// Left input
left: Arc<LogicalPlan>,
/// Right input
right: Arc<LogicalPlan>,
/// Equijoin clause expressed as pairs of (left, right) join columns
on: Vec<(String, String)>,
/// Join type
join_type: JoinType,
/// The output schema, containing fields from the left and right inputs
schema: DFSchemaRef,
},
/// Produces rows from a table provider by reference or from the context
TableScan {
/// The name of the schema
schema_name: String,
/// The source of the table
source: TableSource,
/// The schema of the source data
table_schema: SchemaRef,
/// Optional column indices to use as a projection
projection: Option<Vec<usize>>,
/// The schema description of the output
projected_schema: DFSchemaRef,
},
/// Produces rows that come from a `Vec` of in memory `RecordBatch`es
InMemoryScan {
/// Record batch partitions
data: Vec<Vec<RecordBatch>>,
/// The schema of the record batches
schema: SchemaRef,
/// Optional column indices to use as a projection
projection: Option<Vec<usize>>,
/// The schema description of the output
projected_schema: DFSchemaRef,
},
/// Produces rows by scanning Parquet file(s)
ParquetScan {
/// The path to the files
path: String,
/// The schema of the Parquet file(s)
schema: SchemaRef,
/// Optional column indices to use as a projection
projection: Option<Vec<usize>>,
/// The schema description of the output
projected_schema: DFSchemaRef,
},
/// Produces rows by scanning a CSV file(s)
CsvScan {
/// The path to the files
path: String,
/// The underlying table schema
schema: SchemaRef,
/// Whether the CSV file(s) have a header containing column names
has_header: bool,
/// An optional column delimiter. Defaults to `b','`
delimiter: Option<u8>,
/// Optional column indices to use as a projection
projection: Option<Vec<usize>>,
/// The schema description of the output
projected_schema: DFSchemaRef,
},
/// Produces no rows: An empty relation with an empty schema
EmptyRelation {
/// Whether to produce a placeholder row
produce_one_row: bool,
/// The schema description of the output
schema: DFSchemaRef,
},
/// Produces the first `n` tuples from its input and discards the rest.
Limit {
/// The limit
n: usize,
/// The logical plan
input: Arc<LogicalPlan>,
},
/// Creates an external table.
CreateExternalTable {
/// The table schema
schema: DFSchemaRef,
/// The table name
name: String,
/// The physical location
location: String,
/// The file type of physical file
file_type: FileType,
/// Whether the CSV file contains a header
has_header: bool,
},
/// Produces a relation with string representations of
/// various parts of the plan
Explain {
/// Should extra (detailed, intermediate plans) be included?
verbose: bool,
/// The logical plan that is being EXPLAIN'd
plan: Arc<LogicalPlan>,
/// Represent the various stages plans have gone through
stringified_plans: Vec<StringifiedPlan>,
/// The output schema of the explain (2 columns of text)
schema: DFSchemaRef,
},
/// Extension operator defined outside of DataFusion
Extension {
/// The runtime extension operator
node: Arc<dyn UserDefinedLogicalNode + Send + Sync>,
},
}
impl LogicalPlan {
/// Get a reference to the logical plan's schema
pub fn schema(&self) -> &DFSchemaRef {
match self {
LogicalPlan::EmptyRelation { schema, .. } => &schema,
LogicalPlan::InMemoryScan {
projected_schema, ..
} => &projected_schema,
LogicalPlan::CsvScan {
projected_schema, ..
} => &projected_schema,
LogicalPlan::ParquetScan {
projected_schema, ..
} => &projected_schema,
LogicalPlan::TableScan {
projected_schema, ..
} => &projected_schema,
LogicalPlan::Projection { schema, .. } => &schema,
LogicalPlan::Filter { input, .. } => input.schema(),
LogicalPlan::Aggregate { schema, .. } => &schema,
LogicalPlan::Sort { input, .. } => input.schema(),
LogicalPlan::Join { schema, .. } => &schema,
LogicalPlan::Limit { input, .. } => input.schema(),
LogicalPlan::CreateExternalTable { schema, .. } => &schema,
LogicalPlan::Explain { schema, .. } => &schema,
LogicalPlan::Extension { node } => &node.schema(),
}
}
/// Returns the (fixed) output schema for explain plans
pub fn explain_schema() -> SchemaRef {
SchemaRef::new(Schema::new(vec![
Field::new("plan_type", DataType::Utf8, false),
Field::new("plan", DataType::Utf8, false),
]))
}
}
/// Trait that implements the [Visitor
/// pattern](https://en.wikipedia.org/wiki/Visitor_pattern) for a
/// depth first walk of `LogicalPlan` nodes. `pre_visit` is called
/// before any children are visited, and then `post_visit` is called
/// after all children have been visited.
////
/// To use, define a struct that implements this trait and then invoke
/// "LogicalPlan::accept".
///
/// For example, for a logical plan like:
///
/// Projection: #id
/// Filter: #state Eq Utf8(\"CO\")\
/// CsvScan: employee.csv projection=Some([0, 3])";
///
/// The sequence of visit operations would be:
/// ```text
/// visitor.pre_visit(Projection)
/// visitor.pre_visit(Filter)
/// visitor.pre_visit(CsvScan)
/// visitor.post_visit(CsvScan)
/// visitor.post_visit(Filter)
/// visitor.post_visit(Projection)
/// ```
pub trait PlanVisitor {
/// The type of error returned by this visitor
type Error;
/// Invoked on a logical plan before any of its child inputs have been
/// visited. If Ok(true) is returned, the recursion continues. If
/// Err(..) or Ok(false) are returned, the recursion stops
/// immedately and the error, if any, is returned to `accept`
fn pre_visit(&mut self, plan: &LogicalPlan)
-> std::result::Result<bool, Self::Error>;
/// Invoked on a logical plan after all of its child inputs have
/// been visited. The return value is handled the same as the
/// return value of `pre_visit`. The provided default implementation
/// returns `Ok(true)`.
fn post_visit(
&mut self,
_plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
Ok(true)
}
}
impl LogicalPlan {
/// returns all inputs in the logical plan. Returns Ok(true) if
/// all nodes were visited, and Ok(false) if any call to
/// `pre_visit` or `post_visit` returned Ok(false) and may have
/// cut short the recursion
pub fn accept<V>(&self, visitor: &mut V) -> std::result::Result<bool, V::Error>
where
V: PlanVisitor,
{
if !visitor.pre_visit(self)? {
return Ok(false);
}
let recurse = match self {
LogicalPlan::Projection { input, .. } => input.accept(visitor)?,
LogicalPlan::Filter { input, .. } => input.accept(visitor)?,
LogicalPlan::Aggregate { input, .. } => input.accept(visitor)?,
LogicalPlan::Sort { input, .. } => input.accept(visitor)?,
LogicalPlan::Join { left, right, .. } => {
left.accept(visitor)? && right.accept(visitor)?
}
LogicalPlan::Limit { input, .. } => input.accept(visitor)?,
LogicalPlan::Extension { node } => {
for input in node.inputs() {
if !input.accept(visitor)? {
return Ok(false);
}
}
true
}
// plans without inputs
LogicalPlan::TableScan { .. }
| LogicalPlan::InMemoryScan { .. }
| LogicalPlan::ParquetScan { .. }
| LogicalPlan::CsvScan { .. }
| LogicalPlan::EmptyRelation { .. }
| LogicalPlan::CreateExternalTable { .. }
| LogicalPlan::Explain { .. } => true,
};
if !recurse {
return Ok(false);
}
if !visitor.post_visit(self)? {
return Ok(false);
}
Ok(true)
}
}
// Various implementations for printing out LogicalPlans
impl LogicalPlan {
/// Return a `format`able structure that produces a single line
/// per node. For example:
///
/// ```text
/// Projection: #id
/// Filter: #state Eq Utf8(\"CO\")\
/// CsvScan: employee.csv projection=Some([0, 3])
/// ```
///
/// ```
/// use arrow::datatypes::{Field, Schema, DataType};
/// use datafusion::logical_plan::{lit, col, LogicalPlanBuilder};
/// let schema = Schema::new(vec![
/// Field::new("id", DataType::Int32, false),
/// ]);
/// let plan = LogicalPlanBuilder::scan("default", "foo.csv", &schema, None).unwrap()
/// .filter(col("id").eq(lit(5))).unwrap()
/// .build().unwrap();
///
/// // Format using display_indent
/// let display_string = format!("{}", plan.display_indent());
///
/// assert_eq!("Filter: #id Eq Int32(5)\
/// \n TableScan: foo.csv projection=None",
/// display_string);
/// ```
pub fn display_indent<'a>(&'a self) -> impl fmt::Display + 'a {
// Boilerplate structure to wrap LogicalPlan with something
// that that can be formatted
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> fmt::Display for Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let with_schema = false;
let mut visitor = IndentVisitor::new(f, with_schema);
self.0.accept(&mut visitor).unwrap();
Ok(())
}
}
Wrapper(self)
}
|
///
/// ```text
/// Projection: #id [id:Int32]\
/// Filter: #state Eq Utf8(\"CO\") [id:Int32, state:Utf8]\
/// TableScan: employee.csv projection=Some([0, 3]) [id:Int32, state:Utf8]";
/// ```
///
/// ```
/// use arrow::datatypes::{Field, Schema, DataType};
/// use datafusion::logical_plan::{lit, col, LogicalPlanBuilder};
/// let schema = Schema::new(vec![
/// Field::new("id", DataType::Int32, false),
/// ]);
/// let plan = LogicalPlanBuilder::scan("default", "foo.csv", &schema, None).unwrap()
/// .filter(col("id").eq(lit(5))).unwrap()
/// .build().unwrap();
///
/// // Format using display_indent_schema
/// let display_string = format!("{}", plan.display_indent_schema());
///
/// assert_eq!("Filter: #id Eq Int32(5) [id:Int32]\
/// \n TableScan: foo.csv projection=None [id:Int32]",
/// display_string);
/// ```
pub fn display_indent_schema<'a>(&'a self) -> impl fmt::Display + 'a {
// Boilerplate structure to wrap LogicalPlan with something
// that that can be formatted
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> fmt::Display for Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let with_schema = true;
let mut visitor = IndentVisitor::new(f, with_schema);
self.0.accept(&mut visitor).unwrap();
Ok(())
}
}
Wrapper(self)
}
/// Return a `format`able structure that produces lines meant for
/// graphical display using the `DOT` language. This format can be
/// visualized using software from
/// [`graphviz`](https://graphviz.org/)
///
/// This currently produces two graphs -- one with the basic
/// structure, and one with additional details such as schema.
///
/// ```
/// use arrow::datatypes::{Field, Schema, DataType};
/// use datafusion::logical_plan::{lit, col, LogicalPlanBuilder};
/// let schema = Schema::new(vec![
/// Field::new("id", DataType::Int32, false),
/// ]);
/// let plan = LogicalPlanBuilder::scan("default", "foo.csv", &schema, None).unwrap()
/// .filter(col("id").eq(lit(5))).unwrap()
/// .build().unwrap();
///
/// // Format using display_graphviz
/// let graphviz_string = format!("{}", plan.display_graphviz());
/// ```
///
/// If graphviz string is saved to a file such as `/tmp/example.dot`, the following
/// commands can be used to render it as a pdf:
///
/// ```bash
/// dot -Tpdf < /tmp/example.dot > /tmp/example.pdf
/// ```
///
pub fn display_graphviz<'a>(&'a self) -> impl fmt::Display + 'a {
// Boilerplate structure to wrap LogicalPlan with something
// that that can be formatted
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> fmt::Display for Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(
f,
"// Begin DataFusion GraphViz Plan (see https://graphviz.org)"
)?;
writeln!(f, "digraph {{")?;
let mut visitor = GraphvizVisitor::new(f);
visitor.pre_visit_plan("LogicalPlan")?;
self.0.accept(&mut visitor).unwrap();
visitor.post_visit_plan()?;
visitor.set_with_schema(true);
visitor.pre_visit_plan("Detailed LogicalPlan")?;
self.0.accept(&mut visitor).unwrap();
visitor.post_visit_plan()?;
writeln!(f, "}}")?;
writeln!(f, "// End DataFusion GraphViz Plan")?;
Ok(())
}
}
Wrapper(self)
}
/// Return a `format`able structure with the a human readable
/// description of this LogicalPlan node per node, not including
/// children. For example:
///
/// ```text
/// Projection: #id
/// ```
/// ```
/// use arrow::datatypes::{Field, Schema, DataType};
/// use datafusion::logical_plan::{lit, col, LogicalPlanBuilder};
/// let schema = Schema::new(vec![
/// Field::new("id", DataType::Int32, false),
/// ]);
/// let plan = LogicalPlanBuilder::scan("default", "foo.csv", &schema, None).unwrap()
/// .build().unwrap();
///
/// // Format using display
/// let display_string = format!("{}", plan.display());
///
/// assert_eq!("TableScan: foo.csv projection=None", display_string);
/// ```
pub fn display<'a>(&'a self) -> impl fmt::Display + 'a {
// Boilerplate structure to wrap LogicalPlan with something
// that that can be formatted
struct Wrapper<'a>(&'a LogicalPlan);
impl<'a> fmt::Display for Wrapper<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.0 {
LogicalPlan::EmptyRelation { .. } => write!(f, "EmptyRelation"),
LogicalPlan::TableScan {
ref source,
ref projection,
..
} => match source {
TableSource::FromContext(table_name) => write!(
f,
"TableScan: {} projection={:?}",
table_name, projection
),
TableSource::FromProvider(_) => {
write!(f, "TableScan: projection={:?}", projection)
}
},
LogicalPlan::InMemoryScan { ref projection, .. } => {
write!(f, "InMemoryScan: projection={:?}", projection)
}
LogicalPlan::CsvScan {
ref path,
ref projection,
..
} => write!(f, "CsvScan: {} projection={:?}", path, projection),
LogicalPlan::ParquetScan {
ref path,
ref projection,
..
} => write!(f, "ParquetScan: {} projection={:?}", path, projection),
LogicalPlan::Projection { ref expr, .. } => {
write!(f, "Projection: ")?;
for i in 0..expr.len() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{:?}", expr[i])?;
}
Ok(())
}
LogicalPlan::Filter {
predicate: ref expr,
..
} => write!(f, "Filter: {:?}", expr),
LogicalPlan::Aggregate {
ref group_expr,
ref aggr_expr,
..
} => write!(
f,
"Aggregate: groupBy=[{:?}], aggr=[{:?}]",
group_expr, aggr_expr
),
LogicalPlan::Sort { ref expr, .. } => {
write!(f, "Sort: ")?;
for i in 0..expr.len() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{:?}", expr[i])?;
}
Ok(())
}
LogicalPlan::Join { on: ref keys, .. } => {
let join_expr: Vec<String> =
keys.iter().map(|(l, r)| format!("{} = {}", l, r)).collect();
write!(f, "Join: {}", join_expr.join(", "))
}
LogicalPlan::Limit { ref n, .. } => write!(f, "Limit: {}", n),
LogicalPlan::CreateExternalTable { ref name, .. } => {
write!(f, "CreateExternalTable: {:?}", name)
}
LogicalPlan::Explain { .. } => write!(f, "Explain"),
LogicalPlan::Extension { ref node } => node.fmt_for_explain(f),
}
}
}
Wrapper(self)
}
}
impl fmt::Debug for LogicalPlan {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.display_indent().fmt(f)
}
}
/// Represents which type of plan
#[derive(Debug, Clone, PartialEq)]
pub enum PlanType {
/// The initial LogicalPlan provided to DataFusion
LogicalPlan,
/// The LogicalPlan which results from applying an optimizer pass
OptimizedLogicalPlan {
/// The name of the optimizer which produced this plan
optimizer_name: String,
},
/// The physical plan, prepared for execution
PhysicalPlan,
}
impl From<&PlanType> for String {
fn from(t: &PlanType) -> Self {
match t {
PlanType::LogicalPlan => "logical_plan".into(),
PlanType::OptimizedLogicalPlan { optimizer_name } => {
format!("logical_plan after {}", optimizer_name)
}
PlanType::PhysicalPlan => "physical_plan".into(),
}
}
}
/// Represents some sort of execution plan, in String form
#[derive(Debug, Clone, PartialEq)]
pub struct StringifiedPlan {
/// An identifier of what type of plan this string represents
pub plan_type: PlanType,
/// The string representation of the plan
pub plan: Arc<String>,
}
impl StringifiedPlan {
/// Create a new Stringified plan of `plan_type` with string
/// representation `plan`
pub fn new(plan_type: PlanType, plan: impl Into<String>) -> Self {
StringifiedPlan {
plan_type,
plan: Arc::new(plan.into()),
}
}
/// returns true if this plan should be displayed. Generally
/// `verbose_mode = true` will display all available plans
pub fn should_display(&self, verbose_mode: bool) -> bool {
self.plan_type == PlanType::LogicalPlan || verbose_mode
}
}
#[cfg(test)]
mod tests {
use super::super::{col, lit, LogicalPlanBuilder};
use super::*;
fn employee_schema() -> Schema {
Schema::new(vec![
Field::new("id", DataType::Int32, false),
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new("state", DataType::Utf8, false),
Field::new("salary", DataType::Int32, false),
])
}
fn display_plan() -> LogicalPlan {
LogicalPlanBuilder::scan(
"default",
"employee.csv",
&employee_schema(),
Some(vec![0, 3]),
)
.unwrap()
.filter(col("state").eq(lit("CO")))
.unwrap()
.project(vec![col("id")])
.unwrap()
.build()
.unwrap()
}
#[test]
fn test_display_indent() {
let plan = display_plan();
let expected = "Projection: #id\
\n Filter: #state Eq Utf8(\"CO\")\
\n TableScan: employee.csv projection=Some([0, 3])";
assert_eq!(expected, format!("{}", plan.display_indent()));
}
#[test]
fn test_display_indent_schema() {
let plan = display_plan();
let expected = "Projection: #id [id:Int32]\
\n Filter: #state Eq Utf8(\"CO\") [id:Int32, state:Utf8]\
\n TableScan: employee.csv projection=Some([0, 3]) [id:Int32, state:Utf8]";
assert_eq!(expected, format!("{}", plan.display_indent_schema()));
}
#[test]
fn test_display_graphviz() {
let plan = display_plan();
// just test for a few key lines in the output rather than the
// whole thing to make test mainteance easier.
let graphviz = format!("{}", plan.display_graphviz());
assert!(
graphviz.contains(
r#"// Begin DataFusion GraphViz Plan (see https://graphviz.org)"#
),
"\n{}",
plan.display_graphviz()
);
assert!(
graphviz.contains(
r#"[shape=box label="TableScan: employee.csv projection=Some([0, 3])"]"#
),
"\n{}",
plan.display_graphviz()
);
assert!(graphviz.contains(r#"[shape=box label="TableScan: employee.csv projection=Some([0, 3])\nSchema: [id:Int32, state:Utf8]"]"#),
"\n{}", plan.display_graphviz());
assert!(
graphviz.contains(r#"// End DataFusion GraphViz Plan"#),
"\n{}",
plan.display_graphviz()
);
}
/// Tests for the Visitor trait and walking logical plan nodes
#[derive(Debug, Default)]
struct OkVisitor {
strings: Vec<String>,
}
impl PlanVisitor for OkVisitor {
type Error = String;
fn pre_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
let s = match plan {
LogicalPlan::Projection { .. } => "pre_visit Projection",
LogicalPlan::Filter { .. } => "pre_visit Filter",
LogicalPlan::TableScan { .. } => "pre_visit TableScan",
_ => unimplemented!("unknown plan type"),
};
self.strings.push(s.into());
Ok(true)
}
fn post_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
let s = match plan {
LogicalPlan::Projection { .. } => "post_visit Projection",
LogicalPlan::Filter { .. } => "post_visit Filter",
LogicalPlan::TableScan { .. } => "post_visit TableScan",
_ => unimplemented!("unknown plan type"),
};
self.strings.push(s.into());
Ok(true)
}
}
#[test]
fn visit_order() {
let mut visitor = OkVisitor::default();
let plan = test_plan();
let res = plan.accept(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
"post_visit Filter",
"post_visit Projection"
]
);
}
#[derive(Debug, Default)]
/// Counter than counts to zero and returns true when it gets there
struct OptionalCounter {
val: Option<usize>,
}
impl OptionalCounter {
fn new(val: usize) -> Self {
Self { val: Some(val) }
}
// Decrements the counter by 1, if any, returning true if it hits zero
fn dec(&mut self) -> bool {
if Some(0) == self.val {
true
} else {
self.val = self.val.take().map(|i| i - 1);
false
}
}
}
#[derive(Debug, Default)]
/// Visitor that returns false after some number of visits
struct StoppingVisitor {
inner: OkVisitor,
/// When Some(0) returns false from pre_visit
return_false_from_pre_in: OptionalCounter,
/// When Some(0) returns false from post_visit
return_false_from_post_in: OptionalCounter,
}
impl PlanVisitor for StoppingVisitor {
type Error = String;
fn pre_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
if self.return_false_from_pre_in.dec() {
return Ok(false);
}
self.inner.pre_visit(plan)
}
fn post_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
if self.return_false_from_post_in.dec() {
return Ok(false);
}
self.inner.post_visit(plan)
}
}
/// test earliy stopping in pre-visit
#[test]
fn early_stoping_pre_visit() {
let mut visitor = StoppingVisitor::default();
visitor.return_false_from_pre_in = OptionalCounter::new(2);
let plan = test_plan();
let res = plan.accept(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.inner.strings,
vec!["pre_visit Projection", "pre_visit Filter",]
);
}
#[test]
fn early_stoping_post_visit() {
let mut visitor = StoppingVisitor::default();
visitor.return_false_from_post_in = OptionalCounter::new(1);
let plan = test_plan();
let res = plan.accept(&mut visitor);
assert!(res.is_ok());
assert_eq!(
visitor.inner.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
]
);
}
#[derive(Debug, Default)]
/// Visitor that returns an error after some number of visits
struct ErrorVisitor {
inner: OkVisitor,
/// When Some(0) returns false from pre_visit
return_error_from_pre_in: OptionalCounter,
/// When Some(0) returns false from post_visit
return_error_from_post_in: OptionalCounter,
}
impl PlanVisitor for ErrorVisitor {
type Error = String;
fn pre_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
if self.return_error_from_pre_in.dec() {
return Err("Error in pre_visit".into());
}
self.inner.pre_visit(plan)
}
fn post_visit(
&mut self,
plan: &LogicalPlan,
) -> std::result::Result<bool, Self::Error> {
if self.return_error_from_post_in.dec() {
return Err("Error in post_visit".into());
}
self.inner.post_visit(plan)
}
}
#[test]
fn error_pre_visit() {
let mut visitor = ErrorVisitor::default();
visitor.return_error_from_pre_in = OptionalCounter::new(2);
let plan = test_plan();
let res = plan.accept(&mut visitor);
if let Err(e) = res {
assert_eq!("Error in pre_visit", e);
} else {
panic!("Expected an error");
}
assert_eq!(
visitor.inner.strings,
vec!["pre_visit Projection", "pre_visit Filter",]
);
}
#[test]
fn error_post_visit() {
let mut visitor = ErrorVisitor::default();
visitor.return_error_from_post_in = OptionalCounter::new(1);
let plan = test_plan();
let res = plan.accept(&mut visitor);
if let Err(e) = res {
assert_eq!("Error in post_visit", e);
} else {
panic!("Expected an error");
}
assert_eq!(
visitor.inner.strings,
vec![
"pre_visit Projection",
"pre_visit Filter",
"pre_visit TableScan",
"post_visit TableScan",
]
);
}
fn test_plan() -> LogicalPlan {
let schema = Schema::new(vec![Field::new("id", DataType::Int32, false)]);
LogicalPlanBuilder::scan("default", "employee.csv", &schema, Some(vec![0]))
.unwrap()
.filter(col("state").eq(lit("CO")))
.unwrap()
.project(vec![col("id")])
.unwrap()
.build()
.unwrap()
}
}
|
/// Return a `format`able structure that produces a single line
/// per node that includes the output schema. For example:
|
guessing-game.ts
|
export class GuessingGame {
/**
* The number between 1-10 to guess.
*/
number: number;
/**
* The number of lives the player has.
*/
lives: number;
/**
* The number of hints the player is allowed.
*/
hints: number;
/**
* The last number the player guessed.
*/
lastGuess: number;
/**
* Whether the game is over.
*/
isGameOver = false;
constructor() {
this.lives = 3;
this.hints = 1;
this.lastGuess = -1;
this.number = Math.floor(Math.random() * 10) + 1;
}
/**
* Guess a number;
* @param number The number to guess.
*/
guess(number: number): string {
const min = 1;
const max = 10;
if (number === this.number) {
this.isGameOver = true;
return "You guessed the correct number!";
}
if (number < min || number > max) {
return "You must guess a number between 1 and 10.";
}
this.lives--;
this.lastGuess = number;
if (this.lives < 1) {
this.isGameOver = true;
return `You lose! I was thinking of ${this.number}.`;
}
return `Incorrect! You have ${this.lives} attempt(s) and ${this.hints} hint(s) remaining.`;
}
|
hint(): string {
if (this.lastGuess === -1) {
return "You must guess a number before I can give you a hint.";
}
if (this.hints < 1) {
return "You have no more hints left.";
}
if (this.lastGuess < this.number) {
this.hints--;
return "The last number you guessed was too low.";
}
return "The last number you guessed was too high.";
}
}
|
/**
* Get a hint.
*/
|
cmd.go
|
package main
import (
"flag"
"fmt"
"os"
)
type Cmd struct {
helpFlag bool
versionFlag bool
cpOption string
XJreOption string
class string
args []string
}
func ParseCmd() *Cmd {
cmd := &Cmd{}
flag.Usage = PrintUsage
flag.BoolVar(&cmd.helpFlag, "help", false, "print help message")
flag.BoolVar(&cmd.helpFlag, "?", false, "print help message")
flag.BoolVar(&cmd.versionFlag, "version", false, "print version and exit")
flag.StringVar(&cmd.cpOption, "classpath", "", "classpath")
flag.StringVar(&cmd.cpOption, "cp", "", "classpath")
|
args := flag.Args()
if len(args) > 0 {
cmd.class = args[0]
cmd.args = args[1:]
}
return cmd
}
func PrintUsage() {
fmt.Printf("Usage: %s [-options] class [args...]\n", os.Args[0])
}
|
flag.StringVar(&cmd.XJreOption, "Xjre", "", "path to jre")
flag.Parse()
|
TemperatureString.js
|
import React from "react";
import { View, Text } from "react-native";
import TemperatureStringStyles from "./TemperatureString.style";
const TemperatureString = ({
temp,
unit,
containStyle = {},
unitStyle = {},
tempStyle = {},
degStyle = {},
}) => (
<View style={[TemperatureStringStyles.container, containStyle]}>
<Text style={[TemperatureStringStyles.temperature, tempStyle]}>{temp}</Text>
<View style={[TemperatureStringStyles.unitContainer]}>
<Text style={[TemperatureStringStyles.degree, degStyle]}>{"o"}</Text>
<Text style={[TemperatureStringStyles.unit, unitStyle]}>{unit}</Text>
</View>
</View>
);
|
export default TemperatureString;
|
|
sampler.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import abstractmethod
class Sampler(object):
"""Base class for all Samplers. __iter__ is needed no matter whether you use IterableSampler
or Squential sampler, if you want implement your own sampler, make clear what the type is
your Dataset, if IterableDataset(method __iter__ implemented), try to use IterableSampler,
else if you have an IndexDataset(method __getitem__ implemented), your dataset should have
method __len__ implemented.
"""
def __init__(self, data_source):
pass
@abstractmethod
def __iter__(self):
raise NotImplementedError
class IterableSampler(Sampler):
"""Interally samples elements, used for datasets retrieved element by interator.
yield None to act as a placeholder for each iteration
Args:
dataset (Dataset): set to None
"""
def __init__(self):
super(IterableSampler, self).__init__(None)
def __iter__(self):
while True:
yield None
def __len__(self):
return 0
class SequentialSampler(Sampler):
"""Sequentially samples elements, used for datasets retrieved element by index.
Args:
dataset (Dataset): index dataset(implement method __len__) for sampling
"""
def __init__(self, dataset):
self.dataset = dataset
def __iter__(self):
return iter(range(len(self.dataset)))
def __len__(self):
return len(self.dataset)
class BatchSampler(Sampler):
"""yield a mini-batch of indices for SquentialSampler and batch size length of None list for
IterableSampler.
Args:
sampler (Sampler): sampler used for generating batches.
batch_size (int): Size of mini-batch.
drop_last (bool): BatchSampler will drop the last batch if drop_last is True, else
will return the last batch whose size will be less than batch_size
"""
def __init__(self, sampler, batch_size, drop_last=True):
if isinstance(drop_last, bool):
self.drop_last = drop_last
else:
raise ValueError("last_batch only support bool as input")
self.sampler = sampler
self.batch_size = batch_size
self.drop_last = drop_last
def __iter__(self):
batch = []
for idx in self.sampler:
batch.append(idx)
if len(batch) == self.batch_size:
yield batch
batch = []
if len(batch) > 0 and not self.drop_last:
yield batch
def __len__(self):
if self.drop_last:
return len(self.sampler) // self.batch_size
else:
|
return (len(self.sampler) + self.batch_size - 1) // self.batch_size
|
|
setup.py
|
#-------------------------------------------------------------------------------
# This file is part of PyMad.
#
# Copyright (c) 2011, CERN. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
# Make sure setuptools is available. NOTE: the try/except hack is required to
# make installation work with pip: If an older version of setuptools is
# already imported, `use_setuptools()` will just exit the current process.
try:
import pkg_resources
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, Extension
from distutils.util import get_platform
import sys
from os import path
# Version of pymad (major,minor):
PYMADVERSION=(0, 9)
# setuptools.Extension automatically converts all '.pyx' extensions to '.c'
# extensions if detecting that neither Cython nor Pyrex is available. Early
# versions of setuptools don't know about Cython. Since we don't use Pyrex
# in this module, this leads to problems in the two cases where Cython is
# available and Pyrex is not or vice versa. Therefore, setuptools.Extension
# needs to be patched to match our needs:
try:
# Use Cython if available:
from Cython.Build import cythonize
except ImportError:
# Otherwise, always use the distributed .c instead of the .pyx file:
def cythonize(extensions):
def pyx_to_c(source):
return source[:-4]+'.c' if source.endswith('.pyx') else source
for ext in extensions:
ext.sources = list(map(pyx_to_c, ext.sources))
missing_sources = [s for s in ext.sources if not path.exists(s)]
if missing_sources:
raise OSError(('Missing source file: {0[0]!r}. '
'Install Cython to resolve this problem.')
.format(missing_sources))
return extensions
else:
orig_Extension = Extension
class Extension(orig_Extension):
"""Extension that *never* replaces '.pyx' by '.c' (using Cython)."""
def
|
(self, name, sources, *args, **kwargs):
orig_Extension.__init__(self, name, sources, *args, **kwargs)
self.sources = sources
# Let's just use the default system headers:
include_dirs = []
library_dirs = []
# Parse command line option: --madxdir=/path/to/madxinstallation. We could
# use build_ext.user_options instead, but then the --madxdir argument can
# be passed only to the 'build_ext' command, not to 'build' or 'install',
# which is a minor nuisance.
for arg in sys.argv[:]:
if arg.startswith('--madxdir='):
sys.argv.remove(arg)
prefix = path.expanduser(arg.split('=', 1)[1])
lib_path_candidates = [path.join(prefix, 'lib'),
path.join(prefix, 'lib64')]
include_dirs += [path.join(prefix, 'include')]
library_dirs += list(filter(path.isdir, lib_path_candidates))
# required libraries
if get_platform() == "win32" or get_platform() == "win-amd64":
libraries = ['madx', 'stdc++', 'ptc', 'gfortran', 'msvcrt']
else:
libraries = ['madx', 'stdc++', 'c']
# Common arguments for the Cython extensions:
extension_args = dict(
define_macros=[('MAJOR_VERSION', PYMADVERSION[0]),
('MINOR_VERSION', PYMADVERSION[1])],
libraries=libraries,
include_dirs=include_dirs,
library_dirs=library_dirs,
runtime_library_dirs=library_dirs,
extra_compile_args=['-std=c99'],
)
# Compose a long description for PyPI:
long_description = None
try:
long_description = open('README.rst').read()
long_description += '\n' + open('COPYING.rst').read()
long_description += '\n' + open('CHANGES.rst').read()
except IOError:
pass
setup(
name='cern-cpymad',
version='.'.join(map(str, PYMADVERSION)),
description='Cython binding to MAD-X',
long_description=long_description,
url='http://pymad.github.io/cpymad',
package_dir={
'': 'src' # look for packages in src/ subfolder
},
ext_modules = cythonize([
Extension('cern.cpymad.libmadx',
sources=["src/cern/cpymad/libmadx.pyx"],
**extension_args),
]),
namespace_packages=[
'cern'
],
packages = [
"cern",
"cern.resource",
"cern.cpymad",
],
include_package_data=True, # include files matched by MANIFEST.in
author='PyMAD developers',
author_email='[email protected]',
setup_requires=[
],
install_requires=[
'setuptools',
'numpy',
'PyYAML',
],
license = 'CERN Standard Copyright License'
)
|
__init__
|
test_utils.rs
|
//! API testing helpers.
// Built-in uses
// External uses
use actix_web::{web, App, Scope};
use chrono::Utc;
use once_cell::sync::Lazy;
use tokio::sync::Mutex;
// Workspace uses
use zksync_config::ZkSyncConfig;
use zksync_crypto::rand::{SeedableRng, XorShiftRng};
use zksync_storage::{
chain::operations::records::NewExecutedPriorityOperation,
chain::operations::OperationsSchema,
prover::ProverSchema,
test_data::{
dummy_ethereum_tx_hash, gen_acc_random_updates, gen_sample_block,
gen_unique_aggregated_operation_with_txs, get_sample_aggregated_proof,
get_sample_single_proof, BLOCK_SIZE_CHUNKS,
},
ConnectionPool,
};
use zksync_test_account::ZkSyncAccount;
use zksync_types::{
aggregated_operations::AggregatedActionType,
helpers::{apply_updates, closest_packable_fee_amount, closest_packable_token_amount},
operations::{ChangePubKeyOp, TransferToNewOp},
prover::ProverJobType,
tx::ChangePubKeyType,
AccountId, AccountMap, Address, BlockNumber, Deposit, DepositOp, ExecutedOperations,
ExecutedPriorityOp, ExecutedTx, FullExit, FullExitOp, Nonce, PriorityOp, Token, TokenId,
Transfer, TransferOp, ZkSyncOp, ZkSyncTx, H256,
};
// Local uses
use super::Client;
use std::str::FromStr;
/// Serial ID of the verified priority operation.
pub const VERIFIED_OP_SERIAL_ID: u64 = 10;
/// Serial ID of the committed priority operation.
pub const COMMITTED_OP_SERIAL_ID: u64 = 243;
/// Number of committed blocks.
pub const COMMITTED_BLOCKS_COUNT: u32 = 8;
/// Number of verified blocks.
pub const VERIFIED_BLOCKS_COUNT: u32 = 5;
/// Number of executed blocks.
pub const EXECUTED_BLOCKS_COUNT: u32 = 3;
#[derive(Debug, Clone)]
pub struct TestServerConfig {
pub config: ZkSyncConfig,
pub pool: ConnectionPool,
}
impl Default for TestServerConfig {
fn default() -> Self {
Self {
config: ZkSyncConfig::from_env(),
pool: ConnectionPool::new(Some(1)),
}
}
}
#[derive(Debug)]
pub struct TestTransactions {
pub acc: ZkSyncAccount,
pub txs: Vec<(ZkSyncTx, ExecutedOperations)>,
}
impl TestServerConfig {
pub fn start_server_with_scope<F>(
&self,
scope: String,
scope_factory: F,
) -> (Client, actix_web::test::TestServer)
where
F: Fn(&TestServerConfig) -> Scope + Clone + Send + 'static,
{
let this = self.clone();
let server = actix_web::test::start(move || {
App::new().service(web::scope(scope.as_ref()).service(scope_factory(&this)))
});
let url = server.url("").trim_end_matches('/').to_owned();
let client = Client::new(url);
(client, server)
}
pub fn start_server<F>(&self, scope_factory: F) -> (Client, actix_web::test::TestServer)
where
F: Fn(&TestServerConfig) -> Scope + Clone + Send + 'static,
{
self.start_server_with_scope(String::from("/api/v1"), scope_factory)
}
/// Creates several transactions and the corresponding executed operations.
pub fn
|
(fee: u64) -> TestTransactions {
Self::gen_zk_txs_for_account(AccountId(0xdead), ZkSyncAccount::rand().address, fee)
}
/// Creates several transactions and the corresponding executed operations for the
/// specified account.
pub fn gen_zk_txs_for_account(
account_id: AccountId,
address: Address,
fee: u64,
) -> TestTransactions {
let from = ZkSyncAccount::rand();
from.set_account_id(Some(AccountId(0xf00d)));
let mut to = ZkSyncAccount::rand();
to.set_account_id(Some(account_id));
to.address = address;
let mut txs = Vec::new();
// Sign change pubkey tx pair
{
let tx = from.sign_change_pubkey_tx(
None,
false,
TokenId(0),
fee.into(),
ChangePubKeyType::ECDSA,
Default::default(),
);
let zksync_op = ZkSyncOp::ChangePubKeyOffchain(Box::new(ChangePubKeyOp {
tx: tx.clone(),
account_id: from.get_account_id().unwrap(),
}));
let executed_tx = ExecutedTx {
signed_tx: zksync_op.try_get_tx().unwrap().into(),
success: true,
op: Some(zksync_op),
fail_reason: None,
block_index: Some(1),
created_at: chrono::Utc::now(),
batch_id: None,
};
txs.push((
ZkSyncTx::ChangePubKey(Box::new(tx)),
ExecutedOperations::Tx(Box::new(executed_tx)),
));
}
// Transfer tx pair
{
let tx = from
.sign_transfer(
TokenId(0),
"ETH",
closest_packable_token_amount(&10_u64.into()),
closest_packable_fee_amount(&fee.into()),
&to.address,
None,
false,
Default::default(),
)
.0;
let zksync_op = ZkSyncOp::TransferToNew(Box::new(TransferToNewOp {
tx: tx.clone(),
from: from.get_account_id().unwrap(),
to: to.get_account_id().unwrap(),
}));
let executed_tx = ExecutedTx {
signed_tx: zksync_op.try_get_tx().unwrap().into(),
success: true,
op: Some(zksync_op),
fail_reason: None,
block_index: Some(2),
created_at: chrono::Utc::now(),
batch_id: None,
};
txs.push((
ZkSyncTx::Transfer(Box::new(tx)),
ExecutedOperations::Tx(Box::new(executed_tx)),
));
}
// Failed transfer tx pair
{
let tx = from
.sign_transfer(
TokenId(0),
"GLM",
1_u64.into(),
fee.into(),
&to.address,
None,
false,
Default::default(),
)
.0;
let zksync_op = ZkSyncOp::TransferToNew(Box::new(TransferToNewOp {
tx: tx.clone(),
from: from.get_account_id().unwrap(),
to: to.get_account_id().unwrap(),
}));
let executed_tx = ExecutedTx {
signed_tx: zksync_op.try_get_tx().unwrap().into(),
success: false,
op: Some(zksync_op),
fail_reason: Some("Unknown token".to_string()),
block_index: None,
created_at: chrono::Utc::now(),
batch_id: None,
};
txs.push((
ZkSyncTx::Transfer(Box::new(tx)),
ExecutedOperations::Tx(Box::new(executed_tx)),
));
}
// Transfer back tx pair
{
let tx = Transfer::new(
to.get_account_id().unwrap(),
to.address,
from.address,
TokenId(0),
2_u64.into(),
fee.into(),
Nonce(0),
Default::default(),
None,
);
let zksync_op = ZkSyncOp::Transfer(Box::new(TransferOp {
tx: tx.clone(),
from: to.get_account_id().unwrap(),
to: from.get_account_id().unwrap(),
}));
let executed_tx = ExecutedTx {
signed_tx: zksync_op.try_get_tx().unwrap().into(),
success: true,
op: Some(zksync_op),
fail_reason: None,
block_index: Some(3),
created_at: chrono::Utc::now(),
batch_id: None,
};
txs.push((
ZkSyncTx::Transfer(Box::new(tx)),
ExecutedOperations::Tx(Box::new(executed_tx)),
));
}
TestTransactions { acc: from, txs }
}
pub async fn fill_database(&self) -> anyhow::Result<()> {
static INITED: Lazy<Mutex<bool>> = Lazy::new(|| Mutex::new(false));
// Hold this guard until transaction will be committed to avoid double init.
let mut inited_guard = INITED.lock().await;
if *inited_guard {
return Ok(());
}
*inited_guard = true;
let mut storage = self.pool.access_storage().await?;
// Check if database is been already inited.
if storage
.chain()
.block_schema()
.get_block(BlockNumber(1))
.await?
.is_some()
{
return Ok(());
}
// Make changes atomic.
let mut storage = storage.start_transaction().await?;
// Below lies the initialization of the data for the test.
let mut rng = XorShiftRng::from_seed([0, 1, 2, 3]);
// Required since we use `EthereumSchema` in this test.
storage.ethereum_schema().initialize_eth_data().await?;
// Insert PHNX token
storage
.tokens_schema()
.store_token(Token::new(
TokenId(1),
Address::from_str("38A2fDc11f526Ddd5a607C1F251C065f40fBF2f7").unwrap(),
"PHNX",
18,
))
.await?;
// Insert Golem token with old symbol (from rinkeby).
storage
.tokens_schema()
.store_token(Token::new(
TokenId(16),
Address::from_str("d94e3dc39d4cad1dad634e7eb585a57a19dc7efe").unwrap(),
"GNT",
18,
))
.await?;
let mut accounts = AccountMap::default();
// Create and apply several blocks to work with.
for block_number in 1..=COMMITTED_BLOCKS_COUNT {
let block_number = BlockNumber(block_number);
let updates = (0..3)
.flat_map(|_| gen_acc_random_updates(&mut rng))
.collect::<Vec<_>>();
apply_updates(&mut accounts, updates.clone());
// Add transactions to every odd block.
let txs = if *block_number % 2 == 1 {
let (&id, account) = accounts.iter().next().unwrap();
Self::gen_zk_txs_for_account(id, account.address, 1_000)
.txs
.into_iter()
.map(|(_tx, op)| op)
.collect()
} else {
vec![]
};
storage
.chain()
.block_schema()
.save_block(gen_sample_block(
block_number,
BLOCK_SIZE_CHUNKS,
txs.clone(),
))
.await?;
storage
.chain()
.state_schema()
.commit_state_update(block_number, &updates, 0)
.await?;
// Store & confirm the operation in the ethereum schema, as it's used for obtaining
// commit/verify/execute hashes.
let aggregated_operation = gen_unique_aggregated_operation_with_txs(
block_number,
AggregatedActionType::CommitBlocks,
BLOCK_SIZE_CHUNKS,
txs.clone(),
);
OperationsSchema(&mut storage)
.store_aggregated_action(aggregated_operation)
.await?;
let (id, op) = OperationsSchema(&mut storage)
.get_aggregated_op_that_affects_block(
AggregatedActionType::CommitBlocks,
block_number,
)
.await?
.unwrap();
// Store the Ethereum transaction.
let eth_tx_hash = dummy_ethereum_tx_hash(id);
let response = storage
.ethereum_schema()
.save_new_eth_tx(
AggregatedActionType::CommitBlocks,
Some((id, op)),
100,
100u32.into(),
Default::default(),
)
.await?;
storage
.ethereum_schema()
.add_hash_entry(response.id, ð_tx_hash)
.await?;
storage
.ethereum_schema()
.confirm_eth_tx(ð_tx_hash)
.await?;
// Add verification for the block if required.
if *block_number <= VERIFIED_BLOCKS_COUNT {
// Add jobs to `job_prover_queue`.
let job_data = serde_json::Value::default();
ProverSchema(&mut storage)
.add_prover_job_to_job_queue(
block_number,
block_number,
job_data.clone(),
0,
ProverJobType::SingleProof,
)
.await?;
ProverSchema(&mut storage)
.add_prover_job_to_job_queue(
block_number,
block_number,
job_data,
1,
ProverJobType::AggregatedProof,
)
.await?;
// Get job id.
let stored_job_id = ProverSchema(&mut storage)
.get_idle_prover_job_from_job_queue()
.await?
.unwrap()
.job_id;
let stored_aggregated_job_id = ProverSchema(&mut storage)
.get_idle_prover_job_from_job_queue()
.await?
.unwrap()
.job_id;
// Store proofs.
let proof = get_sample_single_proof();
let aggregated_proof = get_sample_aggregated_proof();
ProverSchema(&mut storage)
.store_proof(stored_job_id, block_number, &proof)
.await?;
ProverSchema(&mut storage)
.store_aggregated_proof(
stored_aggregated_job_id,
block_number,
block_number,
&aggregated_proof,
)
.await?;
let aggregated_operation = gen_unique_aggregated_operation_with_txs(
block_number,
AggregatedActionType::PublishProofBlocksOnchain,
BLOCK_SIZE_CHUNKS,
txs.clone(),
);
OperationsSchema(&mut storage)
.store_aggregated_action(aggregated_operation)
.await?;
let (id, op) = OperationsSchema(&mut storage)
.get_aggregated_op_that_affects_block(
AggregatedActionType::PublishProofBlocksOnchain,
block_number,
)
.await?
.unwrap();
let response = storage
.ethereum_schema()
.save_new_eth_tx(
AggregatedActionType::PublishProofBlocksOnchain,
Some((id, op)),
100,
100u32.into(),
Default::default(),
)
.await?;
let eth_tx_hash = dummy_ethereum_tx_hash(id);
storage
.ethereum_schema()
.add_hash_entry(response.id, ð_tx_hash)
.await?;
storage
.ethereum_schema()
.confirm_eth_tx(ð_tx_hash)
.await?;
}
if *block_number <= EXECUTED_BLOCKS_COUNT {
let aggregated_operation = gen_unique_aggregated_operation_with_txs(
block_number,
AggregatedActionType::ExecuteBlocks,
BLOCK_SIZE_CHUNKS,
txs.clone(),
);
OperationsSchema(&mut storage)
.store_aggregated_action(aggregated_operation)
.await?;
let (id, op) = OperationsSchema(&mut storage)
.get_aggregated_op_that_affects_block(
AggregatedActionType::ExecuteBlocks,
block_number,
)
.await?
.unwrap();
// Store the Ethereum transaction.
let eth_tx_hash = dummy_ethereum_tx_hash(id);
let response = storage
.ethereum_schema()
.save_new_eth_tx(
AggregatedActionType::ExecuteBlocks,
Some((id, op)),
100,
100u32.into(),
Default::default(),
)
.await?;
storage
.ethereum_schema()
.add_hash_entry(response.id, ð_tx_hash)
.await?;
storage
.ethereum_schema()
.confirm_eth_tx(ð_tx_hash)
.await?;
}
}
// Store priority operations for some tests.
let ops = vec![
// Verified priority operation.
NewExecutedPriorityOperation {
block_number: 2,
block_index: 2,
operation: serde_json::to_value(
dummy_deposit_op(Address::default(), AccountId(1), VERIFIED_OP_SERIAL_ID, 2).op,
)
.unwrap(),
from_account: Default::default(),
to_account: Default::default(),
priority_op_serialid: VERIFIED_OP_SERIAL_ID as i64,
deadline_block: 100,
eth_hash: dummy_ethereum_tx_hash(VERIFIED_OP_SERIAL_ID as i64)
.as_bytes()
.to_vec(),
eth_block: 10,
created_at: chrono::Utc::now(),
},
// Committed priority operation.
NewExecutedPriorityOperation {
block_number: EXECUTED_BLOCKS_COUNT as i64 + 1,
block_index: 1,
operation: serde_json::to_value(
dummy_full_exit_op(AccountId(1), Address::default(), COMMITTED_OP_SERIAL_ID, 3)
.op,
)
.unwrap(),
from_account: Default::default(),
to_account: Default::default(),
priority_op_serialid: COMMITTED_OP_SERIAL_ID as i64,
deadline_block: 200,
eth_hash: dummy_ethereum_tx_hash(COMMITTED_OP_SERIAL_ID as i64)
.as_bytes()
.to_vec(),
eth_block: 14,
created_at: chrono::Utc::now(),
},
];
for op in ops {
storage
.chain()
.operations_schema()
.store_executed_priority_op(op)
.await?;
}
// Get the accounts by their IDs.
for (account_id, _account) in accounts {
let account_state = storage
.chain()
.account_schema()
.account_state_by_id(account_id)
.await?;
// Check that committed state is available.
assert!(
account_state.committed.is_some(),
"No committed state for account"
);
}
storage.commit().await?;
// Storage has been inited, so we can safely drop this guard.
drop(inited_guard);
Ok(())
}
}
/// Creates dummy deposit priority operation.
pub fn dummy_deposit_op(
address: Address,
account_id: AccountId,
serial_id: u64,
block_index: u32,
) -> ExecutedPriorityOp {
let deposit_op = ZkSyncOp::Deposit(Box::new(DepositOp {
priority_op: Deposit {
from: address,
token: TokenId(0),
amount: 1_u64.into(),
to: address,
},
account_id,
}));
ExecutedPriorityOp {
priority_op: PriorityOp {
serial_id,
data: deposit_op.try_get_priority_op().unwrap(),
deadline_block: 0,
eth_hash: H256::default(),
eth_block: 10,
},
op: deposit_op,
block_index,
created_at: Utc::now(),
}
}
/// Creates dummy full exit priority operation.
pub fn dummy_full_exit_op(
account_id: AccountId,
eth_address: Address,
serial_id: u64,
block_index: u32,
) -> ExecutedPriorityOp {
let deposit_op = ZkSyncOp::FullExit(Box::new(FullExitOp {
priority_op: FullExit {
account_id,
eth_address,
token: TokenId(0),
},
withdraw_amount: None,
}));
ExecutedPriorityOp {
priority_op: PriorityOp {
serial_id,
data: deposit_op.try_get_priority_op().unwrap(),
deadline_block: 0,
eth_hash: H256::default(),
eth_block: 10,
},
op: deposit_op,
block_index,
created_at: Utc::now(),
}
}
|
gen_zk_txs
|
index.ts
|
/**
* @name mode
* @summary Returns the mode.
*
* @description Returns the highest occuring number/s in a given numbers.
*
* @example
* // Normal usage
* let result = mode(2, 1, 8, 3, 2, 7, 4, 2, 6, 8);
* // => [2]
*
* // Using an array
* let result = mode(...[2, 1, 8, 3, 2, 7, 4, 2, 6, 8]);
* // => [2]
*
* @param {Array<number>} args given numbers.
* @returns {Array<number>} the resulting mode.
*
* @function pure
*/
export default function mode(...args: number[]): number[] {
let count: { [key: string]: number } = {};
let result: number[] = [];
|
if (!count[num]) {
count[num] = 1;
return;
}
count[num] += 1;
});
const highest = Math.max.apply(null, Object.values(count));
Object.entries(count).forEach(([key, value]) => {
if (value === highest) {
result.push(parseInt(key, 10));
}
});
return result;
}
|
args.forEach(num => {
|
sandbox.js
|
function Sandbox(url) {
|
function send(data) {
source.postMessage(data, frameHost);
}
var iframe = document.createElement('iframe');
frameHost = location.origin,
iframe.src = jsbin.root + '/sandbox.html?' + url,
body = document.body,
source = null,
guid = +new Date(),
callbacks = {},
queue = new Queue(send);
iframe.style.display = 'none';
body.appendChild(iframe);
window.addEventListener('message', function (event) {
var result;
if (event.origin === frameHost) {
if (event.data === '__pong__') {
source = event.source;
queue.ready();
} else {
result = JSON.parse(event.data);
if (callbacks[result.guid]) {
callbacks[result.guid](result.data);
}
}
}
}, false);
iframe.onload = function () {
iframe.contentWindow.postMessage('__ping__', frameHost);
};
return {
get: function (what, callback) {
guid++;
callbacks[guid] = callback;
queue.push(JSON.stringify({ guid: guid, what: what }));
}
};
}
|
'use strict';
|
helpers.py
|
import logging
import datetime
import json
import requests
from bs4 import BeautifulSoup
from twilio.rest import TwilioRestClient
from .config import twilio
logger = logging.getLogger(__name__)
def translate_timestamp(ts):
|
def get_url_soup(url):
result = requests.get(url)
soup = BeautifulSoup(result.content, 'html.parser')
return soup
def get_url_json(url):
result = requests.get(url)
_json = json.loads(result.text)
return _json
def send_sms(number):
translation_table = dict.fromkeys(map(ord, '-() '), None)
number = number.translate(translation_table).lstrip('+48')
assert len(number) <= 9, "Invalid phone number '%s'." % number
logger.info('Sending sms to %s' % number)
# client = TwilioRestClient(twilio['sid'], twilio['token'])
# client.messages.create(to="+48%s" % number, from_=twilio['origin-number'],
# body=twilio['message'])
|
translation = {
'stycznia': 1,
'lutego': 2,
'grudnia': 12,
}
def multiple_replace(text, _dict):
for key in _dict:
text = text.replace(key, str(_dict[key]))
return text
ts = multiple_replace(ts, translation)
ts = datetime.datetime.strptime(ts, '%H:%M, %d %m %Y')
return ts
|
studenttracking_gui.py
|
from tkinter import *
import tkinter as tk
import studenttracking_main
import studenttracking_fnct
def load_gui(self):
self.lbl_subform = tk.Label(self.master,text='Submission Form')
self.lbl_subform.grid(row=0,column=1,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_fname = tk.Label(self.master,text='First Name:')
self.lbl_fname.grid(row=2,column=0,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_lname = tk.Label(self.master,text='Last Name:')
self.lbl_lname.grid(row=3,column=0,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_phone = tk.Label(self.master,text='Phone:')
self.lbl_phone.grid(row=4,column=0,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_email = tk.Label(self.master,text='Email:')
self.lbl_email.grid(row=5,column=0,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_course = tk.Label(self.master,text='Course:')
self.lbl_course.grid(row=7,column=0,padx=(27,0),pady=(10,0),sticky=N+W)
self.lbl_info = tk.Label(self.master,text='Information')
self.lbl_info.grid(row=0,column=4,padx=(27,0),pady=(10,0),sticky=N+W)
self.txt_fname = tk.Entry(self.master,text='')
self.txt_fname.grid(row=2,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W)
self.txt_lname = tk.Entry(self.master,text='')
self.txt_lname.grid(row=3,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W)
self.txt_phone = tk.Entry(self.master,text='')
self.txt_phone.grid(row=4,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W)
self.txt_email = tk.Entry(self.master,text='')
self.txt_email.grid(row=5,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W)
self.txt_course = tk.Entry(self.master,text='')
self.txt_course.grid(row=7,column=1,rowspan=1,columnspan=2,padx=(30,40),pady=(10,0),sticky=N+E+W)
self.scrollbar1 = Scrollbar(self.master,orient=VERTICAL)
self.lstList1 = Listbox(self.master,exportselection=0,yscrollcommand=self.scrollbar1.set)
self.lstList1.bind('<<ListboxSelect>>',lambda event: studenttracking_fnct.onSelect(self,event))
self.scrollbar1.config(command=self.lstList1.yview)
self.scrollbar1.grid(row=1,column=7,rowspan=8,columnspan=1,padx=(0,0),pady=(0,0),sticky=N+E+S)
self.lstList1.grid(row=2,column=3,rowspan=7,columnspan=4,padx=(0,0),pady=(0,0),sticky=N+E+S+W)
self.btn_submit = tk.Button(self.master,width=12,height=2,text='Submit',command=lambda: studenttracking_fnct.submit(self))
|
studenttracking_fnct.create_db(self)
studenttracking_fnct.onRefresh(self)
if __name__ == "__main__":
pass
|
self.btn_submit.grid(row=7,column=0,padx=(25,0),pady=(45,10),sticky=W)
self.btn_delete = tk.Button(self.master,width=12,height=2,text='Delete',command=lambda: studenttracking_fnct.onDelete(self))
self.btn_delete.grid(row=7,column=1,padx=(25,0),pady=(45,10),sticky=W)
|
mainwin.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MtgBrowse(object):
def setupUi(self, MtgBrowse):
MtgBrowse.setObjectName("MtgBrowse")
MtgBrowse.resize(1120, 1073)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MtgBrowse.sizePolicy().hasHeightForWidth())
MtgBrowse.setSizePolicy(sizePolicy)
self.centralWidget = QtWidgets.QWidget(MtgBrowse)
self.centralWidget.setObjectName("centralWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralWidget)
self.verticalLayout.setContentsMargins(11, 11, 11, 11)
self.verticalLayout.setSpacing(6)
self.verticalLayout.setObjectName("verticalLayout")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setSpacing(6)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setSpacing(6)
self.horizontalLayout.setObjectName("horizontalLayout")
self.lineEdit = QtWidgets.QLineEdit(self.centralWidget)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.saveButton = QtWidgets.QPushButton(self.centralWidget)
self.saveButton.setObjectName("saveButton")
self.horizontalLayout.addWidget(self.saveButton)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setSpacing(6)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.loadButton_1 = QtWidgets.QPushButton(self.centralWidget)
self.loadButton_1.setObjectName("loadButton_1")
self.horizontalLayout_3.addWidget(self.loadButton_1)
self.loadButton_2 = QtWidgets.QPushButton(self.centralWidget)
self.loadButton_2.setObjectName("loadButton_2")
self.horizontalLayout_3.addWidget(self.loadButton_2)
self.loadButton_3 = QtWidgets.QPushButton(self.centralWidget)
self.loadButton_3.setObjectName("loadButton_3")
self.horizontalLayout_3.addWidget(self.loadButton_3)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.check_standard = QtWidgets.QCheckBox(self.centralWidget)
self.check_standard.setObjectName("check_standard")
self.horizontalLayout_3.addWidget(self.check_standard)
self.zoomToggleButton = QtWidgets.QPushButton(self.centralWidget)
self.zoomToggleButton.setObjectName("zoomToggleButton")
self.horizontalLayout_3.addWidget(self.zoomToggleButton)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem2)
self.verticalLayout_3.addLayout(self.horizontalLayout_3)
self.verticalLayout_2.addLayout(self.verticalLayout_3)
self.verticalLayout.addLayout(self.verticalLayout_2)
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setHorizontalSpacing(6)
self.gridLayout.setVerticalSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.mainWidget = QtWidgets.QWidget(self.centralWidget)
self.mainWidget.setMinimumSize(QtCore.QSize(1100, 930))
self.mainWidget.setLayoutDirection(QtCore.Qt.RightToLeft)
self.mainWidget.setObjectName("mainWidget")
self.gridLayout.addWidget(self.mainWidget, 0, 0, 1, 1)
self.statusBarLabel = QtWidgets.QLabel(self.centralWidget)
self.statusBarLabel.setLayoutDirection(QtCore.Qt.LeftToRight)
self.statusBarLabel.setAutoFillBackground(False)
self.statusBarLabel.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.statusBarLabel.setObjectName("statusBarLabel")
self.gridLayout.addWidget(self.statusBarLabel, 2, 0, 1, 1)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setSpacing(6)
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.prevPage = QtWidgets.QPushButton(self.centralWidget)
self.prevPage.setEnabled(False)
self.prevPage.setObjectName("prevPage")
self.horizontalLayout_5.addWidget(self.prevPage)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_5.addItem(spacerItem3)
self.nextPage = QtWidgets.QPushButton(self.centralWidget)
self.nextPage.setEnabled(False)
self.nextPage.setObjectName("nextPage")
self.horizontalLayout_5.addWidget(self.nextPage)
self.gridLayout.addLayout(self.horizontalLayout_5, 1, 0, 1, 1)
self.verticalLayout.addLayout(self.gridLayout)
MtgBrowse.setCentralWidget(self.centralWidget)
self.mainToolBar = QtWidgets.QToolBar(MtgBrowse)
self.mainToolBar.setObjectName("mainToolBar")
MtgBrowse.addToolBar(QtCore.Qt.TopToolBarArea, self.mainToolBar)
self.retranslateUi(MtgBrowse)
QtCore.QMetaObject.connectSlotsByName(MtgBrowse)
def retranslateUi(self, MtgBrowse):
|
_translate = QtCore.QCoreApplication.translate
MtgBrowse.setWindowTitle(_translate("MtgBrowse", "MTGBrowser"))
self.saveButton.setText(_translate("MtgBrowse", "Save"))
self.loadButton_1.setText(_translate("MtgBrowse", "Load 1"))
self.loadButton_2.setText(_translate("MtgBrowse", "Load 2"))
self.loadButton_3.setText(_translate("MtgBrowse", "Load 3"))
self.check_standard.setText(_translate("MtgBrowse", "Standard"))
self.zoomToggleButton.setText(_translate("MtgBrowse", "Zoom"))
self.statusBarLabel.setText(_translate("MtgBrowse", "Initialized."))
self.prevPage.setText(_translate("MtgBrowse", "<"))
self.nextPage.setText(_translate("MtgBrowse", ">"))
|
|
authority_test.go
|
package smarthome_test
import (
"github.com/betom84/go-alexa/smarthome"
"testing"
"github.com/stretchr/testify/assert"
)
func
|
(t *testing.T) {
authority := smarthome.Authority{
ClientID: "clientID",
ClientSecret: "clientSecret",
RestrictedUsers: []string{"[email protected]"},
}
assert.Equal(t, "clientID", authority.GetClientID())
assert.Equal(t, "clientSecret", authority.GetClientSecret())
assert.Nil(t, authority.AcceptGrant("[email protected]", "", nil))
assert.Errorf(t, authority.AcceptGrant("[email protected]", "", nil), "Restricted users only")
}
|
TestAuthority
|
tcd10_daddr.rs
|
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TCD10_DADDR {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
|
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct DADDRR {
bits: u32,
}
impl DADDRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _DADDRW<'a> {
w: &'a mut W,
}
impl<'a> _DADDRW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31 - Destination Address"]
#[inline]
pub fn daddr(&self) -> DADDRR {
let bits = {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
};
DADDRR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:31 - Destination Address"]
#[inline]
pub fn daddr(&mut self) -> _DADDRW {
_DADDRW { w: self }
}
}
|
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R { bits: self.register.get() }
|
partition.go
|
package disk
import (
"fmt"
)
type Partition struct {
Start uint64 // Start of the partition in bytes
Size uint64 // Size of the partition in bytes
Type string // Partition type, e.g. 0x83 for MBR or a UUID for gpt
Bootable bool // `Legacy BIOS bootable` (GPT) or `active` (DOS) flag
// ID of the partition, dos doesn't use traditional UUIDs, therefore this
// is just a string.
UUID string
// If nil, the partition is raw; It doesn't contain a payload.
Payload Entity
}
func (p *Partition) IsContainer() bool {
return true
}
func (p *Partition) Clone() Entity {
if p == nil {
return nil
}
partition := &Partition{
Start: p.Start,
Size: p.Size,
Type: p.Type,
Bootable: p.Bootable,
UUID: p.UUID,
}
|
if p.Payload != nil {
partition.Payload = p.Payload.Clone()
}
return partition
}
func (pt *Partition) GetItemCount() uint {
if pt == nil || pt.Payload == nil {
return 0
}
return 1
}
func (p *Partition) GetChild(n uint) Entity {
if n != 0 {
panic(fmt.Sprintf("invalid child index for Partition: %d != 0", n))
}
return p.Payload
}
func (p *Partition) GetSize() uint64 {
return p.Size
}
// Ensure the partition has at least the given size. Will do nothing
// if the partition is already larger. Returns if the size changed.
func (p *Partition) EnsureSize(s uint64) bool {
if s > p.Size {
p.Size = s
return true
}
return false
}
func (p *Partition) IsBIOSBoot() bool {
if p == nil {
return false
}
return p.Type == BIOSBootPartitionGUID
}
func (p *Partition) IsPReP() bool {
if p == nil {
return false
}
return p.Type == "41" || p.Type == PRePartitionGUID
}
| |
mod.rs
|
use std::path::{Path, PathBuf};
use notedown_ast::Result;
use rsass::{compile_scss, compile_scss_path, Error, FileContext, FsFileContext, parse_scss_data, parse_scss_file, parse_scss_path, ScopeRef};
use rsass::output::{Format, Style};
// fn new() -> std::result::Result<Vec<u8>, Error> {
// let items = parse_scss_path(&mut input.as_bytes(), "input.scss")?;
// let format = Format {
// style: Style::Compressed,
// precision: 0,
// };
// format.write_root(
// &items,
// ScopeRef::new_global(format),
// &self.file_context,
// )
// }
pub fn render_sass(sass: &str, imports: &[PathBuf]) -> Result<String> {
let format = Format {
style: Style::Expanded,
precision: 0,
};
let mut fs = FsFileContext::new();
for file in imports {
fs.push_path(file.as_path())
}
let items = parse_scss_data(sass.as_bytes())?;
let css =format.write_root(&items, ScopeRef::new_global(format), &fs)?;
unsafe {
Ok(String::from_utf8_unchecked(css))
}
}
pub fn
|
(sass: &Path, imports: &[PathBuf]) -> Result<String> {
let format = Format {
style: Style::Compressed,
precision: 5,
};
let mut fs = FsFileContext::new();
for file in imports {
fs.push_path(file.as_path())
}
let (sub_context, path) = fs.file(sass);
let items = parse_scss_path(&path)?;
let css = format.write_root(&items, ScopeRef::new_global(format), &sub_context)?;
unsafe {
Ok(String::from_utf8_unchecked(css))
}
}
|
render_sass_path
|
index.ts
|
export * from './controller.decorator';
|
export * from './service.decorator';
export * from './http.decorator';
|
|
to_openmm_System.py
|
from molsysmt._private.exceptions import *
from molsysmt._private.digestion import *
from .is_openmm_Topology import is_openmm_Topology
def to_openmm_System(item, atom_indices='all', forcefield=None, parameters=None, check=True):
if check:
try:
is_openmm_Topology(item)
except:
raise WrongFormError('openmm.Topology')
try:
atom_indices = digest_atom_indices(atom_indices)
except:
raise WrongAtomIndicesError()
try:
forcefield = digest_forcefield(forcefield)
except:
raise WrongForceFieldError()
#forcefield = molecular_mechanics.to_openmm_ForceField()
#system_parameters = molecular_mechanics.get_openmm_System_parameters()
#tmp_item = forcefield.createSystem(item, **parameters)
|
# forces['NonbondedForce'].setUseDispersionCorrection(True)
#if molecular_mechanics.ewald_error_tolerance:
# forces['NonbondedForce'].setEwaldErrorTolerance(molecular_mechanics.ewald_error_tolerance)
#return tmp_item
raise NotImplementedMethodError
pass
|
#if molecular_mechanics.use_dispersion_correction or molecular_mechanics.ewald_error_tolerance:
# forces = {ii.__class__.__name__ : ii for ii in tmp_item.getForces()}
#if molecular_mechanics.use_dispersion_correction:
|
params_en1_i.py
|
"""
Main inputs:
(Change for all fields)
"""
eazypath = '/data2/ken/photoz/eazy-photoz/src/eazy '
working_folder = '/data2/ken/EN1_pani'
photometry_catalog = 'en1_phot_with_zspec.fits'
photometry_format = 'fits'
filter_file = 'EN1_filters.res'
translate_file = 'EN1.translate'
zspec_col = 'z_spec'
flux_col = 'flux'
fluxerr_col ='fluxerr'
do_zp = False
do_zp_tests = False
do_subcats = False
do_full = False
do_stellar = False
do_hb = True
do_merge = True
"""
Training parameters
"""
Ncrossval = 1
test_fraction = 0.2
process_outliers = True
correct_extinction = True
"""
Fitting Parameters
(Change only when needed)
"""
# Templates: Any combination of 'eazy', 'swire', 'atlas'
templates = ['eazy', 'atlas', 'cosmos']#, 'swire']#, 'cosmos', 'atlas'] #,'cosmos', 'atlas']
fitting_mode = ['a', '1', '1']
defaults = ['defaults/zphot.eazy',
'defaults/zphot.atlas',
'defaults/zphot.cosmos']
#'defaults/zphot.eazy',
#'defaults/zphot.atlas',
#'defaults/zphot.swire']
stellar_params = 'defaults/zphot.pickles'
additional_errors = [0.0, 0.0, 0.0]
template_error_norm = [1., 1., 1.]
template_error_file = ''
lambda_fit_max = [5., 30., 30.]
"""
Combination Parameters
"""
include_prior = True
|
alpha_colname = 'pani_mag'
"""
System Parameters
(Specific system only - fixed after installation)
"""
block_size = 1e4
ncpus = 10
|
fbad_prior = 'mag' # 'flat', 'vol' or 'mag'
#prior_parameter_path = 'en1_i_prior_coeff.npz'
prior_fname = 'pani_mag'
prior_colname = 'pani_mag'
|
storage.go
|
package storage
import (
"gopkg.in/yaml.v2"
"os"
"path"
)
var AppDir string
var baseDir = ".db-backup"
var configurationFilename = "config.yaml"
func init() {
homeDir, err := os.UserHomeDir()
if err != nil {
panic(err)
}
AppDir = path.Join(homeDir, baseDir)
}
func CreateConfigurationFolderIfDoesntExist() error {
|
_, err := os.Stat(AppDir)
switch os.IsNotExist(err) {
case true:
err = os.Mkdir(AppDir, 0755)
if err != nil {
return err
}
break
}
return nil
}
func CreateInitialConfigurationFileIfDoesntExist(seed interface{}) error {
configurationFilePath := path.Join(AppDir, configurationFilename)
if _, err := os.Stat(configurationFilePath); !os.IsNotExist(err) {
return nil
}
f, err := os.Create(configurationFilePath)
if err != nil {
return err
}
defer f.Close()
data, err := yaml.Marshal(seed)
if err != nil {
return err
}
_, writeErr := f.Write(data)
if writeErr != nil {
return err
}
return nil
}
| |
ops.py
|
"""
Classes to contextualize math operations in log vs linear space.
"""
from types import MethodType
import numpy as np
from ..exceptions import InvalidBase
__all__ = (
'get_ops',
'LinearOperations',
'LogOperations',
)
# For 2.x, these are ascii strings. For 3.x these are unicode strings.
acceptable_base_strings = {'linear', 'e'}
def get_ops(base):
"""
Returns an *Operations instance, depending on the base.
Parameters
----------
base : float, 'linear', 'e'
The base for the Operations instance.
"""
# Let's not initialize unless we have to.
if base in cache:
ops = cache[base]
else:
# This assumes that 'linear' is in cache.
ops = LogOperations(base)
cache[base] = ops
return ops
def exp_func(b):
"""
Returns a base-`b` exponential function.
Parameters
----------
b : positive float or 'e'
The base of the desired exponential function.
Returns
-------
exp : function
The base-`b` exponential function. The returned function will operate
elementwise on NumPy arrays, but note, it is not a ufunc.
Examples
--------
>>> exp2 = exp_func(2)
>>> exp2(1)
2.0
>>> exp3 = exp_func(3)
>>> exp3(1)
3.0
Raises
------
InvalidBase
If the base is less than zero or equal to one.
"""
from dit.utils import is_string_like
if is_string_like(b) and b not in acceptable_base_strings:
raise InvalidBase(msg=b)
if b == 'linear':
exp = lambda x: x # pragma: no branch
elif b == 2:
exp = np.exp2
elif b == 10:
exp = lambda x: 10**x
elif b == 'e' or np.isclose(b, np.e):
exp = np.exp
else:
if b <= 0 or b == 1:
raise InvalidBase(b)
def exp(x, base=b):
"""
Return `base`**`x`
Parameters
----------
x : float
The number to exponentiate
base : float
The base of the exponential
Returns
-------
p : float
`base`**`x`
"""
return base**np.asarray(x)
return exp
def log_func(b):
"""
Returns a base-`b` logarithm function.
Parameters
----------
b : positive float or 'e'
The base of the desired logarithm function.
Returns
-------
log : function
The base-`b` logarithm function. The returned function will operate
elementwise on NumPy arrays, but note, it is not a ufunc.
Examples
--------
>>> log2 = log_func(2)
>>> log2(2)
1.0
>>> log3 = log_func(3)
>>> log3(3)
1.0
Raises
------
InvalidBase
If the base is less than zero or equal to one.
|
from dit.utils import is_string_like
if is_string_like(b) and b not in acceptable_base_strings:
raise InvalidBase(msg=b)
if b == 'linear':
log = lambda x: x # pragma: no branch
elif b == 2:
log = np.log2
elif b == 10:
log = np.log10
elif b == 'e' or np.isclose(b, np.e):
log = np.log
else:
if b <= 0 or b == 1:
raise InvalidBase(b)
Z = np.log(b)
def log(x, func=np.log):
"""
Return the log of `x`
Parameters
----------
x : float
The value to take the log of
func : function
A logarithm function
Returns
-------
log : float
The logarithm of `x` in base `b` (from outer scope)
"""
return func(x) / Z
return log
class Operations(object):
"""
Base class which implements certain math operations.
For example, regular addition with log probabilities is handled specially.
While we could implement many more operations, we do not. Their usage
is uncommon and their implementation would be slower as well. For example,
subtraction with log probabailities must go as:
.. math::
log_2(x-y) = log_2(x) + log_2(1 - 2^[ log_2(y) - log_2(x) ])
Note that if :math:`y > x`, then :math:`log(y) > log(x)` and the inner term
of the second logarithm will be less than 0, yielding NaN.
"""
### Do we allow base == 'e' or should we convert to its numerical value?
### Ans: We store whatever was specified but provide get_base() with an
### option to return a numerical base.
one = None
zero = None
base = None
exp = None
log = None
def get_base(self, numerical=False):
"""
Returns the base in which operations take place.
For linear-based operations, the result is 'linear'.
Parameters
----------
numerical : bool
If `True`, then if the base is 'e', it is returned as a float.
"""
if numerical and self.base == 'e':
base = np.exp(1)
else:
base = self.base
return base
def is_null(self, p):
"""
Returns `True` if `p` is a null probability.
Parameters
----------
p : float
The probability to be tested.
"""
return np.isclose(self.zero, p)
def is_null_exact(self, p):
"""
Returns `True` if `p` is exactly a null probability.
Parameters
----------
p : float
The probability to be tested.
"""
return self.zero == p
def add(self, x, y):
""" Abstract base class """
raise NotImplementedError
def add_inplace(self, x, y):
""" Abstract base class """
raise NotImplementedError
def add_reduce(self, x):
""" Abstract base class """
raise NotImplementedError
def mult(self, x, y):
""" Abstract base class """
raise NotImplementedError
def mult_inplace(self, x, y):
""" Abstract base class """
raise NotImplementedError
def mult_reduce(self, x):
""" Abstract base class """
raise NotImplementedError
def invert(self, x):
""" Abstract base class """
raise NotImplementedError
def normalize(self, x):
""" Abstract base class """
raise NotImplementedError
class LinearOperations(Operations):
"""
The class of operations on linear values.
"""
one = 1
zero = 0
base = 'linear'
# If the functions below are standard Python functions (as opposed to
# NumPy ufuncs), then they will be treated as unbound methods for the class.
# During instantiation, they are bound to the instance (since before
# instantiation they are class methods) and thus, we are left with
# bound methods (undesirably). If we had modified these attributes in the
# __init__ function, then they would not be bound (or even unbound methods)
# but functions instead (desirably). This is precisely what LogOperations
# does, which is why it does not have this issue. An alternative approach
# is to explicitly declare these functions to be static methods, as we
# do below.
#
exp = staticmethod(exp_func(base))
log = staticmethod(log_func(base))
def add(self, x, y):
"""
Add the arrays element-wise. Neither x nor y will be modified.
Assumption: :math:`y >= 0`.
Operation: :math:`z[i] = x[i] + y[i]`
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to add.
Returns
-------
z : NumPy array, shape (n,)
The resultant array.
"""
z = x + y
return z
def add_inplace(self, x, y):
"""
Adds `y` to `x`, in-place. `x` will be modified, but `y` will not.
Assumption: :math:`y >= 0`.
Operation: :math:`x[i] += y[i]`
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to add.
Returns
-------
x : NumPy array, shape (n,)
The resultant array.
"""
x += y
return x
def add_reduce(self, x, axis=None):
"""
Performs an `addition' reduction on `x`.
Assumption: :math:`y >= 0`.
Operation: :math:`z = \\sum_i x[i]`
Returns
-------
z : float
The summation of the elements in `x`.
"""
z = x.sum(axis=axis)
return z
def mult(self, x, y):
"""
Multiplies the arrays element-wise. Neither x nor y will be modified.
Operation: :math:`z[i] = x[i] * y[i]`
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to multiply.
Returns
-------
z : NumPy array, shape (n,)
The resultant array.
"""
z = x * y
return z
def mult_inplace(self, x, y):
"""
Multiplies `y` to `x`, in-place. `x` will be modified, but `y` will not.
Operation: :math:`x[i] *= y[i]`
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to multiply.
Returns
-------
x : NumPy array, shape (n,)
The resultant array.
"""
x *= y
return x
def mult_reduce(self, x, axis=None):
"""
Performs an `multiplication' reduction on `x`.
Operation: :math:`z = \\prod_i x[i]`
Returns
-------
z : float
The product of the elements in `x`.
"""
z = np.prod(x, axis=axis)
return z
def invert(self, x):
"""
Returns the element-wise multiplicative inverse of x.
Operation: :math:`z[i] = 1/x[i]`
Parameters
----------
x : NumPy array, shape (n,)
The array to invert.
Returns
-------
z : NumPy array, shape (n,)
The inverted array.
"""
z = 1 / x
return z
def normalize(self, x, axis=None):
"""
Returns a normalized version of x.
Operation: :math:`z[i] = x[i] / sum(x)`
If x is 2D and axis is None, then normalization is over all elements.
Use axis=-1 to normalize each row of x.
Parameters
----------
x : NumPy array, shape (n,)
The array to normalize.
Returns
-------
z : NumPy array, shape (n,)
The normalized array.
"""
z = x / x.sum(axis=None)
return z
def set_add(ops):
"""
Set the add method on the LogOperations instance.
"""
# To preserve numerical accuracy, we must make use of a logaddexp
# function. These functions only exist in Numpy for base-e and base-2.
# For all other bases, we must convert and then convert back.
# In each case, we use default arguments to make the function that we
# are calling 'local'.
base = ops.base
if base == 2:
def add(self, x, y, func=np.logaddexp2):
return func(x, y)
elif base == 'e' or np.isclose(base, np.e):
def add(self, x, y, func=np.logaddexp):
return func(x, y)
else:
# No need to optimize this...
def add(self, x, y):
# Convert log_b probabilities to log_2 probabilities.
x2 = x * np.log2(base)
y2 = y * np.log2(base)
z = np.logaddexp2(x2, y2)
# Convert log_2 probabilities to log_b probabilities.
z *= self.log(2)
return z
add.__doc__ = """
Add the arrays element-wise. Neither x nor y will be modified.
Assumption: y <= 0.
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to add.
Returns
-------
z : NumPy array, shape (n,)
The resultant array.
"""
ops.add = MethodType(add, ops)
def set_add_inplace(ops):
"""
Set the add_inplace method on the LogOperations instance.
"""
base = ops.base
if base == 2:
def add_inplace(self, x, y, func=np.logaddexp2):
return func(x, y, x)
elif base == 'e' or np.isclose(base, np.e):
def add_inplace(self, x, y, func=np.logaddexp):
return func(x, y, x)
else:
def add_inplace(self, x, y):
x *= np.log2(base)
y2 = y * np.log2(base)
np.logaddexp2(x, y2, x)
x *= self.log(2)
return x
add_inplace.__doc__ = """
Adds `y` to `x`, in-place. `x` will be modified, but `y` will not.
Assumption: :math:`y <= 0`.
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to add.
Returns
-------
x : NumPy array, shape (n,)
The resultant array.
"""
ops.add_inplace = MethodType(add_inplace, ops)
def set_add_reduce(ops):
"""
Set the add_reduce method on the LogOperations instance.
"""
# https://github.com/numpy/numpy/issues/4599
base = ops.base
if base == 2:
def add_reduce(self, x, axis=None, func=np.logaddexp2):
if len(x) == 0:
# Since logaddexp.identity is None, we handle it separately.
z = self.zero
else:
# Note, we are converting to a NumPy array, if necessary.
z = func.reduce(x, axis=axis, dtype=float)
return z
elif base == 'e' or np.isclose(base, np.e):
def add_reduce(self, x, axis=None, func=np.logaddexp):
if len(x) == 0:
# Since logaddexp.identity is None, we handle it separately.
z = self.zero
else:
# Note, we are converting to a NumPy array, if necessary.
z = func.reduce(x, axis=axis, dtype=float)
return z
else:
def add_reduce(self, x, axis=None):
if len(x) == 0:
# Since logaddexp.identity is None, we handle it separately.
z = self.zero
else:
# Note, we are converting to a NumPy array, if necessary.
# Change the base-2, add, and then convert back.
x2 = x * np.log2(base)
z = np.logaddexp2.reduce(x2, axis=axis, dtype=float)
z /= np.log2(base)
return z
add_reduce.__doc__ = """
Performs an `addition' reduction on `x`.
Assumption: :math:`y <= 0`.
Returns
-------
z : float
The summation of the elements in `x`.
"""
ops.add_reduce = MethodType(add_reduce, ops)
class LogOperations(Operations):
one = None
zero = None
base = None
exp = None
log = None
def __init__(self, base):
"""
Initialize the log operation manager.
Parameters
----------
base : float
The base of the logarithm.
"""
self.set_base(base)
def set_base(self, base):
"""
Change the base of the logarithm.
Parameters
----------
base : float
The base of the logarithm.
"""
self.base = base
self.exp = exp_func(base)
self.log = log_func(base)
# Note: When base < 1, zero == +inf. When base > 1, zero == -inf.
self.one = self.log(1)
self.zero = self.log(0)
# Update the add methods.
set_add(self)
set_add_inplace(self)
set_add_reduce(self)
def mult(self, x, y):
"""
Multiplies the arrays element-wise. Neither `x` nor `y` will be modified.
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to multiply.
Returns
-------
z : NumPy array, shape (n,)
The resultant array.
"""
z = x + y
return z
def mult_inplace(self, x, y):
"""
Multiplies `y` to `x`, in-place. `x` will be modified, but `y` will not.
Parameters
----------
x, y : NumPy arrays, shape (n,)
The arrays to multiply.
Returns
-------
x : NumPy array, shape (n,)
The resultant array.
"""
x += y
return x
def mult_reduce(self, x, axis=None):
"""
Performs an `multiplication' reduction on `x`.
Returns
-------
z : float
The product of the elements in `x`.
"""
# The identity for addition in NumPy is zero.
# This corresponds to an identity of 1 for log operations, and this is
# exactly the desired identity for multiplying probabilities.
z = x.sum(axis=axis)
return z
def invert(self, x):
"""
Returns the element-wise multiplicative inverse of `x`: :math:`1/x`.
Parameters
----------
x : NumPy array, shape (n,)
The array to invert.
Returns
-------
z : NumPy array, shape (n,)
The inverted array.
"""
z = -x
return z
def normalize(self, x, axis=None):
"""
Returns a normalized version of `x`.
Non-log equivalent operation: :math:`z[i] = x[i] / sum(x)`
If `x` is 2D and axis is None, then normalization is over all elements.
Use axis=-1 to normalize each row of `x`.
Parameters
----------
x : NumPy array, shape (n,)
The array to normalize.
Returns
-------
z : NumPy array, shape (n,)
The normalized array.
"""
# The API way would be: mult(x, invert( add_reduce(x) ))
# We'll avoid some of those function calls.
z = x - self.add_reduce(x, axis=axis)
return z
cache = {
'linear': LinearOperations(),
2: LogOperations(2),
'e': LogOperations('e')
}
|
"""
|
config_test.go
|
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"fmt"
"io/ioutil"
"os"
"strings"
"testing"
"path/filepath"
"github.com/ghodss/yaml"
prow_config "k8s.io/test-infra/prow/config"
config_pb "k8s.io/test-infra/testgrid/config"
)
type SQConfig struct {
Data map[string]string `yaml:"data,omitempty"`
}
var (
companies = []string{
"canonical",
"cri-o",
"istio",
"google",
"kopeio",
"tectonic",
"redhat",
"vmware",
}
orgs = []string{
"conformance",
"presubmits",
"sig",
"wg",
}
prefixes = [][]string{orgs, companies}
)
// Shared testgrid config, loaded at TestMain.
var cfg *config_pb.Configuration
func TestMain(m *testing.M) {
//make sure we can parse config.yaml
yamlData, err := ioutil.ReadFile("../../config.yaml")
if err != nil {
fmt.Printf("IO Error : Cannot Open File config.yaml")
os.Exit(1)
}
c := Config{}
if err := c.Update(yamlData); err != nil {
fmt.Printf("Yaml2Proto - Conversion Error %v", err)
os.Exit(1)
}
cfg, err = c.Raw()
if err != nil {
fmt.Printf("Error validating config: %v", err)
os.Exit(1)
}
os.Exit(m.Run())
}
func TestConfig(t *testing.T) {
// testgroup - occurrence map, validate testgroups
testgroupMap := make(map[string]int32)
for testgroupidx, testgroup := range cfg.TestGroups {
// All testgroup must have a name and a query
if testgroup.Name == "" || testgroup.GcsPrefix == "" {
t.Errorf("Testgroup #%v (Name: '%v', Query: '%v'): - Must have a name and query",
testgroupidx, testgroup.Name, testgroup.GcsPrefix)
}
// All testgroup must not have duplicated names
if testgroupMap[testgroup.Name] > 0 {
t.Errorf("Duplicated Testgroup: %v", testgroup.Name)
} else {
testgroupMap[testgroup.Name] = 1
}
if !testgroup.IsExternal {
t.Errorf("Testgroup %v: IsExternal should always be true!", testgroup.Name)
}
if !testgroup.UseKubernetesClient {
t.Errorf("Testgroup %v: UseKubernetesClient should always be true!", testgroup.Name)
}
if strings.HasPrefix(testgroup.GcsPrefix, "kubernetes-jenkins/logs/") {
// The expectation is that testgroup.Name is the name of a Prow job and the GCSPrefix
// follows the convention kubernetes-jenkins/logs/.../jobName
// The final part of the prefix should be the job name.
expected := filepath.Join(filepath.Dir(testgroup.GcsPrefix), testgroup.Name)
if expected != testgroup.GcsPrefix {
t.Errorf("Kubernetes Testgroup %v GcsPrefix; Got %v; Want %v", testgroup.Name, testgroup.GcsPrefix, expected)
}
}
if testgroup.TestNameConfig != nil {
if testgroup.TestNameConfig.NameFormat == "" {
t.Errorf("Testgroup %v: NameFormat must not be empty!", testgroup.Name)
}
if len(testgroup.TestNameConfig.NameElements) != strings.Count(testgroup.TestNameConfig.NameFormat, "%") {
t.Errorf("Testgroup %v: TestNameConfig must have number NameElement equal to format count in NameFormat!", testgroup.Name)
}
}
// All PR testgroup has num_columns_recent equals 20
if strings.HasPrefix(testgroup.GcsPrefix, "kubernetes-jenkins/pr-logs/directory/") {
if testgroup.NumColumnsRecent < 20 {
t.Errorf("Testgroup %v: num_columns_recent: must be greater than 20 for presubmit jobs!", testgroup.Name)
}
}
}
// dashboard name set
dashboardmap := make(map[string]bool)
for dashboardidx, dashboard := range cfg.Dashboards {
// All dashboard must have a name
if dashboard.Name == "" {
t.Errorf("Dashboard %v: - Must have a name", dashboardidx)
}
found := false
for _, kind := range prefixes {
for _, prefix := range kind {
if strings.HasPrefix(dashboard.Name, prefix+"-") || dashboard.Name == prefix {
found = true
break
}
}
if found {
break
}
}
if !found {
t.Errorf("Dashboard %v: must prefix with one of: %v", dashboard.Name, prefixes)
}
// All dashboard must not have duplicated names
if dashboardmap[dashboard.Name] {
t.Errorf("Duplicated dashboard: %v", dashboard.Name)
} else {
dashboardmap[dashboard.Name] = true
}
// All dashboard must have at least one tab
if len(dashboard.DashboardTab) == 0 {
t.Errorf("Dashboard %v: - Must have more than one dashboardtab", dashboard.Name)
}
// dashboardtab name set, to check duplicated tabs within each dashboard
dashboardtabmap := make(map[string]bool)
// All notifications in dashboard must have a summary
if len(dashboard.Notifications) != 0 {
for notificationindex, notification := range dashboard.Notifications {
if notification.Summary == "" {
t.Errorf("Notification %v in dashboard %v: - Must have a summary", notificationindex, dashboard.Name)
}
}
}
for tabindex, dashboardtab := range dashboard.DashboardTab {
// All dashboardtab must have a name and a testgroup
if dashboardtab.Name == "" || dashboardtab.TestGroupName == "" {
t.Errorf("Dashboard %v, tab %v: - Must have a name and a testgroup name", dashboard.Name, tabindex)
}
// All dashboardtab within a dashboard must not have duplicated names
if dashboardtabmap[dashboardtab.Name] {
t.Errorf("Duplicated dashboardtab: %v", dashboardtab.Name)
} else {
dashboardtabmap[dashboardtab.Name] = true
}
// All testgroup in dashboard must be defined in testgroups
if testgroupMap[dashboardtab.TestGroupName] == 0 {
t.Errorf("Dashboard %v, tab %v: - Testgroup %v must be defined first",
dashboard.Name, dashboardtab.Name, dashboardtab.TestGroupName)
} else {
testgroupMap[dashboardtab.TestGroupName]++
}
if dashboardtab.AlertOptions != nil && (dashboardtab.AlertOptions.AlertStaleResultsHours != 0 || dashboardtab.AlertOptions.NumFailuresToAlert != 0) {
for _, testgroup := range cfg.TestGroups {
// Disallow alert options in tab but not group.
// Disallow different alert options in tab vs. group.
if testgroup.Name == dashboardtab.TestGroupName {
if testgroup.AlertStaleResultsHours == 0 {
t.Errorf("Cannot define alert_stale_results_hours in DashboardTab %v and not TestGroup %v.", dashboardtab.Name, dashboardtab.TestGroupName)
}
if testgroup.NumFailuresToAlert == 0 {
t.Errorf("Cannot define num_failures_to_alert in DashboardTab %v and not TestGroup %v.", dashboardtab.Name, dashboardtab.TestGroupName)
}
if testgroup.AlertStaleResultsHours != dashboardtab.AlertOptions.AlertStaleResultsHours {
t.Errorf("alert_stale_results_hours for DashboardTab %v must match TestGroup %v.", dashboardtab.Name, dashboardtab.TestGroupName)
}
if testgroup.NumFailuresToAlert != dashboardtab.AlertOptions.NumFailuresToAlert {
t.Errorf("num_failures_to_alert for DashboardTab %v must match TestGroup %v.", dashboardtab.Name, dashboardtab.TestGroupName)
}
}
}
}
}
}
// No dup of dashboard groups, and no dup dashboard in a dashboard group
groups := make(map[string]bool)
tabs := make(map[string]string)
for idx, dashboardGroup := range cfg.DashboardGroups {
// All dashboard must have a name
if dashboardGroup.Name == "" {
t.Errorf("DashboardGroup %v: - DashboardGroup must have a name", idx)
}
found := false
for _, kind := range prefixes {
for _, prefix := range kind {
if strings.HasPrefix(dashboardGroup.Name, prefix+"-") || prefix == dashboardGroup.Name {
found = true
break
}
}
if found {
break
}
}
if !found {
t.Errorf("Dashboard group %v: must prefix with one of: %v", dashboardGroup.Name, prefixes)
}
// All dashboardgroup must not have duplicated names
if _, ok := groups[dashboardGroup.Name]; ok {
t.Errorf("Duplicated dashboard: %v", dashboardGroup.Name)
} else {
groups[dashboardGroup.Name] = true
}
if _, ok := dashboardmap[dashboardGroup.Name]; ok {
t.Errorf("%v is both a dashboard and dashboard group name.", dashboardGroup.Name)
}
for _, dashboard := range dashboardGroup.DashboardNames {
// All dashboard must not have duplicated names
if exist, ok := tabs[dashboard]; ok {
t.Errorf("Duplicated dashboard %v in dashboard group %v and %v", dashboard, exist, dashboardGroup.Name)
} else {
tabs[dashboard] = dashboardGroup.Name
}
if _, ok := dashboardmap[dashboard]; !ok {
t.Errorf("Dashboard %v needs to be defined before adding to a dashboard group!", dashboard)
}
if !strings.HasPrefix(dashboard, dashboardGroup.Name+"-") {
t.Errorf("Dashboard %v in group %v must have the group name as a prefix", dashboard, dashboardGroup.Name)
}
}
}
// All Testgroup should be mapped to one or more tabs
for testgroupname, occurrence := range testgroupMap {
if occurrence == 1 {
t.Errorf("Testgroup %v - defined but not used in any dashboards", testgroupname)
}
}
// make sure items in sq-blocking dashboard matches sq configmap
sqJobPool := []string{}
for _, d := range cfg.Dashboards {
if d.Name != "sq-blocking" {
continue
}
for _, tab := range d.DashboardTab {
for _, t := range cfg.TestGroups {
if t.Name == tab.TestGroupName {
job := strings.TrimPrefix(t.GcsPrefix, "kubernetes-jenkins/logs/")
sqJobPool = append(sqJobPool, job)
break
}
}
}
}
sqConfigPath := "../../../mungegithub/submit-queue/deployment/kubernetes/configmap.yaml"
configData, err := ioutil.ReadFile(sqConfigPath)
if err != nil {
t.Errorf("Read Buffer Error for SQ Data : %v", err)
}
sqData := &SQConfig{}
err = yaml.Unmarshal([]byte(configData), &sqData)
if err != nil {
t.Errorf("Unmarshal Error for SQ Data : %v", err)
}
for _, testgridJob := range sqJobPool {
t.Errorf("Err : testgrid job %v not found in SQ config", testgridJob)
}
sqNonBlockingJobs := strings.Split(sqData.Data["nonblocking-jobs"], ",")
for _, sqJob := range sqNonBlockingJobs {
if sqJob == "" { // ignore empty list of jobs
continue
}
found := false
for _, testgroup := range cfg.TestGroups {
if testgroup.Name == sqJob {
found = true
break
}
}
if !found {
t.Errorf("Err : %v not found in testgrid config", sqJob)
}
}
}
func TestJobsTestgridEntryMatch(t *testing.T)
|
{
prowPath := "../../../prow/config.yaml"
jobPath := "../../../config/jobs"
jobs := make(map[string]bool)
prowConfig, err := prow_config.Load(prowPath, jobPath)
if err != nil {
t.Fatalf("Could not load prow configs: %v\n", err)
}
// Also check k/k presubmit, prow postsubmit and periodic jobs
for _, job := range prowConfig.AllPresubmits([]string{
"bazelbuild/rules_k8s",
"google/cadvisor",
"helm/charts",
"kubeflow/caffe2-operator",
"kubeflow/examples",
"kubeflow/experimental-beagle",
"kubeflow/experimental-kvc",
"kubeflow/experimental-seldon",
"kubeflow/katib",
"kubeflow/kubebench",
"kubeflow/kubeflow",
"kubeflow/mpi-operator",
"kubeflow/pytorch-operator",
"kubeflow/reporting",
"kubeflow/testing",
"kubeflow/tf-operator",
"kubeflow/website",
"kubernetes-sigs/cluster-api",
"kubernetes-sigs/cluster-api-provider-aws",
"kubernetes-sigs/cluster-api-provider-gcp",
"kubernetes-sigs/cluster-api-provider-openstack",
"kubernetes-sigs/poseidon",
"kubernetes/cluster-registry",
"kubernetes/cloud-provider-vsphere",
"kubernetes/federation",
"kubernetes/heapster",
"kubernetes/kops",
"kubernetes/kubernetes",
"kubernetes/test-infra",
"tensorflow/minigo",
}) {
jobs[job.Name] = false
}
for _, job := range prowConfig.AllPostsubmits([]string{}) {
jobs[job.Name] = false
}
for _, job := range prowConfig.AllPeriodics() {
jobs[job.Name] = false
}
// For now anything outsite k8s-jenkins/(pr-)logs are considered to be fine
testgroups := make(map[string]bool)
for _, testgroup := range cfg.TestGroups {
if strings.Contains(testgroup.GcsPrefix, "kubernetes-jenkins/logs/") {
// The convention is that the job name is the final part of the GcsPrefix
job := filepath.Base(testgroup.GcsPrefix)
testgroups[job] = false
}
if strings.Contains(testgroup.GcsPrefix, "kubernetes-jenkins/pr-logs/directory/") {
job := strings.TrimPrefix(testgroup.GcsPrefix, "kubernetes-jenkins/pr-logs/directory/")
testgroups[job] = false
}
}
// Cross check
// -- Each job need to have a match testgrid group
for job := range jobs {
if _, ok := testgroups[job]; ok {
testgroups[job] = true
jobs[job] = true
}
}
// Conclusion
badjobs := []string{}
for job, valid := range jobs {
if !valid {
badjobs = append(badjobs, job)
fmt.Printf("Job %v does not have a matching testgrid testgroup\n", job)
}
}
badconfigs := []string{}
for testgroup, valid := range testgroups {
if !valid {
badconfigs = append(badconfigs, testgroup)
fmt.Printf("Testgrid group %v does not have a matching jenkins or prow job\n", testgroup)
}
}
if len(badconfigs) > 0 {
fmt.Printf("Total bad config(s) - %v\n", len(badconfigs))
}
if len(badjobs) > 0 {
fmt.Printf("Total bad job(s) - %v\n", len(badjobs))
}
if len(badconfigs) > 0 || len(badjobs) > 0 {
t.Fatal("Failed with invalid config or job entries")
}
}
|
|
app.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { app, BrowserWindow, contentTracing, dialog, ipcMain, protocol, session, Session, systemPreferences } from 'electron';
import { statSync } from 'fs';
import { hostname, release } from 'os';
import { VSBuffer } from 'vs/base/common/buffer';
import { onUnexpectedError, setUnexpectedErrorHandler } from 'vs/base/common/errors';
import { isEqualOrParent } from 'vs/base/common/extpath';
import { once } from 'vs/base/common/functional';
import { stripComments } from 'vs/base/common/json';
import { getPathLabel, mnemonicButtonLabel } from 'vs/base/common/labels';
import { Disposable } from 'vs/base/common/lifecycle';
import { Schemas } from 'vs/base/common/network';
import { isAbsolute, join, posix } from 'vs/base/common/path';
import { IProcessEnvironment, isLinux, isLinuxSnap, isMacintosh, isWindows } from 'vs/base/common/platform';
import { joinPath } from 'vs/base/common/resources';
import { withNullAsUndefined } from 'vs/base/common/types';
import { URI } from 'vs/base/common/uri';
import { generateUuid } from 'vs/base/common/uuid';
import { getMachineId } from 'vs/base/node/id';
import { registerContextMenuListener } from 'vs/base/parts/contextmenu/electron-main/contextmenu';
import { getDelayedChannel, ProxyChannel, StaticRouter } from 'vs/base/parts/ipc/common/ipc';
import { Server as ElectronIPCServer } from 'vs/base/parts/ipc/electron-main/ipc.electron';
import { Client as MessagePortClient } from 'vs/base/parts/ipc/electron-main/ipc.mp';
import { Server as NodeIPCServer } from 'vs/base/parts/ipc/node/ipc.net';
import { ProxyAuthHandler } from 'vs/code/electron-main/auth';
import { localize } from 'vs/nls';
import { IBackupMainService } from 'vs/platform/backup/electron-main/backup';
import { BackupMainService } from 'vs/platform/backup/electron-main/backupMainService';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { UserConfigurationFileService, UserConfigurationFileServiceId } from 'vs/platform/configuration/common/userConfigurationFileService';
import { ElectronExtensionHostDebugBroadcastChannel } from 'vs/platform/debug/electron-main/extensionHostDebugIpc';
import { IDiagnosticsService } from 'vs/platform/diagnostics/common/diagnostics';
import { DialogMainService, IDialogMainService } from 'vs/platform/dialogs/electron-main/dialogMainService';
import { serve as serveDriver } from 'vs/platform/driver/electron-main/driver';
import { EncryptionMainService, IEncryptionMainService } from 'vs/platform/encryption/electron-main/encryptionMainService';
import { NativeParsedArgs } from 'vs/platform/environment/common/argv';
import { IEnvironmentMainService } from 'vs/platform/environment/electron-main/environmentMainService';
import { isLaunchedFromCli } from 'vs/platform/environment/node/argvHelper';
import { resolveShellEnv } from 'vs/platform/environment/node/shellEnv';
import { IExtensionUrlTrustService } from 'vs/platform/extensionManagement/common/extensionUrlTrust';
import { ExtensionUrlTrustService } from 'vs/platform/extensionManagement/node/extensionUrlTrustService';
import { IExternalTerminalMainService } from 'vs/platform/externalTerminal/common/externalTerminal';
import { LinuxExternalTerminalService, MacExternalTerminalService, WindowsExternalTerminalService } from 'vs/platform/externalTerminal/node/externalTerminalService';
import { IFileService } from 'vs/platform/files/common/files';
import { SyncDescriptor } from 'vs/platform/instantiation/common/descriptors';
import { IInstantiationService, ServicesAccessor } from 'vs/platform/instantiation/common/instantiation';
import { ServiceCollection } from 'vs/platform/instantiation/common/serviceCollection';
import { IIssueMainService, IssueMainService } from 'vs/platform/issue/electron-main/issueMainService';
import { IKeyboardLayoutMainService, KeyboardLayoutMainService } from 'vs/platform/keyboardLayout/electron-main/keyboardLayoutMainService';
import { ILaunchMainService, LaunchMainService } from 'vs/platform/launch/electron-main/launchMainService';
import { ILifecycleMainService, LifecycleMainPhase } from 'vs/platform/lifecycle/electron-main/lifecycleMainService';
import { ILoggerService, ILogService } from 'vs/platform/log/common/log';
import { LoggerChannel, LogLevelChannel } from 'vs/platform/log/common/logIpc';
import { IMenubarMainService, MenubarMainService } from 'vs/platform/menubar/electron-main/menubarMainService';
import { INativeHostMainService, NativeHostMainService } from 'vs/platform/native/electron-main/nativeHostMainService';
import { IProductService } from 'vs/platform/product/common/productService';
import { getRemoteAuthority } from 'vs/platform/remote/common/remoteHosts';
import { SharedProcess } from 'vs/platform/sharedProcess/electron-main/sharedProcess';
import { ISignService } from 'vs/platform/sign/common/sign';
import { IStateMainService } from 'vs/platform/state/electron-main/state';
import { StorageDatabaseChannel } from 'vs/platform/storage/electron-main/storageIpc';
import { IStorageMainService, StorageMainService } from 'vs/platform/storage/electron-main/storageMainService';
import { resolveCommonProperties } from 'vs/platform/telemetry/common/commonProperties';
import { ITelemetryService, machineIdKey } from 'vs/platform/telemetry/common/telemetry';
import { TelemetryAppenderClient } from 'vs/platform/telemetry/common/telemetryIpc';
import { ITelemetryServiceConfig, TelemetryService } from 'vs/platform/telemetry/common/telemetryService';
import { NullTelemetryService } from 'vs/platform/telemetry/common/telemetryUtils';
import { IUpdateService } from 'vs/platform/update/common/update';
import { UpdateChannel } from 'vs/platform/update/common/updateIpc';
import { DarwinUpdateService } from 'vs/platform/update/electron-main/updateService.darwin';
import { LinuxUpdateService } from 'vs/platform/update/electron-main/updateService.linux';
import { SnapUpdateService } from 'vs/platform/update/electron-main/updateService.snap';
import { Win32UpdateService } from 'vs/platform/update/electron-main/updateService.win32';
import { IOpenURLOptions, IURLService } from 'vs/platform/url/common/url';
import { URLHandlerChannelClient, URLHandlerRouter } from 'vs/platform/url/common/urlIpc';
import { NativeURLService } from 'vs/platform/url/common/urlService';
import { ElectronURLListener } from 'vs/platform/url/electron-main/electronUrlListener';
import { IWebviewManagerService } from 'vs/platform/webview/common/webviewManagerService';
import { WebviewMainService } from 'vs/platform/webview/electron-main/webviewMainService';
import { IWindowOpenable } from 'vs/platform/windows/common/windows';
import { ICodeWindow, IWindowsMainService, OpenContext, WindowError } from 'vs/platform/windows/electron-main/windows';
import { WindowsMainService } from 'vs/platform/windows/electron-main/windowsMainService';
import { ActiveWindowManager } from 'vs/platform/windows/node/windowTracker';
import { hasWorkspaceFileExtension, IWorkspacesService } from 'vs/platform/workspaces/common/workspaces';
import { IWorkspacesHistoryMainService, WorkspacesHistoryMainService } from 'vs/platform/workspaces/electron-main/workspacesHistoryMainService';
import { WorkspacesMainService } from 'vs/platform/workspaces/electron-main/workspacesMainService';
import { IWorkspacesManagementMainService, WorkspacesManagementMainService } from 'vs/platform/workspaces/electron-main/workspacesManagementMainService';
/**
* The main VS Code application. There will only ever be one instance,
* even if the user starts many instances (e.g. from the command line).
*/
export class
|
extends Disposable {
private windowsMainService: IWindowsMainService | undefined;
private nativeHostMainService: INativeHostMainService | undefined;
constructor(
private readonly mainProcessNodeIpcServer: NodeIPCServer,
private readonly userEnv: IProcessEnvironment,
@IInstantiationService private readonly mainInstantiationService: IInstantiationService,
@ILogService private readonly logService: ILogService,
@IEnvironmentMainService private readonly environmentMainService: IEnvironmentMainService,
@ILifecycleMainService private readonly lifecycleMainService: ILifecycleMainService,
@IConfigurationService private readonly configurationService: IConfigurationService,
@IStateMainService private readonly stateMainService: IStateMainService,
@IFileService private readonly fileService: IFileService,
@IProductService private readonly productService: IProductService
) {
super();
this.configureSession();
this.registerListeners();
}
private configureSession(): void {
//#region Security related measures (https://electronjs.org/docs/tutorial/security)
//
// !!! DO NOT CHANGE without consulting the documentation !!!
//
const isUrlFromWebview = (requestingUrl: string | undefined) => requestingUrl?.startsWith(`${Schemas.vscodeWebview}://`);
const allowedPermissionsInWebview = new Set([
'clipboard-read',
'clipboard-sanitized-write',
]);
session.defaultSession.setPermissionRequestHandler((_webContents, permission /* 'media' | 'geolocation' | 'notifications' | 'midiSysex' | 'pointerLock' | 'fullscreen' | 'openExternal' */, callback, details) => {
if (isUrlFromWebview(details.requestingUrl)) {
return callback(allowedPermissionsInWebview.has(permission));
}
return callback(false);
});
session.defaultSession.setPermissionCheckHandler((_webContents, permission /* 'media' */, _origin, details) => {
if (isUrlFromWebview(details.requestingUrl)) {
return allowedPermissionsInWebview.has(permission);
}
return false;
});
//#endregion
//#region Code Cache
type SessionWithCodeCachePathSupport = typeof Session & {
/**
* Sets code cache directory. By default, the directory will be `Code Cache` under
* the respective user data folder.
*/
setCodeCachePath?(path: string): void;
};
const defaultSession = session.defaultSession as unknown as SessionWithCodeCachePathSupport;
if (typeof defaultSession.setCodeCachePath === 'function' && this.environmentMainService.codeCachePath) {
// Make sure to partition Chrome's code cache folder
// in the same way as our code cache path to help
// invalidate caches that we know are invalid
// (https://github.com/microsoft/vscode/issues/120655)
defaultSession.setCodeCachePath(join(this.environmentMainService.codeCachePath, 'chrome'));
}
//#endregion
}
private registerListeners(): void {
// We handle uncaught exceptions here to prevent electron from opening a dialog to the user
setUnexpectedErrorHandler(error => this.onUnexpectedError(error));
process.on('uncaughtException', error => onUnexpectedError(error));
process.on('unhandledRejection', (reason: unknown) => onUnexpectedError(reason));
// Dispose on shutdown
this.lifecycleMainService.onWillShutdown(() => this.dispose());
// Contextmenu via IPC support
registerContextMenuListener();
// Accessibility change event
app.on('accessibility-support-changed', (event, accessibilitySupportEnabled) => {
this.windowsMainService?.sendToAll('vscode:accessibilitySupportChanged', accessibilitySupportEnabled);
});
// macOS dock activate
app.on('activate', (event, hasVisibleWindows) => {
this.logService.trace('app#activate');
// Mac only event: open new window when we get activated
if (!hasVisibleWindows) {
this.windowsMainService?.openEmptyWindow({ context: OpenContext.DOCK });
}
});
//#region Security related measures (https://electronjs.org/docs/tutorial/security)
//
// !!! DO NOT CHANGE without consulting the documentation !!!
//
app.on('web-contents-created', (event, contents) => {
contents.on('will-navigate', event => {
this.logService.error('webContents#will-navigate: Prevented webcontent navigation');
event.preventDefault();
});
contents.setWindowOpenHandler(({ url }) => {
this.nativeHostMainService?.openExternal(undefined, url);
return { action: 'deny' };
});
});
//#endregion
let macOpenFileURIs: IWindowOpenable[] = [];
let runningTimeout: NodeJS.Timeout | undefined = undefined;
app.on('open-file', (event, path) => {
this.logService.trace('app#open-file: ', path);
event.preventDefault();
// Keep in array because more might come!
macOpenFileURIs.push(this.getWindowOpenableFromPathSync(path));
// Clear previous handler if any
if (runningTimeout !== undefined) {
clearTimeout(runningTimeout);
runningTimeout = undefined;
}
// Handle paths delayed in case more are coming!
runningTimeout = setTimeout(() => {
this.windowsMainService?.open({
context: OpenContext.DOCK /* can also be opening from finder while app is running */,
cli: this.environmentMainService.args,
urisToOpen: macOpenFileURIs,
gotoLineMode: false,
preferNewWindow: true /* dropping on the dock or opening from finder prefers to open in a new window */
});
macOpenFileURIs = [];
runningTimeout = undefined;
}, 100);
});
app.on('new-window-for-tab', () => {
this.windowsMainService?.openEmptyWindow({ context: OpenContext.DESKTOP }); //macOS native tab "+" button
});
//#region Bootstrap IPC Handlers
ipcMain.handle('vscode:fetchShellEnv', event => {
// Prefer to use the args and env from the target window
// when resolving the shell env. It is possible that
// a first window was opened from the UI but a second
// from the CLI and that has implications for whether to
// resolve the shell environment or not.
//
// Window can be undefined for e.g. the shared process
// that is not part of our windows registry!
const window = this.windowsMainService?.getWindowByWebContents(event.sender); // Note: this can be `undefined` for the shared process
let args: NativeParsedArgs;
let env: IProcessEnvironment;
if (window?.config) {
args = window.config;
env = { ...process.env, ...window.config.userEnv };
} else {
args = this.environmentMainService.args;
env = process.env;
}
// Resolve shell env
return resolveShellEnv(this.logService, args, env);
});
ipcMain.handle('vscode:writeNlsFile', (event, path: unknown, data: unknown) => {
const uri = this.validateNlsPath([path]);
if (!uri || typeof data !== 'string') {
throw new Error('Invalid operation (vscode:writeNlsFile)');
}
return this.fileService.writeFile(uri, VSBuffer.fromString(data));
});
ipcMain.handle('vscode:readNlsFile', async (event, ...paths: unknown[]) => {
const uri = this.validateNlsPath(paths);
if (!uri) {
throw new Error('Invalid operation (vscode:readNlsFile)');
}
return (await this.fileService.readFile(uri)).value.toString();
});
ipcMain.on('vscode:toggleDevTools', event => event.sender.toggleDevTools());
ipcMain.on('vscode:openDevTools', event => event.sender.openDevTools());
ipcMain.on('vscode:reloadWindow', event => event.sender.reload());
//#endregion
}
private validateNlsPath(pathSegments: unknown[]): URI | undefined {
let path: string | undefined = undefined;
for (const pathSegment of pathSegments) {
if (typeof pathSegment === 'string') {
if (typeof path !== 'string') {
path = pathSegment;
} else {
path = join(path, pathSegment);
}
}
}
if (typeof path !== 'string' || !isAbsolute(path) || !isEqualOrParent(path, this.environmentMainService.cachedLanguagesPath, !isLinux)) {
return undefined;
}
return URI.file(path);
}
private onUnexpectedError(error: Error): void {
if (error) {
// take only the message and stack property
const friendlyError = {
message: `[uncaught exception in main]: ${error.message}`,
stack: error.stack
};
// handle on client side
this.windowsMainService?.sendToFocused('vscode:reportError', JSON.stringify(friendlyError));
}
this.logService.error(`[uncaught exception in main]: ${error}`);
if (error.stack) {
this.logService.error(error.stack);
}
}
async startup(): Promise<void> {
this.logService.debug('Starting VS Code');
this.logService.debug(`from: ${this.environmentMainService.appRoot}`);
this.logService.debug('args:', this.environmentMainService.args);
// Make sure we associate the program with the app user model id
// This will help Windows to associate the running program with
// any shortcut that is pinned to the taskbar and prevent showing
// two icons in the taskbar for the same app.
const win32AppUserModelId = this.productService.win32AppUserModelId;
if (isWindows && win32AppUserModelId) {
app.setAppUserModelId(win32AppUserModelId);
}
// Fix native tabs on macOS 10.13
// macOS enables a compatibility patch for any bundle ID beginning with
// "com.microsoft.", which breaks native tabs for VS Code when using this
// identifier (from the official build).
// Explicitly opt out of the patch here before creating any windows.
// See: https://github.com/microsoft/vscode/issues/35361#issuecomment-399794085
try {
if (isMacintosh && this.configurationService.getValue('window.nativeTabs') === true && !systemPreferences.getUserDefault('NSUseImprovedLayoutPass', 'boolean')) {
systemPreferences.setUserDefault('NSUseImprovedLayoutPass', 'boolean', true as any);
}
} catch (error) {
this.logService.error(error);
}
// Main process server (electron IPC based)
const mainProcessElectronServer = new ElectronIPCServer();
// Resolve unique machine ID
this.logService.trace('Resolving machine identifier...');
const machineId = await this.resolveMachineId();
this.logService.trace(`Resolved machine identifier: ${machineId}`);
// Shared process
const { sharedProcess, sharedProcessReady, sharedProcessClient } = this.setupSharedProcess(machineId);
// Services
const appInstantiationService = await this.initServices(machineId, sharedProcess, sharedProcessReady);
// Create driver
if (this.environmentMainService.driverHandle) {
const server = await serveDriver(mainProcessElectronServer, this.environmentMainService.driverHandle, this.environmentMainService, appInstantiationService);
this.logService.info('Driver started at:', this.environmentMainService.driverHandle);
this._register(server);
}
// Setup Auth Handler
this._register(appInstantiationService.createInstance(ProxyAuthHandler));
// Init Channels
appInstantiationService.invokeFunction(accessor => this.initChannels(accessor, mainProcessElectronServer, sharedProcessClient));
// Open Windows
const windows = appInstantiationService.invokeFunction(accessor => this.openFirstWindow(accessor, mainProcessElectronServer));
// Post Open Windows Tasks
appInstantiationService.invokeFunction(accessor => this.afterWindowOpen(accessor, sharedProcess));
// Tracing: Stop tracing after windows are ready if enabled
if (this.environmentMainService.args.trace) {
appInstantiationService.invokeFunction(accessor => this.stopTracingEventually(accessor, windows));
}
}
private async resolveMachineId(): Promise<string> {
// We cache the machineId for faster lookups on startup
// and resolve it only once initially if not cached or we need to replace the macOS iBridge device
let machineId = this.stateMainService.getItem<string>(machineIdKey);
if (!machineId || (isMacintosh && machineId === '6c9d2bc8f91b89624add29c0abeae7fb42bf539fa1cdb2e3e57cd668fa9bcead')) {
machineId = await getMachineId();
this.stateMainService.setItem(machineIdKey, machineId);
}
return machineId;
}
private setupSharedProcess(machineId: string): { sharedProcess: SharedProcess, sharedProcessReady: Promise<MessagePortClient>, sharedProcessClient: Promise<MessagePortClient> } {
const sharedProcess = this._register(this.mainInstantiationService.createInstance(SharedProcess, machineId, this.userEnv));
const sharedProcessClient = (async () => {
this.logService.trace('Main->SharedProcess#connect');
const port = await sharedProcess.connect();
this.logService.trace('Main->SharedProcess#connect: connection established');
return new MessagePortClient(port, 'main');
})();
const sharedProcessReady = (async () => {
await sharedProcess.whenReady();
return sharedProcessClient;
})();
return { sharedProcess, sharedProcessReady, sharedProcessClient };
}
private async initServices(machineId: string, sharedProcess: SharedProcess, sharedProcessReady: Promise<MessagePortClient>): Promise<IInstantiationService> {
const services = new ServiceCollection();
// Update
switch (process.platform) {
case 'win32':
services.set(IUpdateService, new SyncDescriptor(Win32UpdateService));
break;
case 'linux':
if (isLinuxSnap) {
services.set(IUpdateService, new SyncDescriptor(SnapUpdateService, [process.env['SNAP'], process.env['SNAP_REVISION']]));
} else {
services.set(IUpdateService, new SyncDescriptor(LinuxUpdateService));
}
break;
case 'darwin':
services.set(IUpdateService, new SyncDescriptor(DarwinUpdateService));
break;
}
// Windows
services.set(IWindowsMainService, new SyncDescriptor(WindowsMainService, [machineId, this.userEnv]));
// Dialogs
services.set(IDialogMainService, new SyncDescriptor(DialogMainService));
// Launch
services.set(ILaunchMainService, new SyncDescriptor(LaunchMainService));
// Diagnostics
services.set(IDiagnosticsService, ProxyChannel.toService(getDelayedChannel(sharedProcessReady.then(client => client.getChannel('diagnostics')))));
// Issues
services.set(IIssueMainService, new SyncDescriptor(IssueMainService, [this.userEnv]));
// Encryption
services.set(IEncryptionMainService, new SyncDescriptor(EncryptionMainService, [machineId]));
// Keyboard Layout
services.set(IKeyboardLayoutMainService, new SyncDescriptor(KeyboardLayoutMainService));
// Native Host
services.set(INativeHostMainService, new SyncDescriptor(NativeHostMainService, [sharedProcess]));
// Webview Manager
services.set(IWebviewManagerService, new SyncDescriptor(WebviewMainService));
// Workspaces
services.set(IWorkspacesService, new SyncDescriptor(WorkspacesMainService));
services.set(IWorkspacesManagementMainService, new SyncDescriptor(WorkspacesManagementMainService));
services.set(IWorkspacesHistoryMainService, new SyncDescriptor(WorkspacesHistoryMainService));
// Menubar
services.set(IMenubarMainService, new SyncDescriptor(MenubarMainService));
// Extension URL Trust
services.set(IExtensionUrlTrustService, new SyncDescriptor(ExtensionUrlTrustService));
// Storage
services.set(IStorageMainService, new SyncDescriptor(StorageMainService));
// External terminal
if (isWindows) {
services.set(IExternalTerminalMainService, new SyncDescriptor(WindowsExternalTerminalService));
} else if (isMacintosh) {
services.set(IExternalTerminalMainService, new SyncDescriptor(MacExternalTerminalService));
} else if (isLinux) {
services.set(IExternalTerminalMainService, new SyncDescriptor(LinuxExternalTerminalService));
}
// Backups
const backupMainService = new BackupMainService(this.environmentMainService, this.configurationService, this.logService);
services.set(IBackupMainService, backupMainService);
// URL handling
services.set(IURLService, new SyncDescriptor(NativeURLService));
// Telemetry
if (!this.environmentMainService.isExtensionDevelopment && !this.environmentMainService.args['disable-telemetry'] && !!this.productService.enableTelemetry) {
const channel = getDelayedChannel(sharedProcessReady.then(client => client.getChannel('telemetryAppender')));
const appender = new TelemetryAppenderClient(channel);
const commonProperties = resolveCommonProperties(this.fileService, release(), hostname(), process.arch, this.productService.commit, this.productService.version, machineId, this.productService.msftInternalDomains, this.environmentMainService.installSourcePath);
const piiPaths = [this.environmentMainService.appRoot, this.environmentMainService.extensionsPath];
const config: ITelemetryServiceConfig = { appender, commonProperties, piiPaths, sendErrorTelemetry: true };
services.set(ITelemetryService, new SyncDescriptor(TelemetryService, [config]));
} else {
services.set(ITelemetryService, NullTelemetryService);
}
// Init services that require it
await backupMainService.initialize();
return this.mainInstantiationService.createChild(services);
}
private initChannels(accessor: ServicesAccessor, mainProcessElectronServer: ElectronIPCServer, sharedProcessClient: Promise<MessagePortClient>): void {
// Launch: this one is explicitly registered to the node.js
// server because when a second instance starts up, that is
// the only possible connection between the first and the
// second instance. Electron IPC does not work across apps.
const launchChannel = ProxyChannel.fromService(accessor.get(ILaunchMainService), { disableMarshalling: true });
this.mainProcessNodeIpcServer.registerChannel('launch', launchChannel);
// Configuration
mainProcessElectronServer.registerChannel(UserConfigurationFileServiceId, ProxyChannel.fromService(new UserConfigurationFileService(this.environmentMainService, this.fileService, this.logService)));
// Update
const updateChannel = new UpdateChannel(accessor.get(IUpdateService));
mainProcessElectronServer.registerChannel('update', updateChannel);
// Issues
const issueChannel = ProxyChannel.fromService(accessor.get(IIssueMainService));
mainProcessElectronServer.registerChannel('issue', issueChannel);
// Encryption
const encryptionChannel = ProxyChannel.fromService(accessor.get(IEncryptionMainService));
mainProcessElectronServer.registerChannel('encryption', encryptionChannel);
// Signing
const signChannel = ProxyChannel.fromService(accessor.get(ISignService));
mainProcessElectronServer.registerChannel('sign', signChannel);
// Keyboard Layout
const keyboardLayoutChannel = ProxyChannel.fromService(accessor.get(IKeyboardLayoutMainService));
mainProcessElectronServer.registerChannel('keyboardLayout', keyboardLayoutChannel);
// Native host (main & shared process)
this.nativeHostMainService = accessor.get(INativeHostMainService);
const nativeHostChannel = ProxyChannel.fromService(this.nativeHostMainService);
mainProcessElectronServer.registerChannel('nativeHost', nativeHostChannel);
sharedProcessClient.then(client => client.registerChannel('nativeHost', nativeHostChannel));
// Workspaces
const workspacesChannel = ProxyChannel.fromService(accessor.get(IWorkspacesService));
mainProcessElectronServer.registerChannel('workspaces', workspacesChannel);
// Menubar
const menubarChannel = ProxyChannel.fromService(accessor.get(IMenubarMainService));
mainProcessElectronServer.registerChannel('menubar', menubarChannel);
// URL handling
const urlChannel = ProxyChannel.fromService(accessor.get(IURLService));
mainProcessElectronServer.registerChannel('url', urlChannel);
// Extension URL Trust
const extensionUrlTrustChannel = ProxyChannel.fromService(accessor.get(IExtensionUrlTrustService));
mainProcessElectronServer.registerChannel('extensionUrlTrust', extensionUrlTrustChannel);
// Webview Manager
const webviewChannel = ProxyChannel.fromService(accessor.get(IWebviewManagerService));
mainProcessElectronServer.registerChannel('webview', webviewChannel);
// Storage (main & shared process)
const storageChannel = this._register(new StorageDatabaseChannel(this.logService, accessor.get(IStorageMainService)));
mainProcessElectronServer.registerChannel('storage', storageChannel);
sharedProcessClient.then(client => client.registerChannel('storage', storageChannel));
// External Terminal
const externalTerminalChannel = ProxyChannel.fromService(accessor.get(IExternalTerminalMainService));
mainProcessElectronServer.registerChannel('externalTerminal', externalTerminalChannel);
// Log Level (main & shared process)
const logLevelChannel = new LogLevelChannel(accessor.get(ILogService));
mainProcessElectronServer.registerChannel('logLevel', logLevelChannel);
sharedProcessClient.then(client => client.registerChannel('logLevel', logLevelChannel));
// Logger
const loggerChannel = new LoggerChannel(accessor.get(ILoggerService),);
mainProcessElectronServer.registerChannel('logger', loggerChannel);
sharedProcessClient.then(client => client.registerChannel('logger', loggerChannel));
// Extension Host Debug Broadcasting
const electronExtensionHostDebugBroadcastChannel = new ElectronExtensionHostDebugBroadcastChannel(accessor.get(IWindowsMainService));
mainProcessElectronServer.registerChannel('extensionhostdebugservice', electronExtensionHostDebugBroadcastChannel);
}
private openFirstWindow(accessor: ServicesAccessor, mainProcessElectronServer: ElectronIPCServer): ICodeWindow[] {
const windowsMainService = this.windowsMainService = accessor.get(IWindowsMainService);
const urlService = accessor.get(IURLService);
const nativeHostMainService = accessor.get(INativeHostMainService);
// Signal phase: ready (services set)
this.lifecycleMainService.phase = LifecycleMainPhase.Ready;
// Check for initial URLs to handle from protocol link invocations
const pendingWindowOpenablesFromProtocolLinks: IWindowOpenable[] = [];
const pendingProtocolLinksToHandle = [
// Windows/Linux: protocol handler invokes CLI with --open-url
...this.environmentMainService.args['open-url'] ? this.environmentMainService.args._urls || [] : [],
// macOS: open-url events
...((<any>global).getOpenUrls() || []) as string[]
].map(url => {
try {
return { uri: URI.parse(url), url };
} catch {
return undefined;
}
}).filter((obj): obj is { uri: URI, url: string } => {
if (!obj) {
return false;
}
// If URI should be blocked, filter it out
if (this.shouldBlockURI(obj.uri)) {
return false;
}
// Filter out any protocol link that wants to open as window so that
// we open the right set of windows on startup and not restore the
// previous workspace too.
const windowOpenable = this.getWindowOpenableFromProtocolLink(obj.uri);
if (windowOpenable) {
pendingWindowOpenablesFromProtocolLinks.push(windowOpenable);
return false;
}
return true;
});
// Create a URL handler to open file URIs in the active window
// or open new windows. The URL handler will be invoked from
// protocol invocations outside of VSCode.
const app = this;
const environmentService = this.environmentMainService;
const productService = this.productService;
urlService.registerHandler({
async handleURL(uri: URI, options?: IOpenURLOptions): Promise<boolean> {
if (uri.scheme === productService.urlProtocol && uri.path === 'workspace') {
uri = uri.with({
authority: 'file',
path: URI.parse(uri.query).path,
query: ''
});
}
// If URI should be blocked, behave as if it's handled
if (app.shouldBlockURI(uri)) {
return true;
}
// Check for URIs to open in window
const windowOpenableFromProtocolLink = app.getWindowOpenableFromProtocolLink(uri);
if (windowOpenableFromProtocolLink) {
const [window] = windowsMainService.open({
context: OpenContext.API,
cli: { ...environmentService.args },
urisToOpen: [windowOpenableFromProtocolLink],
gotoLineMode: true
// remoteAuthority: will be determined based on windowOpenableFromProtocolLink
});
window.focus(); // this should help ensuring that the right window gets focus when multiple are opened
return true;
}
// If we have not yet handled the URI and we have no window opened (macOS only)
// we first open a window and then try to open that URI within that window
if (isMacintosh && windowsMainService.getWindowCount() === 0) {
const [window] = windowsMainService.open({
context: OpenContext.API,
cli: { ...environmentService.args },
forceEmpty: true,
gotoLineMode: true,
remoteAuthority: getRemoteAuthority(uri)
});
await window.ready();
return urlService.open(uri, options);
}
return false;
}
});
// Create a URL handler which forwards to the last active window
const activeWindowManager = this._register(new ActiveWindowManager({
onDidOpenWindow: nativeHostMainService.onDidOpenWindow,
onDidFocusWindow: nativeHostMainService.onDidFocusWindow,
getActiveWindowId: () => nativeHostMainService.getActiveWindowId(-1)
}));
const activeWindowRouter = new StaticRouter(ctx => activeWindowManager.getActiveClientId().then(id => ctx === id));
const urlHandlerRouter = new URLHandlerRouter(activeWindowRouter);
const urlHandlerChannel = mainProcessElectronServer.getChannel('urlHandler', urlHandlerRouter);
urlService.registerHandler(new URLHandlerChannelClient(urlHandlerChannel));
// Watch Electron URLs and forward them to the UrlService
this._register(new ElectronURLListener(pendingProtocolLinksToHandle, urlService, windowsMainService, this.environmentMainService, this.productService));
// Open our first window
const args = this.environmentMainService.args;
const macOpenFiles: string[] = (<any>global).macOpenFiles;
const context = isLaunchedFromCli(process.env) ? OpenContext.CLI : OpenContext.DESKTOP;
const hasCliArgs = args._.length;
const hasFolderURIs = !!args['folder-uri'];
const hasFileURIs = !!args['file-uri'];
const noRecentEntry = args['skip-add-to-recently-opened'] === true;
const waitMarkerFileURI = args.wait && args.waitMarkerFilePath ? URI.file(args.waitMarkerFilePath) : undefined;
const remoteAuthority = args.remote || undefined;
// check for a pending window to open from URI
// e.g. when running code with --open-uri from
// a protocol handler
if (pendingWindowOpenablesFromProtocolLinks.length > 0) {
return windowsMainService.open({
context,
cli: args,
urisToOpen: pendingWindowOpenablesFromProtocolLinks,
gotoLineMode: true,
initialStartup: true
// remoteAuthority: will be determined based on pendingWindowOpenablesFromProtocolLinks
});
}
// new window if "-n"
if (args['new-window'] && !hasCliArgs && !hasFolderURIs && !hasFileURIs) {
return windowsMainService.open({
context,
cli: args,
forceNewWindow: true,
forceEmpty: true,
noRecentEntry,
waitMarkerFileURI,
initialStartup: true,
remoteAuthority
});
}
// mac: open-file event received on startup
if (macOpenFiles.length && !hasCliArgs && !hasFolderURIs && !hasFileURIs) {
return windowsMainService.open({
context: OpenContext.DOCK,
cli: args,
urisToOpen: macOpenFiles.map(file => this.getWindowOpenableFromPathSync(file)),
noRecentEntry,
waitMarkerFileURI,
initialStartup: true,
// remoteAuthority: will be determined based on macOpenFiles
});
}
// default: read paths from cli
return windowsMainService.open({
context,
cli: args,
forceNewWindow: args['new-window'] || (!hasCliArgs && args['unity-launch']),
diffMode: args.diff,
noRecentEntry,
waitMarkerFileURI,
gotoLineMode: args.goto,
initialStartup: true,
remoteAuthority
});
}
private shouldBlockURI(uri: URI): boolean {
if (uri.authority === Schemas.file && isWindows) {
const res = dialog.showMessageBoxSync({
title: this.productService.nameLong,
type: 'question',
buttons: [
mnemonicButtonLabel(localize({ key: 'open', comment: ['&& denotes a mnemonic'] }, "&&Yes")),
mnemonicButtonLabel(localize({ key: 'cancel', comment: ['&& denotes a mnemonic'] }, "&&No")),
],
defaultId: 0,
cancelId: 1,
message: localize('confirmOpenMessage', "An external application wants to open '{0}' in {1}. Do you want to open this file or folder?", getPathLabel(uri.fsPath, this.environmentMainService), this.productService.nameShort),
detail: localize('confirmOpenDetail', "If you did not initiate this request, it may represent an attempted attack on your system. Unless you took an explicit action to initiate this request, you should press 'No'"),
noLink: true
});
if (res === 1) {
return true;
}
}
return false;
}
private getWindowOpenableFromProtocolLink(uri: URI): IWindowOpenable | undefined {
if (!uri.path) {
return undefined;
}
// File path
if (uri.authority === Schemas.file) {
// we configure as fileUri, but later validation will
// make sure to open as folder or workspace if possible
return { fileUri: URI.file(uri.fsPath) };
}
// Remote path
else if (uri.authority === Schemas.vscodeRemote) {
// Example conversion:
// From: vscode://vscode-remote/wsl+ubuntu/mnt/c/GitDevelopment/monaco
// To: vscode-remote://wsl+ubuntu/mnt/c/GitDevelopment/monaco
const secondSlash = uri.path.indexOf(posix.sep, 1 /* skip over the leading slash */);
if (secondSlash !== -1) {
const authority = uri.path.substring(1, secondSlash);
const path = uri.path.substring(secondSlash);
const remoteUri = URI.from({ scheme: Schemas.vscodeRemote, authority, path, query: uri.query, fragment: uri.fragment });
if (hasWorkspaceFileExtension(path)) {
return { workspaceUri: remoteUri };
} else if (/:[\d]+$/.test(path)) { // path with :line:column syntax
return { fileUri: remoteUri };
} else {
return { folderUri: remoteUri };
}
}
}
return undefined;
}
private getWindowOpenableFromPathSync(path: string): IWindowOpenable {
try {
const fileStat = statSync(path);
if (fileStat.isDirectory()) {
return { folderUri: URI.file(path) };
}
if (hasWorkspaceFileExtension(path)) {
return { workspaceUri: URI.file(path) };
}
} catch (error) {
// ignore errors
}
return { fileUri: URI.file(path) };
}
private async afterWindowOpen(accessor: ServicesAccessor, sharedProcess: SharedProcess): Promise<void> {
// Signal phase: after window open
this.lifecycleMainService.phase = LifecycleMainPhase.AfterWindowOpen;
// Observe shared process for errors
let willShutdown = false;
once(this.lifecycleMainService.onWillShutdown)(() => willShutdown = true);
const telemetryService = accessor.get(ITelemetryService);
this._register(sharedProcess.onDidError(({ type, details }) => {
// Logging
let message: string;
switch (type) {
case WindowError.UNRESPONSIVE:
message = 'SharedProcess: detected unresponsive window';
break;
case WindowError.CRASHED:
message = `SharedProcess: crashed (detail: ${details?.reason ?? '<unknown>'}, code: ${details?.exitCode ?? '<unknown>'})`;
break;
case WindowError.LOAD:
message = `SharedProcess: failed to load (detail: ${details?.reason ?? '<unknown>'}, code: ${details?.exitCode ?? '<unknown>'})`;
break;
}
onUnexpectedError(new Error(message));
// Telemetry
type SharedProcessErrorClassification = {
type: { classification: 'SystemMetaData', purpose: 'PerformanceAndHealth', isMeasurement: true };
reason: { classification: 'SystemMetaData', purpose: 'PerformanceAndHealth', isMeasurement: true };
code: { classification: 'SystemMetaData', purpose: 'PerformanceAndHealth', isMeasurement: true };
visible: { classification: 'SystemMetaData', purpose: 'PerformanceAndHealth', isMeasurement: true };
shuttingdown: { classification: 'SystemMetaData', purpose: 'PerformanceAndHealth', isMeasurement: true };
};
type SharedProcessErrorEvent = {
type: WindowError;
reason: string | undefined;
code: number | undefined;
visible: boolean;
shuttingdown: boolean;
};
telemetryService.publicLog2<SharedProcessErrorEvent, SharedProcessErrorClassification>('sharedprocesserror', {
type,
reason: details?.reason,
code: details?.exitCode,
visible: sharedProcess.isVisible(),
shuttingdown: willShutdown
});
}));
// Windows: install mutex
const win32MutexName = this.productService.win32MutexName;
if (isWindows && win32MutexName) {
try {
const WindowsMutex = (require.__$__nodeRequire('windows-mutex') as typeof import('windows-mutex')).Mutex;
const mutex = new WindowsMutex(win32MutexName);
once(this.lifecycleMainService.onWillShutdown)(() => mutex.release());
} catch (error) {
this.logService.error(error);
}
}
// Remote Authorities
protocol.registerHttpProtocol(Schemas.vscodeRemoteResource, (request, callback) => {
callback({
url: request.url.replace(/^vscode-remote-resource:/, 'http:'),
method: request.method
});
});
// Initialize update service
const updateService = accessor.get(IUpdateService);
if (updateService instanceof Win32UpdateService || updateService instanceof LinuxUpdateService || updateService instanceof DarwinUpdateService) {
updateService.initialize();
}
// Start to fetch shell environment (if needed) after window has opened
// Since this operation can take a long time, we want to warm it up while
// the window is opening.
resolveShellEnv(this.logService, this.environmentMainService.args, process.env);
// If enable-crash-reporter argv is undefined then this is a fresh start,
// based on telemetry.enableCrashreporter settings, generate a UUID which
// will be used as crash reporter id and also update the json file.
try {
const argvContent = await this.fileService.readFile(this.environmentMainService.argvResource);
const argvString = argvContent.value.toString();
const argvJSON = JSON.parse(stripComments(argvString));
if (argvJSON['enable-crash-reporter'] === undefined) {
const enableCrashReporterSetting = this.configurationService.getValue('telemetry.enableCrashReporter');
const enableCrashReporter = typeof enableCrashReporterSetting === 'boolean' ? enableCrashReporterSetting : true;
const additionalArgvContent = [
'',
' // Allows to disable crash reporting.',
' // Should restart the app if the value is changed.',
` "enable-crash-reporter": ${enableCrashReporter},`,
'',
' // Unique id used for correlating crash reports sent from this instance.',
' // Do not edit this value.',
` "crash-reporter-id": "${generateUuid()}"`,
'}'
];
const newArgvString = argvString.substring(0, argvString.length - 2).concat(',\n', additionalArgvContent.join('\n'));
await this.fileService.writeFile(this.environmentMainService.argvResource, VSBuffer.fromString(newArgvString));
}
} catch (error) {
this.logService.error(error);
}
}
private stopTracingEventually(accessor: ServicesAccessor, windows: ICodeWindow[]): void {
this.logService.info(`Tracing: waiting for windows to get ready...`);
const dialogMainService = accessor.get(IDialogMainService);
let recordingStopped = false;
const stopRecording = async (timeout: boolean) => {
if (recordingStopped) {
return;
}
recordingStopped = true; // only once
const path = await contentTracing.stopRecording(joinPath(this.environmentMainService.userHome, `${this.productService.applicationName}-${Math.random().toString(16).slice(-4)}.trace.txt`).fsPath);
if (!timeout) {
dialogMainService.showMessageBox({
title: this.productService.nameLong,
type: 'info',
message: localize('trace.message', "Successfully created trace."),
detail: localize('trace.detail', "Please create an issue and manually attach the following file:\n{0}", path),
buttons: [mnemonicButtonLabel(localize({ key: 'trace.ok', comment: ['&& denotes a mnemonic'] }, "&&OK"))],
defaultId: 0,
noLink: true
}, withNullAsUndefined(BrowserWindow.getFocusedWindow()));
} else {
this.logService.info(`Tracing: data recorded (after 30s timeout) to ${path}`);
}
};
// Wait up to 30s before creating the trace anyways
const timeoutHandle = setTimeout(() => stopRecording(true), 30000);
// Wait for all windows to get ready and stop tracing then
Promise.all(windows.map(window => window.ready())).then(() => {
clearTimeout(timeoutHandle);
stopRecording(false);
});
}
}
|
CodeApplication
|
product.rs
|
use crate::commands::math::reducers::{reducer_for, Reduce};
use crate::commands::math::utils::run_with_function;
use crate::commands::WholeStreamCommand;
use crate::prelude::*;
use nu_errors::ShellError;
use nu_protocol::{
hir::{convert_number_to_u64, Number},
Primitive, Signature, UntaggedValue, Value,
};
pub struct SubCommand;
#[async_trait]
impl WholeStreamCommand for SubCommand {
fn name(&self) -> &str {
"math product"
}
fn signature(&self) -> Signature {
Signature::build("math product")
}
fn usage(&self) -> &str {
"Finds the product of a list of numbers or tables"
}
async fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
run_with_function(
RunnableContext {
input: args.input,
registry: registry.clone(),
shell_manager: args.shell_manager,
host: args.host,
ctrl_c: args.ctrl_c,
current_errors: args.current_errors,
name: args.call_info.name_tag,
raw_input: args.raw_input,
},
product,
)
.await
}
fn examples(&self) -> Vec<Example> {
vec![Example {
description: "Get the product of a list of numbers",
example: "echo [2 3 3 4] | math product",
result: Some(vec![UntaggedValue::int(72).into()]),
}]
}
}
fn to_byte(value: &Value) -> Option<Value> {
match &value.value {
UntaggedValue::Primitive(Primitive::Int(num)) => Some(
UntaggedValue::Primitive(Primitive::Filesize(convert_number_to_u64(&Number::Int(
num.clone(),
))))
.into_untagged_value(),
),
_ => None,
}
}
/// Calculate product of given values
pub fn product(values: &[Value], name: &Tag) -> Result<Value, ShellError> {
let prod = reducer_for(Reduce::Product);
|
})?;
match first {
v if v.is_filesize() => to_byte(&prod(
UntaggedValue::int(1).into_untagged_value(),
values
.iter()
.map(|v| match v {
Value {
value: UntaggedValue::Primitive(Primitive::Filesize(num)),
..
} => UntaggedValue::int(*num as usize).into_untagged_value(),
other => other.clone(),
})
.collect::<Vec<_>>(),
)?)
.ok_or_else(|| {
ShellError::labeled_error(
"could not convert to decimal",
"could not convert to decimal",
&name.span,
)
}),
v if v.is_none() => prod(
UntaggedValue::int(1).into_untagged_value(),
values
.iter()
.map(|v| match v {
Value {
value: UntaggedValue::Primitive(Primitive::Nothing),
..
} => UntaggedValue::int(1).into_untagged_value(),
other => other.clone(),
})
.collect::<Vec<_>>(),
),
_ => prod(UntaggedValue::int(1).into_untagged_value(), values.to_vec()),
}
}
#[cfg(test)]
mod tests {
use super::SubCommand;
#[test]
fn examples_work_as_expected() {
use crate::examples::test as test_examples;
test_examples(SubCommand {})
}
}
|
let first = values.get(0).ok_or_else(|| {
ShellError::unexpected("Cannot perform aggregate math operation on empty data")
|
agent_groups_request_builder.go
|
package agentgroups
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be "github.com/microsoftgraph/msgraph-beta-sdk-go/models"
i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459 "github.com/microsoftgraph/msgraph-beta-sdk-go/models/odataerrors"
i88e5d68b790ce6f35a00fd437c3d20c25c4a47bfa2b59e43d61a8d241b56dc43 "github.com/microsoftgraph/msgraph-beta-sdk-go/onpremisespublishingprofiles/item/agentgroups/item/publishedresources/item/agentgroups/count"
)
// AgentGroupsRequestBuilder provides operations to manage the agentGroups property of the microsoft.graph.publishedResource entity.
type AgentGroupsRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// AgentGroupsRequestBuilderGetQueryParameters list of onPremisesAgentGroups that a publishedResource is assigned to. Read-only. Nullable.
type AgentGroupsRequestBuilderGetQueryParameters struct {
// Include count of items
Count *bool `uriparametername:"%24count"`
// Expand related entities
Expand []string `uriparametername:"%24expand"`
// Filter items by property values
Filter *string `uriparametername:"%24filter"`
// Order items by property values
Orderby []string `uriparametername:"%24orderby"`
// Search items by search phrases
Search *string `uriparametername:"%24search"`
// Select properties to be returned
Select []string `uriparametername:"%24select"`
// Skip the first n items
Skip *int32 `uriparametername:"%24skip"`
// Show only the first n items
Top *int32 `uriparametername:"%24top"`
}
// AgentGroupsRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type AgentGroupsRequestBuilderGetRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
// Request query parameters
QueryParameters *AgentGroupsRequestBuilderGetQueryParameters
}
// AgentGroupsRequestBuilderPostRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type AgentGroupsRequestBuilderPostRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewAgentGroupsRequestBuilderInternal instantiates a new AgentGroupsRequestBuilder and sets the default values.
func NewAgentGroupsRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*AgentGroupsRequestBuilder) {
m := &AgentGroupsRequestBuilder{
}
m.urlTemplate = "{+baseurl}/onPremisesPublishingProfiles/{onPremisesPublishingProfile%2Did}/agentGroups/{onPremisesAgentGroup%2Did}/publishedResources/{publishedResource%2Did}/agentGroups{?%24top,%24skip,%24search,%24filter,%24count,%24orderby,%24select,%24expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewAgentGroupsRequestBuilder instantiates a new AgentGroupsRequestBuilder and sets the default values.
func NewAgentGroupsRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*AgentGroupsRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewAgentGroupsRequestBuilderInternal(urlParams, requestAdapter)
}
// Count the count property
func (m *AgentGroupsRequestBuilder) Count()(*i88e5d68b790ce6f35a00fd437c3d20c25c4a47bfa2b59e43d61a8d241b56dc43.CountRequestBuilder) {
return i88e5d68b790ce6f35a00fd437c3d20c25c4a47bfa2b59e43d61a8d241b56dc43.NewCountRequestBuilderInternal(m.pathParameters, m.requestAdapter);
}
// CreateGetRequestInformation list of onPremisesAgentGroups that a publishedResource is assigned to. Read-only. Nullable.
func (m *AgentGroupsRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil);
}
// CreateGetRequestInformationWithRequestConfiguration list of onPremisesAgentGroups that a publishedResource is assigned to. Read-only. Nullable.
func (m *AgentGroupsRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *AgentGroupsRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET
if requestConfiguration != nil {
if requestConfiguration.QueryParameters != nil {
requestInfo.AddQueryParameters(*(requestConfiguration.QueryParameters))
}
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// CreatePostRequestInformation create new navigation property to agentGroups for onPremisesPublishingProfiles
func (m *AgentGroupsRequestBuilder) CreatePostRequestInformation(body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreatePostRequestInformationWithRequestConfiguration(body, nil);
}
// CreatePostRequestInformationWithRequestConfiguration create new navigation property to agentGroups for onPremisesPublishingProfiles
func (m *AgentGroupsRequestBuilder) CreatePostRequestInformationWithRequestConfiguration(body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable, requestConfiguration *AgentGroupsRequestBuilderPostRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.POST
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", body)
if requestConfiguration != nil {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Get list of onPremisesAgentGroups that a publishedResource is assigned to. Read-only. Nullable.
func (m *AgentGroupsRequestBuilder) Get()(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupCollectionResponseable, error) {
return m.GetWithRequestConfigurationAndResponseHandler(nil, nil);
}
// GetWithRequestConfigurationAndResponseHandler list of onPremisesAgentGroups that a publishedResource is assigned to. Read-only. Nullable.
func (m *AgentGroupsRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *AgentGroupsRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupCollectionResponseable, error) {
requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return nil, err
}
errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {
"4XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
"5XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendAsync(requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateOnPremisesAgentGroupCollectionResponseFromDiscriminatorValue, responseHandler, errorMapping)
if err != nil {
return nil, err
}
return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupCollectionResponseable), nil
}
// Post create new navigation property to agentGroups for onPremisesPublishingProfiles
func (m *AgentGroupsRequestBuilder) Post(body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable, error) {
return m.PostWithRequestConfigurationAndResponseHandler(body, nil, nil);
}
// PostWithRequestConfigurationAndResponseHandler create new navigation property to agentGroups for onPremisesPublishingProfiles
func (m *AgentGroupsRequestBuilder) PostWithRequestConfigurationAndResponseHandler(body ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable, requestConfiguration *AgentGroupsRequestBuilderPostRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable, error) {
requestInfo, err := m.CreatePostRequestInformationWithRequestConfiguration(body, requestConfiguration);
if err != nil
|
errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {
"4XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
"5XX": i20a3050780ee0b0cde0a884a4f35429a20d60067e3bcda382ec5400079147459.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendAsync(requestInfo, ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.CreateOnPremisesAgentGroupFromDiscriminatorValue, responseHandler, errorMapping)
if err != nil {
return nil, err
}
return res.(ie233ee762e29b4ba6970aa2a2efce4b7fde11697ca9ea81099d0f8269309c1be.OnPremisesAgentGroupable), nil
}
|
{
return nil, err
}
|
user.js
|
const initialState = {
isLoggedIn: false
}
const TOGGLE_LOGIN = 'TOGGLE_LOGIN'
|
export function toggleLogin() {
return {
type: TOGGLE_LOGIN
}
}
export function userReducer(state = initialState, action) {
switch (action.type) {
case TOGGLE_LOGIN:
return {
...state,
isLoggedIn: !state.isLoggedIn
}
default:
return state
}
}
| |
placement_group.py
|
from typing import Dict, Union, List, Optional
import ray
from ray._raylet import ObjectRef
from ray._raylet import PlacementGroupID
from ray._private.utils import hex_to_binary
from ray.util.annotations import PublicAPI, DeveloperAPI
from ray.ray_constants import to_memory_units
from ray._private.client_mode_hook import client_mode_should_convert
from ray._private.client_mode_hook import client_mode_wrap
bundle_reservation_check = None
BUNDLE_RESOURCE_LABEL = "bundle"
# We need to import this method to use for ready API.
# But ray.remote is only available in runtime, and
# if we define this method inside ready method, this function is
# exported whenever ready is called, which can impact performance,
# https://github.com/ray-project/ray/issues/6240.
def _export_bundle_reservation_check_method_if_needed():
global bundle_reservation_check
if bundle_reservation_check:
return
@ray.remote(num_cpus=0)
def bundle_reservation_check_func(placement_group):
return placement_group
bundle_reservation_check = bundle_reservation_check_func
@PublicAPI
class
|
:
"""A handle to a placement group."""
@staticmethod
def empty() -> "PlacementGroup":
return PlacementGroup(PlacementGroupID.nil())
def __init__(self, id: PlacementGroupID, bundle_cache: Optional[List[Dict]] = None):
self.id = id
self.bundle_cache = bundle_cache
@property
def is_empty(self):
return self.id.is_nil()
def ready(self) -> ObjectRef:
"""Returns an ObjectRef to check ready status.
This API runs a small dummy task to wait for placement group creation.
It is compatible to ray.get and ray.wait.
Example:
>>> import ray
>>> from ray.util.placement_group import PlacementGroup
>>> pg = PlacementGroup([{"CPU": 1}]) # doctest: +SKIP
>>> ray.get(pg.ready()) # doctest: +SKIP
>>> pg = PlacementGroup([{"CPU": 1}]) # doctest: +SKIP
>>> ray.wait([pg.ready()], timeout=0) # doctest: +SKIP
"""
self._fill_bundle_cache_if_needed()
_export_bundle_reservation_check_method_if_needed()
assert len(self.bundle_cache) != 0, (
"ready() cannot be called on placement group object with a "
"bundle length == 0, current bundle length: "
f"{len(self.bundle_cache)}"
)
return bundle_reservation_check.options(
placement_group=self, resources={BUNDLE_RESOURCE_LABEL: 0.001}
).remote(self)
def wait(self, timeout_seconds: Union[float, int]) -> bool:
"""Wait for the placement group to be ready within the specified time.
Args:
timeout_seconds(float|int): Timeout in seconds.
Return:
True if the placement group is created. False otherwise.
"""
return _call_placement_group_ready(self.id, timeout_seconds)
@property
def bundle_specs(self) -> List[Dict]:
"""List[Dict]: Return bundles belonging to this placement group."""
self._fill_bundle_cache_if_needed()
return self.bundle_cache
@property
def bundle_count(self) -> int:
self._fill_bundle_cache_if_needed()
return len(self.bundle_cache)
def _fill_bundle_cache_if_needed(self) -> None:
if not self.bundle_cache:
self.bundle_cache = _get_bundle_cache(self.id)
@client_mode_wrap
def _call_placement_group_ready(pg_id: PlacementGroupID, timeout_seconds: int) -> bool:
worker = ray.worker.global_worker
worker.check_connected()
return worker.core_worker.wait_placement_group_ready(pg_id, timeout_seconds)
@client_mode_wrap
def _get_bundle_cache(pg_id: PlacementGroupID) -> List[Dict]:
worker = ray.worker.global_worker
worker.check_connected()
return list(ray.state.state.placement_group_table(pg_id)["bundles"].values())
@PublicAPI
@client_mode_wrap
def placement_group(
bundles: List[Dict[str, float]],
strategy: str = "PACK",
name: str = "",
lifetime=None,
) -> PlacementGroup:
"""Asynchronously creates a PlacementGroup.
Args:
bundles(List[Dict]): A list of bundles which
represent the resources requirements.
strategy(str): The strategy to create the placement group.
- "PACK": Packs Bundles into as few nodes as possible.
- "SPREAD": Places Bundles across distinct nodes as even as possible.
- "STRICT_PACK": Packs Bundles into one node. The group is
not allowed to span multiple nodes.
- "STRICT_SPREAD": Packs Bundles across distinct nodes.
name(str): The name of the placement group.
lifetime(str): Either `None`, which defaults to the placement group
will fate share with its creator and will be deleted once its
creator is dead, or "detached", which means the placement group
will live as a global object independent of the creator.
Raises:
ValueError if bundle type is not a list.
ValueError if empty bundle or empty resource bundles are given.
ValueError if the wrong lifetime arguments are given.
Return:
PlacementGroup: Placement group object.
"""
worker = ray.worker.global_worker
worker.check_connected()
if not isinstance(bundles, list):
raise ValueError("The type of bundles must be list, got {}".format(bundles))
# Validate bundles
for bundle in bundles:
if len(bundle) == 0 or all(
resource_value == 0 for resource_value in bundle.values()
):
raise ValueError(
"Bundles cannot be an empty dictionary or "
f"resources with only 0 values. Bundles: {bundles}"
)
if "memory" in bundle.keys() and bundle["memory"] > 0:
# Make sure the memory resource can be
# transformed to memory unit.
to_memory_units(bundle["memory"], True)
if lifetime is None:
detached = False
elif lifetime == "detached":
detached = True
else:
raise ValueError(
"placement group `lifetime` argument must be either `None` or 'detached'"
)
placement_group_id = worker.core_worker.create_placement_group(
name, bundles, strategy, detached
)
return PlacementGroup(placement_group_id)
@PublicAPI
@client_mode_wrap
def remove_placement_group(placement_group: PlacementGroup) -> None:
"""Asynchronously remove placement group.
Args:
placement_group (PlacementGroup): The placement group to delete.
"""
assert placement_group is not None
worker = ray.worker.global_worker
worker.check_connected()
worker.core_worker.remove_placement_group(placement_group.id)
@PublicAPI
@client_mode_wrap
def get_placement_group(placement_group_name: str) -> PlacementGroup:
"""Get a placement group object with a global name.
Returns:
None if can't find a placement group with the given name.
The placement group object otherwise.
"""
if not placement_group_name:
raise ValueError("Please supply a non-empty value to get_placement_group")
worker = ray.worker.global_worker
worker.check_connected()
placement_group_info = ray.state.state.get_placement_group_by_name(
placement_group_name, worker.namespace
)
if placement_group_info is None:
raise ValueError(f"Failed to look up actor with name: {placement_group_name}")
else:
return PlacementGroup(
PlacementGroupID(hex_to_binary(placement_group_info["placement_group_id"]))
)
@DeveloperAPI
@client_mode_wrap
def placement_group_table(placement_group: PlacementGroup = None) -> dict:
"""Get the state of the placement group from GCS.
Args:
placement_group (PlacementGroup): placement group to see
states.
"""
worker = ray.worker.global_worker
worker.check_connected()
placement_group_id = placement_group.id if (placement_group is not None) else None
return ray.state.state.placement_group_table(placement_group_id)
@PublicAPI
def get_current_placement_group() -> Optional[PlacementGroup]:
"""Get the current placement group which a task or actor is using.
It returns None if there's no current placement group for the worker.
For example, if you call this method in your driver, it returns None
(because drivers never belong to any placement group).
Examples:
>>> import ray
>>> from ray.util.placement_group import PlacementGroup
>>> from ray.util.placement_group import get_current_placement_group
>>> @ray.remote # doctest: +SKIP
... def f(): # doctest: +SKIP
... # This will return the placement group the task f belongs to.
... # It means this pg will be identical to the pg created below.
... pg = get_current_placement_group() # doctest: +SKIP
>>> pg = PlacementGroup([{"CPU": 2}]) # doctest: +SKIP
>>> f.options(placement_group=pg).remote() # doctest: +SKIP
>>> # New script.
>>> ray.init() # doctest: +SKIP
>>> # New script doesn't belong to any placement group,
>>> # so it returns None.
>>> assert get_current_placement_group() is None # doctest: +SKIP
Return:
PlacementGroup: Placement group object.
None if the current task or actor wasn't
created with any placement group.
"""
if client_mode_should_convert(auto_init=True):
# Client mode is only a driver.
return None
worker = ray.worker.global_worker
worker.check_connected()
pg_id = worker.placement_group_id
if pg_id.is_nil():
return None
return PlacementGroup(pg_id)
def check_placement_group_index(
placement_group: PlacementGroup, bundle_index: int
) -> None:
assert placement_group is not None
if placement_group.id.is_nil():
if bundle_index != -1:
raise ValueError(
"If placement group is not set, "
"the value of bundle index must be -1."
)
elif bundle_index >= placement_group.bundle_count or bundle_index < -1:
raise ValueError(
f"placement group bundle index {bundle_index} "
f"is invalid. Valid placement group indexes: "
f"0-{placement_group.bundle_count}"
)
def _validate_resource_shape(
placement_group, resources, placement_resources, task_or_actor_repr
):
def valid_resource_shape(resources, bundle_specs):
"""
If the resource shape cannot fit into every
bundle spec, return False
"""
for bundle in bundle_specs:
fit_in_bundle = True
for resource, requested_val in resources.items():
# Skip "bundle" resource as it is automatically added
# to all nodes with bundles by the placement group.
if resource == BUNDLE_RESOURCE_LABEL:
continue
if bundle.get(resource, 0) < requested_val:
fit_in_bundle = False
break
if fit_in_bundle:
# If resource request fits in any bundle, it is valid.
return True
return False
bundles = placement_group.bundle_specs
resources_valid = valid_resource_shape(resources, bundles)
placement_resources_valid = valid_resource_shape(placement_resources, bundles)
if not resources_valid:
raise ValueError(
f"Cannot schedule {task_or_actor_repr} with "
"the placement group because the resource request "
f"{resources} cannot fit into any bundles for "
f"the placement group, {bundles}."
)
if not placement_resources_valid:
# Happens for the default actor case.
# placement_resources is not an exposed concept to users,
# so we should write more specialized error messages.
raise ValueError(
f"Cannot schedule {task_or_actor_repr} with "
"the placement group because the actor requires "
f"{placement_resources.get('CPU', 0)} CPU for "
"creation, but it cannot "
f"fit into any bundles for the placement group, "
f"{bundles}. Consider "
"creating a placement group with CPU resources."
)
def configure_placement_group_based_on_context(
placement_group_capture_child_tasks: bool,
bundle_index: int,
resources: Dict,
placement_resources: Dict,
task_or_actor_repr: str,
placement_group: Union[PlacementGroup, str, None] = "default",
) -> PlacementGroup:
"""Configure the placement group based on the given context.
Based on the given context, this API returns the placement group instance
for task/actor scheduling.
Params:
placement_group_capture_child_tasks: Whether or not the
placement group needs to be captured from the global
context.
bundle_index: The bundle index for tasks/actor scheduling.
resources: The scheduling resources.
placement_resources: The scheduling placement resources for
actors.
task_or_actor_repr: The repr of task or actor
function/class descriptor.
placement_group: The placement group instance.
- "default": Default placement group argument. Currently,
the default behavior is to capture the parent task'
placement group if placement_group_capture_child_tasks
is set.
- None: means placement group is explicitly not configured.
- Placement group instance: In this case, do nothing.
Returns:
Placement group instance based on the given context.
Raises:
ValueError: If the bundle index is invalid for the placement group
or the requested resources shape doesn't fit to any
bundles.
"""
# Validate inputs.
assert placement_group_capture_child_tasks is not None
assert resources is not None
# Validate and get the PlacementGroup instance.
# Placement group could be None, default, or placement group.
# Default behavior is "do not capture child tasks".
if placement_group != "default":
if not placement_group:
placement_group = PlacementGroup.empty()
elif placement_group == "default":
if placement_group_capture_child_tasks:
placement_group = get_current_placement_group()
else:
placement_group = PlacementGroup.empty()
if not placement_group:
placement_group = PlacementGroup.empty()
assert isinstance(placement_group, PlacementGroup)
# Validate the index.
check_placement_group_index(placement_group, bundle_index)
# Validate the shape.
if not placement_group.is_empty:
_validate_resource_shape(
placement_group, resources, placement_resources, task_or_actor_repr
)
return placement_group
|
PlacementGroup
|
proposal_handler.go
|
package client
import (
|
"github.com/osmosis-labs/osmosis/v7/x/pool-incentives/client/rest"
)
var UpdatePoolIncentivesHandler = govclient.NewProposalHandler(cli.NewCmdSubmitUpdatePoolIncentivesProposal, rest.ProposalUpdatePoolIncentivesRESTHandler)
|
govclient "github.com/cosmos/cosmos-sdk/x/gov/client"
"github.com/osmosis-labs/osmosis/v7/x/pool-incentives/client/cli"
|
delete.py
|
"""
delete.py
This script allows developers to delete files generated from redwood storage
system.
"""
import argparse
import os
import json
import boto3
import botocore
import defaults
import docker
import logging
import urllib2
import ssl
import datetime
from io import BytesIO
logger = logging.getLogger('admin-delete')
logger.setLevel(level=logging.INFO)
strmhd = logging.StreamHandler()
strmhd.setLevel(level=logging.INFO)
logger.addHandler(strmhd)
class ICDCDException(Exception):
"""
Base exception class for DCCOPS admin scripts
"""
message = None
def __repr__(self):
"""
Should have the same functionality as self.__str__()
Returns
-------
str
output of self.__str__()
"""
return self.__str__()
def __str__(self):
"""
Outputs a formatted error message
Returns
-------
str
A formatted error message
"""
return "{}: {}".format(self.__class__.__name__, self.message)
class MetadataDeleteError(ICDCDException):
"""
Thrown if a file metadata entry couldn't be deleted
"""
def __init__(self, file_uuid=""):
"""
Initializes error message
Parameters
----------
file_uuid: str
file_uuid of the file metadata
"""
self.file_name = file_uuid
self.message = "Unable to remove file " \
" {} from Metadata Server".format(self.file_name)
class ICDCDBadAWSKeys(ICDCDException):
"""
Should be thrown the AWS given are not valid for accessing S3 buckets
"""
def __init__(self):
"""
Initializes error message
"""
self.message = "AWS didn't receive the right access" \
" and secret access keys."
class RedwoodDeleteError(ICDCDException):
"""
Should be thrown if file wasn't deleted properly
"""
def __init__(self, file_name=""):
"""
Initializes error message
Parameters
----------
file_name: str
File uuid of the file that can't be deleted
"""
self.file_name = file_name
self.message = "Unable to delete File {}." \
" File still exists in bucket".format(self.file_name)
class ForbiddenDeleteError(ICDCDException):
"""
Thrown if a file that shouldn't be deleted was about to be deleted.
"""
def __init__(self, message=""):
"""
Initializes error message
Parameters
----------
message: str
Error Message
"""
self.message = message
class RedwoodFileNotFoundError(ICDCDException):
"""
Should be thrown if a file wasn't found
"""
def __init__(self, file_uuid=""):
"""
Initializes error message
Parameters
----------
file_uuid: str
File UUID that can't be found
"""
self.file_uuid = file_uuid
self.message = "Cannot find the file named {}." \
" The file uuid may be incorrect or the file is not" \
" in the bucket.".format(self.file_uuid)
class RedwoodMissingDataError(ICDCDException):
"""
Thrown if specific metadata wasn't in the file metadata database
"""
def __init__(self, message=""):
"""
Initializes error message
Parameters
----------
message: str
Error Message
"""
self.message = message
class RedwoodFileMetadataAPI:
"""
Retrieves and modifies data from the redwood metadata server by
accessing the https website or the MongoDB container directly
Attributes
-----------
endpoint : str
The base url of the https metadata website
mongodb_container_name : str
The name of the docker container where the metadata
database is located
table_url : str
The exposed url of the MongoDB dcc-metadata database
"""
FILE_NAME_KEY = 'fileName'
FILE_NAME_BUNDLE_ID = 'gnosId'
def __init__(self, endpoint, mongodb_container_name=None,
table_url=None):
"""
Initializes attributes
Parameters
----------
endpoint : str
The base url of the https metadata website
mongodb_container_name : str
The name of the docker container where the metadata
database is located
table_url : str
The exposed url of the mongoDB dcc-metadata database
"""
self.mongodb_container_name = mongodb_container_name or \
defaults.MONGODB_CONTAINER
self.table_url = table_url or defaults.MONGODB_URL
self.endpoint = endpoint
def get_file_uuids_from_bundle(self, bundle_id, context=None):
context = context or self._generate_fake_context()
url = 'https://metadata.{}/entities?&gnosId={}'.format(
self.endpoint, bundle_id)
file_dict = json.load(urllib2.urlopen(url, context=context))
return [file_data['id'] for file_data in file_dict['content']]
def _run_mongo_shell_script(self, js_command):
"""
Access the redwood-metadata-db docker container. Then, runs a MongoDB
shell command by using the given javascript command
Parameters
----------
js_command
The javascript command that the MongoDB shell will execute
Returns
-------
str
The output from MongoDB shell script
"""
client = docker.APIClient()
exec_info = client.exec_create(defaults.MONGODB_CONTAINER,
['mongo', self.table_url, '--quiet',
'--eval', js_command])
res = client.exec_start(exec_info['Id'])
return res.strip()
def delete_entity(self, file_uuid):
"""
Deletes the file metadata from the file metadata server by executing
a MongoDB shell delete command in the metadata server's docker
container.
Parameters
----------
file_uuid : str
The file_uuid of the target deleted file to locate the database
entry
Raises
-------
MetadataDeleteError
Either the file metadata database is unable to delete the file's
entry or the database doesn't contain any entries with the given
file_uuid
"""
delete_js = "var result = db.Entity.deleteMany(" \
"{ _id: '{file_name}'});" \
"printjson(result);".replace('{file_name}', file_uuid)
res = self._run_mongo_shell_script(delete_js)
if json.loads(res)['deletedCount'] < 1:
raise MetadataDeleteError(file_uuid)
def get_file_metadata(self, file_uuid, context=None):
"""
Gets the file metadata from the https metadata website
Parameters
----------
file_uuid : str
The target file's uuid for locating its file metadata
context : ssl.SSLContext, optional
The custom context for accessing the metadata website. Will default
to a context with a fake cert.
Returns
-------
dict
the file metadata of the target file
Raises
------
RedwoodMissingDataError
Can't find the file metadata with the given file_uuid
"""
context = context or self._generate_fake_context()
url = 'https://metadata.{}/entities/{}'.format(
self.endpoint, file_uuid)
try:
return json.load(urllib2.urlopen(url, context=context))
except urllib2.HTTPError as e:
if e.code == 404:
error_msg = "Unable to find metadata entry " \
"at {} for File {}.".format(url, file_uuid)
raise RedwoodMissingDataError(error_msg)
else:
raise
def get_bundle_metadata_info(self, bundle_id, context=None):
"""
Gets the file metadata of the
bundle's metadata.json (bundle metadata file) from the https metadata
website
Parameters
----------
bundle_id : str
The metadata.json's bundle uuid
context : ssl.SSLContext, optional
The context for accessing the metadata website
Returns
--------
dict
The file metadata of the target bundle's
metadata.json (bundle metadata file)
"""
context = context or self._generate_fake_context()
url = 'https://metadata.{}/entities?fileName={}&gnosId={}'.format(
self.endpoint, defaults.BUNDLE_METADATA_FILENAME, bundle_id)
return json.load(urllib2.urlopen(url, context=context))
@staticmethod
def
|
():
"""
Generates a fake ssl.SSLContext for retrieving json data by https
Returns
-------
An ssl.SSLContext containing a fake cert
"""
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
return ctx
class DCCOpsRepo:
"""
An enumeration representing the repositories used by DCC-Ops
"""
BOARDWALK = 'boardwalk'
ACTION_SERVICE = 'action_service'
REDWOOD = 'redwood'
class DCCOpsEnv:
"""
Contains all the settings from DCC-Ops.
Attributes
-----------
dccops_dir: str
The root directory of the DCC-Ops repository
_env_vars: dict
A dictionary of all repos in DCC-Ops. Each repo contains a list
the environment variables used by that repo
"""
def __init__(self, dcc_ops_directory):
"""
Collects the environment variables from Boardwalk,
Action Service, and Redwood from the DCC-Ops repository.
Also, initializes the dcc ops directory attribute.
Parameters
----------
dcc_ops_directory: str
The directory of the DCC-Ops directory
"""
self.dccops_dir = dcc_ops_directory
self._env_vars = {}
self._sync_settings(DCCOpsRepo.BOARDWALK,
defaults.DCCOPS_BOARDWALK_SUB_DIR)
self._sync_settings(DCCOpsRepo.ACTION_SERVICE,
defaults.DCCOPS_ACTION_SERVICE_SUB_DIR)
self._sync_settings(DCCOpsRepo.REDWOOD,
defaults.DCCOPS_REDWOOD_SUB_DIR)
def _sync_settings(self, repo, repo_subdir,
env_vars_filename=defaults.DCCOPS_ENV_FILENAME):
"""
Gathers the environment variables from the environment variable file
of a the given module sub-directory in DCC-Ops.
This is done by first reading each line in the file environment
variable file. Then, the var name and value extracted from the line by
splitting the it using the "=" character as the delimiter. Then, the
variable name and value are saved in a dictionary.
Parameters
----------
repo_subdir: str
the repo's sub-directory containing the environment variable file
env_vars_filename: str, optional
the filename of the environment variable file
repo: DCCOpsRepo, str
The repo where the environment variable is located
"""
with open(os.path.join(self.dccops_dir, repo_subdir,
env_vars_filename), 'r') as env_file:
var_dict = {}
for setting in env_file.readlines():
if '=' in setting:
var_name, var_setting = setting.split('=')
var_dict[var_name] = var_setting.strip()
self._env_vars[repo] = var_dict
def get_env_var(self, repo, var_name):
"""
Gets the value of the environment variable from the given repo and var
name
Parameters
----------
repo: DCCOpsRepo, str
The repo where the environment variable is located
var_name: str
The name of the environment variable
Returns
-------
str
The value of the environment variable from the given repo and var
name
"""
return self._env_vars[repo][var_name]
class RedwoodAdminDeleter:
"""
Deletes files from the AWS S3 buckets used by the Redwood Storage System.
Also, handles any information related to the file deletion.
Attributes
----------
bucket_name : str
The name of the AWS S3 bucket containing the files selected for
deletion.
base_endpoint : str
the base url for the redwood metadata server
data_root_folder : str
The root folder of where all the bundle's files and metadata are saved.
deleted_list_filename : str
The location of the deleted_list file.
redwood_metadata_api : RedwoodFileMetadataAPI
For accessing and editing the file metadata in the redwood metadata
server.
ignore_errors : boolean
If True, prevents errors (except ForbiddenDeleteError and
RedwoodFileNotFoundError for the target deleted file) from
interrupting the deletion process
"""
def __init__(self, dcc_ops_env=None, ignore_errors=False):
"""
Gets the all of the .env variables in DCC-Ops.
Then, checks if the aws keys from the .env are valid. Afterwards, it
initializes the Redwood File Metadata API, and other attributes
Parameters
----------
dcc_ops_env :
Raises
------
RedwoodDeleteInvalidConfigFile
The config file is missing important options
"""
self.env_settings = dcc_ops_env
os.environ['AWS_ACCESS_KEY_ID'] = self.env_settings.get_env_var(
DCCOpsRepo.REDWOOD,
defaults.DCCOPS_ENV_NAME_ACCESS_ID)
os.environ['AWS_SECRET_ACCESS_KEY'] = self.env_settings.get_env_var(
DCCOpsRepo.REDWOOD,
defaults.DCCOPS_ENV_NAME_SECRET_KEY)
self.bucket_name = self.env_settings.get_env_var(
DCCOpsRepo.REDWOOD,
defaults.DCCOPS_ENV_NAME_REDWOOD_BUCKET)
self.base_endpoint = self.env_settings.get_env_var(
DCCOpsRepo.REDWOOD,
defaults.DCCOPS_ENV_NAME_REDWOOD_ENDPOINT)
self.data_root_folder = defaults.METADATA_FILE_ROOT_FOLDER
self.deleted_list_filename = defaults.DELETED_LIST_FILENAME
self.validate_aws_credentials()
self.redwood_metadata_api = RedwoodFileMetadataAPI(self.base_endpoint)
self.ignore_errors = ignore_errors
@staticmethod
def validate_aws_credentials():
"""
Checks if the AWS access key and AWS secret access key is valid.
Uses the list_bucket method to check the aws keys' validity. If they
aren't valid, InvalidAccessKeyId.ClientError is caught and
RedwoodDBadAWSKeyError is thrown instead.
Raises
-------
RedwoodDBadAWSKeys
If aws access keys are invalid
"""
s3_client = boto3.client('s3')
try:
s3_client.list_buckets()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'InvalidAccessKeyId':
raise ICDCDBadAWSKeys
else:
raise
def check_file_exists(self, file_name):
"""
Checks if there's a file with the given filename in that bucket.
Parameters
----------
file_name
the file's name that going to be checked
Returns
-------
returns True if a file with the given filename exists
in the bucket otherwise this method returns False
"""
s3_client = boto3.client('s3')
try:
s3_client.head_object(Bucket=self.bucket_name, Key=file_name)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == '404':
return False
else:
raise
else:
return True
def delete_file(self, file_uuid, skip_missing_files=False):
"""
Removes the deleted files entry in the metadata database,
Adds a deletion flag in the file's entry in the bundle metadata.
Removes the file's the storage listing in the redwood storage bucket.
Finally, it deletes the file in the redwood storage system.
If the deletion was successful, any information about the deletion is
recorded in the deletion_file_list file in the root folder of the
redwood storage bucket.
Parameters
----------
file_uuid: str
The file_name of the deleted file
Raises
------
RedwoodMissingDataError
(If ignore_errors is disabled)
The deleted file has no file metadata in in the
redwood metadata database
ForbiddenDeleteError
The deleted file contains the bundle metadata
RedwoodFileNotFoundError
(If ignore_errors is disabled)
The bundle data doesn't exist in the redwood storage bucket
"""
logger.info("Starting Deletion for {}...".format(file_uuid))
file_metadata = {}
try:
file_metadata = self.redwood_metadata_api.get_file_metadata(file_uuid)
except RedwoodMissingDataError as e:
if self.ignore_errors:
logging.warn(str(e))
logging.warn("Metadata doesn't exist for this file."
" Skipping metadata related steps.")
else:
raise
if file_metadata:
metadata_filename = defaults.BUNDLE_METADATA_FILENAME
bundle_id = file_metadata['gnosId']
bundle_metadata = self.redwood_metadata_api. \
get_bundle_metadata_info(bundle_id)
if file_metadata['fileName'] == metadata_filename:
raise ForbiddenDeleteError("{} is a bundle metadata file ({})"
" and cannot be"
" deleted".format(bundle_id,
metadata_filename)
)
bundle_metadata_json_uuid = bundle_metadata['content'][0]['id']
logger.info("Found file metadata for {} ({}) from Bundle {}\n"
"Editing Bundle's metadata.json and .meta files"
" ({})...".format(file_metadata['fileName'],
file_uuid,
file_metadata['gnosId'],
bundle_metadata_json_uuid))
try:
self._edit_bundle_metadata(
file_metadata['fileName'],
bundle_metadata_json_uuid)
except RedwoodFileNotFoundError:
if self.ignore_errors:
logging.warn("This bundle ({}) no longer has its metadata"
" in the bucket. Please delete the other"
" files from this"
" bundle".format(file_metadata['gnosId']))
else:
raise
pass
logger.info("Deleting entry in redwood-metadata-db...")
self._clear_metadata_db_entry(file_uuid)
logger.info("Deleting {} ({}) and"
" its endpoint"
" listing file...".format(file_metadata.get('fileName',
'[No Metadata'
' Found]'),
file_uuid))
target_file_name = "{}/{}".format(self.data_root_folder, file_uuid)
listing_info_file_name = "{}/{}.meta".format(self.data_root_folder,
file_uuid)
self._safely_delete_file(target_file_name, always_throw_error=not skip_missing_files)
self._safely_delete_file(listing_info_file_name)
logger.info("Adding file entry in deleted file list for File"
" ({})...".format(file_uuid))
self._record_deletion_data(
file_uuid,
file_metadata.get('fileName', '[No Metadata Found]'),
file_metadata.get('gnosId', '[No Metadata Found]'))
def delete_files_in_bundle(self, bundle_uuid):
fls = self.redwood_metadata_api.get_file_uuids_from_bundle(bundle_uuid)
for file_uuid in fls:
try:
self.delete_file(file_uuid, skip_missing_files=True)
except ForbiddenDeleteError:
logging.error("Skipping Metadata.json file....")
def _safely_delete_file(self, file_name, always_throw_error=False):
"""
Deletes the file if the file exists in the bucket.
Parameters
----------
file_name: str
The deleted file's file name
Raises
------
RedwoodFileNotFoundError
File is not in the redwood storage S3 bucket.
"""
s3_client = boto3.client('s3')
if self.check_file_exists(file_name):
s3_client.delete_object(Bucket=self.bucket_name,
Key=file_name)
elif self.ignore_errors and not always_throw_error:
logger.warn("Unable to delete {}".format(file_name))
else:
raise RedwoodFileNotFoundError(file_name)
def _record_deletion_data(self, file_uuid, file_name, bundle_uuid):
"""
Logs info about the file deletion in a file. The name of the file is
the value of defaults.DELETED_LIST_FILENAME.
The following info is recorded:
-Deleted file's uuid
-Deleted file's name
-Date and time of Deletion
Parameters
----------
file_uuid: str
The file_name of the deleted file
"""
s3_client = boto3.client('s3')
deleted_file_data = BytesIO()
deletion_dict = {'deletedFiles': {'bundles': {}}}
if self.check_file_exists(self.deleted_list_filename):
s3_client.download_fileobj(self.bucket_name,
self.deleted_list_filename,
deleted_file_data)
try:
deletion_dict = json.loads(deleted_file_data.getvalue())
except ValueError:
logger.warn("Deletion History Log "
"format's is incorrect.")
bundle_list = deletion_dict['deletedFiles']['bundles']
date = datetime.datetime.now().strftime('%m-%d-%y %I:%m:%S %p')
bundle_list.setdefault(bundle_uuid, []) \
.append({'file_uuid': file_uuid,
'file_name': file_name,
'date_deleted': date})
deletion_list_bytes = json.dumps(deletion_dict).encode()
if self.check_file_exists(self.deleted_list_filename):
s3_client.put_object(Bucket=self.bucket_name,
Key=self.deleted_list_filename,
Body=deletion_list_bytes)
else:
del_byte_io = BytesIO(deletion_list_bytes)
s3_client.upload_fileobj(del_byte_io,
self.bucket_name,
self.deleted_list_filename)
def _clear_metadata_db_entry(self, file_uuid):
"""
Removes the deleted files entry in a mongo database in the
redwood-metadata-db container
Parameters
----------
file_uuid
The deleted file's file uuid
Raises
-------
MetadataDeleteError
Unable able to delete the deleted file's entry
(if ignore_errors is disabled)
"""
try:
self.redwood_metadata_api.delete_entity(file_uuid)
except MetadataDeleteError as e:
if self.ignore_errors:
logger.warn(str(e))
logger.warn('Unable to delete metadata'
' server entry for file {}'.format(file_uuid))
else:
raise
def _edit_bundle_metadata(self, file_name,
metadata_file_uuid):
"""
This method gets the bundle's metadata.json file in the redwood storage
S3 bucket. Then, in the json file, it finds the deleted file's entry
under "workflow_outputs" key. Afterwards, it adds the is_deleted
flag in the entry. It should look like the following example...
Example
-------
{
...
"workflow_outputs": {
"is_deleted": false
"file_type": "fake",
"file_sha": "fac54a",
"file_path": "fake_file.fakse",
"file_size": 8888
}
}
Finally, the new metadata.json is uploaded to the S3 bucket and the old
metadata is overwritten.
Parameters
----------
file_name: str
the name of the deleted file
metadata_file_uuid
the file_uuid metadata.json of the deleted file's bundle
Raises
-------
RedwoodFileNotFoundError
The metadata.json is not in the S3 Bucket redwood storage
"""
file_location = "{}/{}".format(self.data_root_folder,
metadata_file_uuid)
listing_file_location = "{}.meta".format(file_location)
s3_client = boto3.client('s3')
if self.check_file_exists(file_location):
old_bundle_metadata_file = BytesIO()
s3_client.download_fileobj(self.bucket_name, file_location,
old_bundle_metadata_file)
bundle_metadata_json = json.loads(old_bundle_metadata_file.getvalue())
for wo in bundle_metadata_json["specimen"][0]["samples"][0] \
["analysis"][0]["workflow_outputs"]:
if file_name == wo['file_path']:
wo['is_deleted'] = True
new_bundle_metadata_file = BytesIO()
json.dump(bundle_metadata_json, new_bundle_metadata_file)
s3_client.put_object(Body=new_bundle_metadata_file.getvalue(),
Bucket=self.bucket_name,
Key=file_location)
old_endpoint_info_file = BytesIO()
s3_client.download_fileobj(self.bucket_name, listing_file_location,
old_endpoint_info_file)
listing_info_json = json.loads(old_endpoint_info_file.getvalue())
listing_info_json["objectMd5"] = None
listing_info_json["parts"][0]["sourceMd5"] = None
bundle_metadata_filesize = len(new_bundle_metadata_file.getvalue())
listing_info_json["parts"][0]["partSize"] = bundle_metadata_filesize
listing_info_json["objectSize"] = bundle_metadata_filesize
new_listing_metadata = json.dumps(listing_info_json)
s3_client.put_object(Body=new_listing_metadata,
Bucket=self.bucket_name,
Key=listing_file_location)
client = docker.APIClient()
client.exec_create(defaults.INDEXER_CONTAINER,
['bash', 'update_endpoint_metadata.sh',
metadata_file_uuid])
else:
raise RedwoodFileNotFoundError(metadata_file_uuid)
def run_delete_file_cli(deleter, object_uuid, skip_prompt,
will_delete_bundle=False):
"""
The command interface for deleting a file in AWS S3 Buckets
Parameters
----------
deleter: RedwoodAdminDeleter
The object that manages file deletion
object_uuid
The file_name of the file targeted for deletion
skip_prompt
If this value is True, then the user will not be asked to confirm
the deletion
will_delete_bundle
If this value is True, the confirmation message with change and the
deleter will delete all files in the bundle
"""
resp = ""
if not skip_prompt:
prompt_obj_str = "EVERY FILE IN BUNDLE" if will_delete_bundle else "File"
resp = raw_input("Are you sure you want to delete {} {}?"
" [Y]es/[N]o ".format(prompt_obj_str, object_uuid))
if resp.lower() in {'y', 'yes'} or skip_prompt:
if will_delete_bundle:
try:
deleter.delete_files_in_bundle(object_uuid)
except (RedwoodDeleteError, RedwoodFileNotFoundError) as e:
logger.error(str(e))
logger.error("Deletion Failed")
else:
logger.info("Successfully Deleted "
"All Files from Bundle {}.".format(object_uuid))
else:
try:
deleter.delete_file(object_uuid)
except (RedwoodDeleteError, RedwoodFileNotFoundError) as e:
logger.error(str(e))
logger.error("Deletion Failed")
else:
logger.info("Successfully deleted File {}.".format(object_uuid))
else:
logger.info("DID NOT delete File {}.".format(object_uuid))
def run_cli():
"""
Initiates the command line interface for admin delete.
"""
parser = argparse.ArgumentParser()
parser.add_argument('-s', "--skip-prompt",
help='Skips Confirmation Prompt.',
action="store_true")
parser.add_argument("--ignore-errors",
help='Prevents most errors from interrupting the'
'deletion process',
action="store_true")
parser.add_argument('FILE_UUID',
help='The file uuid of the file that will be'
' deleted.')
parser.add_argument("--delete-bundle", action='store_true')
args = parser.parse_args()
dccops_env_vars = DCCOpsEnv(defaults.DCCOPS_DEFAULT_LOCATION)
if os.getuid() == 0:
try:
deleter = RedwoodAdminDeleter(dccops_env_vars,
ignore_errors=args.ignore_errors)
except ICDCDBadAWSKeys as e:
logger.error(str(e))
logger.error("Please check if your AWS keys are correct.")
else:
run_delete_file_cli(deleter, args.FILE_UUID, args.skip_prompt,
args.delete_bundle)
else:
logger.error("Please run this script as root.")
if __name__ == '__main__':
run_cli()
|
_generate_fake_context
|
playlist_details_model.rs
|
use std::cell::Ref;
use std::ops::Deref;
use std::rc::Rc;
use crate::app::components::{handle_error, PlaylistModel};
use crate::app::dispatch::ActionDispatcher;
use crate::app::models::*;
use crate::app::state::{BrowserAction, BrowserEvent, PlaybackAction, PlaylistSource};
use crate::app::{AppEvent, AppModel, AppState};
pub struct PlaylistDetailsModel {
pub id: String,
app_model: Rc<AppModel>,
dispatcher: Box<dyn ActionDispatcher>,
}
impl PlaylistDetailsModel {
pub fn new(id: String, app_model: Rc<AppModel>, dispatcher: Box<dyn ActionDispatcher>) -> Self {
Self {
id,
app_model,
dispatcher,
}
}
fn songs_ref(&self) -> Option<impl Deref<Target = Vec<SongDescription>> + '_> {
self.app_model.map_state_opt(|s| {
Some(
|
.playlist_details_state(&self.id)?
.content
.as_ref()?
.songs,
)
})
}
pub fn get_playlist_info(&self) -> Option<impl Deref<Target = PlaylistDescription> + '_> {
self.app_model
.map_state_opt(|s| s.browser.playlist_details_state(&self.id)?.content.as_ref())
}
pub fn load_playlist_info(&self) {
let api = self.app_model.get_spotify();
let id = self.id.clone();
self.dispatcher.dispatch_async(Box::pin(async move {
match api.get_playlist(&id).await {
Ok(playlist) => Some(BrowserAction::SetPlaylistDetails(playlist).into()),
Err(err) => handle_error(err),
}
}));
}
}
impl PlaylistDetailsModel {
fn state(&self) -> Ref<'_, AppState> {
self.app_model.get_state()
}
}
impl PlaylistModel for PlaylistDetailsModel {
fn current_song_id(&self) -> Option<String> {
self.state().playback.current_song_id.clone()
}
fn songs(&self) -> Vec<SongModel> {
let songs = self.songs_ref();
match songs {
Some(songs) => songs
.iter()
.enumerate()
.map(|(i, s)| s.to_song_model(i))
.collect(),
None => vec![],
}
}
fn play_song(&self, id: String) {
let source = PlaylistSource::Playlist(self.id.clone());
if self.app_model.get_state().playback.source != source {
let songs = self.songs_ref();
if let Some(songs) = songs {
self.dispatcher
.dispatch(PlaybackAction::LoadPlaylist(source, songs.clone()).into());
}
}
self.dispatcher.dispatch(PlaybackAction::Load(id).into());
}
fn should_refresh_songs(&self, event: &AppEvent) -> bool {
matches!(
event,
AppEvent::BrowserEvent(BrowserEvent::PlaylistDetailsLoaded(id)) if id == &self.id
)
}
fn actions_for(&self, _: String) -> Option<gio::ActionGroup> {
None
}
fn menu_for(&self, _: String) -> Option<gio::MenuModel> {
None
}
}
|
&s.browser
|
gesture.rs
|
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::EventController;
use crate::EventSequenceState;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem;
use std::mem::transmute;
glib::wrapper! {
pub struct Gesture(Object<ffi::GtkGesture, ffi::GtkGestureClass>) @extends EventController;
match fn {
type_ => || ffi::gtk_gesture_get_type(),
}
}
pub const NONE_GESTURE: Option<&Gesture> = None;
pub trait GestureExt: 'static {
#[doc(alias = "gtk_gesture_get_bounding_box")]
#[doc(alias = "get_bounding_box")]
fn bounding_box(&self) -> Option<gdk::Rectangle>;
#[doc(alias = "gtk_gesture_get_bounding_box_center")]
#[doc(alias = "get_bounding_box_center")]
fn bounding_box_center(&self) -> Option<(f64, f64)>;
#[doc(alias = "gtk_gesture_get_device")]
#[doc(alias = "get_device")]
fn device(&self) -> Option<gdk::Device>;
#[doc(alias = "gtk_gesture_get_group")]
#[doc(alias = "get_group")]
fn group(&self) -> Vec<Gesture>;
#[doc(alias = "gtk_gesture_get_last_event")]
#[doc(alias = "get_last_event")]
fn last_event(&self, sequence: Option<&gdk::EventSequence>) -> Option<gdk::Event>;
#[doc(alias = "gtk_gesture_get_last_updated_sequence")]
#[doc(alias = "get_last_updated_sequence")]
fn last_updated_sequence(&self) -> Option<gdk::EventSequence>;
#[doc(alias = "gtk_gesture_get_point")]
#[doc(alias = "get_point")]
fn point(&self, sequence: Option<&gdk::EventSequence>) -> Option<(f64, f64)>;
#[doc(alias = "gtk_gesture_get_sequence_state")]
#[doc(alias = "get_sequence_state")]
fn sequence_state(&self, sequence: &gdk::EventSequence) -> EventSequenceState;
#[doc(alias = "gtk_gesture_get_sequences")]
#[doc(alias = "get_sequences")]
fn sequences(&self) -> Vec<gdk::EventSequence>;
#[doc(alias = "gtk_gesture_group")]
#[doc(alias = "group")]
fn group_with<P: IsA<Gesture>>(&self, gesture: &P);
#[doc(alias = "gtk_gesture_handles_sequence")]
fn handles_sequence(&self, sequence: Option<&gdk::EventSequence>) -> bool;
#[doc(alias = "gtk_gesture_is_active")]
fn is_active(&self) -> bool;
#[doc(alias = "gtk_gesture_is_grouped_with")]
fn is_grouped_with<P: IsA<Gesture>>(&self, other: &P) -> bool;
#[doc(alias = "gtk_gesture_is_recognized")]
fn is_recognized(&self) -> bool;
#[doc(alias = "gtk_gesture_set_sequence_state")]
fn set_sequence_state(&self, sequence: &gdk::EventSequence, state: EventSequenceState) -> bool;
#[doc(alias = "gtk_gesture_set_state")]
fn set_state(&self, state: EventSequenceState) -> bool;
#[doc(alias = "gtk_gesture_ungroup")]
fn ungroup(&self);
#[doc(alias = "n-points")]
fn n_points(&self) -> u32;
#[doc(alias = "begin")]
fn connect_begin<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "cancel")]
fn connect_cancel<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "end")]
fn connect_end<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "sequence-state-changed")]
fn connect_sequence_state_changed<
F: Fn(&Self, Option<&gdk::EventSequence>, EventSequenceState) + 'static,
>(
&self,
f: F,
) -> SignalHandlerId;
#[doc(alias = "update")]
fn connect_update<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId;
}
impl<O: IsA<Gesture>> GestureExt for O {
fn bounding_box(&self) -> Option<gdk::Rectangle> {
unsafe {
let mut rect = gdk::Rectangle::uninitialized();
let ret = from_glib(ffi::gtk_gesture_get_bounding_box(
self.as_ref().to_glib_none().0,
rect.to_glib_none_mut().0,
));
if ret {
Some(rect)
} else {
None
}
}
}
fn bounding_box_center(&self) -> Option<(f64, f64)>
|
fn device(&self) -> Option<gdk::Device> {
unsafe { from_glib_none(ffi::gtk_gesture_get_device(self.as_ref().to_glib_none().0)) }
}
fn group(&self) -> Vec<Gesture> {
unsafe {
FromGlibPtrContainer::from_glib_container(ffi::gtk_gesture_get_group(
self.as_ref().to_glib_none().0,
))
}
}
fn last_event(&self, sequence: Option<&gdk::EventSequence>) -> Option<gdk::Event> {
unsafe {
from_glib_none(ffi::gtk_gesture_get_last_event(
self.as_ref().to_glib_none().0,
mut_override(sequence.to_glib_none().0),
))
}
}
fn last_updated_sequence(&self) -> Option<gdk::EventSequence> {
unsafe {
from_glib_none(ffi::gtk_gesture_get_last_updated_sequence(
self.as_ref().to_glib_none().0,
))
}
}
fn point(&self, sequence: Option<&gdk::EventSequence>) -> Option<(f64, f64)> {
unsafe {
let mut x = mem::MaybeUninit::uninit();
let mut y = mem::MaybeUninit::uninit();
let ret = from_glib(ffi::gtk_gesture_get_point(
self.as_ref().to_glib_none().0,
mut_override(sequence.to_glib_none().0),
x.as_mut_ptr(),
y.as_mut_ptr(),
));
let x = x.assume_init();
let y = y.assume_init();
if ret {
Some((x, y))
} else {
None
}
}
}
fn sequence_state(&self, sequence: &gdk::EventSequence) -> EventSequenceState {
unsafe {
from_glib(ffi::gtk_gesture_get_sequence_state(
self.as_ref().to_glib_none().0,
mut_override(sequence.to_glib_none().0),
))
}
}
fn sequences(&self) -> Vec<gdk::EventSequence> {
unsafe {
FromGlibPtrContainer::from_glib_container(ffi::gtk_gesture_get_sequences(
self.as_ref().to_glib_none().0,
))
}
}
fn group_with<P: IsA<Gesture>>(&self, gesture: &P) {
unsafe {
ffi::gtk_gesture_group(
self.as_ref().to_glib_none().0,
gesture.as_ref().to_glib_none().0,
);
}
}
fn handles_sequence(&self, sequence: Option<&gdk::EventSequence>) -> bool {
unsafe {
from_glib(ffi::gtk_gesture_handles_sequence(
self.as_ref().to_glib_none().0,
mut_override(sequence.to_glib_none().0),
))
}
}
fn is_active(&self) -> bool {
unsafe { from_glib(ffi::gtk_gesture_is_active(self.as_ref().to_glib_none().0)) }
}
fn is_grouped_with<P: IsA<Gesture>>(&self, other: &P) -> bool {
unsafe {
from_glib(ffi::gtk_gesture_is_grouped_with(
self.as_ref().to_glib_none().0,
other.as_ref().to_glib_none().0,
))
}
}
fn is_recognized(&self) -> bool {
unsafe {
from_glib(ffi::gtk_gesture_is_recognized(
self.as_ref().to_glib_none().0,
))
}
}
fn set_sequence_state(&self, sequence: &gdk::EventSequence, state: EventSequenceState) -> bool {
unsafe {
from_glib(ffi::gtk_gesture_set_sequence_state(
self.as_ref().to_glib_none().0,
mut_override(sequence.to_glib_none().0),
state.into_glib(),
))
}
}
fn set_state(&self, state: EventSequenceState) -> bool {
unsafe {
from_glib(ffi::gtk_gesture_set_state(
self.as_ref().to_glib_none().0,
state.into_glib(),
))
}
}
fn ungroup(&self) {
unsafe {
ffi::gtk_gesture_ungroup(self.as_ref().to_glib_none().0);
}
}
fn n_points(&self) -> u32 {
unsafe {
let mut value = glib::Value::from_type(<u32 as StaticType>::static_type());
glib::gobject_ffi::g_object_get_property(
self.to_glib_none().0 as *mut glib::gobject_ffi::GObject,
b"n-points\0".as_ptr() as *const _,
value.to_glib_none_mut().0,
);
value
.get()
.expect("Return Value for property `n-points` getter")
}
}
#[doc(alias = "begin")]
fn connect_begin<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn begin_trampoline<P, F: Fn(&P, Option<&gdk::EventSequence>) + 'static>(
this: *mut ffi::GtkGesture,
sequence: *mut gdk::ffi::GdkEventSequence,
f: glib::ffi::gpointer,
) where
P: IsA<Gesture>,
{
let f: &F = &*(f as *const F);
f(
&Gesture::from_glib_borrow(this).unsafe_cast_ref(),
Option::<gdk::EventSequence>::from_glib_borrow(sequence)
.as_ref()
.as_ref(),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"begin\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
begin_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
#[doc(alias = "cancel")]
fn connect_cancel<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn cancel_trampoline<
P,
F: Fn(&P, Option<&gdk::EventSequence>) + 'static,
>(
this: *mut ffi::GtkGesture,
sequence: *mut gdk::ffi::GdkEventSequence,
f: glib::ffi::gpointer,
) where
P: IsA<Gesture>,
{
let f: &F = &*(f as *const F);
f(
&Gesture::from_glib_borrow(this).unsafe_cast_ref(),
Option::<gdk::EventSequence>::from_glib_borrow(sequence)
.as_ref()
.as_ref(),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"cancel\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
cancel_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
#[doc(alias = "end")]
fn connect_end<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn end_trampoline<P, F: Fn(&P, Option<&gdk::EventSequence>) + 'static>(
this: *mut ffi::GtkGesture,
sequence: *mut gdk::ffi::GdkEventSequence,
f: glib::ffi::gpointer,
) where
P: IsA<Gesture>,
{
let f: &F = &*(f as *const F);
f(
&Gesture::from_glib_borrow(this).unsafe_cast_ref(),
Option::<gdk::EventSequence>::from_glib_borrow(sequence)
.as_ref()
.as_ref(),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"end\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
end_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
#[doc(alias = "sequence-state-changed")]
fn connect_sequence_state_changed<
F: Fn(&Self, Option<&gdk::EventSequence>, EventSequenceState) + 'static,
>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn sequence_state_changed_trampoline<
P,
F: Fn(&P, Option<&gdk::EventSequence>, EventSequenceState) + 'static,
>(
this: *mut ffi::GtkGesture,
sequence: *mut gdk::ffi::GdkEventSequence,
state: ffi::GtkEventSequenceState,
f: glib::ffi::gpointer,
) where
P: IsA<Gesture>,
{
let f: &F = &*(f as *const F);
f(
&Gesture::from_glib_borrow(this).unsafe_cast_ref(),
Option::<gdk::EventSequence>::from_glib_borrow(sequence)
.as_ref()
.as_ref(),
from_glib(state),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"sequence-state-changed\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
sequence_state_changed_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
#[doc(alias = "update")]
fn connect_update<F: Fn(&Self, Option<&gdk::EventSequence>) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn update_trampoline<
P,
F: Fn(&P, Option<&gdk::EventSequence>) + 'static,
>(
this: *mut ffi::GtkGesture,
sequence: *mut gdk::ffi::GdkEventSequence,
f: glib::ffi::gpointer,
) where
P: IsA<Gesture>,
{
let f: &F = &*(f as *const F);
f(
&Gesture::from_glib_borrow(this).unsafe_cast_ref(),
Option::<gdk::EventSequence>::from_glib_borrow(sequence)
.as_ref()
.as_ref(),
)
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"update\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
update_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for Gesture {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Gesture")
}
}
|
{
unsafe {
let mut x = mem::MaybeUninit::uninit();
let mut y = mem::MaybeUninit::uninit();
let ret = from_glib(ffi::gtk_gesture_get_bounding_box_center(
self.as_ref().to_glib_none().0,
x.as_mut_ptr(),
y.as_mut_ptr(),
));
let x = x.assume_init();
let y = y.assume_init();
if ret {
Some((x, y))
} else {
None
}
}
}
|
RemovePropertyAction.ts
|
import {
Action,
ActionParameters,
Entity,
ActionType,
BroadcastType,
ProcessEffectGenerator
} from '../../internal.js';
export class
|
extends Action<Entity> {
actionType: ActionType = ActionType.REMOVE_PROPERTY_ACTION;
broadcastType = BroadcastType.HAS_SENSE_OF_ENTITY;
target: Entity;
name: string;
constructor({ caster, target, name, using, metadata }: RemovePropertyAction.Params) {
super({ caster, using, metadata });
this.target = target;
this.name = name;
}
async *apply(): ProcessEffectGenerator {
return this.target._removeProperty(this.name);
}
}
export namespace RemovePropertyAction {
export interface EntityParams extends ActionParameters<Entity> {
name: string;
}
export interface Params extends EntityParams {
target: Entity;
}
}
|
RemovePropertyAction
|
accountDetail.js
|
const CY_SELECTOR = {
title: '[data-cy=title]',
addDepositButton: '[data-cy=addDepositButton]',
completeNumberButton: '[data-cy=completeNumber]',
numberOne: '[data-cy=number_1]',
numberZero: '[data-cy=number_0]',
icoDotHorizontal: '[data-cy=icoDotHorizontal]'
};
export const moveDetailPage = {
action() {
cy.visit('/accounts/37');
},
actionTest() {
it('버킷리스트 상세 페이지 진입', () => this.action());
}
};
export const checkInitRender = {
action() {
// 제목 체크
cy
.get(CY_SELECTOR.title)
.its('length')
.should('be.gt', 0);
// 기본 렌더링 체크
cy.contains('계좌정보');
cy.contains('입금 내역');
cy.contains('입금 내역');
cy.contains('개설일');
|
// 테스트 대상은 자유 적금
cy.contains('자유적금');
},
actionTest() {
it('예적금 상세 init 데이터 체크', () => this.action());
}
};
export const addDeposit = {
action() {
// 입금버튼 클릭
cy
.get(CY_SELECTOR.addDepositButton)
.click({ force: true });
cy.wait(300);
cy.contains('입금하실 금액을 입력해주세요.');
cy.contains('0원');
cy.get(CY_SELECTOR.completeNumberButton)
.should('have.css', 'opacity', '0.5');
// 금액 클릭 10원
cy.get(CY_SELECTOR.numberOne)
.click({ force: true });
cy.get(CY_SELECTOR.numberZero)
.click({ force: true });
// 만원 적금액 정상 입력되었는지 체크
cy.contains('10원');
cy.contains('총 적금액 : 10원');
cy.get(CY_SELECTOR.completeNumberButton)
.should('have.css', 'opacity', '1');
cy.get(CY_SELECTOR.completeNumberButton)
.click();
// 완료 처리 대기
cy.wait(500);
// 입금액 존재 확인
cy.contains('10원');
},
actionTest() {
it('예금 입력', () => this.action());
}
};
export const checkBottomMenu = {
action() {
cy.get(CY_SELECTOR.icoDotHorizontal)
.click({ force: true });
cy.contains('작성하실 예/적금 종류를 선택해주세요.');
cy.contains('이전 입금내역 추가');
cy.contains('만기');
cy.contains('삭제');
},
actionTest() {
it('하단 메뉴 렌더링 체크', () => this.action());
}
}
|
cy.contains('만기일');
cy.contains('만기예상액');
|
alpm.rs
|
#![allow(clippy)]
#![allow(non_upper_case_globals)]
|
#![allow(non_snake_case)]
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
#![allow(non_camel_case_types)]
|
a.js
|
console.log('a');
var sum = 0;
console.time('add start');
for(var i = 0; i < 1000000000; i++) {
sum += i;
}
|
setTimeout(function() {
console.log('async a');
});
|
console.timeEnd('add end');
|
index.d.ts
|
import Iris from './Iris';
export default Iris;
|
||
operations.rs
|
#![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn agent_pools(&self) -> agent_pools::Client {
agent_pools::Client(self.clone())
}
pub fn container_services(&self) -> container_services::Client {
container_services::Client(self.clone())
}
pub fn managed_clusters(&self) -> managed_clusters::Client {
managed_clusters::Client(self.clone())
}
pub fn open_shift_managed_clusters(&self) -> open_shift_managed_clusters::Client {
open_shift_managed_clusters::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
OpenShiftManagedClusters_List(#[from] open_shift_managed_clusters::list::Error),
#[error(transparent)]
OpenShiftManagedClusters_ListByResourceGroup(#[from] open_shift_managed_clusters::list_by_resource_group::Error),
#[error(transparent)]
OpenShiftManagedClusters_Get(#[from] open_shift_managed_clusters::get::Error),
#[error(transparent)]
OpenShiftManagedClusters_CreateOrUpdate(#[from] open_shift_managed_clusters::create_or_update::Error),
#[error(transparent)]
OpenShiftManagedClusters_UpdateTags(#[from] open_shift_managed_clusters::update_tags::Error),
#[error(transparent)]
OpenShiftManagedClusters_Delete(#[from] open_shift_managed_clusters::delete::Error),
#[error(transparent)]
ContainerServices_List(#[from] container_services::list::Error),
#[error(transparent)]
ContainerServices_Get(#[from] container_services::get::Error),
#[error(transparent)]
ContainerServices_CreateOrUpdate(#[from] container_services::create_or_update::Error),
#[error(transparent)]
ContainerServices_Delete(#[from] container_services::delete::Error),
#[error(transparent)]
ContainerServices_ListByResourceGroup(#[from] container_services::list_by_resource_group::Error),
#[error(transparent)]
ContainerServices_ListOrchestrators(#[from] container_services::list_orchestrators::Error),
#[error(transparent)]
Operations_List(#[from] operations::list::Error),
#[error(transparent)]
ManagedClusters_List(#[from] managed_clusters::list::Error),
#[error(transparent)]
ManagedClusters_ListByResourceGroup(#[from] managed_clusters::list_by_resource_group::Error),
#[error(transparent)]
ManagedClusters_GetUpgradeProfile(#[from] managed_clusters::get_upgrade_profile::Error),
#[error(transparent)]
ManagedClusters_GetAccessProfile(#[from] managed_clusters::get_access_profile::Error),
#[error(transparent)]
ManagedClusters_ListClusterAdminCredentials(#[from] managed_clusters::list_cluster_admin_credentials::Error),
#[error(transparent)]
ManagedClusters_ListClusterUserCredentials(#[from] managed_clusters::list_cluster_user_credentials::Error),
#[error(transparent)]
ManagedClusters_Get(#[from] managed_clusters::get::Error),
#[error(transparent)]
ManagedClusters_CreateOrUpdate(#[from] managed_clusters::create_or_update::Error),
#[error(transparent)]
ManagedClusters_UpdateTags(#[from] managed_clusters::update_tags::Error),
#[error(transparent)]
ManagedClusters_Delete(#[from] managed_clusters::delete::Error),
#[error(transparent)]
AgentPools_List(#[from] agent_pools::list::Error),
#[error(transparent)]
AgentPools_Get(#[from] agent_pools::get::Error),
#[error(transparent)]
AgentPools_CreateOrUpdate(#[from] agent_pools::create_or_update::Error),
#[error(transparent)]
AgentPools_Delete(#[from] agent_pools::delete::Error),
#[error(transparent)]
AgentPools_GetUpgradeProfile(#[from] agent_pools::get_upgrade_profile::Error),
#[error(transparent)]
AgentPools_GetAvailableAgentPoolVersions(#[from] agent_pools::get_available_agent_pool_versions::Error),
#[error(transparent)]
ManagedClusters_ResetServicePrincipalProfile(#[from] managed_clusters::reset_service_principal_profile::Error),
#[error(transparent)]
ManagedClusters_ResetAadProfile(#[from] managed_clusters::reset_aad_profile::Error),
}
pub mod open_shift_managed_clusters {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets a list of OpenShift managed clusters in the specified subscription."]
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Lists OpenShift managed clusters in the specified subscription and resource group."]
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets a OpenShift managed cluster."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Creates or updates an OpenShift managed cluster."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::OpenShiftManagedCluster>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Updates tags on an OpenShift managed cluster."]
pub fn update_tags(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::TagsObject>,
) -> update_tags::Builder {
update_tags::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes an OpenShift managed cluster."]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::OpenShiftManagedClusterListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerService/openShiftManagedClusters",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedClusterListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::OpenShiftManagedClusterListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/openShiftManagedClusters",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedClusterListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OpenShiftManagedCluster, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/openShiftManagedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::OpenShiftManagedCluster),
Created201(models::OpenShiftManagedCluster),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::OpenShiftManagedCluster,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/openShiftManagedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update_tags {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::TagsObject,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OpenShiftManagedCluster, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/openShiftManagedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OpenShiftManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/openShiftManagedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-04-30");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod container_services {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets a list of container services in the specified subscription."]
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Gets the properties of the specified container service."]
pub fn get(
&self,
resource_group_name: impl Into<String>,
container_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
container_service_name: container_service_name.into(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Creates or updates a container service."]
pub fn create_or_update(
&self,
resource_group_name: impl Into<String>,
container_service_name: impl Into<String>,
parameters: impl Into<models::ContainerService>,
subscription_id: impl Into<String>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
container_service_name: container_service_name.into(),
parameters: parameters.into(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Deletes the specified container service."]
pub fn delete(
&self,
resource_group_name: impl Into<String>,
container_service_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
container_service_name: container_service_name.into(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Gets a list of container services in the specified resource group."]
pub fn list_by_resource_group(
&self,
resource_group_name: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Gets a list of supported orchestrators in the specified subscription."]
pub fn list_orchestrators(&self, subscription_id: impl Into<String>, location: impl Into<String>) -> list_orchestrators::Builder {
list_orchestrators::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
location: location.into(),
resource_type: None,
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ContainerServiceListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerService/containerServices",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2017-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerServiceListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) container_service_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ContainerService, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/containerServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.container_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2017-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::ContainerService),
Created201(models::ContainerService),
Accepted202(models::ContainerService),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) container_service_name: String,
pub(crate) parameters: models::ContainerService,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/containerServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.container_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2017-07-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
http::StatusCode::ACCEPTED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerService =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Accepted202(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) container_service_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/containerServices/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.container_service_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2017-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ContainerServiceListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/containerServices",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2017-07-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ContainerServiceListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
pub mod list_orchestrators {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) location: String,
pub(crate) resource_type: Option<String>,
}
impl Builder {
pub fn resource_type(mut self, resource_type: impl Into<String>) -> Self {
self.resource_type = Some(resource_type.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::OrchestratorVersionProfileListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerService/locations/{}/orchestrators",
self.client.endpoint(),
&self.subscription_id,
&self.location
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
if let Some(resource_type) = &self.resource_type {
url.query_pairs_mut().append_pair("resource-type", resource_type);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OrchestratorVersionProfileListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::OperationListResult, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.ContainerService/operations", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::OperationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod managed_clusters {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets a list of managed clusters in the specified subscription."]
pub fn list(&self, subscription_id: impl Into<String>) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Lists managed clusters in the specified subscription and resource group."]
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Gets upgrade profile for a managed cluster."]
pub fn get_upgrade_profile(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> get_upgrade_profile::Builder {
get_upgrade_profile::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Gets an access profile of a managed cluster."]
pub fn get_access_profile(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
role_name: impl Into<String>,
) -> get_access_profile::Builder {
get_access_profile::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
role_name: role_name.into(),
}
}
#[doc = "Gets cluster admin credential of a managed cluster."]
pub fn list_cluster_admin_credentials(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> list_cluster_admin_credentials::Builder {
list_cluster_admin_credentials::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Gets cluster user credential of a managed cluster."]
pub fn list_cluster_user_credentials(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> list_cluster_user_credentials::Builder {
list_cluster_user_credentials::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Gets a managed cluster."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Creates or updates a managed cluster."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::ManagedCluster>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Updates tags on a managed cluster."]
pub fn update_tags(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::TagsObject>,
) -> update_tags::Builder {
update_tags::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes a managed cluster."]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Reset Service Principal Profile of a managed cluster."]
pub fn reset_service_principal_profile(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::ManagedClusterServicePrincipalProfile>,
) -> reset_service_principal_profile::Builder {
reset_service_principal_profile::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Reset AAD Profile of a managed cluster."]
pub fn reset_aad_profile(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
parameters: impl Into<models::ManagedClusterAadProfile>,
) -> reset_aad_profile::Builder {
reset_aad_profile::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
parameters: parameters.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedClusterListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.ContainerService/managedClusters",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedClusterListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedClusterListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK =>
|
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_upgrade_profile {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedClusterUpgradeProfile, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/upgradeProfiles/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedClusterUpgradeProfile =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_access_profile {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) role_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedClusterAccessProfile, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/accessProfiles/{}/listCredential" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name , & self . role_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedClusterAccessProfile =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_cluster_admin_credentials {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CredentialResults, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/listClusterAdminCredential" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CredentialResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_cluster_user_credentials {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CredentialResults, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/listClusterUserCredential" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CredentialResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedCluster, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::ManagedCluster),
Created201(models::ManagedCluster),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::ManagedCluster,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod update_tags {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::TagsObject,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::ManagedCluster, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedCluster =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod reset_service_principal_profile {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::ManagedClusterServicePrincipalProfile,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/resetServicePrincipalProfile" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod reset_aad_profile {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
Accepted202,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) parameters: models::ManagedClusterAadProfile,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/resetAADProfile",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod agent_pools {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Gets a list of agent pools in the specified managed cluster."]
pub fn list(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> list::Builder {
list::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
#[doc = "Gets the agent pool."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
agent_pool_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
agent_pool_name: agent_pool_name.into(),
}
}
#[doc = "Creates or updates an agent pool."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
agent_pool_name: impl Into<String>,
parameters: impl Into<models::AgentPool>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
agent_pool_name: agent_pool_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes an agent pool."]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
agent_pool_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
agent_pool_name: agent_pool_name.into(),
}
}
#[doc = "Gets upgrade profile for an agent pool."]
pub fn get_upgrade_profile(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
agent_pool_name: impl Into<String>,
) -> get_upgrade_profile::Builder {
get_upgrade_profile::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
agent_pool_name: agent_pool_name.into(),
}
}
#[doc = "Gets a list of supported versions for the specified agent pool."]
pub fn get_available_agent_pool_versions(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
resource_name: impl Into<String>,
) -> get_available_agent_pool_versions::Builder {
get_available_agent_pool_versions::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
resource_name: resource_name.into(),
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AgentPoolListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/agentPools",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPoolListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) agent_pool_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AgentPool, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/agentPools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name,
&self.agent_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::AgentPool),
Created201(models::AgentPool),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) agent_pool_name: String,
pub(crate) parameters: models::AgentPool,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/agentPools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name,
&self.agent_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPool =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete {
use super::models;
#[derive(Debug)]
pub enum Response {
Accepted202,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) agent_pool_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/agentPools/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.resource_name,
&self.agent_pool_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::ACCEPTED => Ok(Response::Accepted202),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_upgrade_profile {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::CloudError,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
pub(crate) agent_pool_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AgentPoolUpgradeProfile, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/agentPools/{}/upgradeProfiles/default" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name , & self . agent_pool_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPoolUpgradeProfile =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CloudError =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_available_agent_pool_versions {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("Unexpected HTTP status code {}", status_code)]
UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes },
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::AgentPoolAvailableVersions, Error>> {
Box::pin(async move {
let url_str = & format ! ("{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.ContainerService/managedClusters/{}/availableAgentPoolVersions" , self . client . endpoint () , & self . subscription_id , & self . resource_group_name , & self . resource_name) ;
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-06-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AgentPoolAvailableVersions =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
Err(Error::UnexpectedResponse {
status_code,
body: rsp_body,
})
}
}
})
}
}
}
}
|
{
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ManagedClusterListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
|
key.go
|
// Copyright 2018 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
// Package private includes all internal routes. The package name internal is ideal but Golang is not allowed, so we use private as package name instead.
package private
import (
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/util"
macaron "gopkg.in/macaron.v1"
)
// UpdatePublicKeyInRepo update public key and deploy key updates
func
|
(ctx *macaron.Context) {
keyID := ctx.ParamsInt64(":id")
repoID := ctx.ParamsInt64(":repoid")
if err := models.UpdatePublicKeyUpdated(keyID); err != nil {
ctx.JSON(500, map[string]interface{}{
"err": err.Error(),
})
return
}
deployKey, err := models.GetDeployKeyByRepo(keyID, repoID)
if err != nil {
if models.IsErrDeployKeyNotExist(err) {
ctx.PlainText(200, []byte("success"))
return
}
ctx.JSON(500, map[string]interface{}{
"err": err.Error(),
})
return
}
deployKey.UpdatedUnix = util.TimeStampNow()
if err = models.UpdateDeployKeyCols(deployKey, "updated_unix"); err != nil {
ctx.JSON(500, map[string]interface{}{
"err": err.Error(),
})
return
}
ctx.PlainText(200, []byte("success"))
}
|
UpdatePublicKeyInRepo
|
validate_test.go
|
package config
import (
"testing"
"github.com/openshift/oc-mirror/pkg/api/v1alpha2"
"github.com/stretchr/testify/require"
)
|
type spec struct {
name string
config *v1alpha2.ImageSetConfiguration
expError string
}
cases := []spec{
{
name: "Valid/HeadsOnlyFalse",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
Operators: []v1alpha2.Operator{
{
Catalog: "test-catalog",
IncludeConfig: v1alpha2.IncludeConfig{
Packages: []v1alpha2.IncludePackage{{Name: "foo"}},
},
Full: true,
},
},
},
},
},
expError: "",
},
{
name: "Valid/NoIncludePackages",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
Operators: []v1alpha2.Operator{
{
Catalog: "test-catalog",
IncludeConfig: v1alpha2.IncludeConfig{},
Full: false,
},
},
},
},
},
},
{
name: "Valid/HeadsOnlyFalse",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
Operators: []v1alpha2.Operator{
{
Catalog: "test-catalog",
IncludeConfig: v1alpha2.IncludeConfig{
Packages: []v1alpha2.IncludePackage{{Name: "foo"}},
},
Full: true,
},
},
},
},
},
},
{
name: "Valid/UniqueReleaseChannels",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
OCP: v1alpha2.OCP{
Channels: []v1alpha2.ReleaseChannel{
{
Name: "channel1",
},
{
Name: "channel2",
},
},
},
},
},
},
},
{
name: "Invalid/HeadsOnlyTrue",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
Operators: []v1alpha2.Operator{
{
Catalog: "test-catalog",
IncludeConfig: v1alpha2.IncludeConfig{
Packages: []v1alpha2.IncludePackage{{Name: "foo"}},
},
Full: false,
},
},
},
},
},
expError: "invalid configuration: catalog \"test-catalog\": cannot define packages with full key set to false",
},
{
name: "Invalid/DuplicateChannels",
config: &v1alpha2.ImageSetConfiguration{
ImageSetConfigurationSpec: v1alpha2.ImageSetConfigurationSpec{
Mirror: v1alpha2.Mirror{
OCP: v1alpha2.OCP{
Channels: []v1alpha2.ReleaseChannel{
{
Name: "channel",
},
{
Name: "channel",
},
},
},
},
},
},
expError: "invalid configuration: release channel \"channel\": duplicate found in configuration",
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
err := Validate(c.config)
if c.expError != "" {
require.EqualError(t, err, c.expError)
} else {
require.NoError(t, err)
}
})
}
}
|
func TestValidate(t *testing.T) {
|
test_ssl.py
|
# Test the support for SSL and sockets
import sys
import unittest
from test import support
import socket
import select
import time
import datetime
import gc
import os
import errno
import pprint
import tempfile
import urllib.request
import traceback
import asyncore
import weakref
import platform
import functools
from unittest import mock
ssl = support.import_module("ssl")
PROTOCOLS = sorted(ssl._PROTOCOL_NAMES)
HOST = support.HOST
def data_file(*name):
return os.path.join(os.path.dirname(__file__), *name)
# The custom key and certificate files used in test_ssl are generated
# using Lib/test/make_ssl_certs.py.
# Other certificates are simply fetched from the Internet servers they
# are meant to authenticate.
CERTFILE = data_file("keycert.pem")
BYTES_CERTFILE = os.fsencode(CERTFILE)
ONLYCERT = data_file("ssl_cert.pem")
ONLYKEY = data_file("ssl_key.pem")
BYTES_ONLYCERT = os.fsencode(ONLYCERT)
BYTES_ONLYKEY = os.fsencode(ONLYKEY)
CERTFILE_PROTECTED = data_file("keycert.passwd.pem")
ONLYKEY_PROTECTED = data_file("ssl_key.passwd.pem")
KEY_PASSWORD = "somepass"
CAPATH = data_file("capath")
BYTES_CAPATH = os.fsencode(CAPATH)
CAFILE_NEURONIO = data_file("capath", "4e1295a3.0")
CAFILE_CACERT = data_file("capath", "5ed36f99.0")
# empty CRL
CRLFILE = data_file("revocation.crl")
# Two keys and certs signed by the same CA (for SNI tests)
SIGNED_CERTFILE = data_file("keycert3.pem")
SIGNED_CERTFILE2 = data_file("keycert4.pem")
SIGNING_CA = data_file("pycacert.pem")
SVN_PYTHON_ORG_ROOT_CERT = data_file("https_svn_python_org_root.pem")
EMPTYCERT = data_file("nullcert.pem")
BADCERT = data_file("badcert.pem")
WRONGCERT = data_file("XXXnonexisting.pem")
BADKEY = data_file("badkey.pem")
NOKIACERT = data_file("nokia.pem")
NULLBYTECERT = data_file("nullbytecert.pem")
DHFILE = data_file("dh512.pem")
BYTES_DHFILE = os.fsencode(DHFILE)
def handle_error(prefix):
exc_format = ' '.join(traceback.format_exception(*sys.exc_info()))
if support.verbose:
sys.stdout.write(prefix + exc_format)
def can_clear_options():
# 0.9.8m or higher
return ssl._OPENSSL_API_VERSION >= (0, 9, 8, 13, 15)
def no_sslv2_implies_sslv3_hello():
# 0.9.7h or higher
return ssl.OPENSSL_VERSION_INFO >= (0, 9, 7, 8, 15)
def have_verify_flags():
# 0.9.8 or higher
return ssl.OPENSSL_VERSION_INFO >= (0, 9, 8, 0, 15)
def utc_offset(): #NOTE: ignore issues like #1647654
# local time = utc time + utc offset
if time.daylight and time.localtime().tm_isdst > 0:
return -time.altzone # seconds
return -time.timezone
def asn1time(cert_time):
# Some versions of OpenSSL ignore seconds, see #18207
# 0.9.8.i
if ssl._OPENSSL_API_VERSION == (0, 9, 8, 9, 15):
fmt = "%b %d %H:%M:%S %Y GMT"
dt = datetime.datetime.strptime(cert_time, fmt)
dt = dt.replace(second=0)
cert_time = dt.strftime(fmt)
# %d adds leading zero but ASN1_TIME_print() uses leading space
if cert_time[4] == "0":
cert_time = cert_time[:4] + " " + cert_time[5:]
return cert_time
# Issue #9415: Ubuntu hijacks their OpenSSL and forcefully disables SSLv2
def skip_if_broken_ubuntu_ssl(func):
if hasattr(ssl, 'PROTOCOL_SSLv2'):
@functools.wraps(func)
def f(*args, **kwargs):
try:
ssl.SSLContext(ssl.PROTOCOL_SSLv2)
except ssl.SSLError:
if (ssl.OPENSSL_VERSION_INFO == (0, 9, 8, 15, 15) and
platform.linux_distribution() == ('debian', 'squeeze/sid', '')):
raise unittest.SkipTest("Patched Ubuntu OpenSSL breaks behaviour")
return func(*args, **kwargs)
return f
else:
return func
needs_sni = unittest.skipUnless(ssl.HAS_SNI, "SNI support needed for this test")
class BasicSocketTests(unittest.TestCase):
def test_constants(self):
ssl.CERT_NONE
ssl.CERT_OPTIONAL
ssl.CERT_REQUIRED
ssl.OP_CIPHER_SERVER_PREFERENCE
ssl.OP_SINGLE_DH_USE
if ssl.HAS_ECDH:
ssl.OP_SINGLE_ECDH_USE
if ssl.OPENSSL_VERSION_INFO >= (1, 0):
ssl.OP_NO_COMPRESSION
self.assertIn(ssl.HAS_SNI, {True, False})
self.assertIn(ssl.HAS_ECDH, {True, False})
def test_str_for_enums(self):
# Make sure that the PROTOCOL_* constants have enum-like string
# reprs.
proto = ssl.PROTOCOL_SSLv3
self.assertEqual(str(proto), '_SSLMethod.PROTOCOL_SSLv3')
ctx = ssl.SSLContext(proto)
self.assertIs(ctx.protocol, proto)
def test_random(self):
v = ssl.RAND_status()
if support.verbose:
sys.stdout.write("\n RAND_status is %d (%s)\n"
% (v, (v and "sufficient randomness") or
"insufficient randomness"))
data, is_cryptographic = ssl.RAND_pseudo_bytes(16)
self.assertEqual(len(data), 16)
self.assertEqual(is_cryptographic, v == 1)
if v:
data = ssl.RAND_bytes(16)
self.assertEqual(len(data), 16)
else:
self.assertRaises(ssl.SSLError, ssl.RAND_bytes, 16)
# negative num is invalid
self.assertRaises(ValueError, ssl.RAND_bytes, -5)
self.assertRaises(ValueError, ssl.RAND_pseudo_bytes, -5)
self.assertRaises(TypeError, ssl.RAND_egd, 1)
self.assertRaises(TypeError, ssl.RAND_egd, 'foo', 1)
ssl.RAND_add("this is a random string", 75.0)
@unittest.skipUnless(os.name == 'posix', 'requires posix')
def test_random_fork(self):
status = ssl.RAND_status()
if not status:
self.fail("OpenSSL's PRNG has insufficient randomness")
rfd, wfd = os.pipe()
pid = os.fork()
if pid == 0:
try:
os.close(rfd)
child_random = ssl.RAND_pseudo_bytes(16)[0]
self.assertEqual(len(child_random), 16)
os.write(wfd, child_random)
os.close(wfd)
except BaseException:
os._exit(1)
else:
os._exit(0)
else:
os.close(wfd)
self.addCleanup(os.close, rfd)
_, status = os.waitpid(pid, 0)
self.assertEqual(status, 0)
child_random = os.read(rfd, 16)
self.assertEqual(len(child_random), 16)
parent_random = ssl.RAND_pseudo_bytes(16)[0]
self.assertEqual(len(parent_random), 16)
self.assertNotEqual(child_random, parent_random)
def test_parse_cert(self):
# note that this uses an 'unofficial' function in _ssl.c,
# provided solely for this test, to exercise the certificate
# parsing code
p = ssl._ssl._test_decode_cert(CERTFILE)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['issuer'],
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
# Note the next three asserts will fail if the keys are regenerated
self.assertEqual(p['notAfter'], asn1time('Oct 5 23:01:56 2020 GMT'))
self.assertEqual(p['notBefore'], asn1time('Oct 8 23:01:56 2010 GMT'))
self.assertEqual(p['serialNumber'], 'D7C7381919AFC24E')
self.assertEqual(p['subject'],
((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),))
)
self.assertEqual(p['subjectAltName'], (('DNS', 'localhost'),))
# Issue #13034: the subjectAltName in some certificates
# (notably projects.developer.nokia.com:443) wasn't parsed
p = ssl._ssl._test_decode_cert(NOKIACERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
self.assertEqual(p['subjectAltName'],
(('DNS', 'projects.developer.nokia.com'),
('DNS', 'projects.forum.nokia.com'))
)
# extra OCSP and AIA fields
self.assertEqual(p['OCSP'], ('http://ocsp.verisign.com',))
self.assertEqual(p['caIssuers'],
('http://SVRIntl-G3-aia.verisign.com/SVRIntlG3.cer',))
self.assertEqual(p['crlDistributionPoints'],
('http://SVRIntl-G3-crl.verisign.com/SVRIntlG3.crl',))
def test_parse_cert_CVE_2013_4238(self):
p = ssl._ssl._test_decode_cert(NULLBYTECERT)
if support.verbose:
sys.stdout.write("\n" + pprint.pformat(p) + "\n")
subject = ((('countryName', 'US'),),
(('stateOrProvinceName', 'Oregon'),),
(('localityName', 'Beaverton'),),
(('organizationName', 'Python Software Foundation'),),
(('organizationalUnitName', 'Python Core Development'),),
(('commonName', 'null.python.org\x00example.org'),),
(('emailAddress', '[email protected]'),))
self.assertEqual(p['subject'], subject)
self.assertEqual(p['issuer'], subject)
if ssl._OPENSSL_API_VERSION >= (0, 9, 8):
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', '[email protected]\[email protected]'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '2001:DB8:0:0:0:0:0:1\n'))
else:
# OpenSSL 0.9.7 doesn't support IPv6 addresses in subjectAltName
san = (('DNS', 'altnull.python.org\x00example.com'),
('email', '[email protected]\[email protected]'),
('URI', 'http://null.python.org\x00http://example.org'),
('IP Address', '192.0.2.1'),
('IP Address', '<invalid>'))
self.assertEqual(p['subjectAltName'], san)
def test_DER_to_PEM(self):
with open(SVN_PYTHON_ORG_ROOT_CERT, 'r') as f:
pem = f.read()
d1 = ssl.PEM_cert_to_DER_cert(pem)
p2 = ssl.DER_cert_to_PEM_cert(d1)
d2 = ssl.PEM_cert_to_DER_cert(p2)
self.assertEqual(d1, d2)
if not p2.startswith(ssl.PEM_HEADER + '\n'):
self.fail("DER-to-PEM didn't include correct header:\n%r\n" % p2)
if not p2.endswith('\n' + ssl.PEM_FOOTER + '\n'):
self.fail("DER-to-PEM didn't include correct footer:\n%r\n" % p2)
def test_openssl_version(self):
n = ssl.OPENSSL_VERSION_NUMBER
t = ssl.OPENSSL_VERSION_INFO
s = ssl.OPENSSL_VERSION
self.assertIsInstance(n, int)
self.assertIsInstance(t, tuple)
self.assertIsInstance(s, str)
# Some sanity checks follow
# >= 0.9
self.assertGreaterEqual(n, 0x900000)
# < 2.0
self.assertLess(n, 0x20000000)
major, minor, fix, patch, status = t
self.assertGreaterEqual(major, 0)
self.assertLess(major, 2)
self.assertGreaterEqual(minor, 0)
self.assertLess(minor, 256)
self.assertGreaterEqual(fix, 0)
self.assertLess(fix, 256)
self.assertGreaterEqual(patch, 0)
self.assertLessEqual(patch, 26)
self.assertGreaterEqual(status, 0)
self.assertLessEqual(status, 15)
# Version string as returned by OpenSSL, the format might change
self.assertTrue(s.startswith("OpenSSL {:d}.{:d}.{:d}".format(major, minor, fix)),
(s, t))
@support.cpython_only
def test_refcycle(self):
# Issue #7943: an SSL object doesn't create reference cycles with
# itself.
s = socket.socket(socket.AF_INET)
ss = ssl.wrap_socket(s)
wr = weakref.ref(ss)
with support.check_warnings(("", ResourceWarning)):
del ss
self.assertEqual(wr(), None)
def test_wrapped_unconnected(self):
# Methods on an unconnected SSLSocket propagate the original
# OSError raise by the underlying socket object.
s = socket.socket(socket.AF_INET)
with ssl.wrap_socket(s) as ss:
self.assertRaises(OSError, ss.recv, 1)
self.assertRaises(OSError, ss.recv_into, bytearray(b'x'))
self.assertRaises(OSError, ss.recvfrom, 1)
self.assertRaises(OSError, ss.recvfrom_into, bytearray(b'x'), 1)
self.assertRaises(OSError, ss.send, b'x')
self.assertRaises(OSError, ss.sendto, b'x', ('0.0.0.0', 0))
def test_timeout(self):
# Issue #8524: when creating an SSL socket, the timeout of the
# original socket should be retained.
for timeout in (None, 0.0, 5.0):
s = socket.socket(socket.AF_INET)
s.settimeout(timeout)
with ssl.wrap_socket(s) as ss:
self.assertEqual(timeout, ss.gettimeout())
def test_errors(self):
sock = socket.socket()
self.assertRaisesRegex(ValueError,
"certfile must be specified",
ssl.wrap_socket, sock, keyfile=CERTFILE)
self.assertRaisesRegex(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True)
self.assertRaisesRegex(ValueError,
"certfile must be specified for server-side operations",
ssl.wrap_socket, sock, server_side=True, certfile="")
with ssl.wrap_socket(sock, server_side=True, certfile=CERTFILE) as s:
self.assertRaisesRegex(ValueError, "can't connect in server-side mode",
s.connect, (HOST, 8080))
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=CERTFILE, keyfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(OSError) as cm:
with socket.socket() as sock:
ssl.wrap_socket(sock, certfile=WRONGCERT, keyfile=WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
def test_match_hostname(self):
def ok(cert, hostname):
ssl.match_hostname(cert, hostname)
def fail(cert, hostname):
self.assertRaises(ssl.CertificateError,
ssl.match_hostname, cert, hostname)
cert = {'subject': ((('commonName', 'example.com'),),)}
ok(cert, 'example.com')
ok(cert, 'ExAmple.cOm')
fail(cert, 'www.example.com')
fail(cert, '.example.com')
fail(cert, 'example.org')
fail(cert, 'exampleXcom')
cert = {'subject': ((('commonName', '*.a.com'),),)}
ok(cert, 'foo.a.com')
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
# only match one left-most wildcard
cert = {'subject': ((('commonName', 'f*.com'),),)}
ok(cert, 'foo.com')
ok(cert, 'f.com')
fail(cert, 'bar.com')
fail(cert, 'foo.a.com')
fail(cert, 'bar.foo.com')
# NULL bytes are bad, CVE-2013-4073
cert = {'subject': ((('commonName',
'null.python.org\x00example.org'),),)}
ok(cert, 'null.python.org\x00example.org') # or raise an error?
fail(cert, 'example.org')
fail(cert, 'null.python.org')
# error cases with wildcards
cert = {'subject': ((('commonName', '*.*.a.com'),),)}
fail(cert, 'bar.foo.a.com')
fail(cert, 'a.com')
fail(cert, 'Xa.com')
fail(cert, '.a.com')
cert = {'subject': ((('commonName', 'a.*.com'),),)}
fail(cert, 'a.foo.com')
fail(cert, 'a..com')
fail(cert, 'a.com')
# wildcard doesn't match IDNA prefix 'xn--'
idna = 'püthon.python.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
ok(cert, idna)
cert = {'subject': ((('commonName', 'x*.python.org'),),)}
fail(cert, idna)
cert = {'subject': ((('commonName', 'xn--p*.python.org'),),)}
fail(cert, idna)
# wildcard in first fragment and IDNA A-labels in sequent fragments
# are supported.
idna = 'www*.pythön.org'.encode("idna").decode("ascii")
cert = {'subject': ((('commonName', idna),),)}
ok(cert, 'www.pythön.org'.encode("idna").decode("ascii"))
ok(cert, 'www1.pythön.org'.encode("idna").decode("ascii"))
fail(cert, 'ftp.pythön.org'.encode("idna").decode("ascii"))
fail(cert, 'pythön.org'.encode("idna").decode("ascii"))
# Slightly fake real-world example
cert = {'notAfter': 'Jun 26 21:41:46 2011 GMT',
'subject': ((('commonName', 'linuxfrz.org'),),),
'subjectAltName': (('DNS', 'linuxfr.org'),
('DNS', 'linuxfr.com'),
('othername', '<unsupported>'))}
ok(cert, 'linuxfr.org')
ok(cert, 'linuxfr.com')
# Not a "DNS" entry
fail(cert, '<unsupported>')
# When there is a subjectAltName, commonName isn't used
fail(cert, 'linuxfrz.org')
# A pristine real-world example
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),),
(('commonName', 'mail.google.com'),))}
ok(cert, 'mail.google.com')
fail(cert, 'gmail.com')
# Only commonName is considered
fail(cert, 'California')
# Neither commonName nor subjectAltName
cert = {'notAfter': 'Dec 18 23:59:59 2011 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),))}
fail(cert, 'mail.google.com')
# No DNS entry in subjectAltName but a commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('commonName', 'mail.google.com'),)),
'subjectAltName': (('othername', 'blabla'), )}
ok(cert, 'mail.google.com')
# No DNS entry subjectAltName and no commonName
cert = {'notAfter': 'Dec 18 23:59:59 2099 GMT',
'subject': ((('countryName', 'US'),),
(('stateOrProvinceName', 'California'),),
(('localityName', 'Mountain View'),),
(('organizationName', 'Google Inc'),)),
'subjectAltName': (('othername', 'blabla'),)}
fail(cert, 'google.com')
# Empty cert / no cert
self.assertRaises(ValueError, ssl.match_hostname, None, 'example.com')
self.assertRaises(ValueError, ssl.match_hostname, {}, 'example.com')
# Issue #17980: avoid denials of service by refusing more than one
# wildcard per fragment.
cert = {'subject': ((('commonName', 'a*b.com'),),)}
ok(cert, 'axxb.com')
cert = {'subject': ((('commonName', 'a*b.co*'),),)}
fail(cert, 'axxb.com')
cert = {'subject': ((('commonName', 'a*b*.com'),),)}
with self.assertRaises(ssl.CertificateError) as cm:
ssl.match_hostname(cert, 'axxbxxc.com')
self.assertIn("too many wildcards", str(cm.exception))
def test_server_side(self):
# server_hostname doesn't work for server sockets
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with socket.socket() as sock:
self.assertRaises(ValueError, ctx.wrap_socket, sock, True,
server_hostname="some.hostname")
def test_unknown_channel_binding(self):
# should raise ValueError for unknown type
s = socket.socket(socket.AF_INET)
with ssl.wrap_socket(s) as ss:
with self.assertRaises(ValueError):
ss.get_channel_binding("unknown-type")
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
# unconnected should return None for known type
s = socket.socket(socket.AF_INET)
with ssl.wrap_socket(s) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
# the same for server-side
s = socket.socket(socket.AF_INET)
with ssl.wrap_socket(s, server_side=True, certfile=CERTFILE) as ss:
self.assertIsNone(ss.get_channel_binding("tls-unique"))
def test_dealloc_warn(self):
ss = ssl.wrap_socket(socket.socket(socket.AF_INET))
r = repr(ss)
with self.assertWarns(ResourceWarning) as cm:
ss = None
support.gc_collect()
self.assertIn(r, str(cm.warning.args[0]))
def test_get_default_verify_paths(self):
paths = ssl.get_default_verify_paths()
self.assertEqual(len(paths), 6)
self.assertIsInstance(paths, ssl.DefaultVerifyPaths)
with support.EnvironmentVarGuard() as env:
env["SSL_CERT_DIR"] = CAPATH
env["SSL_CERT_FILE"] = CERTFILE
paths = ssl.get_default_verify_paths()
self.assertEqual(paths.cafile, CERTFILE)
self.assertEqual(paths.capath, CAPATH)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_certificates(self):
self.assertTrue(ssl.enum_certificates("CA"))
self.assertTrue(ssl.enum_certificates("ROOT"))
self.assertRaises(TypeError, ssl.enum_certificates)
self.assertRaises(WindowsError, ssl.enum_certificates, "")
trust_oids = set()
for storename in ("CA", "ROOT"):
store = ssl.enum_certificates(storename)
self.assertIsInstance(store, list)
for element in store:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 3)
cert, enc, trust = element
self.assertIsInstance(cert, bytes)
self.assertIn(enc, {"x509_asn", "pkcs_7_asn"})
self.assertIsInstance(trust, (set, bool))
if isinstance(trust, set):
trust_oids.update(trust)
serverAuth = "1.3.6.1.5.5.7.3.1"
self.assertIn(serverAuth, trust_oids)
@unittest.skipUnless(sys.platform == "win32", "Windows specific")
def test_enum_crls(self):
self.assertTrue(ssl.enum_crls("CA"))
self.assertRaises(TypeError, ssl.enum_crls)
self.assertRaises(WindowsError, ssl.enum_crls, "")
crls = ssl.enum_crls("CA")
self.assertIsInstance(crls, list)
for element in crls:
self.assertIsInstance(element, tuple)
self.assertEqual(len(element), 2)
self.assertIsInstance(element[0], bytes)
self.assertIn(element[1], {"x509_asn", "pkcs_7_asn"})
def test_asn1object(self):
expected = (129, 'serverAuth', 'TLS Web Server Authentication',
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertEqual(val, expected)
self.assertEqual(val.nid, 129)
self.assertEqual(val.shortname, 'serverAuth')
self.assertEqual(val.longname, 'TLS Web Server Authentication')
self.assertEqual(val.oid, '1.3.6.1.5.5.7.3.1')
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object, 'serverAuth')
val = ssl._ASN1Object.fromnid(129)
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertRaises(ValueError, ssl._ASN1Object.fromnid, -1)
with self.assertRaisesRegex(ValueError, "unknown NID 100000"):
ssl._ASN1Object.fromnid(100000)
for i in range(1000):
try:
obj = ssl._ASN1Object.fromnid(i)
except ValueError:
pass
else:
self.assertIsInstance(obj.nid, int)
self.assertIsInstance(obj.shortname, str)
self.assertIsInstance(obj.longname, str)
self.assertIsInstance(obj.oid, (str, type(None)))
val = ssl._ASN1Object.fromname('TLS Web Server Authentication')
self.assertEqual(val, expected)
self.assertIsInstance(val, ssl._ASN1Object)
self.assertEqual(ssl._ASN1Object.fromname('serverAuth'), expected)
self.assertEqual(ssl._ASN1Object.fromname('1.3.6.1.5.5.7.3.1'),
expected)
with self.assertRaisesRegex(ValueError, "unknown object 'serverauth'"):
ssl._ASN1Object.fromname('serverauth')
def test_purpose_enum(self):
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.1')
self.assertIsInstance(ssl.Purpose.SERVER_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.SERVER_AUTH, val)
self.assertEqual(ssl.Purpose.SERVER_AUTH.nid, 129)
self.assertEqual(ssl.Purpose.SERVER_AUTH.shortname, 'serverAuth')
self.assertEqual(ssl.Purpose.SERVER_AUTH.oid,
'1.3.6.1.5.5.7.3.1')
val = ssl._ASN1Object('1.3.6.1.5.5.7.3.2')
self.assertIsInstance(ssl.Purpose.CLIENT_AUTH, ssl._ASN1Object)
self.assertEqual(ssl.Purpose.CLIENT_AUTH, val)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.nid, 130)
self.assertEqual(ssl.Purpose.CLIENT_AUTH.shortname, 'clientAuth')
self.assertEqual(ssl.Purpose.CLIENT_AUTH.oid,
'1.3.6.1.5.5.7.3.2')
def test_unsupported_dtls(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.addCleanup(s.close)
with self.assertRaises(NotImplementedError) as cx:
ssl.wrap_socket(s, cert_reqs=ssl.CERT_NONE)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with self.assertRaises(NotImplementedError) as cx:
ctx.wrap_socket(s)
self.assertEqual(str(cx.exception), "only stream sockets are supported")
def cert_time_ok(self, timestring, timestamp):
self.assertEqual(ssl.cert_time_to_seconds(timestring), timestamp)
def cert_time_fail(self, timestring):
with self.assertRaises(ValueError):
ssl.cert_time_to_seconds(timestring)
@unittest.skipUnless(utc_offset(),
'local time needs to be different from UTC')
def test_cert_time_to_seconds_timezone(self):
# Issue #19940: ssl.cert_time_to_seconds() returns wrong
# results if local timezone is not UTC
self.cert_time_ok("May 9 00:00:00 2007 GMT", 1178668800.0)
self.cert_time_ok("Jan 5 09:34:43 2018 GMT", 1515144883.0)
def test_cert_time_to_seconds(self):
timestring = "Jan 5 09:34:43 2018 GMT"
ts = 1515144883.0
self.cert_time_ok(timestring, ts)
# accept keyword parameter, assert its name
self.assertEqual(ssl.cert_time_to_seconds(cert_time=timestring), ts)
# accept both %e and %d (space or zero generated by strftime)
self.cert_time_ok("Jan 05 09:34:43 2018 GMT", ts)
# case-insensitive
self.cert_time_ok("JaN 5 09:34:43 2018 GmT", ts)
self.cert_time_fail("Jan 5 09:34 2018 GMT") # no seconds
self.cert_time_fail("Jan 5 09:34:43 2018") # no GMT
self.cert_time_fail("Jan 5 09:34:43 2018 UTC") # not GMT timezone
self.cert_time_fail("Jan 35 09:34:43 2018 GMT") # invalid day
self.cert_time_fail("Jon 5 09:34:43 2018 GMT") # invalid month
self.cert_time_fail("Jan 5 24:00:00 2018 GMT") # invalid hour
self.cert_time_fail("Jan 5 09:60:43 2018 GMT") # invalid minute
newyear_ts = 1230768000.0
# leap seconds
self.cert_time_ok("Dec 31 23:59:60 2008 GMT", newyear_ts)
# same timestamp
self.cert_time_ok("Jan 1 00:00:00 2009 GMT", newyear_ts)
self.cert_time_ok("Jan 5 09:34:59 2018 GMT", 1515144899)
# allow 60th second (even if it is not a leap second)
self.cert_time_ok("Jan 5 09:34:60 2018 GMT", 1515144900)
# allow 2nd leap second for compatibility with time.strptime()
self.cert_time_ok("Jan 5 09:34:61 2018 GMT", 1515144901)
self.cert_time_fail("Jan 5 09:34:62 2018 GMT") # invalid seconds
# no special treatement for the special value:
# 99991231235959Z (rfc 5280)
self.cert_time_ok("Dec 31 23:59:59 9999 GMT", 253402300799.0)
@support.run_with_locale('LC_ALL', '')
def test_cert_time_to_seconds_locale(self):
# `cert_time_to_seconds()` should be locale independent
def local_february_name():
return time.strftime('%b', (1, 2, 3, 4, 5, 6, 0, 0, 0))
if local_february_name().lower() == 'feb':
self.skipTest("locale-specific month name needs to be "
"different from C locale")
# locale-independent
self.cert_time_ok("Feb 9 00:00:00 2007 GMT", 1170979200.0)
self.cert_time_fail(local_february_name() + " 9 00:00:00 2007 GMT")
class ContextTests(unittest.TestCase):
@skip_if_broken_ubuntu_ssl
def test_constructor(self):
for protocol in PROTOCOLS:
ssl.SSLContext(protocol)
self.assertRaises(TypeError, ssl.SSLContext)
self.assertRaises(ValueError, ssl.SSLContext, -1)
self.assertRaises(ValueError, ssl.SSLContext, 42)
@skip_if_broken_ubuntu_ssl
def test_protocol(self):
for proto in PROTOCOLS:
ctx = ssl.SSLContext(proto)
self.assertEqual(ctx.protocol, proto)
def test_ciphers(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_ciphers("ALL")
ctx.set_ciphers("DEFAULT")
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
ctx.set_ciphers("^$:,;?*'dorothyx")
@skip_if_broken_ubuntu_ssl
def test_options(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# OP_ALL | OP_NO_SSLv2 is the default value
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2,
ctx.options)
ctx.options |= ssl.OP_NO_SSLv3
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3,
ctx.options)
if can_clear_options():
ctx.options = (ctx.options & ~ssl.OP_NO_SSLv2) | ssl.OP_NO_TLSv1
self.assertEqual(ssl.OP_ALL | ssl.OP_NO_TLSv1 | ssl.OP_NO_SSLv3,
ctx.options)
ctx.options = 0
self.assertEqual(0, ctx.options)
else:
with self.assertRaises(ValueError):
ctx.options = 0
def test_verify_mode(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# Default value
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
ctx.verify_mode = ssl.CERT_OPTIONAL
self.assertEqual(ctx.verify_mode, ssl.CERT_OPTIONAL)
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
ctx.verify_mode = ssl.CERT_NONE
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
with self.assertRaises(TypeError):
ctx.verify_mode = None
with self.assertRaises(ValueError):
ctx.verify_mode = 42
@unittest.skipUnless(have_verify_flags(),
"verify_flags need OpenSSL > 0.9.8")
def test_verify_flags(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# default value by OpenSSL
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_LEAF)
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_CHAIN
self.assertEqual(ctx.verify_flags, ssl.VERIFY_CRL_CHECK_CHAIN)
ctx.verify_flags = ssl.VERIFY_DEFAULT
self.assertEqual(ctx.verify_flags, ssl.VERIFY_DEFAULT)
# supports any value
ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT
self.assertEqual(ctx.verify_flags,
ssl.VERIFY_CRL_CHECK_LEAF | ssl.VERIFY_X509_STRICT)
with self.assertRaises(TypeError):
ctx.verify_flags = None
def test_load_cert_chain(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# Combined key and cert in a single file
ctx.load_cert_chain(CERTFILE)
ctx.load_cert_chain(CERTFILE, keyfile=CERTFILE)
self.assertRaises(TypeError, ctx.load_cert_chain, keyfile=CERTFILE)
with self.assertRaises(OSError) as cm:
ctx.load_cert_chain(WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(BADCERT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(EMPTYCERT)
# Separate key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_cert_chain(ONLYCERT, ONLYKEY)
ctx.load_cert_chain(certfile=ONLYCERT, keyfile=ONLYKEY)
ctx.load_cert_chain(certfile=BYTES_ONLYCERT, keyfile=BYTES_ONLYKEY)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYCERT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(ONLYKEY)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_cert_chain(certfile=ONLYKEY, keyfile=ONLYCERT)
# Mismatching key and cert
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with self.assertRaisesRegex(ssl.SSLError, "key values mismatch"):
ctx.load_cert_chain(SVN_PYTHON_ORG_ROOT_CERT, ONLYKEY)
# Password protected key and cert
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=KEY_PASSWORD.encode())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=bytearray(KEY_PASSWORD.encode()))
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD)
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED, KEY_PASSWORD.encode())
ctx.load_cert_chain(ONLYCERT, ONLYKEY_PROTECTED,
bytearray(KEY_PASSWORD.encode()))
with self.assertRaisesRegex(TypeError, "should be a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=True)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password="badpass")
with self.assertRaisesRegex(ValueError, "cannot be longer"):
# openssl has a fixed limit on the password buffer.
# PEM_BUFSIZE is generally set to 1kb.
# Return a string larger than this.
ctx.load_cert_chain(CERTFILE_PROTECTED, password=b'a' * 102400)
# Password callback
def getpass_unicode():
return KEY_PASSWORD
def getpass_bytes():
return KEY_PASSWORD.encode()
def getpass_bytearray():
return bytearray(KEY_PASSWORD.encode())
def getpass_badpass():
return "badpass"
def getpass_huge():
return b'a' * (1024 * 1024)
def getpass_bad_type():
return 9
def getpass_exception():
raise Exception('getpass error')
class GetPassCallable:
def __call__(self):
return KEY_PASSWORD
def getpass(self):
return KEY_PASSWORD
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_unicode)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytes)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bytearray)
ctx.load_cert_chain(CERTFILE_PROTECTED, password=GetPassCallable())
ctx.load_cert_chain(CERTFILE_PROTECTED,
password=GetPassCallable().getpass)
with self.assertRaises(ssl.SSLError):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_badpass)
with self.assertRaisesRegex(ValueError, "cannot be longer"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_huge)
with self.assertRaisesRegex(TypeError, "must return a string"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_bad_type)
with self.assertRaisesRegex(Exception, "getpass error"):
ctx.load_cert_chain(CERTFILE_PROTECTED, password=getpass_exception)
# Make sure the password function isn't called if it isn't needed
ctx.load_cert_chain(CERTFILE, password=getpass_exception)
def test_load_verify_locations(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(CERTFILE)
ctx.load_verify_locations(cafile=CERTFILE, capath=None)
ctx.load_verify_locations(BYTES_CERTFILE)
ctx.load_verify_locations(cafile=BYTES_CERTFILE, capath=None)
self.assertRaises(TypeError, ctx.load_verify_locations)
self.assertRaises(TypeError, ctx.load_verify_locations, None, None, None)
with self.assertRaises(OSError) as cm:
ctx.load_verify_locations(WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaisesRegex(ssl.SSLError, "PEM lib"):
ctx.load_verify_locations(BADCERT)
ctx.load_verify_locations(CERTFILE, CAPATH)
ctx.load_verify_locations(CERTFILE, capath=BYTES_CAPATH)
# Issue #10989: crash if the second argument type is invalid
self.assertRaises(TypeError, ctx.load_verify_locations, None, True)
def test_load_verify_cadata(self):
# test cadata
with open(CAFILE_CACERT) as f:
cacert_pem = f.read()
cacert_der = ssl.PEM_cert_to_DER_cert(cacert_pem)
with open(CAFILE_NEURONIO) as f:
neuronio_pem = f.read()
neuronio_der = ssl.PEM_cert_to_DER_cert(neuronio_pem)
# test PEM
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 0)
ctx.load_verify_locations(cadata=cacert_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 1)
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=neuronio_pem)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = "\n".join((cacert_pem, neuronio_pem))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# with junk around the certs
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = ["head", cacert_pem, "other", neuronio_pem, "again",
neuronio_pem, "tail"]
ctx.load_verify_locations(cadata="\n".join(combined))
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# test DER
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_verify_locations(cadata=cacert_der)
ctx.load_verify_locations(cadata=neuronio_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# cert already in hash table
ctx.load_verify_locations(cadata=cacert_der)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# combined
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
combined = b"".join((cacert_der, neuronio_der))
ctx.load_verify_locations(cadata=combined)
self.assertEqual(ctx.cert_store_stats()["x509_ca"], 2)
# error cases
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(TypeError, ctx.load_verify_locations, cadata=object)
with self.assertRaisesRegex(ssl.SSLError, "no start line"):
ctx.load_verify_locations(cadata="broken")
with self.assertRaisesRegex(ssl.SSLError, "not enough data"):
ctx.load_verify_locations(cadata=b"broken")
def test_load_dh_params(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_dh_params(DHFILE)
if os.name != 'nt':
ctx.load_dh_params(BYTES_DHFILE)
self.assertRaises(TypeError, ctx.load_dh_params)
self.assertRaises(TypeError, ctx.load_dh_params, None)
with self.assertRaises(FileNotFoundError) as cm:
ctx.load_dh_params(WRONGCERT)
self.assertEqual(cm.exception.errno, errno.ENOENT)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
@skip_if_broken_ubuntu_ssl
|
def test_session_stats(self):
for proto in PROTOCOLS:
ctx = ssl.SSLContext(proto)
self.assertEqual(ctx.session_stats(), {
'number': 0,
'connect': 0,
'connect_good': 0,
'connect_renegotiate': 0,
'accept': 0,
'accept_good': 0,
'accept_renegotiate': 0,
'hits': 0,
'misses': 0,
'timeouts': 0,
'cache_full': 0,
})
def test_set_default_verify_paths(self):
# There's not much we can do to test that it acts as expected,
# so just check it doesn't crash or raise an exception.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_default_verify_paths()
@unittest.skipUnless(ssl.HAS_ECDH, "ECDH disabled on this OpenSSL build")
def test_set_ecdh_curve(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.set_ecdh_curve("prime256v1")
ctx.set_ecdh_curve(b"prime256v1")
self.assertRaises(TypeError, ctx.set_ecdh_curve)
self.assertRaises(TypeError, ctx.set_ecdh_curve, None)
self.assertRaises(ValueError, ctx.set_ecdh_curve, "foo")
self.assertRaises(ValueError, ctx.set_ecdh_curve, b"foo")
@needs_sni
def test_sni_callback(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
# set_servername_callback expects a callable, or None
self.assertRaises(TypeError, ctx.set_servername_callback)
self.assertRaises(TypeError, ctx.set_servername_callback, 4)
self.assertRaises(TypeError, ctx.set_servername_callback, "")
self.assertRaises(TypeError, ctx.set_servername_callback, ctx)
def dummycallback(sock, servername, ctx):
pass
ctx.set_servername_callback(None)
ctx.set_servername_callback(dummycallback)
@needs_sni
def test_sni_callback_refcycle(self):
# Reference cycles through the servername callback are detected
# and cleared.
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
def dummycallback(sock, servername, ctx, cycle=ctx):
pass
ctx.set_servername_callback(dummycallback)
wr = weakref.ref(ctx)
del ctx, dummycallback
gc.collect()
self.assertIs(wr(), None)
def test_cert_store_stats(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_cert_chain(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 0})
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 0, 'crl': 0, 'x509': 1})
ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT)
self.assertEqual(ctx.cert_store_stats(),
{'x509_ca': 1, 'crl': 0, 'x509': 2})
def test_get_ca_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.get_ca_certs(), [])
# CERTFILE is not flagged as X509v3 Basic Constraints: CA:TRUE
ctx.load_verify_locations(CERTFILE)
self.assertEqual(ctx.get_ca_certs(), [])
# but SVN_PYTHON_ORG_ROOT_CERT is a CA cert
ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT)
self.assertEqual(ctx.get_ca_certs(),
[{'issuer': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', '[email protected]'),)),
'notAfter': asn1time('Mar 29 12:29:49 2033 GMT'),
'notBefore': asn1time('Mar 30 12:29:49 2003 GMT'),
'serialNumber': '00',
'crlDistributionPoints': ('https://www.cacert.org/revoke.crl',),
'subject': ((('organizationName', 'Root CA'),),
(('organizationalUnitName', 'http://www.cacert.org'),),
(('commonName', 'CA Cert Signing Authority'),),
(('emailAddress', '[email protected]'),)),
'version': 3}])
with open(SVN_PYTHON_ORG_ROOT_CERT) as f:
pem = f.read()
der = ssl.PEM_cert_to_DER_cert(pem)
self.assertEqual(ctx.get_ca_certs(True), [der])
def test_load_default_certs(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs(ssl.Purpose.SERVER_AUTH)
ctx.load_default_certs()
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.load_default_certs(ssl.Purpose.CLIENT_AUTH)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertRaises(TypeError, ctx.load_default_certs, None)
self.assertRaises(TypeError, ctx.load_default_certs, 'SERVER_AUTH')
def test_create_default_context(self):
ctx = ssl.create_default_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
with open(SIGNING_CA) as f:
cadata = f.read()
ctx = ssl.create_default_context(cafile=SIGNING_CA, capath=CAPATH,
cadata=cadata)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
self.assertEqual(
ctx.options & getattr(ssl, "OP_NO_COMPRESSION", 0),
getattr(ssl, "OP_NO_COMPRESSION", 0),
)
self.assertEqual(
ctx.options & getattr(ssl, "OP_SINGLE_DH_USE", 0),
getattr(ssl, "OP_SINGLE_DH_USE", 0),
)
self.assertEqual(
ctx.options & getattr(ssl, "OP_SINGLE_ECDH_USE", 0),
getattr(ssl, "OP_SINGLE_ECDH_USE", 0),
)
def test__create_stdlib_context(self):
ctx = ssl._create_stdlib_context()
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertFalse(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(ssl.PROTOCOL_TLSv1,
cert_reqs=ssl.CERT_REQUIRED,
check_hostname=True)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_TLSv1)
self.assertEqual(ctx.verify_mode, ssl.CERT_REQUIRED)
self.assertTrue(ctx.check_hostname)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
ctx = ssl._create_stdlib_context(purpose=ssl.Purpose.CLIENT_AUTH)
self.assertEqual(ctx.protocol, ssl.PROTOCOL_SSLv23)
self.assertEqual(ctx.verify_mode, ssl.CERT_NONE)
self.assertEqual(ctx.options & ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv2)
def test_check_hostname(self):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
self.assertFalse(ctx.check_hostname)
# Requires CERT_REQUIRED or CERT_OPTIONAL
with self.assertRaises(ValueError):
ctx.check_hostname = True
ctx.verify_mode = ssl.CERT_REQUIRED
self.assertFalse(ctx.check_hostname)
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
ctx.verify_mode = ssl.CERT_OPTIONAL
ctx.check_hostname = True
self.assertTrue(ctx.check_hostname)
# Cannot set CERT_NONE with check_hostname enabled
with self.assertRaises(ValueError):
ctx.verify_mode = ssl.CERT_NONE
ctx.check_hostname = False
self.assertFalse(ctx.check_hostname)
class SSLErrorTests(unittest.TestCase):
def test_str(self):
# The str() of a SSLError doesn't include the errno
e = ssl.SSLError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
# Same for a subclass
e = ssl.SSLZeroReturnError(1, "foo")
self.assertEqual(str(e), "foo")
self.assertEqual(e.errno, 1)
def test_lib_reason(self):
# Test the library and reason attributes
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with self.assertRaises(ssl.SSLError) as cm:
ctx.load_dh_params(CERTFILE)
self.assertEqual(cm.exception.library, 'PEM')
self.assertEqual(cm.exception.reason, 'NO_START_LINE')
s = str(cm.exception)
self.assertTrue(s.startswith("[PEM: NO_START_LINE] no start line"), s)
def test_subclass(self):
# Check that the appropriate SSLError subclass is raised
# (this only tests one of them)
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
with socket.socket() as s:
s.bind(("127.0.0.1", 0))
s.listen(5)
c = socket.socket()
c.connect(s.getsockname())
c.setblocking(False)
with ctx.wrap_socket(c, False, do_handshake_on_connect=False) as c:
with self.assertRaises(ssl.SSLWantReadError) as cm:
c.do_handshake()
s = str(cm.exception)
self.assertTrue(s.startswith("The operation did not complete (read)"), s)
# For compatibility
self.assertEqual(cm.exception.errno, ssl.SSL_ERROR_WANT_READ)
class NetworkedTests(unittest.TestCase):
def test_connect(self):
with support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE)
try:
s.connect(("svn.python.org", 443))
self.assertEqual({}, s.getpeercert())
finally:
s.close()
# this should fail because we have no verification certs
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED)
self.assertRaisesRegex(ssl.SSLError, "certificate verify failed",
s.connect, ("svn.python.org", 443))
s.close()
# this should succeed because we specify the root cert
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
s.connect(("svn.python.org", 443))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_connect_ex(self):
# Issue #11326: check connect_ex() implementation
with support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
self.assertEqual(0, s.connect_ex(("svn.python.org", 443)))
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_non_blocking_connect_ex(self):
# Issue #11326: non-blocking connect_ex() should allow handshake
# to proceed after the socket gets ready.
with support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.setblocking(False)
rc = s.connect_ex(('svn.python.org', 443))
# EWOULDBLOCK under Windows, EINPROGRESS elsewhere
self.assertIn(rc, (0, errno.EINPROGRESS, errno.EWOULDBLOCK))
# Wait for connect to finish
select.select([], [s], [], 5.0)
# Non-blocking handshake
while True:
try:
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [], 5.0)
except ssl.SSLWantWriteError:
select.select([], [s], [], 5.0)
# SSL established
self.assertTrue(s.getpeercert())
finally:
s.close()
def test_timeout_connect_ex(self):
# Issue #12065: on a timeout, connect_ex() should return the original
# errno (mimicking the behaviour of non-SSL sockets).
with support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT,
do_handshake_on_connect=False)
try:
s.settimeout(0.0000001)
rc = s.connect_ex(('svn.python.org', 443))
if rc == 0:
self.skipTest("svn.python.org responded too quickly")
self.assertIn(rc, (errno.EAGAIN, errno.EWOULDBLOCK))
finally:
s.close()
def test_connect_ex_error(self):
with support.transient_internet("svn.python.org"):
s = ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=SVN_PYTHON_ORG_ROOT_CERT)
try:
rc = s.connect_ex(("svn.python.org", 444))
# Issue #19919: Windows machines or VMs hosted on Windows
# machines sometimes return EWOULDBLOCK.
self.assertIn(rc, (errno.ECONNREFUSED, errno.EWOULDBLOCK))
finally:
s.close()
def test_connect_with_context(self):
with support.transient_internet("svn.python.org"):
# Same as test_connect, but with a separately created context
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect(("svn.python.org", 443))
try:
self.assertEqual({}, s.getpeercert())
finally:
s.close()
# Same with a server hostname
s = ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname="svn.python.org")
if ssl.HAS_SNI:
s.connect(("svn.python.org", 443))
s.close()
else:
self.assertRaises(ValueError, s.connect, ("svn.python.org", 443))
# This should fail because we have no verification certs
ctx.verify_mode = ssl.CERT_REQUIRED
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
self.assertRaisesRegex(ssl.SSLError, "certificate verify failed",
s.connect, ("svn.python.org", 443))
s.close()
# This should succeed because we specify the root cert
ctx.load_verify_locations(SVN_PYTHON_ORG_ROOT_CERT)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect(("svn.python.org", 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
def test_connect_capath(self):
# Verify server certificates using the `capath` argument
# NOTE: the subject hashing algorithm has been changed between
# OpenSSL 0.9.8n and 1.0.0, as a result the capath directory must
# contain both versions of each certificate (same content, different
# filename) for this test to be portable across OpenSSL releases.
with support.transient_internet("svn.python.org"):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=CAPATH)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect(("svn.python.org", 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
# Same with a bytes `capath` argument
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=BYTES_CAPATH)
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect(("svn.python.org", 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
def test_connect_cadata(self):
with open(CAFILE_CACERT) as f:
pem = f.read()
der = ssl.PEM_cert_to_DER_cert(pem)
with support.transient_internet("svn.python.org"):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cadata=pem)
with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s:
s.connect(("svn.python.org", 443))
cert = s.getpeercert()
self.assertTrue(cert)
# same with DER
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(cadata=der)
with ctx.wrap_socket(socket.socket(socket.AF_INET)) as s:
s.connect(("svn.python.org", 443))
cert = s.getpeercert()
self.assertTrue(cert)
@unittest.skipIf(os.name == "nt", "Can't use a socket as a file under Windows")
def test_makefile_close(self):
# Issue #5238: creating a file-like object with makefile() shouldn't
# delay closing the underlying "real socket" (here tested with its
# file descriptor, hence skipping the test under Windows).
with support.transient_internet("svn.python.org"):
ss = ssl.wrap_socket(socket.socket(socket.AF_INET))
ss.connect(("svn.python.org", 443))
fd = ss.fileno()
f = ss.makefile()
f.close()
# The fd is still open
os.read(fd, 0)
# Closing the SSL socket should close the fd too
ss.close()
gc.collect()
with self.assertRaises(OSError) as e:
os.read(fd, 0)
self.assertEqual(e.exception.errno, errno.EBADF)
def test_non_blocking_handshake(self):
with support.transient_internet("svn.python.org"):
s = socket.socket(socket.AF_INET)
s.connect(("svn.python.org", 443))
s.setblocking(False)
s = ssl.wrap_socket(s,
cert_reqs=ssl.CERT_NONE,
do_handshake_on_connect=False)
count = 0
while True:
try:
count += 1
s.do_handshake()
break
except ssl.SSLWantReadError:
select.select([s], [], [])
except ssl.SSLWantWriteError:
select.select([], [s], [])
s.close()
if support.verbose:
sys.stdout.write("\nNeeded %d calls to do_handshake() to establish session.\n" % count)
def test_get_server_certificate(self):
def _test_get_server_certificate(host, port, cert=None):
with support.transient_internet(host):
pem = ssl.get_server_certificate((host, port))
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
try:
pem = ssl.get_server_certificate((host, port),
ca_certs=CERTFILE)
except ssl.SSLError as x:
#should fail
if support.verbose:
sys.stdout.write("%s\n" % x)
else:
self.fail("Got server certificate %s for %s:%s!" % (pem, host, port))
pem = ssl.get_server_certificate((host, port),
ca_certs=cert)
if not pem:
self.fail("No server certificate on %s:%s!" % (host, port))
if support.verbose:
sys.stdout.write("\nVerified certificate for %s:%s is\n%s\n" % (host, port ,pem))
_test_get_server_certificate('svn.python.org', 443, SVN_PYTHON_ORG_ROOT_CERT)
if support.IPV6_ENABLED:
_test_get_server_certificate('ipv6.google.com', 443)
def test_ciphers(self):
remote = ("svn.python.org", 443)
with support.transient_internet(remote[0]):
with ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="ALL") as s:
s.connect(remote)
with ssl.wrap_socket(socket.socket(socket.AF_INET),
cert_reqs=ssl.CERT_NONE, ciphers="DEFAULT") as s:
s.connect(remote)
# Error checking can happen at instantiation or when connecting
with self.assertRaisesRegex(ssl.SSLError, "No cipher can be selected"):
with socket.socket(socket.AF_INET) as sock:
s = ssl.wrap_socket(sock,
cert_reqs=ssl.CERT_NONE, ciphers="^$:,;?*'dorothyx")
s.connect(remote)
def test_algorithms(self):
# Issue #8484: all algorithms should be available when verifying a
# certificate.
# SHA256 was added in OpenSSL 0.9.8
if ssl.OPENSSL_VERSION_INFO < (0, 9, 8, 0, 15):
self.skipTest("SHA256 not available on %r" % ssl.OPENSSL_VERSION)
# sha256.tbs-internet.com needs SNI to use the correct certificate
if not ssl.HAS_SNI:
self.skipTest("SNI needed for this test")
# https://sha2.hboeck.de/ was used until 2011-01-08 (no route to host)
remote = ("sha256.tbs-internet.com", 443)
sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem")
with support.transient_internet("sha256.tbs-internet.com"):
ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(sha256_cert)
s = ctx.wrap_socket(socket.socket(socket.AF_INET),
server_hostname="sha256.tbs-internet.com")
try:
s.connect(remote)
if support.verbose:
sys.stdout.write("\nCipher with %r is %r\n" %
(remote, s.cipher()))
sys.stdout.write("Certificate is:\n%s\n" %
pprint.pformat(s.getpeercert()))
finally:
s.close()
def test_get_ca_certs_capath(self):
# capath certs are loaded on request
with support.transient_internet("svn.python.org"):
ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
ctx.verify_mode = ssl.CERT_REQUIRED
ctx.load_verify_locations(capath=CAPATH)
self.assertEqual(ctx.get_ca_certs(), [])
s = ctx.wrap_socket(socket.socket(socket.AF_INET))
s.connect(("svn.python.org", 443))
try:
cert = s.getpeercert()
self.assertTrue(cert)
finally:
s.close()
self.assertEqual(len(ctx.get_ca_certs()), 1)
@needs_sni
def test_context_setget(self):
# Check that the context of a connected socket can be replaced.
with support.transient_internet("svn.python.org"):
ctx1 = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
ctx2 = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
s = socket.socket(socket.AF_INET)
with ctx1.wrap_socket(s) as ss:
ss.connect(("svn.python.org", 443))
self.assertIs(ss.context, ctx1)
self.assertIs(ss._sslobj.context, ctx1)
ss.context = ctx2
self.assertIs(ss.context, ctx2)
self.assertIs(ss._sslobj.context, ctx2)
try:
import threading
except ImportError:
_have_threads = False
else:
_have_threads = True
from test.ssl_servers import make_https_server
class ThreadedEchoServer(threading.Thread):
class ConnectionHandler(threading.Thread):
"""A mildly complicated class, because we want it to work both
with and without the SSL wrapper around the socket connection, so
that we can test the STARTTLS functionality."""
def __init__(self, server, connsock, addr):
self.server = server
self.running = False
self.sock = connsock
self.addr = addr
self.sock.setblocking(1)
self.sslconn = None
threading.Thread.__init__(self)
self.daemon = True
def wrap_conn(self):
try:
self.sslconn = self.server.context.wrap_socket(
self.sock, server_side=True)
self.server.selected_protocols.append(self.sslconn.selected_npn_protocol())
except (ssl.SSLError, ConnectionResetError) as e:
# We treat ConnectionResetError as though it were an
# SSLError - OpenSSL on Ubuntu abruptly closes the
# connection when asked to use an unsupported protocol.
#
# XXX Various errors can have happened here, for example
# a mismatching protocol version, an invalid certificate,
# or a low-level bug. This should be made more discriminating.
self.server.conn_errors.append(e)
if self.server.chatty:
handle_error("\n server: bad connection attempt from " + repr(self.addr) + ":\n")
self.running = False
self.server.stop()
self.close()
return False
else:
if self.server.context.verify_mode == ssl.CERT_REQUIRED:
cert = self.sslconn.getpeercert()
if support.verbose and self.server.chatty:
sys.stdout.write(" client cert is " + pprint.pformat(cert) + "\n")
cert_binary = self.sslconn.getpeercert(True)
if support.verbose and self.server.chatty:
sys.stdout.write(" cert binary is " + str(len(cert_binary)) + " bytes\n")
cipher = self.sslconn.cipher()
if support.verbose and self.server.chatty:
sys.stdout.write(" server: connection cipher is now " + str(cipher) + "\n")
sys.stdout.write(" server: selected protocol is now "
+ str(self.sslconn.selected_npn_protocol()) + "\n")
return True
def read(self):
if self.sslconn:
return self.sslconn.read()
else:
return self.sock.recv(1024)
def write(self, bytes):
if self.sslconn:
return self.sslconn.write(bytes)
else:
return self.sock.send(bytes)
def close(self):
if self.sslconn:
self.sslconn.close()
else:
self.sock.close()
def run(self):
self.running = True
if not self.server.starttls_server:
if not self.wrap_conn():
return
while self.running:
try:
msg = self.read()
stripped = msg.strip()
if not stripped:
# eof, so quit this handler
self.running = False
self.close()
elif stripped == b'over':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: client closed connection\n")
self.close()
return
elif (self.server.starttls_server and
stripped == b'STARTTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read STARTTLS from client, sending OK...\n")
self.write(b"OK\n")
if not self.wrap_conn():
return
elif (self.server.starttls_server and self.sslconn
and stripped == b'ENDTLS'):
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read ENDTLS from client, sending OK...\n")
self.write(b"OK\n")
self.sock = self.sslconn.unwrap()
self.sslconn = None
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: connection is now unencrypted...\n")
elif stripped == b'CB tls-unique':
if support.verbose and self.server.connectionchatty:
sys.stdout.write(" server: read CB tls-unique from client, sending our CB data...\n")
data = self.sslconn.get_channel_binding("tls-unique")
self.write(repr(data).encode("us-ascii") + b"\n")
else:
if (support.verbose and
self.server.connectionchatty):
ctype = (self.sslconn and "encrypted") or "unencrypted"
sys.stdout.write(" server: read %r (%s), sending back %r (%s)...\n"
% (msg, ctype, msg.lower(), ctype))
self.write(msg.lower())
except OSError:
if self.server.chatty:
handle_error("Test server failure:\n")
self.close()
self.running = False
# normally, we'd just stop here, but for the test
# harness, we want to stop the server
self.server.stop()
def __init__(self, certificate=None, ssl_version=None,
certreqs=None, cacerts=None,
chatty=True, connectionchatty=False, starttls_server=False,
npn_protocols=None, ciphers=None, context=None):
if context:
self.context = context
else:
self.context = ssl.SSLContext(ssl_version
if ssl_version is not None
else ssl.PROTOCOL_TLSv1)
self.context.verify_mode = (certreqs if certreqs is not None
else ssl.CERT_NONE)
if cacerts:
self.context.load_verify_locations(cacerts)
if certificate:
self.context.load_cert_chain(certificate)
if npn_protocols:
self.context.set_npn_protocols(npn_protocols)
if ciphers:
self.context.set_ciphers(ciphers)
self.chatty = chatty
self.connectionchatty = connectionchatty
self.starttls_server = starttls_server
self.sock = socket.socket()
self.port = support.bind_port(self.sock)
self.flag = None
self.active = False
self.selected_protocols = []
self.conn_errors = []
threading.Thread.__init__(self)
self.daemon = True
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
self.stop()
self.join()
def start(self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.sock.settimeout(0.05)
self.sock.listen(5)
self.active = True
if self.flag:
# signal an event
self.flag.set()
while self.active:
try:
newconn, connaddr = self.sock.accept()
if support.verbose and self.chatty:
sys.stdout.write(' server: new connection from '
+ repr(connaddr) + '\n')
handler = self.ConnectionHandler(self, newconn, connaddr)
handler.start()
handler.join()
except socket.timeout:
pass
except KeyboardInterrupt:
self.stop()
self.sock.close()
def stop(self):
self.active = False
class AsyncoreEchoServer(threading.Thread):
# this one's based on asyncore.dispatcher
class EchoServer (asyncore.dispatcher):
class ConnectionHandler (asyncore.dispatcher_with_send):
def __init__(self, conn, certfile):
self.socket = ssl.wrap_socket(conn, server_side=True,
certfile=certfile,
do_handshake_on_connect=False)
asyncore.dispatcher_with_send.__init__(self, self.socket)
self._ssl_accepting = True
self._do_ssl_handshake()
def readable(self):
if isinstance(self.socket, ssl.SSLSocket):
while self.socket.pending() > 0:
self.handle_read_event()
return True
def _do_ssl_handshake(self):
try:
self.socket.do_handshake()
except (ssl.SSLWantReadError, ssl.SSLWantWriteError):
return
except ssl.SSLEOFError:
return self.handle_close()
except ssl.SSLError:
raise
except OSError as err:
if err.args[0] == errno.ECONNABORTED:
return self.handle_close()
else:
self._ssl_accepting = False
def handle_read(self):
if self._ssl_accepting:
self._do_ssl_handshake()
else:
data = self.recv(1024)
if support.verbose:
sys.stdout.write(" server: read %s from client\n" % repr(data))
if not data:
self.close()
else:
self.send(data.lower())
def handle_close(self):
self.close()
if support.verbose:
sys.stdout.write(" server: closed connection %s\n" % self.socket)
def handle_error(self):
raise
def __init__(self, certfile):
self.certfile = certfile
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(sock, '')
asyncore.dispatcher.__init__(self, sock)
self.listen(5)
def handle_accepted(self, sock_obj, addr):
if support.verbose:
sys.stdout.write(" server: new connection from %s:%s\n" %addr)
self.ConnectionHandler(sock_obj, self.certfile)
def handle_error(self):
raise
def __init__(self, certfile):
self.flag = None
self.active = False
self.server = self.EchoServer(certfile)
self.port = self.server.port
threading.Thread.__init__(self)
self.daemon = True
def __str__(self):
return "<%s %s>" % (self.__class__.__name__, self.server)
def __enter__(self):
self.start(threading.Event())
self.flag.wait()
return self
def __exit__(self, *args):
if support.verbose:
sys.stdout.write(" cleanup: stopping server.\n")
self.stop()
if support.verbose:
sys.stdout.write(" cleanup: joining server thread.\n")
self.join()
if support.verbose:
sys.stdout.write(" cleanup: successfully joined.\n")
def start (self, flag=None):
self.flag = flag
threading.Thread.start(self)
def run(self):
self.active = True
if self.flag:
self.flag.set()
while self.active:
try:
asyncore.loop(1)
except:
pass
def stop(self):
self.active = False
self.server.close()
def bad_cert_test(certfile):
"""
Launch a server with CERT_REQUIRED, and check that trying to
connect to it with the given client certificate fails.
"""
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_REQUIRED,
cacerts=CERTFILE, chatty=False,
connectionchatty=False)
with server:
try:
with socket.socket() as sock:
s = ssl.wrap_socket(sock,
certfile=certfile,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
except ssl.SSLError as x:
if support.verbose:
sys.stdout.write("\nSSLError is %s\n" % x.args[1])
except OSError as x:
if support.verbose:
sys.stdout.write("\nOSError is %s\n" % x.args[1])
except OSError as x:
if x.errno != errno.ENOENT:
raise
if support.verbose:
sys.stdout.write("\OSError is %s\n" % str(x))
else:
raise AssertionError("Use of invalid cert should have failed!")
def server_params_test(client_context, server_context, indata=b"FOO\n",
chatty=True, connectionchatty=False, sni_name=None):
"""
Launch a server, connect a client to it and try various reads
and writes.
"""
stats = {}
server = ThreadedEchoServer(context=server_context,
chatty=chatty,
connectionchatty=False)
with server:
with client_context.wrap_socket(socket.socket(),
server_hostname=sni_name) as s:
s.connect((HOST, server.port))
for arg in [indata, bytearray(indata), memoryview(indata)]:
if connectionchatty:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(arg)
outdata = s.read()
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
raise AssertionError(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if connectionchatty:
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
stats.update({
'compression': s.compression(),
'cipher': s.cipher(),
'peercert': s.getpeercert(),
'client_npn_protocol': s.selected_npn_protocol()
})
s.close()
stats['server_npn_protocols'] = server.selected_protocols
return stats
def try_protocol_combo(server_protocol, client_protocol, expect_success,
certsreqs=None, server_options=0, client_options=0):
if certsreqs is None:
certsreqs = ssl.CERT_NONE
certtype = {
ssl.CERT_NONE: "CERT_NONE",
ssl.CERT_OPTIONAL: "CERT_OPTIONAL",
ssl.CERT_REQUIRED: "CERT_REQUIRED",
}[certsreqs]
if support.verbose:
formatstr = (expect_success and " %s->%s %s\n") or " {%s->%s} %s\n"
sys.stdout.write(formatstr %
(ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol),
certtype))
client_context = ssl.SSLContext(client_protocol)
client_context.options |= client_options
server_context = ssl.SSLContext(server_protocol)
server_context.options |= server_options
# NOTE: we must enable "ALL" ciphers on the client, otherwise an
# SSLv23 client will send an SSLv3 hello (rather than SSLv2)
# starting from OpenSSL 1.0.0 (see issue #8322).
if client_context.protocol == ssl.PROTOCOL_SSLv23:
client_context.set_ciphers("ALL")
for ctx in (client_context, server_context):
ctx.verify_mode = certsreqs
ctx.load_cert_chain(CERTFILE)
ctx.load_verify_locations(CERTFILE)
try:
server_params_test(client_context, server_context,
chatty=False, connectionchatty=False)
# Protocol mismatch can result in either an SSLError, or a
# "Connection reset by peer" error.
except ssl.SSLError:
if expect_success:
raise
except OSError as e:
if expect_success or e.errno != errno.ECONNRESET:
raise
else:
if not expect_success:
raise AssertionError(
"Client protocol %s succeeded with server protocol %s!"
% (ssl.get_protocol_name(client_protocol),
ssl.get_protocol_name(server_protocol)))
class ThreadedTests(unittest.TestCase):
@skip_if_broken_ubuntu_ssl
def test_echo(self):
"""Basic test of an SSL client connecting to a server"""
if support.verbose:
sys.stdout.write("\n")
for protocol in PROTOCOLS:
with self.subTest(protocol=ssl._PROTOCOL_NAMES[protocol]):
context = ssl.SSLContext(protocol)
context.load_cert_chain(CERTFILE)
server_params_test(context, context,
chatty=True, connectionchatty=True)
def test_getpeercert(self):
if support.verbose:
sys.stdout.write("\n")
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = ThreadedEchoServer(context=context, chatty=False)
with server:
s = context.wrap_socket(socket.socket(),
do_handshake_on_connect=False)
s.connect((HOST, server.port))
# getpeercert() raise ValueError while the handshake isn't
# done.
with self.assertRaises(ValueError):
s.getpeercert()
s.do_handshake()
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
cipher = s.cipher()
if support.verbose:
sys.stdout.write(pprint.pformat(cert) + '\n')
sys.stdout.write("Connection cipher is " + str(cipher) + '.\n')
if 'subject' not in cert:
self.fail("No subject field in certificate: %s." %
pprint.pformat(cert))
if ((('organizationName', 'Python Software Foundation'),)
not in cert['subject']):
self.fail(
"Missing or invalid 'organizationName' field in certificate subject; "
"should be 'Python Software Foundation'.")
self.assertIn('notBefore', cert)
self.assertIn('notAfter', cert)
before = ssl.cert_time_to_seconds(cert['notBefore'])
after = ssl.cert_time_to_seconds(cert['notAfter'])
self.assertLess(before, after)
s.close()
@unittest.skipUnless(have_verify_flags(),
"verify_flags need OpenSSL > 0.9.8")
def test_crl_check(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(SIGNING_CA)
self.assertEqual(context.verify_flags, ssl.VERIFY_DEFAULT)
# VERIFY_DEFAULT should pass
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket()) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# VERIFY_CRL_CHECK_LEAF without a loaded CRL file fails
context.verify_flags |= ssl.VERIFY_CRL_CHECK_LEAF
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket()) as s:
with self.assertRaisesRegex(ssl.SSLError,
"certificate verify failed"):
s.connect((HOST, server.port))
# now load a CRL file. The CRL file is signed by the CA.
context.load_verify_locations(CRLFILE)
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket()) as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
@needs_sni
def test_check_hostname(self):
if support.verbose:
sys.stdout.write("\n")
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
context.load_verify_locations(SIGNING_CA)
# correct hostname should verify
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket(),
server_hostname="localhost") as s:
s.connect((HOST, server.port))
cert = s.getpeercert()
self.assertTrue(cert, "Can't get peer certificate.")
# incorrect hostname should raise an exception
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with context.wrap_socket(socket.socket(),
server_hostname="invalid") as s:
with self.assertRaisesRegex(ssl.CertificateError,
"hostname 'invalid' doesn't match 'localhost'"):
s.connect((HOST, server.port))
# missing server_hostname arg should cause an exception, too
server = ThreadedEchoServer(context=server_context, chatty=True)
with server:
with socket.socket() as s:
with self.assertRaisesRegex(ValueError,
"check_hostname requires server_hostname"):
context.wrap_socket(s)
def test_empty_cert(self):
"""Connecting with an empty cert file"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"nullcert.pem"))
def test_malformed_cert(self):
"""Connecting with a badly formatted certificate (syntax error)"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"badcert.pem"))
def test_nonexisting_cert(self):
"""Connecting with a non-existing cert file"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"wrongcert.pem"))
def test_malformed_key(self):
"""Connecting with a badly formatted key (syntax error)"""
bad_cert_test(os.path.join(os.path.dirname(__file__) or os.curdir,
"badkey.pem"))
def test_rude_shutdown(self):
"""A brutal shutdown of an SSL server should raise an OSError
in the client when attempting handshake.
"""
listener_ready = threading.Event()
listener_gone = threading.Event()
s = socket.socket()
port = support.bind_port(s, HOST)
# `listener` runs in a thread. It sits in an accept() until
# the main thread connects. Then it rudely closes the socket,
# and sets Event `listener_gone` to let the main thread know
# the socket is gone.
def listener():
s.listen(5)
listener_ready.set()
newsock, addr = s.accept()
newsock.close()
s.close()
listener_gone.set()
def connector():
listener_ready.wait()
with socket.socket() as c:
c.connect((HOST, port))
listener_gone.wait()
try:
ssl_sock = ssl.wrap_socket(c)
except OSError:
pass
else:
self.fail('connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
try:
connector()
finally:
t.join()
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, 'PROTOCOL_SSLv2'),
"OpenSSL is compiled without SSLv2 support")
def test_protocol_sslv2(self):
"""Connecting to an SSLv2 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv2, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_TLSv1, False)
# SSLv23 client with specific SSL options
if no_sslv2_implies_sslv3_hello():
# No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv2)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv23(self):
"""Connecting to an SSLv23 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try:
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv2, True)
except OSError as x:
# this fails on some older versions of OpenSSL (0.9.7l, for instance)
if support.verbose:
sys.stdout.write(
" SSL2 client to SSL23 server test unexpectedly failed:\n %s\n"
% str(x))
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True, ssl.CERT_REQUIRED)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
# Server with specific SSL options
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv3, False,
server_options=ssl.OP_NO_SSLv3)
# Will choose TLSv1
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23, True,
server_options=ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1, False,
server_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
def test_protocol_sslv3(self):
"""Connecting to an SSLv3 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv3, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_SSLv3)
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_TLSv1, False)
if no_sslv2_implies_sslv3_hello():
# No SSLv2 => client will use an SSLv3 hello on recent OpenSSLs
try_protocol_combo(ssl.PROTOCOL_SSLv3, ssl.PROTOCOL_SSLv23, True,
client_options=ssl.OP_NO_SSLv2)
@skip_if_broken_ubuntu_ssl
def test_protocol_tlsv1(self):
"""Connecting to a TLSv1 server with various client options"""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_OPTIONAL)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1, True, ssl.CERT_REQUIRED)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1)
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_1"),
"TLS version 1.1 not supported.")
def test_protocol_tlsv1_1(self):
"""Connecting to a TLSv1.1 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_1, True)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1_1)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_1, True)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_1, False)
@skip_if_broken_ubuntu_ssl
@unittest.skipUnless(hasattr(ssl, "PROTOCOL_TLSv1_2"),
"TLS version 1.2 not supported.")
def test_protocol_tlsv1_2(self):
"""Connecting to a TLSv1.2 server with various client options.
Testing against older TLS versions."""
if support.verbose:
sys.stdout.write("\n")
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_2, True,
server_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,
client_options=ssl.OP_NO_SSLv3|ssl.OP_NO_SSLv2,)
if hasattr(ssl, 'PROTOCOL_SSLv2'):
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv3, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_SSLv23, False,
client_options=ssl.OP_NO_TLSv1_2)
try_protocol_combo(ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_TLSv1_2, True)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1_2, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_2, ssl.PROTOCOL_TLSv1_1, False)
try_protocol_combo(ssl.PROTOCOL_TLSv1_1, ssl.PROTOCOL_TLSv1_2, False)
def test_starttls(self):
"""Switching from clear text to encrypted and back again."""
msgs = (b"msg 1", b"MSG 2", b"STARTTLS", b"MSG 3", b"msg 4", b"ENDTLS", b"msg 5", b"msg 6")
server = ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_TLSv1,
starttls_server=True,
chatty=True,
connectionchatty=True)
wrapped = False
with server:
s = socket.socket()
s.setblocking(1)
s.connect((HOST, server.port))
if support.verbose:
sys.stdout.write("\n")
for indata in msgs:
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
if wrapped:
conn.write(indata)
outdata = conn.read()
else:
s.send(indata)
outdata = s.recv(1024)
msg = outdata.strip().lower()
if indata == b"STARTTLS" and msg.startswith(b"ok"):
# STARTTLS ok, switch to secure mode
if support.verbose:
sys.stdout.write(
" client: read %r from server, starting TLS...\n"
% msg)
conn = ssl.wrap_socket(s, ssl_version=ssl.PROTOCOL_TLSv1)
wrapped = True
elif indata == b"ENDTLS" and msg.startswith(b"ok"):
# ENDTLS ok, switch back to clear text
if support.verbose:
sys.stdout.write(
" client: read %r from server, ending TLS...\n"
% msg)
s = conn.unwrap()
wrapped = False
else:
if support.verbose:
sys.stdout.write(
" client: read %r from server\n" % msg)
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
if wrapped:
conn.write(b"over\n")
else:
s.send(b"over\n")
if wrapped:
conn.close()
else:
s.close()
def test_socketserver(self):
"""Using a SocketServer to create and manage SSL connections."""
server = make_https_server(self, certfile=CERTFILE)
# try to connect
if support.verbose:
sys.stdout.write('\n')
with open(CERTFILE, 'rb') as f:
d1 = f.read()
d2 = ''
# now fetch the same data from the HTTPS server
url = 'https://%s:%d/%s' % (
HOST, server.port, os.path.split(CERTFILE)[1])
f = urllib.request.urlopen(url)
try:
dlen = f.info().get("content-length")
if dlen and (int(dlen) > 0):
d2 = f.read(int(dlen))
if support.verbose:
sys.stdout.write(
" client: read %d bytes from remote server '%s'\n"
% (len(d2), server))
finally:
f.close()
self.assertEqual(d1, d2)
def test_asyncore_server(self):
"""Check the example asyncore integration."""
indata = "TEST MESSAGE of mixed case\n"
if support.verbose:
sys.stdout.write("\n")
indata = b"FOO\n"
server = AsyncoreEchoServer(CERTFILE)
with server:
s = ssl.wrap_socket(socket.socket())
s.connect(('127.0.0.1', server.port))
if support.verbose:
sys.stdout.write(
" client: sending %r...\n" % indata)
s.write(indata)
outdata = s.read()
if support.verbose:
sys.stdout.write(" client: read %r\n" % outdata)
if outdata != indata.lower():
self.fail(
"bad data <<%r>> (%d) received; expected <<%r>> (%d)\n"
% (outdata[:20], len(outdata),
indata[:20].lower(), len(indata)))
s.write(b"over\n")
if support.verbose:
sys.stdout.write(" client: closing connection.\n")
s.close()
if support.verbose:
sys.stdout.write(" client: connection closed.\n")
def test_recv_send(self):
"""Test recv(), send() and friends."""
if support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
# helper methods for standardising recv* method signatures
def _recv_into():
b = bytearray(b"\0"*100)
count = s.recv_into(b)
return b[:count]
def _recvfrom_into():
b = bytearray(b"\0"*100)
count, addr = s.recvfrom_into(b)
return b[:count]
# (name, method, whether to expect success, *args)
send_methods = [
('send', s.send, True, []),
('sendto', s.sendto, False, ["some.address"]),
('sendall', s.sendall, True, []),
]
recv_methods = [
('recv', s.recv, True, []),
('recvfrom', s.recvfrom, False, ["some.address"]),
('recv_into', _recv_into, True, []),
('recvfrom_into', _recvfrom_into, False, []),
]
data_prefix = "PREFIX_"
for meth_name, send_meth, expect_success, args in send_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
send_meth(indata, *args)
outdata = s.read()
if outdata != indata.lower():
self.fail(
"While sending with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to send with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
for meth_name, recv_meth, expect_success, args in recv_methods:
indata = (data_prefix + meth_name).encode('ascii')
try:
s.send(indata)
outdata = recv_meth(*args)
if outdata != indata.lower():
self.fail(
"While receiving with <<{name:s}>> bad data "
"<<{outdata:r}>> ({nout:d}) received; "
"expected <<{indata:r}>> ({nin:d})\n".format(
name=meth_name, outdata=outdata[:20],
nout=len(outdata),
indata=indata[:20], nin=len(indata)
)
)
except ValueError as e:
if expect_success:
self.fail(
"Failed to receive with method <<{name:s}>>; "
"expected to succeed.\n".format(name=meth_name)
)
if not str(e).startswith(meth_name):
self.fail(
"Method <<{name:s}>> failed with unexpected "
"exception message: {exp:s}\n".format(
name=meth_name, exp=e
)
)
# consume data
s.read()
# Make sure sendmsg et al are disallowed to avoid
# inadvertent disclosure of data and/or corruption
# of the encrypted data stream
self.assertRaises(NotImplementedError, s.sendmsg, [b"data"])
self.assertRaises(NotImplementedError, s.recvmsg, 100)
self.assertRaises(NotImplementedError,
s.recvmsg_into, bytearray(100))
s.write(b"over\n")
s.close()
def test_nonblocking_send(self):
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
s.setblocking(False)
# If we keep sending data, at some point the buffers
# will be full and the call will block
buf = bytearray(8192)
def fill_buffer():
while True:
s.send(buf)
self.assertRaises((ssl.SSLWantWriteError,
ssl.SSLWantReadError), fill_buffer)
# Now read all the output and discard it
s.setblocking(True)
s.close()
def test_handshake_timeout(self):
# Issue #5103: SSL handshake must respect the socket timeout
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = support.bind_port(server)
started = threading.Event()
finish = False
def serve():
server.listen(5)
started.set()
conns = []
while not finish:
r, w, e = select.select([server], [], [], 0.1)
if server in r:
# Let the socket hang around rather than having
# it closed by garbage collection.
conns.append(server.accept()[0])
for sock in conns:
sock.close()
t = threading.Thread(target=serve)
t.start()
started.wait()
try:
try:
c = socket.socket(socket.AF_INET)
c.settimeout(0.2)
c.connect((host, port))
# Will attempt handshake and time out
self.assertRaisesRegex(socket.timeout, "timed out",
ssl.wrap_socket, c)
finally:
c.close()
try:
c = socket.socket(socket.AF_INET)
c = ssl.wrap_socket(c)
c.settimeout(0.2)
# Will attempt handshake and time out
self.assertRaisesRegex(socket.timeout, "timed out",
c.connect, (host, port))
finally:
c.close()
finally:
finish = True
t.join()
server.close()
def test_server_accept(self):
# Issue #16357: accept() on a SSLSocket created through
# SSLContext.wrap_socket().
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = socket.socket(socket.AF_INET)
host = "127.0.0.1"
port = support.bind_port(server)
server = context.wrap_socket(server, server_side=True)
evt = threading.Event()
remote = None
peer = None
def serve():
nonlocal remote, peer
server.listen(5)
# Block on the accept and wait on the connection to close.
evt.set()
remote, peer = server.accept()
remote.recv(1)
t = threading.Thread(target=serve)
t.start()
# Client wait until server setup and perform a connect.
evt.wait()
client = context.wrap_socket(socket.socket())
client.connect((host, port))
client_addr = client.getsockname()
client.close()
t.join()
remote.close()
server.close()
# Sanity checks.
self.assertIsInstance(remote, ssl.SSLSocket)
self.assertEqual(peer, client_addr)
def test_getpeercert_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with context.wrap_socket(socket.socket()) as sock:
with self.assertRaises(OSError) as cm:
sock.getpeercert()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_do_handshake_enotconn(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
with context.wrap_socket(socket.socket()) as sock:
with self.assertRaises(OSError) as cm:
sock.do_handshake()
self.assertEqual(cm.exception.errno, errno.ENOTCONN)
def test_default_ciphers(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
try:
# Force a set of weak ciphers on our client context
context.set_ciphers("DES")
except ssl.SSLError:
self.skipTest("no DES cipher available")
with ThreadedEchoServer(CERTFILE,
ssl_version=ssl.PROTOCOL_SSLv23,
chatty=False) as server:
with context.wrap_socket(socket.socket()) as s:
with self.assertRaises(OSError):
s.connect((HOST, server.port))
self.assertIn("no shared cipher", str(server.conn_errors[0]))
@unittest.skipUnless(ssl.HAS_ECDH, "test requires ECDH-enabled OpenSSL")
def test_default_ecdh_curve(self):
# Issue #21015: elliptic curve-based Diffie Hellman key exchange
# should be enabled by default on SSL contexts.
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.load_cert_chain(CERTFILE)
# Prior to OpenSSL 1.0.0, ECDH ciphers have to be enabled
# explicitly using the 'ECCdraft' cipher alias. Otherwise,
# our default cipher list should prefer ECDH-based ciphers
# automatically.
if ssl.OPENSSL_VERSION_INFO < (1, 0, 0):
context.set_ciphers("ECCdraft:ECDH")
with ThreadedEchoServer(context=context) as server:
with context.wrap_socket(socket.socket()) as s:
s.connect((HOST, server.port))
self.assertIn("ECDH", s.cipher()[0])
@unittest.skipUnless("tls-unique" in ssl.CHANNEL_BINDING_TYPES,
"'tls-unique' channel binding not available")
def test_tls_unique_channel_binding(self):
"""Test tls-unique channel binding."""
if support.verbose:
sys.stdout.write("\n")
server = ThreadedEchoServer(CERTFILE,
certreqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1,
cacerts=CERTFILE,
chatty=True,
connectionchatty=False)
with server:
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
# get the data
cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(" got channel binding data: {0!r}\n"
.format(cb_data))
# check if it is sane
self.assertIsNotNone(cb_data)
self.assertEqual(len(cb_data), 12) # True for TLSv1
# and compare with the peers version
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(cb_data).encode("us-ascii"))
s.close()
# now, again
s = ssl.wrap_socket(socket.socket(),
server_side=False,
certfile=CERTFILE,
ca_certs=CERTFILE,
cert_reqs=ssl.CERT_NONE,
ssl_version=ssl.PROTOCOL_TLSv1)
s.connect((HOST, server.port))
new_cb_data = s.get_channel_binding("tls-unique")
if support.verbose:
sys.stdout.write(" got another channel binding data: {0!r}\n"
.format(new_cb_data))
# is it really unique
self.assertNotEqual(cb_data, new_cb_data)
self.assertIsNotNone(cb_data)
self.assertEqual(len(cb_data), 12) # True for TLSv1
s.write(b"CB tls-unique\n")
peer_data_repr = s.read().strip()
self.assertEqual(peer_data_repr,
repr(new_cb_data).encode("us-ascii"))
s.close()
def test_compression(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
if support.verbose:
sys.stdout.write(" got compression: {!r}\n".format(stats['compression']))
self.assertIn(stats['compression'], { None, 'ZLIB', 'RLE' })
@unittest.skipUnless(hasattr(ssl, 'OP_NO_COMPRESSION'),
"ssl.OP_NO_COMPRESSION needed for this test")
def test_compression_disabled(self):
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
context.options |= ssl.OP_NO_COMPRESSION
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
self.assertIs(stats['compression'], None)
def test_dh_params(self):
# Check we can get a connection with ephemeral Diffie-Hellman
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
context.load_dh_params(DHFILE)
context.set_ciphers("kEDH")
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
cipher = stats["cipher"][0]
parts = cipher.split("-")
if "ADH" not in parts and "EDH" not in parts and "DHE" not in parts:
self.fail("Non-DH cipher: " + cipher[0])
def test_selected_npn_protocol(self):
# selected_npn_protocol() is None unless NPN is used
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
context.load_cert_chain(CERTFILE)
stats = server_params_test(context, context,
chatty=True, connectionchatty=True)
self.assertIs(stats['client_npn_protocol'], None)
@unittest.skipUnless(ssl.HAS_NPN, "NPN support needed for this test")
def test_npn_protocols(self):
server_protocols = ['http/1.1', 'spdy/2']
protocol_tests = [
(['http/1.1', 'spdy/2'], 'http/1.1'),
(['spdy/2', 'http/1.1'], 'http/1.1'),
(['spdy/2', 'test'], 'spdy/2'),
(['abc', 'def'], 'abc')
]
for client_protocols, expected in protocol_tests:
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(CERTFILE)
server_context.set_npn_protocols(server_protocols)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context.load_cert_chain(CERTFILE)
client_context.set_npn_protocols(client_protocols)
stats = server_params_test(client_context, server_context,
chatty=True, connectionchatty=True)
msg = "failed trying %s (s) and %s (c).\n" \
"was expecting %s, but got %%s from the %%s" \
% (str(server_protocols), str(client_protocols),
str(expected))
client_result = stats['client_npn_protocol']
self.assertEqual(client_result, expected, msg % (client_result, "client"))
server_result = stats['server_npn_protocols'][-1] \
if len(stats['server_npn_protocols']) else 'nothing'
self.assertEqual(server_result, expected, msg % (server_result, "server"))
def sni_contexts(self):
server_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
server_context.load_cert_chain(SIGNED_CERTFILE)
other_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
other_context.load_cert_chain(SIGNED_CERTFILE2)
client_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1)
client_context.verify_mode = ssl.CERT_REQUIRED
client_context.load_verify_locations(SIGNING_CA)
return server_context, other_context, client_context
def check_common_name(self, stats, name):
cert = stats['peercert']
self.assertIn((('commonName', name),), cert['subject'])
@needs_sni
def test_sni_callback(self):
calls = []
server_context, other_context, client_context = self.sni_contexts()
def servername_cb(ssl_sock, server_name, initial_context):
calls.append((server_name, initial_context))
if server_name is not None:
ssl_sock.context = other_context
server_context.set_servername_callback(servername_cb)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='supermessage')
# The hostname was fetched properly, and the certificate was
# changed for the connection.
self.assertEqual(calls, [("supermessage", server_context)])
# CERTFILE4 was selected
self.check_common_name(stats, 'fakehostname')
calls = []
# The callback is called with server_name=None
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name=None)
self.assertEqual(calls, [(None, server_context)])
self.check_common_name(stats, 'localhost')
# Check disabling the callback
calls = []
server_context.set_servername_callback(None)
stats = server_params_test(client_context, server_context,
chatty=True,
sni_name='notfunny')
# Certificate didn't change
self.check_common_name(stats, 'localhost')
self.assertEqual(calls, [])
@needs_sni
def test_sni_callback_alert(self):
# Returning a TLS alert is reflected to the connecting client
server_context, other_context, client_context = self.sni_contexts()
def cb_returning_alert(ssl_sock, server_name, initial_context):
return ssl.ALERT_DESCRIPTION_ACCESS_DENIED
server_context.set_servername_callback(cb_returning_alert)
with self.assertRaises(ssl.SSLError) as cm:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_ACCESS_DENIED')
@needs_sni
def test_sni_callback_raising(self):
# Raising fails the connection with a TLS handshake failure alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_raising(ssl_sock, server_name, initial_context):
1/0
server_context.set_servername_callback(cb_raising)
with self.assertRaises(ssl.SSLError) as cm, \
support.captured_stderr() as stderr:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'SSLV3_ALERT_HANDSHAKE_FAILURE')
self.assertIn("ZeroDivisionError", stderr.getvalue())
@needs_sni
def test_sni_callback_wrong_return_type(self):
# Returning the wrong return type terminates the TLS connection
# with an internal error alert.
server_context, other_context, client_context = self.sni_contexts()
def cb_wrong_return_type(ssl_sock, server_name, initial_context):
return "foo"
server_context.set_servername_callback(cb_wrong_return_type)
with self.assertRaises(ssl.SSLError) as cm, \
support.captured_stderr() as stderr:
stats = server_params_test(client_context, server_context,
chatty=False,
sni_name='supermessage')
self.assertEqual(cm.exception.reason, 'TLSV1_ALERT_INTERNAL_ERROR')
self.assertIn("TypeError", stderr.getvalue())
def test_read_write_after_close_raises_valuerror(self):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.load_verify_locations(CERTFILE)
context.load_cert_chain(CERTFILE)
server = ThreadedEchoServer(context=context, chatty=False)
with server:
s = context.wrap_socket(socket.socket())
s.connect((HOST, server.port))
s.close()
self.assertRaises(ValueError, s.read, 1024)
self.assertRaises(ValueError, s.write, b'hello')
def test_main(verbose=False):
if support.verbose:
plats = {
'Linux': platform.linux_distribution,
'Mac': platform.mac_ver,
'Windows': platform.win32_ver,
}
for name, func in plats.items():
plat = func()
if plat and plat[0]:
plat = '%s %r' % (name, plat)
break
else:
plat = repr(platform.platform())
print("test_ssl: testing with %r %r" %
(ssl.OPENSSL_VERSION, ssl.OPENSSL_VERSION_INFO))
print(" under %s" % plat)
print(" HAS_SNI = %r" % ssl.HAS_SNI)
print(" OP_ALL = 0x%8x" % ssl.OP_ALL)
try:
print(" OP_NO_TLSv1_1 = 0x%8x" % ssl.OP_NO_TLSv1_1)
except AttributeError:
pass
for filename in [
CERTFILE, SVN_PYTHON_ORG_ROOT_CERT, BYTES_CERTFILE,
ONLYCERT, ONLYKEY, BYTES_ONLYCERT, BYTES_ONLYKEY,
SIGNED_CERTFILE, SIGNED_CERTFILE2, SIGNING_CA,
BADCERT, BADKEY, EMPTYCERT]:
if not os.path.exists(filename):
raise support.TestFailed("Can't read certificate file %r" % filename)
tests = [ContextTests, BasicSocketTests, SSLErrorTests]
if support.is_resource_enabled('network'):
tests.append(NetworkedTests)
if _have_threads:
thread_info = support.threading_setup()
if thread_info:
tests.append(ThreadedTests)
try:
support.run_unittest(*tests)
finally:
if _have_threads:
support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
| |
renderers.py
|
from rest_framework_json_api.renderers import JSONRenderer
from django.contrib.auth.models import AnonymousUser
|
class BluebottleJSONAPIRenderer(JSONRenderer):
def get_indent(self, *args, **kwargs):
return 4
@classmethod
def build_json_resource_obj(
cls,
fields,
resource,
resource_instance,
resource_name,
*args,
**kwargs
):
if isinstance(resource_instance, AnonymousUser):
return {
'id': resource['id'],
'type': resource_name,
'attributes': {
'is-anonymous': True
}
}
return super().build_json_resource_obj(
fields, resource, resource_instance, resource_name, *args, **kwargs
)
| |
test_module.py
|
# Test the module type
import unittest
import weakref
from test.support import gc_collect
from test.support import import_helper
from test.support.script_helper import assert_python_ok
import sys
ModuleType = type(sys)
class FullLoader:
@classmethod
def module_repr(cls, m):
return "<module '{}' (crafted)>".format(m.__name__)
class BareLoader:
pass
class ModuleTests(unittest.TestCase):
def test_uninitialized(self):
# An uninitialized module has no __dict__ or __name__,
# and __doc__ is None
foo = ModuleType.__new__(ModuleType)
self.assertTrue(isinstance(foo.__dict__, dict))
self.assertEqual(dir(foo), [])
try:
s = foo.__name__
self.fail("__name__ = %s" % repr(s))
except AttributeError:
pass
self.assertEqual(foo.__doc__, ModuleType.__doc__)
def test_uninitialized_missing_getattr(self):
# Issue 8297
# test the text in the AttributeError of an uninitialized module
foo = ModuleType.__new__(ModuleType)
self.assertRaisesRegex(
AttributeError, "module has no attribute 'not_here'",
getattr, foo, "not_here")
def test_missing_getattr(self):
# Issue 8297
# test the text in the AttributeError
foo = ModuleType("foo")
self.assertRaisesRegex(
AttributeError, "module 'foo' has no attribute 'not_here'",
getattr, foo, "not_here")
def test_no_docstring(self):
# Regularly initialized module, no docstring
foo = ModuleType("foo")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, None)
self.assertIs(foo.__loader__, None)
self.assertIs(foo.__package__, None)
self.assertIs(foo.__spec__, None)
self.assertEqual(foo.__dict__, {"__name__": "foo", "__doc__": None,
"__loader__": None, "__package__": None,
"__spec__": None})
def test_ascii_docstring(self):
# ASCII docstring
foo = ModuleType("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_unicode_docstring(self):
# Unicode docstring
foo = ModuleType("foo", "foodoc\u1234")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc\u1234")
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc\u1234",
"__loader__": None, "__package__": None,
"__spec__": None})
def test_reinit(self):
# Reinitialization should not replace the __dict__
foo = ModuleType("foo", "foodoc\u1234")
foo.bar = 42
d = foo.__dict__
foo.__init__("foo", "foodoc")
self.assertEqual(foo.__name__, "foo")
self.assertEqual(foo.__doc__, "foodoc")
self.assertEqual(foo.bar, 42)
self.assertEqual(foo.__dict__,
{"__name__": "foo", "__doc__": "foodoc", "bar": 42,
"__loader__": None, "__package__": None, "__spec__": None})
self.assertTrue(foo.__dict__ is d)
def test_dont_clear_dict(self):
# See issue 7140.
def f():
foo = ModuleType("foo")
foo.bar = 4
return foo
gc_collect()
self.assertEqual(f().__dict__["bar"], 4)
def test_clear_dict_in_ref_cycle(self):
destroyed = []
m = ModuleType("foo")
m.destroyed = destroyed
s = """class A:
def __init__(self, l):
self.l = l
def __del__(self):
self.l.append(1)
a = A(destroyed)"""
exec(s, m.__dict__)
del m
gc_collect()
self.assertEqual(destroyed, [1])
def test_weakref(self):
m = ModuleType("foo")
wr = weakref.ref(m)
self.assertIs(wr(), m)
del m
gc_collect()
self.assertIs(wr(), None)
def test_module_getattr(self):
import test.good_getattr as gga
from test.good_getattr import test
self.assertEqual(test, "There is test")
self.assertEqual(gga.x, 1)
self.assertEqual(gga.y, 2)
with self.assertRaisesRegex(AttributeError,
"Deprecated, use whatever instead"):
gga.yolo
self.assertEqual(gga.whatever, "There is whatever")
del sys.modules['test.good_getattr']
def test_module_getattr_errors(self):
import test.bad_getattr as bga
from test import bad_getattr2
self.assertEqual(bga.x, 1)
self.assertEqual(bad_getattr2.x, 1)
with self.assertRaises(TypeError):
bga.nope
with self.assertRaises(TypeError):
bad_getattr2.nope
del sys.modules['test.bad_getattr']
if 'test.bad_getattr2' in sys.modules:
del sys.modules['test.bad_getattr2']
def test_module_dir(self):
import test.good_getattr as gga
self.assertEqual(dir(gga), ['a', 'b', 'c'])
del sys.modules['test.good_getattr']
def test_module_dir_errors(self):
import test.bad_getattr as bga
from test import bad_getattr2
with self.assertRaises(TypeError):
dir(bga)
with self.assertRaises(TypeError):
dir(bad_getattr2)
del sys.modules['test.bad_getattr']
if 'test.bad_getattr2' in sys.modules:
del sys.modules['test.bad_getattr2']
def test_module_getattr_tricky(self):
from test import bad_getattr3
# these lookups should not crash
with self.assertRaises(AttributeError):
bad_getattr3.one
with self.assertRaises(AttributeError):
bad_getattr3.delgetattr
if 'test.bad_getattr3' in sys.modules:
del sys.modules['test.bad_getattr3']
def test_module_repr_minimal(self):
# reprs when modules have no __file__, __name__, or __loader__
m = ModuleType('foo')
del m.__name__
self.assertEqual(repr(m), "<module '?'>")
def test_module_repr_with_name(self):
m = ModuleType('foo')
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_name_and_filename(self):
m = ModuleType('foo')
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_filename_only(self):
m = ModuleType('foo')
del m.__name__
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module '?' from '/tmp/foo.py'>")
def test_module_repr_with_loader_as_None(self):
m = ModuleType('foo')
assert m.__loader__ is None
self.assertEqual(repr(m), "<module 'foo'>")
def test_module_repr_with_bare_loader_but_no_name(self):
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = BareLoader
loader_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_full_loader_but_no_name(self):
# m.__loader__.module_repr() will fail because the module has no
# m.__name__. This exception will get suppressed and instead the
# loader's repr will be used.
m = ModuleType('foo')
del m.__name__
# Yes, a class not an instance.
m.__loader__ = FullLoader
loader_repr = repr(FullLoader)
self.assertEqual(
repr(m), "<module '?' ({})>".format(loader_repr))
def test_module_repr_with_bare_loader(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
module_repr = repr(BareLoader)
self.assertEqual(
repr(m), "<module 'foo' ({})>".format(module_repr))
def
|
(self):
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
self.assertEqual(
repr(m), "<module 'foo' (crafted)>")
def test_module_repr_with_bare_loader_and_filename(self):
# Because the loader has no module_repr(), use the file name.
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = BareLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' from '/tmp/foo.py'>")
def test_module_repr_with_full_loader_and_filename(self):
# Even though the module has an __file__, use __loader__.module_repr()
m = ModuleType('foo')
# Yes, a class not an instance.
m.__loader__ = FullLoader
m.__file__ = '/tmp/foo.py'
self.assertEqual(repr(m), "<module 'foo' (crafted)>")
def test_module_repr_builtin(self):
self.assertEqual(repr(sys), "<module 'sys' (built-in)>")
def test_module_repr_source(self):
r = repr(unittest)
starts_with = "<module 'unittest' from '"
ends_with = "__init__.py'>"
self.assertEqual(r[:len(starts_with)], starts_with,
'{!r} does not start with {!r}'.format(r, starts_with))
self.assertEqual(r[-len(ends_with):], ends_with,
'{!r} does not end with {!r}'.format(r, ends_with))
def test_module_finalization_at_shutdown(self):
# Module globals and builtins should still be available during shutdown
rc, out, err = assert_python_ok("-c", "from test import final_a")
self.assertFalse(err)
lines = out.splitlines()
self.assertEqual(set(lines), {
b"x = a",
b"x = b",
b"final_a.x = a",
b"final_b.x = b",
b"len = len",
b"shutil.rmtree = rmtree"})
def test_descriptor_errors_propagate(self):
class Descr:
def __get__(self, o, t):
raise RuntimeError
class M(ModuleType):
melon = Descr()
self.assertRaises(RuntimeError, getattr, M("mymod"), "melon")
def test_lazy_create_annotations(self):
# module objects lazy create their __annotations__ dict on demand.
# the annotations dict is stored in module.__dict__.
# a freshly created module shouldn't have an annotations dict yet.
foo = ModuleType("foo")
for i in range(4):
self.assertFalse("__annotations__" in foo.__dict__)
d = foo.__annotations__
self.assertTrue("__annotations__" in foo.__dict__)
self.assertEqual(foo.__annotations__, d)
self.assertEqual(foo.__dict__['__annotations__'], d)
if i % 2:
del foo.__annotations__
else:
del foo.__dict__['__annotations__']
def test_setting_annotations(self):
foo = ModuleType("foo")
for i in range(4):
self.assertFalse("__annotations__" in foo.__dict__)
d = {'a': int}
foo.__annotations__ = d
self.assertTrue("__annotations__" in foo.__dict__)
self.assertEqual(foo.__annotations__, d)
self.assertEqual(foo.__dict__['__annotations__'], d)
if i % 2:
del foo.__annotations__
else:
del foo.__dict__['__annotations__']
def test_annotations_getset_raises(self):
# double delete
foo = ModuleType("foo")
foo.__annotations__ = {}
del foo.__annotations__
with self.assertRaises(AttributeError):
del foo.__annotations__
def test_annotations_are_created_correctly(self):
ann_module4 = import_helper.import_fresh_module('test.ann_module4')
self.assertTrue("__annotations__" in ann_module4.__dict__)
del ann_module4.__annotations__
self.assertFalse("__annotations__" in ann_module4.__dict__)
def test_repeated_attribute_pops(self):
# Repeated accesses to module attribute will be specialized
# Check that popping the attribute doesn't break it
m = ModuleType("test")
d = m.__dict__
count = 0
for _ in range(100):
m.attr = 1
count += m.attr # Might be specialized
d.pop("attr")
self.assertEqual(count, 100)
# frozen and namespace module reprs are tested in importlib.
def test_subclass_with_slots(self):
# In 3.11alpha this crashed, as the slots weren't NULLed.
class ModuleWithSlots(ModuleType):
__slots__ = ("a", "b")
def __init__(self, name):
super().__init__(name)
m = ModuleWithSlots("name")
with self.assertRaises(AttributeError):
m.a
with self.assertRaises(AttributeError):
m.b
m.a, m.b = 1, 2
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
if __name__ == '__main__':
unittest.main()
|
test_module_repr_with_full_loader
|
config.go
|
package main
import (
"encoding/json"
"io/ioutil"
)
type Config struct {
SupportedTypes []string `json:"supportedTypes"`
SupportedResizes []int `json:"supportedResizes"`
}
func MakeDefaultConfig(directory string) Config {
ret := Config{}
ret.SupportedTypes = []string{"png", "jpg"}
ret.SupportedResizes = []int{50, 33, 20}
return ret
}
func MakeConfigFromFile(filePath string) (Config, error) {
jsonContent, err := ioutil.ReadFile(filePath)
if err != nil
|
var config Config
err = json.Unmarshal(jsonContent, &config)
if err != nil {
return Config{}, err
}
return config, nil
}
func (c *Config) WriteToFile(filePath string) error {
jsonBytes, err := json.MarshalIndent(c, "", "\t")
if err != nil {
return err
}
err = ioutil.WriteFile(filePath, jsonBytes, 0755)
if err != nil {
return err
}
return nil
}
func (c *Config) ToJson() ([]byte, error) {
return json.MarshalIndent(c, "", "\t")
}
|
{
return Config{}, err
}
|
tuple.rs
|
use super::Rdx;
use core::cmp;
impl Rdx for () {
#[inline]
fn cfg_nbuckets() -> usize {
0
}
#[inline]
fn cfg_nrounds() -> usize {
0
}
#[inline]
fn get_bucket(&self, _round: usize) -> usize {
unreachable!()
}
#[inline]
fn reverse(_round: usize, _bucket: usize) -> bool {
unreachable!()
}
}
impl<A> Rdx for (A,)
where
A: Rdx,
{
#[inline]
fn cfg_nbuckets() -> usize {
A::cfg_nbuckets()
}
#[inline]
fn cfg_nrounds() -> usize {
A::cfg_nrounds()
}
#[inline]
fn get_bucket(&self, round: usize) -> usize {
self.0.get_bucket(round)
}
#[inline]
fn reverse(round: usize, bucket: usize) -> bool {
A::reverse(round, bucket)
}
}
impl<A, B> Rdx for (A, B)
where
A: Rdx,
B: Rdx,
{
#[inline]
fn cfg_nbuckets() -> usize {
cmp::max(A::cfg_nbuckets(), B::cfg_nbuckets())
}
#[inline]
fn cfg_nrounds() -> usize {
A::cfg_nrounds() + B::cfg_nrounds()
}
#[inline]
fn get_bucket(&self, round: usize) -> usize {
if round < B::cfg_nrounds() {
self.1.get_bucket(round)
} else {
self.0.get_bucket(round - B::cfg_nrounds())
}
}
#[inline]
fn reverse(round: usize, bucket: usize) -> bool {
if round < B::cfg_nrounds() {
B::reverse(round, bucket)
} else {
A::reverse(round - B::cfg_nrounds(), bucket)
}
}
}
impl<A, B, C> Rdx for (A, B, C)
|
B: Rdx,
C: Rdx,
{
#[inline]
fn cfg_nbuckets() -> usize {
cmp::max(
A::cfg_nbuckets(),
cmp::max(B::cfg_nbuckets(), C::cfg_nbuckets()),
)
}
#[inline]
fn cfg_nrounds() -> usize {
A::cfg_nrounds() + B::cfg_nrounds() + C::cfg_nrounds()
}
#[inline]
fn get_bucket(&self, round: usize) -> usize {
if round < C::cfg_nrounds() {
self.2.get_bucket(round)
} else if round < B::cfg_nrounds() + C::cfg_nrounds() {
self.1.get_bucket(round - C::cfg_nrounds())
} else {
self.0
.get_bucket(round - B::cfg_nrounds() - C::cfg_nrounds())
}
}
#[inline]
fn reverse(round: usize, bucket: usize) -> bool {
if round < C::cfg_nrounds() {
C::reverse(round, bucket)
} else if round < B::cfg_nrounds() + C::cfg_nrounds() {
B::reverse(round - C::cfg_nrounds(), bucket)
} else {
A::reverse(round - B::cfg_nrounds() - C::cfg_nrounds(), bucket)
}
}
}
|
where
A: Rdx,
|
main.go
|
package main
import (
"fmt"
"log"
"github.com/mjm/advent-of-code-2019/day9"
"github.com/mjm/advent-of-code-2019/pkg/input"
)
func main() {
template, err := day9.LoadFromString(input.ReadString())
if err != nil {
log.Fatal(err)
}
part1(template)
part2(template)
}
func part1(template *day9.VM) {
vm := template.Clone()
vm.Input <- 1
done := make(chan int)
go func() {
var values []int
for out := range vm.Output {
values = append(values, out)
}
if len(values) == 1
|
else {
panic(fmt.Errorf("got some failing opcodes: %v", values))
}
}()
if err := vm.Execute(); err != nil {
log.Fatal(err)
}
boostKeycode := <-done
log.Printf("The BOOST keycode is %d", boostKeycode)
}
func part2(template *day9.VM) {
vm := template.Clone()
vm.Input <- 2
done := make(chan int)
go func() {
n := <-vm.Output
done <- n
}()
if err := vm.Execute(); err != nil {
log.Fatal(err)
}
coordinates := <-done
log.Printf("The coordinates of the distress signal are %d", coordinates)
}
|
{
done <- values[0]
}
|
verifier.rs
|
use crate::common::*;
pub(crate) struct Verifier<'a> {
metainfo: &'a Metainfo,
base: &'a Path,
buffer: Vec<u8>,
piece_length: usize,
pieces: PieceList,
sha1: Sha1,
piece_bytes_hashed: usize,
progress_bar: Option<ProgressBar>,
}
impl<'a> Verifier<'a> {
fn new(
metainfo: &'a Metainfo,
base: &'a Path,
progress_bar: Option<ProgressBar>,
) -> Result<Verifier<'a>> {
let piece_length = metainfo.info.piece_length.as_piece_length()?.into_usize();
Ok(Verifier {
buffer: vec![0; piece_length],
piece_bytes_hashed: 0,
pieces: PieceList::new(),
sha1: Sha1::new(),
base,
metainfo,
piece_length,
progress_bar,
})
}
pub(crate) fn verify(
metainfo: &'a Metainfo,
base: &'a Path,
progress_bar: Option<ProgressBar>,
) -> Result<Status> {
Self::new(metainfo, base, progress_bar)?.verify_metainfo()
}
fn verify_metainfo(mut self) -> Result<Status> {
match &self.metainfo.info.mode {
Mode::Single { length, md5sum } => {
self.hash(&self.base).ok();
let error = FileError::verify(&self.base, *length, *md5sum).err();
let pieces = self.finish();
Ok(Status::single(pieces, error))
}
Mode::Multiple { files } => {
let mut status = Vec::new();
for file in files {
let path = file.path.absolute(self.base);
self.hash(&path).ok();
status.push(FileStatus::status(
&path,
file.path.clone(),
file.length,
file.md5sum,
));
}
let pieces = self.finish();
Ok(Status::multiple(pieces, status))
}
}
}
pub(crate) fn hash(&mut self, path: &Path) -> io::Result<()> {
let mut file = BufReader::new(File::open(path)?);
loop {
let remaining = &mut self.buffer[..self.piece_length - self.piece_bytes_hashed];
let bytes_read = file.read(remaining)?;
if bytes_read == 0 {
break;
}
let read = &remaining[..bytes_read];
self.sha1.update(read);
self.piece_bytes_hashed += bytes_read;
if self.piece_bytes_hashed == self.piece_length {
self.pieces.push(self.sha1.digest().into());
self.sha1.reset();
self.piece_bytes_hashed = 0;
}
if let Some(progress_bar) = &self.progress_bar {
progress_bar.inc(bytes_read.into_u64());
}
}
Ok(())
}
fn finish(&mut self) -> bool {
if self.piece_bytes_hashed > 0 {
self.pieces.push(self.sha1.digest().into());
self.sha1.reset();
self.piece_bytes_hashed = 0;
}
self.pieces == self.metainfo.info.pieces
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn good() -> Result<()>
|
#[test]
fn piece_mismatch() -> Result<()> {
let mut env = test_env! {
args: [
"torrent",
"create",
"--input",
"foo",
"--announce",
"https://bar",
],
tree: {
foo: {
a: "abc",
d: "efg",
h: "ijk",
},
},
};
env.assert_ok();
env.write("foo/a", "xyz");
let metainfo = env.load_metainfo("foo.torrent");
let status = metainfo.verify(&env.resolve("foo")?, None)?;
assert_eq!(status.count_bad(), 0);
assert!(!status.pieces());
Ok(())
}
}
|
{
let mut env = test_env! {
args: [
"torrent",
"create",
"--input",
"foo",
"--announce",
"https://bar",
],
tree: {
foo: {
a: "abc",
d: "efg",
h: "ijk",
},
},
};
env.assert_ok();
let metainfo = env.load_metainfo("foo.torrent");
assert!(metainfo.verify(&env.resolve("foo")?, None)?.good());
Ok(())
}
|
tests.py
|
from django.test import RequestFactory, TestCase, Client
from .models import Project, Category
from .views import portfolio, portfolio_detail
class PortfolioViewTests(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
self.client = Client()
# Test Category
self.category_sample = Category.objects.create(name='Sample')
# Portfolio projects
self.portfolio_web_01 = Project.objects.create(title='Projeto Web 01',
description='Projeto web Teste',
category=self.category_sample)
def test_portfolio_view_status_code_is_ok(self):
request = self.factory.get('/portfolio/')
response = portfolio(request)
self.assertEqual(response.status_code, 200)
def test_portfolio_detail_view_status_code_is_ok(self):
|
def test_project_title_returns(self):
projeto = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto.title)
def test_project_str_returns(self):
projeto_str = self.portfolio_web_01
self.assertEquals('Projeto Web 01', projeto_str.__str__())
def test_category_name_returns(self):
categoria = self.category_sample
self.assertEquals('Sample', categoria.name)
|
request = self.factory.get('/portfolio/projeto/projeto-web-01')
response = portfolio_detail(request, slug=self.portfolio_web_01.slug)
self.assertEqual(response.status_code, 200)
|
segment.rs
|
use crate::scalar::Scalar;
use crate::{CubicBezierSegment, LineSegment, QuadraticBezierSegment};
use crate::{Point, Vector, Box2D, point};
use std::ops::Range;
/// Common APIs to segment types.
pub trait Segment: Copy + Sized {
type Scalar: Scalar;
/// Start of the curve.
fn from(&self) -> Point<Self::Scalar>;
/// End of the curve.
fn to(&self) -> Point<Self::Scalar>;
/// Sample the curve at t (expecting t between 0 and 1).
fn sample(&self, t: Self::Scalar) -> Point<Self::Scalar>;
/// Sample x at t (expecting t between 0 and 1).
fn x(&self, t: Self::Scalar) -> Self::Scalar {
self.sample(t).x
}
/// Sample y at t (expecting t between 0 and 1).
fn y(&self, t: Self::Scalar) -> Self::Scalar {
self.sample(t).y
}
/// Sample the derivative at t (expecting t between 0 and 1).
fn derivative(&self, t: Self::Scalar) -> Vector<Self::Scalar>;
/// Sample x derivative at t (expecting t between 0 and 1).
fn dx(&self, t: Self::Scalar) -> Self::Scalar {
self.derivative(t).x
}
/// Sample y derivative at t (expecting t between 0 and 1).
fn dy(&self, t: Self::Scalar) -> Self::Scalar {
self.derivative(t).y
}
/// Split this curve into two sub-curves.
fn split(&self, t: Self::Scalar) -> (Self, Self);
/// Return the curve before the split point.
fn before_split(&self, t: Self::Scalar) -> Self;
/// Return the curve after the split point.
fn after_split(&self, t: Self::Scalar) -> Self;
/// Return the curve inside a given range of t.
///
/// This is equivalent splitting at the range's end points.
fn split_range(&self, t_range: Range<Self::Scalar>) -> Self;
/// Swap the direction of the segment.
fn flip(&self) -> Self;
/// Compute the length of the segment using a flattened approximation.
fn approximate_length(&self, tolerance: Self::Scalar) -> Self::Scalar;
}
pub trait BoundingBox {
type Scalar: Scalar;
/// Returns the smallest rectangle that contains the curve.
fn bounding_box(&self) -> Box2D<Self::Scalar> {
let (min_x, max_x) = self.bounding_range_x();
let (min_y, max_y) = self.bounding_range_y();
Box2D {
min: point(min_x, min_y),
max: point(max_x, max_y),
}
}
/// Returns a conservative rectangle that contains the curve.
///
/// This does not necessarily return the smallest possible bounding rectangle.
fn fast_bounding_box(&self) -> Box2D<Self::Scalar> {
let (min_x, max_x) = self.fast_bounding_range_x();
let (min_y, max_y) = self.fast_bounding_range_y();
Box2D {
min: point(min_x, min_y),
max: point(max_x, max_y),
}
}
/// Returns a range of x values that contains the curve.
fn bounding_range_x(&self) -> (Self::Scalar, Self::Scalar);
/// Returns a range of y values that contains the curve.
fn bounding_range_y(&self) -> (Self::Scalar, Self::Scalar);
/// Returns a range of x values that contains the curve.
fn fast_bounding_range_x(&self) -> (Self::Scalar, Self::Scalar);
/// Returns a range of y values that contains the curve.
fn fast_bounding_range_y(&self) -> (Self::Scalar, Self::Scalar);
}
macro_rules! impl_segment {
($S:ty) => {
type Scalar = $S;
fn from(&self) -> Point<$S> {
self.from()
}
fn to(&self) -> Point<$S> {
self.to()
}
fn sample(&self, t: $S) -> Point<$S> {
self.sample(t)
}
fn x(&self, t: $S) -> $S {
self.x(t)
}
fn y(&self, t: $S) -> $S {
self.y(t)
}
fn derivative(&self, t: $S) -> Vector<$S> {
self.derivative(t)
}
fn dx(&self, t: $S) -> $S {
self.dx(t)
}
fn dy(&self, t: $S) -> $S {
self.dy(t)
}
fn split(&self, t: $S) -> (Self, Self) {
self.split(t)
}
fn before_split(&self, t: $S) -> Self {
self.before_split(t)
}
fn after_split(&self, t: $S) -> Self {
self.after_split(t)
}
fn split_range(&self, t_range: Range<$S>) -> Self {
self.split_range(t_range)
}
fn flip(&self) -> Self {
self.flip()
}
fn approximate_length(&self, tolerance: $S) -> $S {
self.approximate_length(tolerance)
}
};
}
/// Either a cubic, quadratic or linear bézier segment.
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum BezierSegment<S> {
Linear(LineSegment<S>),
Quadratic(QuadraticBezierSegment<S>),
Cubic(CubicBezierSegment<S>),
}
impl<S: Scalar> BezierSegment<S> {
#[inline]
pub fn sample(&self, t: S) -> Point<S> {
match self {
BezierSegment::Linear(segment) => segment.sample(t),
BezierSegment::Quadratic(segment) => segment.sample(t),
BezierSegment::Cubic(segment) => segment.sample(t),
}
}
#[inline]
pub fn from(&self) -> Point<S> {
match self {
BezierSegment::Linear(segment) => segment.from,
BezierSegment::Quadratic(segment) => segment.from,
BezierSegment::Cubic(segment) => segment.from,
}
}
#[inline]
pub fn to(&self) -> Point<S> {
match self {
BezierSegment::Linear(segment) => segment.to,
BezierSegment::Quadratic(segment) => segment.to,
BezierSegment::Cubic(segment) => segment.to,
}
}
#[inline]
pub fn is_linear(&self, tolerance: S) -> bool {
match self {
BezierSegment::Linear(..) => true,
BezierSegment::Quadratic(segment) => segment.is_linear(tolerance),
BezierSegment::Cubic(segment) => segment.is_linear(tolerance),
}
}
#[inline]
pub fn baseline(&self) -> LineSegment<S> {
match self {
BezierSegment::Linear(segment) => *segment,
BezierSegment::Quadratic(segment) => segment.baseline(),
BezierSegment::Cubic(segment) => segment.baseline(),
}
}
/// Split this segment into two sub-segments.
pub fn split(&self, t: S) -> (BezierSegment<S>, BezierSegment<S>) {
match self {
BezierSegment::Linear(segment) => {
let (a, b) = segment.split(t);
(BezierSegment::Linear(a), BezierSegment::Linear(b))
}
BezierSegment::Quadratic(segment) => {
let (a, b) = segment.split(t);
(BezierSegment::Quadratic(a), BezierSegment::Quadratic(b))
}
BezierSegment::Cubic(segment) => {
let (a, b) = segment.split(t);
(BezierSegment::Cubic(a), BezierSegment::Cubic(b))
}
}
}
}
impl<S> From<LineSegment<S>> for BezierSegment<S> {
fn from(s: LineSegment<S>) -> Self {
BezierSegment::Linear(s)
}
}
impl<S> From<QuadraticBezierSegment<S>> for BezierSegment<S> {
fn f
|
s: QuadraticBezierSegment<S>) -> Self {
BezierSegment::Quadratic(s)
}
}
impl<S> From<CubicBezierSegment<S>> for BezierSegment<S> {
fn from(s: CubicBezierSegment<S>) -> Self {
BezierSegment::Cubic(s)
}
}
|
rom(
|
PyGLCMViewer.py
|
import PyQt4
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import Qt
from PyQt4.QtCore import QObject, QTimer, pyqtSignal, pyqtSlot
from PyGLWidget import PyGLWidget
from OpenGL.GL import *
from OpenGL.GLU import *
from CellModeller.Regulation import ModuleRegulator
from CellModeller.Simulator import Simulator
from CellModeller.CellState import CellState
import os
import sys
class PyGLCMViewer(PyGLWidget):
selectedCell = pyqtSignal(str)#CellState, name='selectedCell')
selectedName = -1
dt = 0.25
def __init__(self, parent = None):
PyGLWidget.__init__(self,parent)
self.animTimer = QTimer()
self.animTimer.timeout.connect(self.animate)
self.renderInfo = None
self.sim= None
self.modfile = None
self.record = False
self.set_radius(32)
self.frameNo = 0
def help(self):
pass
def setSimulator(self, sim):
self.sim = sim
@pyqtSlot(bool)
def toggleRun(self, run):
if run:
self.animTimer.start(0)
else:
self.animTimer.stop()
@pyqtSlot(bool)
def toggleRecord(self, rec):
self.record = rec
self.sim.savePickle = rec
@pyqtSlot()
def reset(self):
self.sim = Simulator(self.modname, self.dt)
#if self.sim:
# self.sim.reset()
self.frameNo = 0
@pyqtSlot()
def load(self):
qs = QtGui.QFileDialog.getOpenFileName(self, 'Load Python module', '', '*.py')
|
def loadFile(self, modstr):
(path,name) = os.path.split(modstr)
modname = str(name).split('.')[0]
self.modname = modname
sys.path.append(path)
if self.sim:
self.sim.reset(modname)
else:
self.sim = Simulator(modname, self.dt)
#self.draw()
self.paintGL()
def animate(self):
if self.sim:
self.sim.step()
self.updateSelectedCell()
self.frameNo += 1
if self.record:
if (self.frameNo%5)==0:
self.setSnapshotCounter(self.frameNo)
self.saveSnapshot()
def updateSelectedCell(self):
if self.sim:
states = self.sim.cellStates
cid = self.selectedName
txt = ''
if states.has_key(cid):
s = states[cid]
for (name,val) in s.__dict__.items():
if name not in CellState.excludeAttr:
vals = str(val)
#if len(vals)>6: vals = vals[0:6]
txt = txt + name + ': ' + vals + '\n'
self.selectedCell.emit(txt)
if self.sim.stepNum%100==0:
self.updateGL()
def postSelection(self, name):
self.selectedName = name
self.updateSelectedCell()
def paintGL(self):
PyGLWidget.paintGL(self)
glClearColor(0.5,0.5,0.5,0.0)
glClear(GL_COLOR_BUFFER_BIT)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
#s = self.renderInfo.scale
#glScalef(s,s,s)
if self.sim:
for r in self.sim.renderers:
if r != None:
r.render_gl(self.selectedName)
glPopMatrix()
def drawWithNames(self):
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
#s = self.renderInfo.scale
#glScalef(s,s,s)
if self.sim:
for r in self.sim.renderers:
if r:
r.renderNames_gl()
glPopMatrix()
class RenderInfo:
def __init__(self):
self.renderers = []
self.scale = 1.0
def addRenderer(self, renderer):
self.renderers.append(renderer)
def reset(self):
self.renderers = []
self.scale = 1.0
def setScale(self, s):
self.scale = s
|
self.modfile = str(qs)
self.loadFile(self.modfile)
|
crypto.go
|
//
// Created by Aman LaChapelle on 7/20/18.
//
// peacemakr-core-crypto
// Copyright (c) 2018 peacemakr
// Full license at peacemakr-core-crypto/LICENSE.txt
//
package crypto
// #cgo LDFLAGS: -lpeacemakr-core-crypto -L${SRCDIR}/lib -Wl,-rpath ${SRCDIR}/lib
// #cgo CFLAGS: -I${SRCDIR}/include -I${SRCDIR}/openssl/include
// #include <peacemakr/crypto.h>
// #include <peacemakr/random.h>
// #include <stdlib.h>
// #include <string.h>
/*
extern int go_rng(unsigned char *, size_t);
extern char *go_rng_err(int);
static inline random_device_t cbridge() {
random_device_t rand = {
.generator = &go_rng,
.err = (const char *(*)(int))(&go_rng_err)
};
return rand;
}
extern void go_log_export(char *);
static inline peacemakr_log_cb go_log_cbridge() {
return (peacemakr_log_cb)go_log_export;
}
*/
import "C"
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"encoding/pem"
"errors"
"log"
"unsafe"
)
//export go_rng
func go_rng(buf *C.uchar, size C.size_t) C.int {
randomBytes := make([]byte, size)
_, err := rand.Read(randomBytes)
if err != nil {
return 1
}
if buf == nil {
return 2
}
if size == 0 {
return 3
}
C.memcpy(unsafe.Pointer(buf), unsafe.Pointer(&randomBytes[0]), size)
return 0
}
//export go_rng_err
func go_rng_err(err C.int) *C.char {
switch err {
case 0:
return nil
case 1:
return C.CString("error ocurred while reading random numbers")
case 2:
return C.CString("buf passed in was nil")
case 3:
return C.CString("size passed in was zero")
}
return nil
}
//export go_log_export
func go_log_export(buf *C.char) {
goStr := C.GoString(buf)
log.Print(goStr)
}
func NewRandomDevice() RandomDevice {
return RandomDevice{
randomDevice: C.cbridge(),
}
}
type RandomDevice struct {
randomDevice C.random_device_t
}
type SymmetricCipher int
const (
SYMMETRIC_UNSPECIFIED SymmetricCipher = 0
AES_128_GCM SymmetricCipher = 1
AES_192_GCM SymmetricCipher = 2
AES_256_GCM SymmetricCipher = 3
CHACHA20_POLY1305 SymmetricCipher = 4
)
type AsymmetricCipher int
const (
ASYMMETRIC_UNSPECIFIED AsymmetricCipher = 0
RSA_2048 AsymmetricCipher = 1
RSA_4096 AsymmetricCipher = 2
ECDH_P256 AsymmetricCipher = 3
ECDH_P384 AsymmetricCipher = 4
ECDH_P521 AsymmetricCipher = 5
ECDH_SECP256K1 AsymmetricCipher = 6
)
type MessageDigestAlgorithm int
const (
DIGEST_UNSPECIFIED MessageDigestAlgorithm = 0
SHA_224 MessageDigestAlgorithm = 1
SHA_256 MessageDigestAlgorithm = 2
SHA_384 MessageDigestAlgorithm = 3
SHA_512 MessageDigestAlgorithm = 4
)
type EncryptionMode int
const (
SYMMETRIC EncryptionMode = 0
ASYMMETRIC EncryptionMode = 1
)
type CryptoConfig struct {
Mode EncryptionMode
SymmetricCipher SymmetricCipher
AsymmetricCipher AsymmetricCipher
DigestAlgorithm MessageDigestAlgorithm
}
// ========================= File-internal helpers =========================
func configToInternal(config CryptoConfig) C.crypto_config_t {
return C.crypto_config_t{
mode: C.encryption_mode(config.Mode),
symm_cipher: C.symmetric_cipher(config.SymmetricCipher),
asymm_cipher: C.asymmetric_cipher(config.AsymmetricCipher),
digest_algorithm: C.message_digest_algorithm(config.DigestAlgorithm),
}
}
func configFromInternal(config C.crypto_config_t) CryptoConfig {
return CryptoConfig{
Mode: EncryptionMode(config.mode),
SymmetricCipher: SymmetricCipher(config.symm_cipher),
AsymmetricCipher: AsymmetricCipher(config.asymm_cipher),
DigestAlgorithm: MessageDigestAlgorithm(config.digest_algorithm),
}
}
type Plaintext struct {
Data []byte
Aad []byte
}
func plaintextToInternal(plaintext Plaintext) C.plaintext_t {
return C.plaintext_t{
data_len: C.size_t(len(plaintext.Data)),
data: (*C.uchar)(C.CBytes(plaintext.Data)),
aad_len: C.size_t(len(plaintext.Aad)),
aad: (*C.uchar)(C.CBytes(plaintext.Aad)),
}
}
func freeInternalPlaintext(internalPlaintext *C.plaintext_t) {
if internalPlaintext == nil {
return
}
C.free(unsafe.Pointer(internalPlaintext.data))
C.free(unsafe.Pointer(internalPlaintext.aad))
}
// ========================= Package helpers =========================
func GetMaxSupportedVersion() uint8 {
return uint8(C.get_max_version())
}
func
|
() bool {
C.peacemakr_set_log_callback(C.go_log_cbridge())
return bool(C.peacemakr_init())
}
// ========================= Core types =========================
type CiphertextBlob struct {
blob *C.ciphertext_blob_t
}
type PeacemakrKey struct {
key *C.peacemakr_key_t
}
// ========================= Raw key creation =========================
func NewPeacemakrKeySymmetric(config SymmetricCipher, rand RandomDevice) *PeacemakrKey {
return &PeacemakrKey{
key: C.peacemakr_key_new_symmetric((C.symmetric_cipher)(config), (*C.random_device_t)(unsafe.Pointer(&rand.randomDevice))),
}
}
func NewPeacemakrKeyAsymmetric(asymm AsymmetricCipher, symm SymmetricCipher, rand RandomDevice) *PeacemakrKey {
return &PeacemakrKey{
key: C.peacemakr_key_new_asymmetric((C.asymmetric_cipher)(asymm), (C.symmetric_cipher)(symm), (*C.random_device_t)(unsafe.Pointer(&rand.randomDevice))),
}
}
func NewPeacemakrKeyFromBytes(cipher SymmetricCipher, contents []byte) *PeacemakrKey {
cBytes := (*C.uint8_t)(C.CBytes(contents))
defer C.free(unsafe.Pointer(cBytes))
cNumBytes := (C.size_t)(len(contents))
return &PeacemakrKey{
key: C.peacemakr_key_new_bytes((C.symmetric_cipher)(cipher), cBytes, cNumBytes),
}
}
func newPeacemakrKeyFromPassword(cipher SymmetricCipher, digest MessageDigestAlgorithm, passwordStr string, salt []byte, iterationCount int) *PeacemakrKey {
password := []byte(passwordStr)
cBytes := (*C.uint8_t)(C.CBytes(password))
defer C.free(unsafe.Pointer(cBytes))
cNumBytes := (C.size_t)(len(password))
cSalt := (*C.uint8_t)(C.CBytes(salt))
defer C.free(unsafe.Pointer(cSalt))
cNumSalt := (C.size_t)(len(salt))
return &PeacemakrKey{
key: C.peacemakr_key_new_from_password((C.symmetric_cipher)(cipher), (C.message_digest_algorithm)(digest), cBytes, cNumBytes, cSalt, cNumSalt, (C.size_t)(iterationCount)),
}
}
func newPeacemakrKeyFromPubPem(symm SymmetricCipher, contents []byte) *PeacemakrKey {
cBytes := (*C.char)(C.CBytes(contents))
defer C.free(unsafe.Pointer(cBytes))
return &PeacemakrKey{
key: C.peacemakr_key_new_pem_pub((C.symmetric_cipher)(symm), cBytes, C.size_t(len(contents))),
}
}
func newPeacemakrKeyFromPrivPem(symm SymmetricCipher, contents []byte) *PeacemakrKey {
cBytes := (*C.char)(C.CBytes(contents))
defer C.free(unsafe.Pointer(cBytes))
return &PeacemakrKey{
key: C.peacemakr_key_new_pem_priv((C.symmetric_cipher)(symm), cBytes, C.size_t(len(contents))),
}
}
// ========================= Internal helpers for wrappers =========================
func GetECKeyTypeFromPubPemStr(pubPEM string) (AsymmetricCipher, error) {
block, _ := pem.Decode([]byte(pubPEM))
if block == nil {
return ASYMMETRIC_UNSPECIFIED, errors.New("failed to parse PEM block containing the key")
}
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
if err != nil {
return ASYMMETRIC_UNSPECIFIED, err
}
switch pub := pub.(type) {
case *ecdsa.PublicKey:
if pub.Curve == elliptic.P256() {
return ECDH_P256, nil
} else if pub.Curve == elliptic.P384() {
return ECDH_P384, nil
} else if pub.Curve == elliptic.P521() {
return ECDH_P521, nil
}
default:
break // fall through
}
return ASYMMETRIC_UNSPECIFIED, errors.New("key type is not EC")
}
func GetECKeyTypeFromPrivPemStr(pubPEM string) (AsymmetricCipher, error) {
block, _ := pem.Decode([]byte(pubPEM))
if block == nil {
return ASYMMETRIC_UNSPECIFIED, errors.New("failed to parse PEM block containing the key")
}
priv, err := x509.ParseECPrivateKey(block.Bytes)
if err != nil {
return ASYMMETRIC_UNSPECIFIED, err
}
if priv.Curve == elliptic.P256() {
return ECDH_P256, nil
} else if priv.Curve == elliptic.P384() {
return ECDH_P384, nil
} else if priv.Curve == elliptic.P521() {
return ECDH_P521, nil
}
return ASYMMETRIC_UNSPECIFIED, errors.New("key type is not EC")
}
func ParseRsaPublicKeyFromPemStr(pubPEM string) (*rsa.PublicKey, error) {
block, _ := pem.Decode([]byte(pubPEM))
if block == nil {
return nil, errors.New("failed to parse PEM block containing the key")
}
pub, err := x509.ParsePKIXPublicKey(block.Bytes)
if err != nil {
return nil, err
}
switch pub := pub.(type) {
case *rsa.PublicKey:
return pub, nil
default:
break // fall through
}
return nil, errors.New("key type is not RSA")
}
func ParseRsaPrivateKeyFromPemStr(privPEM string) (*rsa.PrivateKey, error) {
block, _ := pem.Decode([]byte(privPEM))
if block == nil {
return nil, errors.New("failed to parse PEM block containing the key")
}
priv, err := x509.ParsePKCS1PrivateKey(block.Bytes)
if err != nil {
return nil, err
}
return priv, nil
}
func getBitLenFromRsaPubPemStr(pubRSA string) (int, error) {
rsaKey, err := ParseRsaPublicKeyFromPemStr(pubRSA)
if err != nil {
return 0, err
}
return rsaKey.N.BitLen(), nil
}
func getBitLenFromRsaPrivPemStr(privRSA string) (int, error) {
rsaKey, err := ParseRsaPrivateKeyFromPemStr(privRSA)
if err != nil {
return 0, err
}
return rsaKey.N.BitLen(), nil
}
func GetConfigFromPubKey(pubKey string) (AsymmetricCipher, error) {
// First try to get it as an EC key
asymKeyLen, err := GetECKeyTypeFromPubPemStr(pubKey)
if err != nil { // It's not an EC key
bitLength, err := getBitLenFromRsaPubPemStr(string(pubKey))
if err != nil {
return ASYMMETRIC_UNSPECIFIED, errors.New("failed to get bit length from public rsa key")
}
if bitLength == 4096 {
asymKeyLen = RSA_4096
} else if bitLength == 2048 {
asymKeyLen = RSA_2048
} else {
return ASYMMETRIC_UNSPECIFIED, errors.New("unknown bitlength for RSA key")
}
}
return asymKeyLen, nil
}
func GetConfigFromPrivKey(privKey string) (AsymmetricCipher, error) {
// First try to get it as an EC key
asymKeyLen, err := GetECKeyTypeFromPrivPemStr(privKey)
if err != nil { // It's not an EC key
bitLength, err := getBitLenFromRsaPrivPemStr(privKey)
if err != nil {
return ASYMMETRIC_UNSPECIFIED, errors.New("failed to get bit length from public rsa key")
}
if bitLength == 4096 {
asymKeyLen = RSA_4096
} else if bitLength == 2048 {
asymKeyLen = RSA_2048
} else {
return ASYMMETRIC_UNSPECIFIED, errors.New("unknown bitlength for RSA key")
}
}
return asymKeyLen, nil
}
// ========================= Wrapped key creation =========================
func SymmetricKeyFromBytes(keyBytes []byte) (*PeacemakrKey, error) {
var cipher SymmetricCipher
switch len(keyBytes) {
case 128 / 8:
cipher = AES_128_GCM
case 192 / 8:
cipher = AES_192_GCM
case 256 / 8:
cipher = AES_256_GCM
default:
return nil, errors.New("unknown length for keyBytes, need to use raw key creation APIs")
}
return NewPeacemakrKeyFromBytes(cipher, keyBytes), nil
}
func NewSymmetricKeyFromPassword(keylenBits int, passwordStr string, iterationCount int) (*PeacemakrKey, []byte, error) {
var cipher SymmetricCipher
switch keylenBits {
case 128:
cipher = AES_128_GCM
case 192:
cipher = AES_192_GCM
case 256:
cipher = AES_256_GCM
default:
return nil, nil, errors.New("unknown length for keylenBits, acceptable values are 128, 192, 256")
}
salt := make([]byte, 256/8)
_, err := rand.Read(salt)
if err != nil {
return nil, nil, errors.New("unable to read salt from random string")
}
outKey := newPeacemakrKeyFromPassword(cipher, SHA_256, passwordStr, salt, iterationCount)
return outKey, salt, nil
}
func SymmetricKeyFromPasswordAndSalt(keylenBits int, passwordStr string, salt []byte, iterationCount int) (*PeacemakrKey, error) {
var cipher SymmetricCipher
switch keylenBits {
case 128:
cipher = AES_128_GCM
case 192:
cipher = AES_192_GCM
case 256:
cipher = AES_256_GCM
default:
return nil, errors.New("unknown length for keylenBits, acceptable values are 128, 192, 256")
}
outKey := newPeacemakrKeyFromPassword(cipher, SHA_256, passwordStr, salt, iterationCount)
return outKey, nil
}
func NewPublicKeyFromPEM(symm SymmetricCipher, contents string) (*PeacemakrKey, error) {
return newPeacemakrKeyFromPubPem(symm, []byte(contents)), nil
}
func NewPrivateKeyFromPEM(symm SymmetricCipher, contents string) (*PeacemakrKey, error) {
return newPeacemakrKeyFromPrivPem(symm, []byte(contents)), nil
}
// ========================= Operations to do on keys =========================
func (k *PeacemakrKey) IsValid() bool {
return k.key != nil
}
func (k *PeacemakrKey) ECDHKeygen(cipher SymmetricCipher, peerKey *PeacemakrKey) *PeacemakrKey {
return &PeacemakrKey{
key: C.peacemakr_key_dh_generate((C.symmetric_cipher)(cipher), k.key, peerKey.key),
}
}
func (k *PeacemakrKey) HKDFKeygen(cipher SymmetricCipher, digest MessageDigestAlgorithm, keyID []byte) (*PeacemakrKey, error) {
if !k.IsValid() {
return nil, errors.New("invalid master key")
}
cBytes := (*C.uint8_t)(C.CBytes(keyID))
defer C.free(unsafe.Pointer(cBytes))
cNumBytes := (C.size_t)(len(keyID))
return &PeacemakrKey{
key: C.peacemakr_key_new_from_master((C.symmetric_cipher)(cipher), (C.message_digest_algorithm)(digest), k.key, cBytes, cNumBytes),
}, nil
}
func (k *PeacemakrKey) Config() (CryptoConfig, error) {
if !k.IsValid() {
return CryptoConfig{}, errors.New("invalid key passed to GetKeyConfig")
}
keyConfig := C.peacemakr_key_get_config(k.key)
return configFromInternal(keyConfig), nil
}
func (k *PeacemakrKey) Bytes() ([]byte, error) {
if !k.IsValid() {
return []byte{}, errors.New("invalid key passed to GetKeyConfig")
}
var buf *byte
defer C.free(unsafe.Pointer(buf))
var bufSize C.size_t
success := C.peacemakr_key_get_bytes(k.key, (**C.uint8_t)(unsafe.Pointer(&buf)), (*C.size_t)(&bufSize))
if !success {
return []byte{}, errors.New("failed to get bytes from peacemakr key")
}
return C.GoBytes(unsafe.Pointer(buf), C.int(bufSize)), nil
}
func (k *PeacemakrKey) Destroy() {
if !k.IsValid() {
return
}
C.peacemakr_key_free((*C.peacemakr_key_t)(k.key))
k.key = nil
}
// ========================= Core APIs =========================
func Encrypt(key *PeacemakrKey, plaintext Plaintext, rand RandomDevice) (*CiphertextBlob, error) {
if !key.IsValid() {
return nil, errors.New("invalid key passed to Encrypt")
}
cPlaintext := plaintextToInternal(plaintext)
defer freeInternalPlaintext(&cPlaintext)
blob := C.peacemakr_encrypt(key.key, (*C.plaintext_t)(unsafe.Pointer(&cPlaintext)), (*C.random_device_t)(unsafe.Pointer(&rand.randomDevice)))
if blob == nil {
return nil, errors.New("encryption failed")
}
return &CiphertextBlob{
blob: blob,
}, nil
}
func GetPlaintextBlob(plaintext Plaintext) (*CiphertextBlob, error) {
cPlaintext := plaintextToInternal(plaintext)
defer freeInternalPlaintext(&cPlaintext)
blob := C.peacemakr_get_plaintext_blob((*C.plaintext_t)(unsafe.Pointer(&cPlaintext)))
if blob == nil {
return nil, errors.New("unable to get plaintext blob")
}
return &CiphertextBlob{
blob: blob,
}, nil
}
func ExtractPlaintextFromBlob(blob *CiphertextBlob) (Plaintext, error) {
var plaintext C.plaintext_t
defer freeInternalPlaintext(&plaintext)
if !C.peacemakr_extract_plaintext_blob(blob.blob, (*C.plaintext_t)(unsafe.Pointer(&plaintext))) {
return Plaintext{}, errors.New("failed to extract plaintext blob")
}
// the C.GoBytes functions make copies of the underlying data so it's OK to free the original ptr
return Plaintext{
Data: C.GoBytes(unsafe.Pointer(plaintext.data), C.int(plaintext.data_len)),
Aad: C.GoBytes(unsafe.Pointer(plaintext.aad), C.int(plaintext.aad_len)),
}, nil
}
func Serialize(digest MessageDigestAlgorithm, blob *CiphertextBlob) ([]byte, error) {
var cSize C.size_t
serialized := C.peacemakr_serialize((C.message_digest_algorithm)(digest), blob.blob, (*C.size_t)(unsafe.Pointer(&cSize)))
if serialized == nil {
return nil, errors.New("serialization failed")
}
return C.GoBytes(unsafe.Pointer(serialized), C.int(cSize)), nil
}
func Deserialize(serialized []byte) (*CiphertextBlob, *CryptoConfig, error) {
cBlobBytes := C.CBytes(serialized)
defer C.free(cBlobBytes)
cBlobLen := C.size_t(len(serialized))
cConfig := C.crypto_config_t{}
deserialized := C.peacemakr_deserialize((*C.uint8_t)(cBlobBytes), cBlobLen, (*C.crypto_config_t)(unsafe.Pointer(&cConfig)))
if deserialized == nil {
return nil, nil, errors.New("deserialization failed")
}
outConfig := configFromInternal(cConfig)
return &CiphertextBlob{
blob: deserialized,
}, &outConfig, nil
}
func Sign(senderKey *PeacemakrKey, plaintext Plaintext, digest MessageDigestAlgorithm, ciphertext *CiphertextBlob) error {
if !senderKey.IsValid() {
return errors.New("invalid key passed to Encrypt")
}
cPlaintext := plaintextToInternal(plaintext)
defer freeInternalPlaintext(&cPlaintext)
if !C.peacemakr_sign(senderKey.key, (*C.plaintext_t)(unsafe.Pointer(&cPlaintext)), (C.message_digest_algorithm)(digest), ciphertext.blob) {
return errors.New("signing failed")
}
return nil
}
func ExtractUnverifiedAAD(ciphertext []byte) ([]byte, error) {
var plaintext C.plaintext_t
defer freeInternalPlaintext(&plaintext)
if ciphertext[len(ciphertext)-1] != 0 {
ciphertext = append(ciphertext, byte(0)) // add NULL terminator
}
deserialized, _, err := Deserialize(ciphertext)
if err != nil {
return nil, err
}
extractSuccess := bool(C.peacemakr_get_unverified_aad(deserialized.blob, (*C.plaintext_t)(unsafe.Pointer(&plaintext))))
if !extractSuccess {
return nil, errors.New("extraction failed")
}
return C.GoBytes(unsafe.Pointer(plaintext.aad), C.int(plaintext.aad_len)), nil
}
type DecryptCode int
const (
DECRYPT_SUCCESS DecryptCode = 0
DECRYPT_NEED_VERIFY DecryptCode = 1
DECRYPT_FAILED DecryptCode = 2
)
func Decrypt(key *PeacemakrKey, ciphertext *CiphertextBlob) (*Plaintext, bool, error) {
if !key.IsValid() {
return nil, false, errors.New("invalid key passed to Decrypt")
}
var plaintext C.plaintext_t
defer freeInternalPlaintext(&plaintext)
decryptCode := DecryptCode(C.peacemakr_decrypt(key.key, ciphertext.blob, (*C.plaintext_t)(unsafe.Pointer(&plaintext))))
if decryptCode == DECRYPT_FAILED {
return nil, false, errors.New("decrypt failed")
}
needVerify := false
if decryptCode == DECRYPT_NEED_VERIFY {
needVerify = true
}
// the C.GoBytes functions make copies of the underlying data so it's OK to free the original ptr
return &Plaintext{
Data: C.GoBytes(unsafe.Pointer(plaintext.data), C.int(plaintext.data_len)),
Aad: C.GoBytes(unsafe.Pointer(plaintext.aad), C.int(plaintext.aad_len)),
}, needVerify, nil
}
func Verify(senderKey *PeacemakrKey, plaintext *Plaintext, ciphertext *CiphertextBlob) error {
if !senderKey.IsValid() {
return errors.New("invalid key passed to Encrypt")
}
cPlaintext := plaintextToInternal(*plaintext)
defer freeInternalPlaintext(&cPlaintext)
verified := C.peacemakr_verify(senderKey.key, (*C.plaintext_t)(unsafe.Pointer(&cPlaintext)), ciphertext.blob)
if !verified {
return errors.New("verification failed")
}
return nil
}
func HMAC(algo MessageDigestAlgorithm, key *PeacemakrKey, buf []byte) ([]byte, error) {
if !key.IsValid() {
return nil, errors.New("invalid key")
}
var outSize C.size_t
hmac := C.peacemakr_hmac(C.message_digest_algorithm(algo), key.key, (*C.uint8_t)(C.CBytes(buf)), C.size_t(len(buf)), (*C.size_t)(unsafe.Pointer(&outSize)))
if hmac == nil {
return nil, errors.New("hmac failed")
}
return C.GoBytes(unsafe.Pointer(hmac), C.int(outSize)), nil
}
|
PeacemakrInit
|
0007_auto_20190214_1405.py
|
# Generated by Django 2.1 on 2019-02-14 14:05
from django.db import migrations, models
class Migration(migrations.Migration):
|
dependencies = [
('report_builder', '0006_auto_20180413_0747'),
]
operations = [
migrations.AlterField(
model_name='filterfield',
name='filter_value',
field=models.CharField(blank=True, max_length=2000),
),
]
|
|
mapexport.py
|
import PIL.Image, PIL.ImageDraw
from roomEditor import RoomEditor, ObjectHorizontal, ObjectVertical, ObjectWarp
class RenderedMap:
WALL_UP = 0x01
WALL_DOWN = 0x02
WALL_LEFT = 0x04
WALL_RIGHT = 0x08
def __init__(self, floor_object, overworld=False):
|
def addWalls(self, flags):
for x in range(0, 10):
if flags & RenderedMap.WALL_UP:
self.placeObject(x, 0, 0x21)
if flags & RenderedMap.WALL_DOWN:
self.placeObject(x, 7, 0x22)
for y in range(0, 8):
if flags & RenderedMap.WALL_LEFT:
self.placeObject(0, y, 0x23)
if flags & RenderedMap.WALL_RIGHT:
self.placeObject(9, y, 0x24)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_UP:
self.placeObject(0, 0, 0x25)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_UP:
self.placeObject(9, 0, 0x26)
if flags & RenderedMap.WALL_LEFT and flags & RenderedMap.WALL_DOWN:
self.placeObject(0, 7, 0x27)
if flags & RenderedMap.WALL_RIGHT and flags & RenderedMap.WALL_DOWN:
self.placeObject(9, 7, 0x28)
def placeObject(self, x, y, type_id):
if self.overworld:
if type_id == 0xF5:
if self.getObject(x, y) in (0x28, 0x83, 0x90):
self.placeObject(x, y, 0x29)
else:
self.placeObject(x, y, 0x25)
if self.getObject(x + 1, y) in (0x27, 0x82, 0x90):
self.placeObject(x + 1, y, 0x2A)
else:
self.placeObject(x + 1, y, 0x26)
if self.getObject(x, y + 1) in (0x26, 0x2A):
self.placeObject(x, y + 1, 0x2A)
elif self.getObject(x, y + 1) == 0x90:
self.placeObject(x, y + 1, 0x82)
else:
self.placeObject(x, y + 1, 0x27)
if self.getObject(x + 1, y + 1) in (0x25, 0x29):
self.placeObject(x + 1, y + 1, 0x29)
elif self.getObject(x + 1, y + 1) == 0x90:
self.placeObject(x + 1, y + 1, 0x83)
else:
self.placeObject(x + 1, y + 1, 0x28)
elif type_id == 0xF6: # two door house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x5A)
self.placeObject(x + 3, y, 0x5A)
self.placeObject(x + 4, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x59)
self.placeObject(x + 3, y + 1, 0x59)
self.placeObject(x + 4, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
self.placeObject(x + 3, y + 2, 0xE2)
self.placeObject(x + 4, y + 2, 0x5B)
elif type_id == 0xF7: # large house
self.placeObject(x + 0, y, 0x55)
self.placeObject(x + 1, y, 0x5A)
self.placeObject(x + 2, y, 0x56)
self.placeObject(x + 0, y + 1, 0x57)
self.placeObject(x + 1, y + 1, 0x59)
self.placeObject(x + 2, y + 1, 0x58)
self.placeObject(x + 0, y + 2, 0x5B)
self.placeObject(x + 1, y + 2, 0xE2)
self.placeObject(x + 2, y + 2, 0x5B)
elif type_id == 0xF8: # catfish
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 1, y, 0xB7)
self.placeObject(x + 2, y, 0x66)
self.placeObject(x + 0, y + 1, 0x67)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0x68)
elif type_id == 0xF9: # palace door
self.placeObject(x + 0, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
self.placeObject(x + 2, y, 0xA6)
self.placeObject(x + 0, y + 1, 0xA7)
self.placeObject(x + 1, y + 1, 0xE3)
self.placeObject(x + 2, y + 1, 0xA8)
elif type_id == 0xFA: # stone pig head
self.placeObject(x + 0, y, 0xBB)
self.placeObject(x + 1, y, 0xBC)
self.placeObject(x + 0, y + 1, 0xBD)
self.placeObject(x + 1, y + 1, 0xBE)
elif type_id == 0xFB: # palmtree
if x == 15:
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 1, y + 2, 0xCE)
else:
self.placeObject(x + 0, y, 0xB6)
self.placeObject(x + 0, y + 1, 0xCD)
self.placeObject(x + 1, y + 0, 0xB7)
self.placeObject(x + 1, y + 1, 0xCE)
elif type_id == 0xFC: # square "hill with hole" (seen near lvl4 entrance)
self.placeObject(x + 0, y, 0x2B)
self.placeObject(x + 1, y, 0x2C)
self.placeObject(x + 2, y, 0x2D)
self.placeObject(x + 0, y + 1, 0x37)
self.placeObject(x + 1, y + 1, 0xE8)
self.placeObject(x + 2, y + 1, 0x38)
self.placeObject(x - 1, y + 2, 0x0A)
self.placeObject(x + 0, y + 2, 0x33)
self.placeObject(x + 1, y + 2, 0x2F)
self.placeObject(x + 2, y + 2, 0x34)
self.placeObject(x + 0, y + 3, 0x0A)
self.placeObject(x + 1, y + 3, 0x0A)
self.placeObject(x + 2, y + 3, 0x0A)
self.placeObject(x + 3, y + 3, 0x0A)
elif type_id == 0xFD: # small house
self.placeObject(x + 0, y, 0x52)
self.placeObject(x + 1, y, 0x52)
self.placeObject(x + 2, y, 0x52)
self.placeObject(x + 0, y + 1, 0x5B)
self.placeObject(x + 1, y + 1, 0xE2)
self.placeObject(x + 2, y + 1, 0x5B)
else:
self.objects[(x & 15), (y & 15)] = type_id
else:
if type_id == 0xEC: # key door
self.placeObject(x, y, 0x2D)
self.placeObject(x + 1, y, 0x2E)
elif type_id == 0xED:
self.placeObject(x, y, 0x2F)
self.placeObject(x + 1, y, 0x30)
elif type_id == 0xEE:
self.placeObject(x, y, 0x31)
self.placeObject(x, y + 1, 0x32)
elif type_id == 0xEF:
self.placeObject(x, y, 0x33)
self.placeObject(x, y + 1, 0x34)
elif type_id == 0xF0: # closed door
self.placeObject(x, y, 0x35)
self.placeObject(x + 1, y, 0x36)
elif type_id == 0xF1:
self.placeObject(x, y, 0x37)
self.placeObject(x + 1, y, 0x38)
elif type_id == 0xF2:
self.placeObject(x, y, 0x39)
self.placeObject(x, y + 1, 0x3A)
elif type_id == 0xF3:
self.placeObject(x, y, 0x3B)
self.placeObject(x, y + 1, 0x3C)
elif type_id == 0xF4: # open door
self.placeObject(x, y, 0x43)
self.placeObject(x + 1, y, 0x44)
elif type_id == 0xF5:
self.placeObject(x, y, 0x8C)
self.placeObject(x + 1, y, 0x08)
elif type_id == 0xF6:
self.placeObject(x, y, 0x09)
self.placeObject(x, y + 1, 0x0A)
elif type_id == 0xF7:
self.placeObject(x, y, 0x0B)
self.placeObject(x, y + 1, 0x0C)
elif type_id == 0xF8: # boss door
self.placeObject(x, y, 0xA4)
self.placeObject(x + 1, y, 0xA5)
elif type_id == 0xF9: # stairs door
self.placeObject(x, y, 0xAF)
self.placeObject(x + 1, y, 0xB0)
elif type_id == 0xFA: # flipwall
self.placeObject(x, y, 0xB1)
self.placeObject(x + 1, y, 0xB2)
elif type_id == 0xFB: # one way arrow
self.placeObject(x, y, 0x45)
self.placeObject(x + 1, y, 0x46)
elif type_id == 0xFC: # entrance
self.placeObject(x + 0, y, 0xB3)
self.placeObject(x + 1, y, 0xB4)
self.placeObject(x + 2, y, 0xB4)
self.placeObject(x + 3, y, 0xB5)
self.placeObject(x + 0, y + 1, 0xB6)
self.placeObject(x + 1, y + 1, 0xB7)
self.placeObject(x + 2, y + 1, 0xB8)
self.placeObject(x + 3, y + 1, 0xB9)
self.placeObject(x + 0, y + 2, 0xBA)
self.placeObject(x + 1, y + 2, 0xBB)
self.placeObject(x + 2, y + 2, 0xBC)
self.placeObject(x + 3, y + 2, 0xBD)
elif type_id == 0xFD: # entrance
self.placeObject(x, y, 0xC1)
self.placeObject(x + 1, y, 0xC2)
else:
self.objects[(x & 15), (y & 15)] = type_id
def getObject(self, x, y):
return self.objects.get(((x & 15), (y & 15)), None)
class MapExport:
def __init__(self, rom):
self.__rom = rom
self.__tiles = {
0x0C: self.getTiles(0x0C),
0x0D: self.getTiles(0x0D),
0x0F: self.getTiles(0x0F),
0x12: self.getTiles(0x12),
}
self.__room_map_info = {}
f = open("test.html", "wt")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n), (x * 20 * 8, y * 16 * 8))
result.save("overworld.png")
f.write("<img src='overworld.png'><br><br>")
self.exportMetaTiles(f, "metatiles_main.png", 0x0F, 0, lambda n: n >= 32 and (n < 0x6C or n >= 0x70))
for n in (0x1A, 0x1C, 0x1E, 0x20, 0x22, 0x24, 0x26, 0x28, 0x2A, 0x2C, 0x2E, 0x30, 0x32, 0x34, 0x36, 0x38, 0x3A, 0x3C, 0x3E):
self.exportMetaTiles(f, "metatiles_%02x.png" % (n), n, 0, lambda n: n < 32)
for n in range(2, 17):
self.exportMetaTiles(f, "metatiles_anim_%02x.png" % (n), 0x0F, n, lambda n: n >= 0x6C and n < 0x70)
for n in (0,1,2,3,4,5,6,7, 10, 11):
addr = 0x0220 + n * 8 * 8
result = PIL.Image.new("L", (8 * 20 * 8, 8 * 16 * 8))
for y in range(8):
for x in range(8):
room = rom.banks[0x14][addr] + 0x100
if n > 5:
room += 0x100
if n == 11:
room += 0x100
addr += 1
if (room & 0xFF) == 0 and (n != 11 or x != 1 or y != 3): # ignore room nr 0, except on a very specific spot in the color dungeon.
continue
self.__room_map_info[room] = (x, y, n)
result.paste(self.exportRoom(room), (x * 20 * 8, y * 16 * 8))
result.save("dungeon_%d.png" % (n))
f.write("<img src='dungeon_%d.png'><br><br>" % (n))
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x100):
if n + 0x100 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x100), (x * 20 * 8, y * 16 * 8))
result.save("caves1.png")
f.write("<img src='caves1.png'><br><br>")
result = PIL.Image.new("L", (16 * 20 * 8, 16 * 16 * 8))
for n in range(0x0FF):
if n + 0x200 in self.__room_map_info:
continue
x = n % 0x10
y = n // 0x10
result.paste(self.exportRoom(n + 0x200), (x * 20 * 8, y * 16 * 8))
result.save("caves2.png")
f.write("<img src='caves2.png'>")
f.close()
def exportMetaTiles(self, f, name, main_set, animation_set, condition_func):
condition = lambda n: condition_func(n) and (n < 0x80 or n >= 0xF0)
metatile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
metatile_info = self.__rom.banks[0x1A][metatile_info_offset:metatile_info_offset + 0x100 * 4]
result = PIL.Image.new("L", (16 * 16, 16 * 16))
sub_tileset_offset = main_set * 0x10
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
addr = (0x000, 0x000, 0x2B0, 0x2C0, 0x2D0, 0x2E0, 0x2F0, 0x2D0, 0x300, 0x310, 0x320, 0x2A0, 0x330, 0x350, 0x360, 0x340, 0x370)[animation_set]
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
for x in range(16):
for y in range(16):
obj = x + y * 16
if condition(metatile_info[obj*4+0]):
result.paste(tilemap[metatile_info[obj*4+0]], (x*16+0, y*16+0))
if condition(metatile_info[obj*4+1]):
result.paste(tilemap[metatile_info[obj*4+1]], (x*16+8, y*16+0))
if condition(metatile_info[obj*4+2]):
result.paste(tilemap[metatile_info[obj*4+2]], (x*16+0, y*16+8))
if condition(metatile_info[obj*4+3]):
result.paste(tilemap[metatile_info[obj*4+3]], (x*16+8, y*16+8))
result.save(name)
f.write("%s<br><img src='%s'><br><br>" % (name, name))
def exportRoom(self, room_nr):
re = RoomEditor(self.__rom, room_nr)
if room_nr < 0x100:
tile_info_offset = self.__rom.banks[0x1A].find(b'\x7C\x7C\x7C\x7C\x7D\x7D\x7D\x7D')
tile_info = self.__rom.banks[0x1A][tile_info_offset:tile_info_offset + 0x100 * 4]
else:
tile_info_offset = self.__rom.banks[0x08].find(b'\x7F\x7F\x7F\x7F\x7E\x7E\x7E\x7E')
tile_info = self.__rom.banks[0x08][tile_info_offset:tile_info_offset+0x100*4]
if room_nr >= 0x100:
rendered_map = RenderedMap(re.floor_object & 0x0F)
else:
rendered_map = RenderedMap(re.floor_object, True)
def objHSize(type_id):
if type_id == 0xF5:
return 2
return 1
def objVSize(type_id):
if type_id == 0xF5:
return 2
return 1
if room_nr >= 0x100:
if re.floor_object & 0xF0 == 0x00:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x10:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x20:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x30:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x40:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x50:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x60:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_DOWN)
if re.floor_object & 0xF0 == 0x70:
rendered_map.addWalls(RenderedMap.WALL_RIGHT | RenderedMap.WALL_UP)
if re.floor_object & 0xF0 == 0x80:
rendered_map.addWalls(RenderedMap.WALL_LEFT | RenderedMap.WALL_UP)
for obj in re.objects:
if isinstance(obj, ObjectWarp):
pass
elif isinstance(obj, ObjectHorizontal):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x + n * objHSize(obj.type_id), obj.y, obj.type_id)
elif isinstance(obj, ObjectVertical):
for n in range(0, obj.count):
rendered_map.placeObject(obj.x, obj.y + n * objVSize(obj.type_id), obj.type_id)
else:
rendered_map.placeObject(obj.x, obj.y, obj.type_id)
tiles = [0] * 20 * 16
for y in range(8):
for x in range(10):
obj = rendered_map.objects[(x, y)]
tiles[x*2 + y*2*20] = tile_info[obj*4]
tiles[x*2+1 + y*2*20] = tile_info[obj*4+1]
tiles[x*2 + (y*2+1)*20] = tile_info[obj*4+2]
tiles[x*2+1 + (y*2+1)*20] = tile_info[obj*4+3]
if room_nr < 0x100:
sub_tileset_offset = self.__rom.banks[0x20][0x2E73 + (room_nr & 0x0F) // 2 + ((room_nr >> 5) * 8)] << 4
tilemap = self.__tiles[0x0f][sub_tileset_offset:sub_tileset_offset+0x20]
tilemap += self.__tiles[0x0c][0x120:0x180]
tilemap += self.__tiles[0x0c][0x080:0x100]
else:
# TODO: The whole indoor tileset loading seems complex...
tileset_nr = self.__rom.banks[0x20][0x2eB3 + room_nr - 0x100]
tilemap = [None] * 0x100
tilemap[0x20:0x80] = self.__tiles[0x0D][0x000:0x060]
if tileset_nr != 0xFF:
tilemap[0x00:0x10] = self.__tiles[0x0D][0x100 + tileset_nr * 0x10:0x110 + tileset_nr * 0x10]
tilemap[0x10:0x20] = self.__tiles[0x0D][0x210:0x220]
tilemap[0xF0:0x100] = self.__tiles[0x12][0x380:0x390]
if re.animation_id == 2:
addr = 0x2B0
elif re.animation_id == 3:
addr = 0x2C0
elif re.animation_id == 4:
addr = 0x2D0
elif re.animation_id == 5:
addr = 0x2E0
elif re.animation_id == 6:
addr = 0x2F0
elif re.animation_id == 7:
addr = 0x2D0
elif re.animation_id == 8:
addr = 0x300
elif re.animation_id == 9:
addr = 0x310
elif re.animation_id == 10:
addr = 0x320
elif re.animation_id == 11:
addr = 0x2A0
elif re.animation_id == 12:
addr = 0x330
elif re.animation_id == 13:
addr = 0x350
elif re.animation_id == 14:
addr = 0x360
elif re.animation_id == 15:
addr = 0x340
elif re.animation_id == 16:
addr = 0x370
else:
print(hex(room_nr), re.animation_id)
addr = 0x000
tilemap[0x6C:0x70] = self.__tiles[0x0c][addr:addr+4]
assert len(tilemap) == 0x100
result = PIL.Image.new('L', (8 * 20, 8 * 16))
draw = PIL.ImageDraw.Draw(result)
for y in range(16):
for x in range(20):
tile = tilemap[tiles[x+y*20]]
if tile is not None:
result.paste(tile, (x * 8, y * 8))
warp_pos = []
for y in range(8):
for x in range(10):
if rendered_map.objects[(x, y)] in (0xE1, 0xE2, 0xE3, 0xBA, 0xD5, 0xA8, 0xBE, 0xCB):
warp_pos.append((x, y))
for x, y, type_id in re.entities:
draw.rectangle([(x * 16, y * 16), (x * 16 + 15, y * 16 + 15)], outline=0)
draw.text((x * 16 + 3, y * 16 + 2), "%02X" % (type_id))
y = 8
for obj in re.objects:
if isinstance(obj, ObjectWarp):
draw.text((8, y), "W%d:%02x:%03x:%d,%d" % (obj.warp_type, obj.map_nr, obj.room, obj.target_x, obj.target_y))
y += 16
return result
def getTiles(self, bank_nr):
bank = self.__rom.banks[bank_nr]
buffer = bytearray(b'\x00' * 16 * 16)
result = []
for n in range(0, len(bank), 16):
for y in range(8):
a = bank[n + y * 2]
b = bank[n + y * 2 + 1]
for x in range(8):
v = 0x3F
if not a & (0x80 >> x):
v |= 0x40
if not b & (0x80 >> x):
v |= 0x80
buffer[x+y*8] = v
result.append(PIL.Image.frombytes('L', (8, 8), bytes(buffer)))
return result
|
self.objects = {}
self.overworld = overworld
for y in range(8):
for x in range(10):
self.objects[(x, y)] = floor_object
|
issue-14309.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![deny(improper_ctypes)]
#![allow(dead_code)]
struct A {
x: i32
}
#[repr(C, packed)]
struct B {
x: i32,
y: A
}
#[repr(C)]
struct C {
x: i32
}
type A2 = A;
type B2 = B;
type C2 = C;
#[repr(C)]
struct D {
x: C,
y: A
}
extern "C" {
fn foo(x: A); //~ ERROR type `A` which is not FFI-safe
fn bar(x: B); //~ ERROR type `A`
fn baz(x: C);
fn qux(x: A2); //~ ERROR type `A`
fn quux(x: B2); //~ ERROR type `A`
fn corge(x: C2);
fn fred(x: D); //~ ERROR type `A`
}
fn
|
() { }
|
main
|
insert.py
|
import numpy as np
from PyQt5 import QtCore, QtGui, QtWidgets
from sscanss.config import path_for, settings
from sscanss.core.math import Plane, Matrix33, Vector3, clamp, map_range, trunc, VECTOR_EPS
from sscanss.core.geometry import mesh_plane_intersection
from sscanss.core.util import Primitives, DockFlag, StrainComponents, PointType, PlaneOptions, Attributes
from sscanss.ui.widgets import (FormGroup, FormControl, GraphicsView, GraphicsScene, create_tool_button, FormTitle,
create_scroll_area, CompareValidator, GraphicsPointItem, Grid, create_icon)
from .managers import PointManager
class InsertPrimitiveDialog(QtWidgets.QWidget):
"""Provides UI for typing in measurement/fiducial points
:param primitive: primitive type
:type primitive: Primitives
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, primitive, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = self.parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.primitive = primitive
self.main_layout = QtWidgets.QVBoxLayout()
self.textboxes = {}
name = self.parent_model.uniqueKey(self.primitive.value)
self.mesh_args = {'name': name}
if self.primitive == Primitives.Tube:
|
elif self.primitive == Primitives.Sphere:
self.mesh_args.update({'radius': 100.000})
elif self.primitive == Primitives.Cylinder:
self.mesh_args.update({'radius': 100.000, 'height': 200.000})
else:
self.mesh_args.update({'width': 50.000, 'height': 100.000, 'depth': 200.000})
self.createPrimitiveSwitcher()
self.createFormInputs()
button_layout = QtWidgets.QHBoxLayout()
self.create_primitive_button = QtWidgets.QPushButton('Create')
self.create_primitive_button.clicked.connect(self.createPrimiviteButtonClicked)
button_layout.addWidget(self.create_primitive_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.title = 'Insert {}'.format(self.primitive.value)
self.setMinimumWidth(450)
self.textboxes['name'].setFocus()
def createPrimitiveSwitcher(self):
switcher_layout = QtWidgets.QHBoxLayout()
switcher = create_tool_button(style_name='MenuButton', status_tip='Open dialog for a different primitive')
switcher.setArrowType(QtCore.Qt.DownArrow)
switcher.setPopupMode(QtWidgets.QToolButton.InstantPopup)
switcher.setMenu(self.parent.primitives_menu)
switcher_layout.addStretch(1)
switcher_layout.addWidget(switcher)
self.main_layout.addLayout(switcher_layout)
def createFormInputs(self):
self.form_group = FormGroup()
for key, value in self.mesh_args.items():
pretty_label = key.replace('_', ' ').title()
if key == 'name':
control = FormControl(pretty_label, value, required=True)
control.form_lineedit.textChanged.connect(self.nameCheck)
else:
control = FormControl(pretty_label, value, desc='mm', required=True, number=True)
control.range(0, None, min_exclusive=True)
self.textboxes[key] = control
self.form_group.addControl(control)
if self.primitive == Primitives.Tube:
outer_radius = self.textboxes['outer_radius']
inner_radius = self.textboxes['inner_radius']
outer_radius.compareWith(inner_radius, CompareValidator.Operator.Greater)
inner_radius.compareWith(outer_radius, CompareValidator.Operator.Less)
self.main_layout.addWidget(self.form_group)
self.form_group.groupValidation.connect(self.formValidation)
def nameCheck(self, value):
if self.parent_model.all_sample_key == value:
self.textboxes['name'].isInvalid(f'"{self.parent_model.all_sample_key}" is a reserved name')
def formValidation(self, is_valid):
if is_valid:
self.create_primitive_button.setEnabled(True)
else:
self.create_primitive_button.setDisabled(True)
def createPrimiviteButtonClicked(self):
for key, textbox in self.textboxes.items():
value = textbox.value
self.mesh_args[key] = value
self.parent.presenter.addPrimitive(self.primitive, self.mesh_args)
new_name = self.parent_model.uniqueKey(self.primitive.value)
self.textboxes['name'].value = new_name
class InsertPointDialog(QtWidgets.QWidget):
"""Provides UI for typing in measurement/fiducial points
:param point_type: point type
:type point_type: PointType
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, point_type, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.point_type = point_type
self.title = 'Add {} Point'.format(point_type.value)
self.main_layout = QtWidgets.QVBoxLayout()
unit = 'mm'
self.form_group = FormGroup()
self.x_axis = FormControl('X', 0.0, required=True, desc=unit, number=True)
self.y_axis = FormControl('Y', 0.0, required=True, desc=unit, number=True)
self.z_axis = FormControl('Z', 0.0, required=True, desc=unit, number=True)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addWidget(self.form_group)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.setMinimumWidth(450)
def formValidation(self, is_valid):
if is_valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
point = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
self.parent.presenter.addPoints([(point, True)], self.point_type)
class InsertVectorDialog(QtWidgets.QWidget):
"""Provides UI for adding measurement vectors using a variety of methods
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Vectors'
self.main_layout = QtWidgets.QVBoxLayout()
spacing = 10
self.main_layout.addSpacing(spacing)
self.main_layout.addWidget(QtWidgets.QLabel('Measurement Point:'))
self.points_combobox = QtWidgets.QComboBox()
self.points_combobox.setView(QtWidgets.QListView())
self.main_layout.addWidget(self.points_combobox)
self.updatePointList()
self.main_layout.addSpacing(spacing)
layout = QtWidgets.QHBoxLayout()
alignment_layout = QtWidgets.QVBoxLayout()
alignment_layout.addWidget(QtWidgets.QLabel('Alignment:'))
self.alignment_combobox = QtWidgets.QComboBox()
self.alignment_combobox.setView(QtWidgets.QListView())
self.alignment_combobox.setInsertPolicy(QtWidgets.QComboBox.InsertAtCurrent)
self.updateAlignment()
self.alignment_combobox.activated.connect(self.addNewAlignment)
self.alignment_combobox.currentIndexChanged.connect(self.changeRenderedAlignment)
alignment_layout.addWidget(self.alignment_combobox)
alignment_layout.addSpacing(spacing)
layout.addLayout(alignment_layout)
self.detector_combobox = QtWidgets.QComboBox()
self.detector_combobox.setView(QtWidgets.QListView())
self.detector_combobox.addItems(list(self.parent_model.instrument.detectors.keys()))
if len(self.parent_model.instrument.detectors) > 1:
detector_layout = QtWidgets.QVBoxLayout()
detector_layout.addWidget(QtWidgets.QLabel('Detector:'))
detector_layout.addWidget(self.detector_combobox)
size = self.detector_combobox.iconSize()
self.detector_combobox.setItemIcon(0, create_icon(settings.value(settings.Key.Vector_1_Colour), size))
self.detector_combobox.setItemIcon(1, create_icon(settings.value(settings.Key.Vector_2_Colour), size))
detector_layout.addSpacing(spacing)
layout.addSpacing(spacing)
layout.addLayout(detector_layout)
self.main_layout.addLayout(layout)
self.main_layout.addWidget(QtWidgets.QLabel('Strain Component:'))
self.component_combobox = QtWidgets.QComboBox()
self.component_combobox.setView(QtWidgets.QListView())
strain_components = [s.value for s in StrainComponents]
self.component_combobox.addItems(strain_components)
self.component_combobox.currentTextChanged.connect(self.toggleKeyInBox)
self.main_layout.addWidget(self.component_combobox)
self.main_layout.addSpacing(spacing)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton(self.title)
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.createKeyInBox()
self.reverse_checkbox = QtWidgets.QCheckBox('Reverse Direction of Vector')
self.main_layout.addWidget(self.reverse_checkbox)
self.main_layout.addSpacing(spacing)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
self.setLayout(self.main_layout)
self.parent_model.measurement_points_changed.connect(self.updatePointList)
self.parent_model.measurement_vectors_changed.connect(self.updateAlignment)
self.parent.scenes.rendered_alignment_changed.connect(self.alignment_combobox.setCurrentIndex)
self.setMinimumWidth(450)
def updatePointList(self):
self.points_combobox.clear()
point_list = ['All Points']
point_list.extend(['{}'.format(i+1) for i in range(self.parent_model.measurement_points.size)])
self.points_combobox.addItems(point_list)
def updateAlignment(self):
align_count = self.parent_model.measurement_vectors.shape[2]
if align_count != self.alignment_combobox.count() - 1:
self.alignment_combobox.clear()
alignment_list = ['{}'.format(i + 1) for i in range(align_count)]
alignment_list.append('Add New...')
self.alignment_combobox.addItems(alignment_list)
self.alignment_combobox.setCurrentIndex(self.parent.scenes.rendered_alignment)
def addNewAlignment(self, index):
if index == self.alignment_combobox.count() - 1:
self.alignment_combobox.insertItem(index, '{}'.format(index + 1))
self.alignment_combobox.setCurrentIndex(index)
def changeRenderedAlignment(self, index):
align_count = self.parent_model.measurement_vectors.shape[2]
if 0 <= index < align_count:
self.parent.scenes.changeRenderedAlignment(index)
elif index >= align_count:
self.parent.scenes.changeVisibility(Attributes.Vectors, False)
def toggleKeyInBox(self, selected_text):
strain_component = StrainComponents(selected_text)
if strain_component == StrainComponents.custom:
self.key_in_box.setVisible(True)
self.form_group.validateGroup()
else:
self.key_in_box.setVisible(False)
self.execute_button.setEnabled(True)
def createKeyInBox(self):
self.key_in_box = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True, decimals=7)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True, decimals=7)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True, decimals=7)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.formValidation)
layout.addWidget(self.form_group)
self.key_in_box.setLayout(layout)
self.main_layout.addWidget(self.key_in_box)
self.toggleKeyInBox(self.component_combobox.currentText())
def formValidation(self, is_valid):
self.execute_button.setDisabled(True)
if is_valid:
if np.linalg.norm([self.x_axis.value, self.y_axis.value, self.z_axis.value]) > VECTOR_EPS:
self.x_axis.validation_label.setText('')
self.execute_button.setEnabled(True)
else:
self.x_axis.validation_label.setText('Bad Normal')
def executeButtonClicked(self):
points = self.points_combobox.currentIndex() - 1
selected_text = self.component_combobox.currentText()
strain_component = StrainComponents(selected_text)
alignment = self.alignment_combobox.currentIndex()
detector = self.detector_combobox.currentIndex()
check_state = self.reverse_checkbox.checkState()
reverse = True if check_state == QtCore.Qt.Checked else False
if strain_component == StrainComponents.custom:
vector = [self.x_axis.value, self.y_axis.value, self.z_axis.value]
else:
vector = None
self.parent.presenter.addVectors(points, strain_component, alignment, detector,
key_in=vector, reverse=reverse)
# New vectors are drawn by the scene manager after function ends
self.parent.scenes._rendered_alignment = alignment
def closeEvent(self, event):
self.parent.scenes.changeRenderedAlignment(0)
event.accept()
class PickPointDialog(QtWidgets.QWidget):
"""Provides UI for selecting measurement points on a cross section of the sample
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Full
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent_model = parent.presenter.model
self.parent.scenes.switchToSampleScene()
self.title = 'Add Measurement Points Graphically'
self.setMinimumWidth(500)
self.plane_offset_range = (-1., 1.)
self.slider_range = (-10000000, 10000000)
self.sample_scale = 20
self.path_pen = QtGui.QPen(QtGui.QColor(255, 0, 0), 0)
self.point_pen = QtGui.QPen(QtGui.QColor(200, 0, 0), 0)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
button_layout = QtWidgets.QHBoxLayout()
self.help_button = create_tool_button(tooltip='Help', style_name='ToolButton',
status_tip='Display shortcuts for the cross-section view',
icon_path=path_for('question.png'))
self.help_button.clicked.connect(self.showHelp)
self.reset_button = create_tool_button(tooltip='Reset View', style_name='ToolButton',
status_tip='Reset camera transformation of the cross-section view',
icon_path=path_for('refresh.png'))
self.execute_button = QtWidgets.QPushButton('Add Points')
self.execute_button.clicked.connect(self.addPoints)
button_layout.addWidget(self.help_button)
button_layout.addWidget(self.reset_button)
button_layout.addStretch(1)
button_layout.addWidget(self.execute_button)
self.main_layout.addLayout(button_layout)
self.splitter = QtWidgets.QSplitter(QtCore.Qt.Vertical)
self.splitter.setChildrenCollapsible(False)
self.main_layout.addWidget(self.splitter)
self.createGraphicsView()
self.reset_button.clicked.connect(self.view.reset)
self.createControlPanel()
self.prepareMesh()
self.parent_model.sample_changed.connect(self.prepareMesh)
self.parent_model.measurement_points_changed.connect(self.updateCrossSection)
self.initializing = True
def showEvent(self, event):
if self.initializing:
self.view.fitInView(self.view.anchor, QtCore.Qt.KeepAspectRatio)
self.initializing = False
super().showEvent(event)
def closeEvent(self, event):
self.parent.scenes.removePlane()
event.accept()
def prepareMesh(self):
self.mesh = None
samples = self.parent_model.sample
for _, sample in samples.items():
if self.mesh is None:
self.mesh = sample.copy()
else:
self.mesh.append(sample)
self.scene.clear()
self.tabs.setEnabled(self.mesh is not None)
if self.mesh is not None:
self.setPlane(self.plane_combobox.currentText())
else:
self.parent.scenes.removePlane()
self.view.reset()
def updateStatusBar(self, point):
if self.view.rect().contains(point):
transform = self.view.scene_transform.inverted()[0]
scene_pt = transform.map(self.view.mapToScene(point)) / self.sample_scale
world_pt = [scene_pt.x(), scene_pt.y(), -self.old_distance] @ self.matrix.transpose()
cursor_text = f'X: {world_pt[0]:.3f} Y: {world_pt[1]:.3f} Z: {world_pt[2]:.3f}'
self.parent.cursor_label.setText(cursor_text)
else:
self.parent.cursor_label.clear()
def createGraphicsView(self):
self.scene = GraphicsScene(self.sample_scale, self)
self.view = GraphicsView(self.scene)
self.view.mouse_moved.connect(self.updateStatusBar)
self.view.setMinimumHeight(350)
self.splitter.addWidget(self.view)
def createControlPanel(self):
self.tabs = QtWidgets.QTabWidget()
self.tabs.setMinimumHeight(250)
self.tabs.setTabPosition(QtWidgets.QTabWidget.South)
self.splitter.addWidget(self.tabs)
self.createPlaneTab()
self.createSelectionToolsTab()
self.createGridOptionsTab()
point_manager = PointManager(PointType.Measurement, self.parent)
self.tabs.addTab(create_scroll_area(point_manager), 'Point Manager')
def createPlaneTab(self):
layout = QtWidgets.QVBoxLayout()
layout.addWidget(QtWidgets.QLabel('Specify Plane:'))
self.plane_combobox = QtWidgets.QComboBox()
self.plane_combobox.setView(QtWidgets.QListView())
self.plane_combobox.addItems([p.value for p in PlaneOptions])
self.plane_combobox.currentTextChanged.connect(self.setPlane)
self.createCustomPlaneBox()
layout.addWidget(self.plane_combobox)
layout.addWidget(self.custom_plane_widget)
layout.addSpacing(20)
slider_layout = QtWidgets.QHBoxLayout()
slider_layout.addWidget(QtWidgets.QLabel('Plane Distance from Origin (mm):'))
self.plane_lineedit = QtWidgets.QLineEdit()
validator = QtGui.QDoubleValidator(self.plane_lineedit)
validator.setNotation(QtGui.QDoubleValidator.StandardNotation)
validator.setDecimals(3)
self.plane_lineedit.setValidator(validator)
self.plane_lineedit.textEdited.connect(self.updateSlider)
self.plane_lineedit.editingFinished.connect(self.movePlane)
slider_layout.addStretch(1)
slider_layout.addWidget(self.plane_lineedit)
layout.addLayout(slider_layout)
self.plane_slider = QtWidgets.QSlider(QtCore.Qt.Horizontal)
self.plane_slider.setMinimum(self.slider_range[0])
self.plane_slider.setMaximum(self.slider_range[1])
self.plane_slider.setFocusPolicy(QtCore.Qt.StrongFocus)
self.plane_slider.setSingleStep(1)
self.plane_slider.sliderMoved.connect(self.updateLineEdit)
self.plane_slider.sliderReleased.connect(self.movePlane)
layout.addWidget(self.plane_slider)
layout.addStretch(1)
plane_tab = QtWidgets.QWidget()
plane_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(plane_tab), 'Define Plane')
def createSelectionToolsTab(self):
layout = QtWidgets.QVBoxLayout()
selector_layout = QtWidgets.QHBoxLayout()
selector_layout.addWidget(QtWidgets.QLabel('Select Geometry of Points: '))
self.button_group = QtWidgets.QButtonGroup()
self.button_group.buttonClicked[int].connect(self.changeSceneMode)
self.object_selector = create_tool_button(checkable=True, checked=True, tooltip='Select Points',
status_tip='Select movable points from the cross-section view',
style_name='MidToolButton', icon_path=path_for('select.png'))
self.point_selector = create_tool_button(checkable=True, tooltip='Draw a Point',
status_tip='Draw a single point at the selected position',
style_name='MidToolButton', icon_path=path_for('point.png'))
self.line_selector = create_tool_button(checkable=True, tooltip='Draw Points on Line',
status_tip='Draw equally spaced points on the selected line',
style_name='MidToolButton', icon_path=path_for('line_tool.png'))
self.area_selector = create_tool_button(checkable=True, tooltip='Draw Points on Area',
status_tip='Draw a grid of points on the selected area',
style_name='MidToolButton', icon_path=path_for('area_tool.png'))
self.button_group.addButton(self.object_selector, GraphicsScene.Mode.Select.value)
self.button_group.addButton(self.point_selector, GraphicsScene.Mode.Draw_point.value)
self.button_group.addButton(self.line_selector, GraphicsScene.Mode.Draw_line.value)
self.button_group.addButton(self.area_selector, GraphicsScene.Mode.Draw_area.value)
selector_layout.addWidget(self.object_selector)
selector_layout.addWidget(self.point_selector)
selector_layout.addWidget(self.line_selector)
selector_layout.addWidget(self.area_selector)
selector_layout.addStretch(1)
self.createLineToolWidget()
self.createAreaToolWidget()
layout.addLayout(selector_layout)
layout.addWidget(self.line_tool_widget)
layout.addWidget(self.area_tool_widget)
layout.addStretch(1)
select_tab = QtWidgets.QWidget()
select_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(select_tab), 'Selection Tools')
def createGridOptionsTab(self):
layout = QtWidgets.QVBoxLayout()
self.show_grid_checkbox = QtWidgets.QCheckBox('Show Grid')
self.show_grid_checkbox.stateChanged.connect(self.showGrid)
self.snap_to_grid_checkbox = QtWidgets.QCheckBox('Snap Selection to Grid')
self.snap_to_grid_checkbox.stateChanged.connect(self.snapToGrid)
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
layout.addWidget(self.show_grid_checkbox)
layout.addWidget(self.snap_to_grid_checkbox)
self.createGridWidget()
layout.addWidget(self.grid_widget)
layout.addStretch(1)
grid_tab = QtWidgets.QWidget()
grid_tab.setLayout(layout)
self.tabs.addTab(create_scroll_area(grid_tab), 'Grid Options')
def createCustomPlaneBox(self):
self.custom_plane_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QVBoxLayout()
self.form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_axis = FormControl('X', 1.0, required=True, number=True)
self.x_axis.range(-1.0, 1.0)
self.y_axis = FormControl('Y', 0.0, required=True, number=True)
self.y_axis.range(-1.0, 1.0)
self.z_axis = FormControl('Z', 0.0, required=True, number=True)
self.z_axis.range(-1.0, 1.0)
self.form_group.addControl(self.x_axis)
self.form_group.addControl(self.y_axis)
self.form_group.addControl(self.z_axis)
self.form_group.groupValidation.connect(self.setCustomPlane)
layout.addWidget(self.form_group)
self.custom_plane_widget.setLayout(layout)
def createLineToolWidget(self):
self.line_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.line_point_count_spinbox = QtWidgets.QSpinBox()
self.line_point_count_spinbox.setValue(self.scene.line_tool_size)
self.line_point_count_spinbox.setRange(2, 100)
self.line_point_count_spinbox.valueChanged.connect(self.scene.setLineToolSize)
layout.addWidget(self.line_point_count_spinbox)
self.line_tool_widget.setVisible(False)
self.line_tool_widget.setLayout(layout)
def createAreaToolWidget(self):
self.area_tool_widget = QtWidgets.QWidget(self)
layout = QtWidgets.QHBoxLayout()
layout.setContentsMargins(0, 20, 0, 0)
layout.addWidget(QtWidgets.QLabel('Number of Points: '))
self.area_x_spinbox = QtWidgets.QSpinBox()
self.area_x_spinbox.setValue(self.scene.area_tool_size[0])
self.area_x_spinbox.setRange(2, 100)
self.area_y_spinbox = QtWidgets.QSpinBox()
self.area_y_spinbox.setValue(self.scene.area_tool_size[1])
self.area_y_spinbox.setRange(2, 100)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('X: '))
self.area_x_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(QtWidgets.QLabel('Y: '))
self.area_y_spinbox.valueChanged.connect(lambda: self.scene.setAreaToolSize(self.area_x_spinbox.value(),
self.area_y_spinbox.value()))
layout.addWidget(self.area_y_spinbox, stretch_factor)
self.area_tool_widget.setVisible(False)
self.area_tool_widget.setLayout(layout)
def createGridWidget(self):
self.grid_widget = QtWidgets.QWidget(self)
main_layout = QtWidgets.QVBoxLayout()
main_layout.setContentsMargins(0, 20, 0, 0)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Type: '))
grid_combobox = QtWidgets.QComboBox()
grid_combobox.setView(QtWidgets.QListView())
grid_combobox.addItems([g.value for g in Grid.Type])
grid_combobox.currentTextChanged.connect(lambda value: self.setGridType(Grid.Type(value)))
layout.addWidget(grid_combobox)
main_layout.addLayout(layout)
main_layout.addSpacing(20)
layout = QtWidgets.QHBoxLayout()
layout.addWidget(QtWidgets.QLabel('Grid Size: '))
self.grid_x_label = QtWidgets.QLabel('')
self.grid_x_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_x_spinbox.setDecimals(1)
self.grid_x_spinbox.setSingleStep(0.1)
self.grid_x_spinbox.valueChanged.connect(self.changeGridSize)
self.grid_y_label = QtWidgets.QLabel('')
self.grid_y_spinbox = QtWidgets.QDoubleSpinBox()
self.grid_y_spinbox.setDecimals(1)
self.grid_y_spinbox.setSingleStep(0.1)
self.grid_y_spinbox.valueChanged.connect(self.changeGridSize)
stretch_factor = 3
layout.addStretch(1)
layout.addWidget(self.grid_x_label)
layout.addWidget(self.grid_x_spinbox, stretch_factor)
layout.addStretch(1)
layout.addWidget(self.grid_y_label)
layout.addWidget(self.grid_y_spinbox, stretch_factor)
main_layout.addLayout(layout)
self.setGridType(self.view.grid.type)
self.grid_widget.setVisible(False)
self.grid_widget.setLayout(main_layout)
def changeGridSize(self):
if self.view.grid.type == Grid.Type.Box:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = int(self.grid_y_spinbox.value() * self.sample_scale)
else:
grid_x = int(self.grid_x_spinbox.value() * self.sample_scale)
grid_y = self.grid_y_spinbox.value()
self.view.setGridSize((grid_x, grid_y))
def setGridType(self, grid_type):
self.view.setGridType(grid_type)
size = self.view.grid.size
if grid_type == Grid.Type.Box:
self.grid_x_label.setText('X (mm): ')
self.grid_y_label.setText('Y (mm): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 1000)
else:
self.grid_x_label.setText('Radius (mm): ')
self.grid_y_label.setText('Angle (degree): ')
self.grid_x_spinbox.setValue(size[0])
self.grid_y_spinbox.setValue(size[1])
self.grid_x_spinbox.setRange(0.1, 1000)
self.grid_y_spinbox.setRange(0.1, 360)
def changeSceneMode(self, button_id):
self.scene.mode = GraphicsScene.Mode(button_id)
self.line_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_line)
self.area_tool_widget.setVisible(self.scene.mode == GraphicsScene.Mode.Draw_area)
def showHelp(self):
self.view.show_help = False if self.view.has_foreground else True
self.scene.update()
def showGrid(self, state):
self.view.show_grid = True if state == QtCore.Qt.Checked else False
self.snap_to_grid_checkbox.setEnabled(self.view.show_grid)
self.grid_widget.setVisible(self.view.show_grid)
self.scene.update()
def snapToGrid(self, state):
self.view.snap_to_grid = True if state == QtCore.Qt.Checked else False
def updateSlider(self, value):
if not self.plane_lineedit.hasAcceptableInput():
return
new_distance = clamp(float(value), *self.plane_offset_range)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, new_distance))
self.plane_slider.setValue(slider_value)
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def updateLineEdit(self, value):
new_distance = trunc(map_range(*self.slider_range, *self.plane_offset_range, value), 3)
self.plane_lineedit.setText('{:.3f}'.format(new_distance))
offset = new_distance - self.old_distance
self.parent.scenes.movePlane(offset * self.plane.normal)
self.old_distance = new_distance
def movePlane(self):
distance = clamp(float(self.plane_lineedit.text()), *self.plane_offset_range)
self.plane_lineedit.setText('{:.3f}'.format(distance))
point = distance * self.plane.normal
self.plane = Plane(self.plane.normal, point)
self.updateCrossSection()
def setCustomPlane(self, is_valid):
if is_valid:
normal = np.array([self.x_axis.value, self.y_axis.value, self.z_axis.value])
try:
self.initializePlane(normal, self.mesh.bounding_box.center)
except ValueError:
self.x_axis.validation_label.setText('Bad Normal')
def setPlane(self, selected_text):
if selected_text == PlaneOptions.Custom.value:
self.custom_plane_widget.setVisible(True)
self.form_group.validateGroup()
return
else:
self.custom_plane_widget.setVisible(False)
if selected_text == PlaneOptions.XY.value:
plane_normal = np.array([0., 0., 1.])
elif selected_text == PlaneOptions.XZ.value:
plane_normal = np.array([0., 1., 0.])
else:
plane_normal = np.array([1., 0., 0.])
self.initializePlane(plane_normal, self.mesh.bounding_box.center)
def initializePlane(self, plane_normal, plane_point):
self.plane = Plane(plane_normal, plane_point)
plane_size = self.mesh.bounding_box.radius
self.parent.scenes.drawPlane(self.plane, 2 * plane_size, 2 * plane_size)
distance = self.plane.distanceFromOrigin()
self.plane_offset_range = (distance - plane_size, distance + plane_size)
slider_value = int(map_range(*self.plane_offset_range, *self.slider_range, distance))
self.plane_slider.setValue(slider_value)
self.plane_lineedit.setText('{:.3f}'.format(distance))
self.old_distance = distance
# inverted the normal so that the y-axis is flipped
self.matrix = self.__lookAt(-Vector3(self.plane.normal))
self.view.resetTransform()
self.updateCrossSection()
def updateCrossSection(self):
self.scene.clear()
segments = mesh_plane_intersection(self.mesh, self.plane)
if len(segments) == 0:
return
segments = np.array(segments)
item = QtWidgets.QGraphicsPathItem()
cross_section_path = QtGui.QPainterPath()
rotated_segments = self.sample_scale * (segments @ self.matrix)
for i in range(0, rotated_segments.shape[0], 2):
start = rotated_segments[i, :]
cross_section_path.moveTo(start[0], start[1])
end = rotated_segments[i + 1, :]
cross_section_path.lineTo(end[0], end[1])
item.setPath(cross_section_path)
item.setPen(self.path_pen)
item.setTransform(self.view.scene_transform)
self.scene.addItem(item)
rect = item.boundingRect()
anchor = rect.center()
ab = self.plane.point - self.parent_model.measurement_points.points
d = np.einsum('ij,ij->i', np.expand_dims(self.plane.normal, axis=0), ab)
index = np.where(np.abs(d) < VECTOR_EPS)[0]
rotated_points = self.parent_model.measurement_points.points[index, :]
rotated_points = rotated_points @ self.matrix
for i, p in zip(index, rotated_points):
point = QtCore.QPointF(p[0], p[1]) * self.sample_scale
point = self.view.scene_transform.map(point)
item = GraphicsPointItem(point, size=self.scene.point_size)
item.setToolTip(f'Point {i + 1}')
item.fixed = True
item.makeControllable(self.scene.mode == GraphicsScene.Mode.Select)
item.setPen(self.point_pen)
self.scene.addItem(item)
rect = rect.united(item.boundingRect().translated(point))
# calculate new rectangle that encloses original rect with a different anchor
rect.united(rect.translated(anchor - rect.center()))
self.view.setSceneRect(rect)
self.view.fitInView(rect, QtCore.Qt.KeepAspectRatio)
self.view.anchor = rect
@staticmethod
def __lookAt(forward):
rot_matrix = Matrix33.identity()
up = Vector3([0., -1., 0.]) if -VECTOR_EPS < forward[1] < VECTOR_EPS else Vector3([0., 0., 1.])
left = up ^ forward
left.normalize()
up = forward ^ left
rot_matrix.c1[:3] = left
rot_matrix.c2[:3] = up
rot_matrix.c3[:3] = forward
return rot_matrix
def addPoints(self):
if len(self.scene.items()) < 2:
return
points_2d = []
transform = self.view.scene_transform.inverted()[0]
for item in self.scene.items():
if isinstance(item, GraphicsPointItem) and not item.fixed:
pos = transform.map(item.pos()) / self.sample_scale
# negate distance due to inverted normal when creating matrix
points_2d.append([pos.x(), pos.y(), -self.old_distance])
self.scene.removeItem(item)
if not points_2d:
return
points = points_2d[::-1] @ self.matrix.transpose()
enabled = [True] * points.shape[0]
self.parent.presenter.addPoints(list(zip(points, enabled)), PointType.Measurement, False)
class AlignSample(QtWidgets.QWidget):
"""Provides UI for aligning sample on instrument with 6D pose
:param parent: Main window
:type parent: MainWindow
"""
dock_flag = DockFlag.Upper
def __init__(self, parent):
super().__init__(parent)
self.parent = parent
self.parent.scenes.switchToInstrumentScene()
self.title = 'Align Sample with 6D pose'
self.setMinimumWidth(450)
self.main_layout = QtWidgets.QVBoxLayout()
self.setLayout(self.main_layout)
self.main_layout.addSpacing(20)
self.main_layout.addWidget(FormTitle('Create Transformation for Alignment'))
self.main_layout.addSpacing(10)
self.main_layout.addWidget(QtWidgets.QLabel('Translation along the X, Y, and Z axis (mm):'))
self.position_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_position = FormControl('X', 0.0, required=True, number=True)
self.y_position = FormControl('Y', 0.0, required=True, number=True)
self.z_position = FormControl('Z', 0.0, required=True, number=True)
self.position_form_group.addControl(self.x_position)
self.position_form_group.addControl(self.y_position)
self.position_form_group.addControl(self.z_position)
self.position_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.position_form_group)
self.main_layout.addWidget(QtWidgets.QLabel('Rotation around the X, Y, and Z axis (degrees):'))
self.orientation_form_group = FormGroup(FormGroup.Layout.Horizontal)
self.x_rotation = FormControl('X', 0.0, required=True, number=True)
self.x_rotation.range(-360.0, 360.0)
self.y_rotation = FormControl('Y', 0.0, required=True, number=True)
self.y_rotation.range(-360.0, 360.0)
self.z_rotation = FormControl('Z', 0.0, required=True, number=True)
self.z_rotation.range(-360.0, 360.0)
self.orientation_form_group.addControl(self.x_rotation)
self.orientation_form_group.addControl(self.y_rotation)
self.orientation_form_group.addControl(self.z_rotation)
self.orientation_form_group.groupValidation.connect(self.formValidation)
self.main_layout.addWidget(self.orientation_form_group)
button_layout = QtWidgets.QHBoxLayout()
self.execute_button = QtWidgets.QPushButton('Align Sample')
self.execute_button.clicked.connect(self.executeButtonClicked)
button_layout.addWidget(self.execute_button)
button_layout.addStretch(1)
self.main_layout.addLayout(button_layout)
self.main_layout.addStretch(1)
def formValidation(self):
if self.position_form_group.valid and self.orientation_form_group.valid:
self.execute_button.setEnabled(True)
else:
self.execute_button.setDisabled(True)
def executeButtonClicked(self):
pose = [self.x_position.value, self.y_position.value, self.z_position.value,
self.z_rotation.value, self.y_rotation.value, self.x_rotation.value]
self.parent.presenter.alignSampleWithPose(pose)
|
self.mesh_args.update({'outer_radius': 100.000, 'inner_radius': 50.000, 'height': 200.000})
|
AlexaSkill.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const jovo_core_1 = require("jovo-core");
const _get = require("lodash.get");
const _set = require("lodash.set");
const AlexaResponse_1 = require("./AlexaResponse");
const AlexaAPI_1 = require("../services/AlexaAPI");
const AmazonProfileAPI_1 = require("../services/AmazonProfileAPI");
const AlexaSpeechBuilder_1 = require("./AlexaSpeechBuilder");
const index_1 = require("../index");
const alexa_enums_1 = require("./alexa-enums");
class AlexaSkill extends jovo_core_1.Jovo {
constructor(app, host, handleRequest) {
super(app, host, handleRequest);
this.$alexaSkill = this;
this.$response = new AlexaResponse_1.AlexaResponse();
this.$speech = new AlexaSpeechBuilder_1.AlexaSpeechBuilder(this);
this.$reprompt = new AlexaSpeechBuilder_1.AlexaSpeechBuilder(this);
}
/**
* Returns Speechbuilder object initialized for the platform
* @public
* @return {SpeechBuilder}
*/
speechBuilder() {
return this.getSpeechBuilder();
}
/**
* Returns Speechbuilder object initialized for the platform
* @public
* @return {SpeechBuilder}
*/
getSpeechBuilder() {
return new AlexaSpeechBuilder_1.AlexaSpeechBuilder(this);
}
/**
* Returns boolean if request is part of new session
* @public
* @return {boolean}
*/
isNewSession() {
return this.$request.isNewSession();
}
/**
* Returns timestamp of a user's request
* @returns {string | undefined}
*/
getTimestamp() {
return this.$request.getTimestamp();
}
/**
* Returns locale of the request
* @deprecated use this.$request.getLocale() instead
* @returns {string}
*/
getLocale() {
return this.$request.getLocale();
}
/**
* Returns UserID
* @deprecated Use this.$user.getId() instead.
* @public
* @return {string}
*/
getUserId() {
return _get(this.$request, 'session.user.userId') || _get(this.$request, 'context.user.userId');
}
/**
* Sends an asynchronous speak directive
* @param {string | SpeechBuilder} speech
* @param {Function} callback
* @return {Promise}
*/
progressiveResponse(speech, callback) {
const alexaRequest = this.$request;
if (callback) {
AlexaAPI_1.AlexaAPI.progressiveResponse(speech, alexaRequest.getRequestId(), alexaRequest.getApiEndpoint(), alexaRequest.getApiAccessToken()).then(() => callback());
}
else {
return AlexaAPI_1.AlexaAPI.progressiveResponse(speech, alexaRequest.getRequestId(), alexaRequest.getApiEndpoint(), alexaRequest.getApiAccessToken());
}
}
/**
* Makes a request to the amazon profile api
* @public
* @param {func} callback
*/
requestAmazonProfile(callback) {
const alexaRequest = this.$request;
if (callback) {
AmazonProfileAPI_1.AmazonProfileAPI.requestAmazonProfile(alexaRequest.getAccessToken()).then(() => callback());
}
else {
return AmazonProfileAPI_1.AmazonProfileAPI.requestAmazonProfile(alexaRequest.getAccessToken());
}
}
/**
* Returns device id
* @returns {string | undefined}
*/
getDeviceId() {
return _get(this.$request, 'context.System.device.deviceId');
}
/**
* Returns audio capability of request device
* @public
* @return {boolean}
*/
hasAudioInterface() {
return this.$request.hasAudioInterface();
}
/**
* Returns screen capability of request device
* @public
* @return {boolean}
*/
hasScreenInterface() {
return this.$request.hasScreenInterface();
}
/**
* Returns screen capability of request device
* @public
* @return {boolean}
*/
hasVideoInterface() {
return this.$request.hasVideoInterface();
}
/**
* Returns APL capability of request device
* @public
* @return {boolean}
*/
hasAPLInterface() {
return this.$request.hasAPLInterface();
}
/**
* Returns the amazon pay permission status
* @public
* @return {PermissionStatus | undefined}
*/
getAmazonPayPermissionStatus() {
return this.$request.getAmazonPayPermissionStatus();
}
/**
* Returns true if the amazon pay permission was granted
* @return {boolean}
*/
isAmazonPayPermissionGranted() {
return this.$request.isAmazonPayPermissionGranted();
}
/**
* Returns true if the amazon pay permission was denied
* @return {boolean}
*/
isAmazonPayPermissionDenied() {
return this.$request.isAmazonPayPermissionDenied();
}
/**
* Returns geo location capability of request device
* @public
* @return {boolean}
*/
hasGeoLocationInterface() {
return this.$request.hasGeoLocationInterface();
}
/**
* Returns the geolocation permission status
* @return {PermissionStatus | undefined}
*/
getGeoLocationPermissionStatus() {
return this.$request.getGeoLocationPermissionStatus();
}
/**
* Returns true if geolocation permission was denied
* @return {boolean}
*/
isGeoLocationPermissionDenied() {
return this.$request.isGeoLocationPermissionDenied();
}
|
* Returns true if geolocation permission was granted
* @return {boolean}
*/
isGeoLocationPermissionGranted() {
return this.$request.isGeoLocationPermissionGranted();
}
/**
* Returns the whole geolocation object
* @return {Geolocation | undefined}
*/
getGeoLocationObject() {
return this.$request.getGeoLocationObject();
}
/**
* Returns geolocation timestamp
* @return {string | undefined} ISO 8601
*/
getGeoLocationTimestamp() {
return this.$request.getGeoLocationTimestamp();
}
/**
* Returns geolocation location services object
* @return {LocationServices | undefined}
*/
getLocationServicesObject() {
return this.$request.getLocationServicesObject();
}
/**
* Returns geolocation location services access
* @return {LocationServicesAccess | undefined}
*/
getLocationServicesAccess() {
return this.$request.getLocationServicesAccess();
}
/**
* Returns geolocation location services status
* @return {LocationServicesStatus | undefined}
*/
getLocationServicesStatus() {
return this.$request.getLocationServicesStatus();
}
/**
* Returns geolocation coordinate object
* @return {Coordinate | undefined}
*/
getCoordinateObject() {
return this.$request.getCoordinateObject();
}
/**
* Returns geolocation coordinate latitude in degrees
* @return {number | undefined} [-90.0, 90.0]
*/
getCoordinateLatitude() {
return this.$request.getCoordinateLatitude();
}
/**
* Returns geolocation coordinate longitude in degrees
* @return {number | undefined} [-180.0, 180]
*/
getCoordinateLongitude() {
return this.$request.getCoordinateLongitude();
}
/**
* Returns geolocation coordinate accuracy in meters
* @return {number | undefined} [0, MAX_INTEGER]
*/
getCoordinateAccuracy() {
return this.$request.getCoordinateAccuracy();
}
/**
* Returns geolocation altitude object
* @return {Altitude | undefined}
*/
getAltitudeObject() {
return this.$request.getAltitudeObject();
}
/**
* Returns geolocation altitude in meters
* @return {number | undefined} [-6350, 18000]
*/
getAltitude() {
return this.$request.getAltitude();
}
/**
* Returns geolocation altitude accuracy in meters
* @return {number | undefined} [0, MAX_INTEGER]
*/
getAltitudeAccuracy() {
return this.$request.getAltitudeAccuracy();
}
/**
* Returns geolocation heading object
* @return {Heading | undefined}
*/
getHeadingObject() {
return this.$request.getHeadingObject();
}
/**
* Returns geolocation heading direction in degrees
* @return {number | undefined} (0.0, 360.0]
*/
getHeadingDirection() {
return this.$request.getHeadingDirection();
}
/**
* Returns geolocation heading accuracy in degrees
* @return {number | undefined} [0, MAX_INTEGER]
*/
getHeadingAccuracy() {
return this.$request.getHeadingAccuracy();
}
/**
* Returns geolocation speed object
* @return {Speed}
*/
getSpeedObject() {
return this.$request.getSpeedObject();
}
/**
* Returns geolocation speed in meters per second
* @return {number | undefined} [0, 1900]
*/
getSpeed() {
return this.$request.getSpeed();
}
/**
* Returns geolocation speed accuracy in meters per second
* @return {number | undefined} [0, MAX_INTEGER]
*/
getSpeedAccuracy() {
return this.$request.getSpeedAccuracy();
}
/**
* Returns type of platform jovo implementation
* @public
* @return {string}
*/
getType() {
return 'AlexaSkill';
}
/**
* Returns type of platform type
* @public
* @return {string}
*/
getPlatformType() {
return 'Alexa';
}
/**
* Adds raw json directive to output object
* @param directive
*/
addDirective(directive) {
const directives = _get(this.$output, 'Alexa.Directives', []);
directives.push(directive);
_set(this.$output, 'Alexa.Directives', directives);
}
/**
* Returns id of the touched/selected item
* @public
* @return {*}
*/
getSelectedElementId() {
return _get(this.$request, 'request.arguments') || _get(this.$request, 'request.token');
}
/**
* Returns raw text.
* Only available with catchAll slots
* @return {String} rawText
*/
getRawText() {
if (!this.$inputs || this.$inputs.catchAll) {
throw new Error('Only available with catchAll slot');
}
return _get(this, '$inputs.catchAll.value');
}
/**
* Returns template builder by type
* @public
* @param {string} type
* @return {*}
*/
templateBuilder(type) {
if (type === 'BodyTemplate1') {
return new index_1.BodyTemplate1();
}
if (type === 'BodyTemplate2') {
return new index_1.BodyTemplate2();
}
if (type === 'BodyTemplate3') {
return new index_1.BodyTemplate3();
}
if (type === 'BodyTemplate6') {
return new index_1.BodyTemplate6();
}
if (type === 'BodyTemplate7') {
return new index_1.BodyTemplate7();
}
if (type === 'ListTemplate1') {
return new index_1.ListTemplate1();
}
if (type === 'ListTemplate2') {
return new index_1.ListTemplate2();
}
if (type === 'ListTemplate3') {
return new index_1.ListTemplate3();
}
}
/**
* Returns reason code for an end of a session
*
* @public
* @return {*}
*/
getEndReason() {
return _get(this.$request, 'request.reason');
}
/**
* Returns error object
*
* @public
* @return {*}
*/
getError() {
return _get(this.$request, 'request.error');
}
/**
* Returns skill id
* @returns {string | undefined}
*/
getSkillId() {
return _get(this.$request, 'session.application.applicationId');
}
/**
* Deletes shouldEndSession property
* @public
*/
deleteShouldEndSession(value = true) {
_set(this.$output, 'Alexa.deleteShouldEndSession', value);
return this;
}
/**
* Sets value for shouldEndSession. Removes shouldEndSession when null
* @public
*/
shouldEndSession(value) {
_set(this.$output, 'Alexa.shouldEndSession', value);
return this;
}
/**
* Returns true if the current request is of type ON_EVENT
* @public
* @return {boolean}
*/
isEventRequest() {
return this.$type.type === alexa_enums_1.EnumAlexaRequestType.ON_EVENT;
}
/**
* Returns true if the current request is of type ON_PURCHASE
* @public
* @return {boolean}
*/
isPurchaseRequest() {
return this.$type.type === alexa_enums_1.EnumAlexaRequestType.ON_PURCHASE;
}
/**
* Returns true if the current request is of type CAN_FULFILL_INTENT
* @public
* @return {boolean}
*/
isCanFulfillIntentRequest() {
return this.$type.type === alexa_enums_1.EnumAlexaRequestType.CAN_FULFILL_INTENT;
}
/**
* Returns true if the current request is of type PLAYBACKCONTROLLER
* @public
* @return {boolean}
*/
isPlaybackControllerRequest() {
return this.$type.type === alexa_enums_1.EnumAlexaRequestType.PLAYBACKCONTROLLER;
}
/**
* Returns true if the current request is of type ON_GAME_ENGINE_INPUT_HANDLER_EVENT
* @public
* @return {boolean}
*/
isGameEngineInputHandlerEventRequest() {
return this.$type.type === alexa_enums_1.EnumAlexaRequestType.ON_GAME_ENGINE_INPUT_HANDLER_EVENT;
}
}
exports.AlexaSkill = AlexaSkill;
//# sourceMappingURL=AlexaSkill.js.map
|
/**
|
decl-macro-illegal-copy.rs
|
// Regression test for #46314
#![feature(decl_macro)]
|
struct NonCopy(String);
struct Wrapper {
inner: NonCopy,
}
macro inner_copy($wrapper:ident) {
$wrapper.inner
}
fn main() {
let wrapper = Wrapper {
inner: NonCopy("foo".into()),
};
assert_two_non_copy(
inner_copy!(wrapper),
wrapper.inner,
//~^ ERROR use of moved value: `wrapper.inner` [E0382]
);
}
fn assert_two_non_copy(a: NonCopy, b: NonCopy) {
assert_eq!(a.0, b.0);
}
| |
xsd.js
|
// Copyright IBM Corp. 2016,2017. All Rights Reserved.
// Node module: strong-soap
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
// @ts-check
"use strict";
var helper = require("./helper");
var builtinTypes;
function getBuiltinTypes() {
if (builtinTypes) return builtinTypes;
builtinTypes = {};
var SimpleType = require("./xsd/simpleType");
for (let t in helper.schemaTypes) {
let type = new SimpleType(
"xsd:simpleType",
{ name: t, "xmlns:xsd": helper.namespaces.xsd },
{}
);
type.targetNamespace = helper.namespaces.xsd;
type.jsType = helper.schemaTypes[t];
builtinTypes[t] = type;
}
return builtinTypes;
}
exports.getBuiltinTypes = getBuiltinTypes;
exports.getBuiltinType = function (name) {
return getBuiltinTypes()[name];
};
function
|
(value, type) {
var SimpleType = require("./xsd/simpleType");
}
|
parse
|
cleanup_dd.py
|
"""
Copyright 2016 Fabric S.P.A, Emmanuel Benazera, Alexandre Girard
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
from dd_client import DD
HOST = 'localhost'
PORT = 8080
dd = DD(HOST,PORT)
dd.set_return_format(dd.RETURN_PYTHON)
def delete_dd_service(sname):
|
# main
info = dd.info()
# in case there are remaining services, remove them
for s in info['head']['services']:
sname = s['name']
delete_dd_service(sname)
|
dd.delete_service(sname,clear='')
|
source_map.rs
|
//! Types for tracking pieces of source code within a crate.
//!
//! The [`SourceMap`] tracks all the source code used within a single crate, mapping
//! from integer byte positions to the original source code location. Each bit
//! of source parsed during crate parsing (typically files, in-memory strings,
//! or various bits of macro expansion) cover a continuous range of bytes in the
//! `SourceMap` and are represented by [`SourceFile`]s. Byte positions are stored in
//! [`Span`] and used pervasively in the compiler. They are absolute positions
//! within the `SourceMap`, which upon request can be converted to line and column
//! information, source code snippets, etc.
pub use crate::hygiene::{ExpnData, ExpnKind};
pub use crate::*;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::{AtomicU32, Lrc, MappedReadGuard, ReadGuard, RwLock};
use std::hash::Hash;
use std::path::{Path, PathBuf};
use std::sync::atomic::Ordering;
use std::{clone::Clone, cmp};
use std::{convert::TryFrom, unreachable};
use std::fs;
use std::io;
use tracing::debug;
#[cfg(test)]
mod tests;
/// Returns the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_data` chain.
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
let expn_data1 = sp.ctxt().outer_expn_data();
let expn_data2 = enclosing_sp.ctxt().outer_expn_data();
if expn_data1.is_root() || !expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site
{
sp
} else {
original_sp(expn_data1.call_site, enclosing_sp)
}
}
pub mod monotonic {
use std::ops::{Deref, DerefMut};
/// A `MonotonicVec` is a `Vec` which can only be grown.
/// Once inserted, an element can never be removed or swapped,
/// guaranteeing that any indices into a `MonotonicVec` are stable
// This is declared in its own module to ensure that the private
// field is inaccessible
pub struct MonotonicVec<T>(Vec<T>);
impl<T> MonotonicVec<T> {
pub fn new(val: Vec<T>) -> MonotonicVec<T> {
MonotonicVec(val)
}
pub fn push(&mut self, val: T) {
self.0.push(val);
}
}
impl<T> Default for MonotonicVec<T> {
fn default() -> Self {
MonotonicVec::new(vec![])
}
}
impl<T> Deref for MonotonicVec<T> {
type Target = Vec<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> !DerefMut for MonotonicVec<T> {}
}
#[derive(Clone, Encodable, Decodable, Debug, Copy, HashStable_Generic)]
pub struct Spanned<T> {
pub node: T,
pub span: Span,
}
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
Spanned { node: t, span: sp }
}
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t)
}
// _____________________________________________________________________________
// SourceFile, MultiByteChar, FileName, FileLines
//
/// An abstraction over the fs operations used by the Parser.
pub trait FileLoader {
/// Query the existence of a file.
fn file_exists(&self, path: &Path) -> bool;
/// Read the contents of a UTF-8 file into memory.
fn read_file(&self, path: &Path) -> io::Result<String>;
}
/// A FileLoader that uses std::fs to load real files.
pub struct RealFileLoader;
impl FileLoader for RealFileLoader {
fn file_exists(&self, path: &Path) -> bool {
path.exists()
}
fn read_file(&self, path: &Path) -> io::Result<String> {
fs::read_to_string(path)
}
}
/// This is a [SourceFile] identifier that is used to correlate source files between
/// subsequent compilation sessions (which is something we need to do during
/// incremental compilation).
///
/// The [StableSourceFileId] also contains the CrateNum of the crate the source
/// file was originally parsed for. This way we get two separate entries in
/// the [SourceMap] if the same file is part of both the local and an upstream
/// crate. Trying to only have one entry for both cases is problematic because
/// at the point where we discover that there's a local use of the file in
/// addition to the upstream one, we might already have made decisions based on
/// the assumption that it's an upstream file. Treating the two files as
/// different has no real downsides.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable, Debug)]
pub struct StableSourceFileId {
// A hash of the source file's FileName. This is hash so that it's size
// is more predictable than if we included the actual FileName value.
pub file_name_hash: u64,
// The CrateNum of the crate this source file was originally parsed for.
// We cannot include this information in the hash because at the time
// of hashing we don't have the context to map from the CrateNum's numeric
// value to a StableCrateId.
pub cnum: CrateNum,
}
// FIXME: we need a more globally consistent approach to the problem solved by
// StableSourceFileId, perhaps built atop source_file.name_hash.
impl StableSourceFileId {
pub fn new(source_file: &SourceFile) -> StableSourceFileId {
StableSourceFileId::new_from_name(&source_file.name, source_file.cnum)
}
fn new_from_name(name: &FileName, cnum: CrateNum) -> StableSourceFileId {
let mut hasher = StableHasher::new();
name.hash(&mut hasher);
StableSourceFileId { file_name_hash: hasher.finish(), cnum }
}
}
// _____________________________________________________________________________
// SourceMap
//
#[derive(Default)]
pub(super) struct SourceMapFiles {
source_files: monotonic::MonotonicVec<Lrc<SourceFile>>,
stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>,
}
pub struct SourceMap {
/// The address space below this value is currently used by the files in the source map.
used_address_space: AtomicU32,
files: RwLock<SourceMapFiles>,
file_loader: Box<dyn FileLoader + Sync + Send>,
// This is used to apply the file path remapping as specified via
// `--remap-path-prefix` to all `SourceFile`s allocated within this `SourceMap`.
path_mapping: FilePathMapping,
/// The algorithm used for hashing the contents of each source file.
hash_kind: SourceFileHashAlgorithm,
}
impl SourceMap {
pub fn new(path_mapping: FilePathMapping) -> SourceMap {
Self::with_file_loader_and_hash_kind(
Box::new(RealFileLoader),
path_mapping,
SourceFileHashAlgorithm::Md5,
)
}
pub fn with_file_loader_and_hash_kind(
file_loader: Box<dyn FileLoader + Sync + Send>,
path_mapping: FilePathMapping,
hash_kind: SourceFileHashAlgorithm,
) -> SourceMap {
SourceMap {
used_address_space: AtomicU32::new(0),
files: Default::default(),
file_loader,
path_mapping,
hash_kind,
}
}
pub fn path_mapping(&self) -> &FilePathMapping {
&self.path_mapping
}
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
let src = self.file_loader.read_file(path)?;
let filename = path.to_owned().into();
Ok(self.new_source_file(filename, src))
}
/// Loads source file as a binary blob.
///
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
/// takes place.
pub fn load_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
// Ideally, this should use `self.file_loader`, but it can't
// deal with binary files yet.
let bytes = fs::read(path)?;
// We need to add file to the `SourceMap`, so that it is present
// in dep-info. There's also an edge case that file might be both
// loaded as a binary via `include_bytes!` and as proper `SourceFile`
// via `mod`, so we try to use real file contents and not just an
// empty string.
let text = std::str::from_utf8(&bytes).unwrap_or("").to_string();
self.new_source_file(path.to_owned().into(), text);
Ok(bytes)
}
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
// any existing indices pointing into `files`.
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> {
ReadGuard::map(self.files.borrow(), |files| &files.source_files)
}
pub fn source_file_by_stable_id(
&self,
stable_id: StableSourceFileId,
) -> Option<Lrc<SourceFile>> {
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
}
fn allocate_address_space(&self, size: usize) -> Result<usize, OffsetOverflowError> {
let size = u32::try_from(size).map_err(|_| OffsetOverflowError)?;
loop {
let current = self.used_address_space.load(Ordering::Relaxed);
let next = current
.checked_add(size)
// Add one so there is some space between files. This lets us distinguish
// positions in the `SourceMap`, even in the presence of zero-length files.
.and_then(|next| next.checked_add(1))
.ok_or(OffsetOverflowError)?;
if self
.used_address_space
.compare_exchange(current, next, Ordering::Relaxed, Ordering::Relaxed)
.is_ok()
{
return Ok(usize::try_from(current).unwrap());
}
}
}
/// Creates a new `SourceFile`.
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
/// unmodified.
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
eprintln!("fatal error: rustc does not support files larger than 4GB");
crate::fatal_error::FatalError.raise()
})
}
fn try_new_source_file(
&self,
filename: FileName,
src: String,
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
// but this is okay because the directory determined by `path.pop()` will
// be empty, so the working directory will be used.
let (filename, _) = self.path_mapping.map_filename_prefix(&filename);
let file_id = StableSourceFileId::new_from_name(&filename, LOCAL_CRATE);
let lrc_sf = match self.source_file_by_stable_id(file_id) {
Some(lrc_sf) => lrc_sf,
None => {
let start_pos = self.allocate_address_space(src.len())?;
let source_file = Lrc::new(SourceFile::new(
filename,
src,
Pos::from_usize(start_pos),
self.hash_kind,
));
// Let's make sure the file_id we generated above actually matches
// the ID we generate for the SourceFile we just created.
debug_assert_eq!(StableSourceFileId::new(&source_file), file_id);
let mut files = self.files.borrow_mut();
files.source_files.push(source_file.clone());
files.stable_id_to_source_file.insert(file_id, source_file.clone());
source_file
}
};
Ok(lrc_sf)
}
/// Allocates a new `SourceFile` representing a source file from an external
/// crate. The source code of such an "imported `SourceFile`" is not available,
/// but we still know enough to generate accurate debuginfo location
/// information for things inlined from other crates.
pub fn new_imported_source_file(
&self,
filename: FileName,
src_hash: SourceFileHash,
name_hash: u128,
source_len: usize,
cnum: CrateNum,
mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>,
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
mut file_local_normalized_pos: Vec<NormalizedPos>,
original_start_pos: BytePos,
original_end_pos: BytePos,
) -> Lrc<SourceFile> {
let start_pos = self
.allocate_address_space(source_len)
.expect("not enough address space for imported source file");
let end_pos = Pos::from_usize(start_pos + source_len);
let start_pos = Pos::from_usize(start_pos);
for pos in &mut file_local_lines {
*pos = *pos + start_pos;
}
for mbc in &mut file_local_multibyte_chars {
mbc.pos = mbc.pos + start_pos;
}
for swc in &mut file_local_non_narrow_chars {
*swc = *swc + start_pos;
}
for nc in &mut file_local_normalized_pos {
nc.pos = nc.pos + start_pos;
}
let source_file = Lrc::new(SourceFile {
name: filename,
src: None,
src_hash,
external_src: Lock::new(ExternalSource::Foreign {
kind: ExternalSourceKind::AbsentOk,
original_start_pos,
original_end_pos,
}),
start_pos,
end_pos,
lines: file_local_lines,
multibyte_chars: file_local_multibyte_chars,
non_narrow_chars: file_local_non_narrow_chars,
normalized_pos: file_local_normalized_pos,
name_hash,
cnum,
});
let mut files = self.files.borrow_mut();
files.source_files.push(source_file.clone());
files
.stable_id_to_source_file
.insert(StableSourceFileId::new(&source_file), source_file.clone());
source_file
}
// If there is a doctest offset, applies it to the line.
pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
match file {
FileName::DocTest(_, offset) => {
if *offset < 0 {
orig - (-(*offset)) as usize
} else {
orig + *offset as usize
}
}
_ => orig,
}
}
/// Return the SourceFile that contains the given `BytePos`
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
let idx = self.lookup_source_file_idx(pos);
(*self.files.borrow().source_files)[idx].clone()
}
/// Looks up source information about a `BytePos`.
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
let sf = self.lookup_source_file(pos);
let (line, col, col_display) = sf.lookup_file_pos_with_col_display(pos);
Loc { file: sf, line, col, col_display }
}
// If the corresponding `SourceFile` is empty, does not return a line number.
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
let f = self.lookup_source_file(pos);
match f.lookup_line(pos) {
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
None => Err(f),
}
}
fn span_to_string(&self, sp: Span, prefer_local: bool) -> String {
if self.files.borrow().source_files.is_empty() || sp.is_dummy() {
return "no-location".to_string();
}
let lo = self.lookup_char_pos(sp.lo());
let hi = self.lookup_char_pos(sp.hi());
format!(
"{}:{}:{}: {}:{}",
if prefer_local { lo.file.name.prefer_local() } else { lo.file.name.prefer_remapped() },
lo.line,
lo.col.to_usize() + 1,
hi.line,
hi.col.to_usize() + 1,
)
}
/// Format the span location suitable for embedding in build artifacts
pub fn span_to_embeddable_string(&self, sp: Span) -> String {
self.span_to_string(sp, false)
}
/// Format the span location to be printed in diagnostics. Must not be emitted
/// to build artifacts as this may leak local file paths. Use span_to_embeddable_string
/// for string suitable for embedding.
pub fn span_to_diagnostic_string(&self, sp: Span) -> String {
self.span_to_string(sp, true)
}
pub fn span_to_filename(&self, sp: Span) -> FileName {
self.lookup_char_pos(sp.lo()).file.name.clone()
}
pub fn is_multiline(&self, sp: Span) -> bool {
let lo = self.lookup_source_file_idx(sp.lo());
let hi = self.lookup_source_file_idx(sp.hi());
if lo != hi {
return true;
}
let f = (*self.files.borrow().source_files)[lo].clone();
f.lookup_line(sp.lo()) != f.lookup_line(sp.hi())
}
pub fn is_valid_span(&self, sp: Span) -> Result<(Loc, Loc), SpanLinesError> {
let lo = self.lookup_char_pos(sp.lo());
debug!("span_to_lines: lo={:?}", lo);
let hi = self.lookup_char_pos(sp.hi());
debug!("span_to_lines: hi={:?}", hi);
if lo.file.start_pos != hi.file.start_pos {
return Err(SpanLinesError::DistinctSources(DistinctSources {
begin: (lo.file.name.clone(), lo.file.start_pos),
end: (hi.file.name.clone(), hi.file.start_pos),
}));
}
Ok((lo, hi))
}
pub fn is_line_before_span_empty(&self, sp: Span) -> bool {
match self.span_to_prev_source(sp) {
Ok(s) => s.rsplit_once('\n').unwrap_or(("", &s)).1.trim_start().is_empty(),
Err(_) => false,
}
}
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
debug!("span_to_lines(sp={:?})", sp);
let (lo, hi) = self.is_valid_span(sp)?;
assert!(hi.line >= lo.line);
if sp.is_dummy() {
return Ok(FileLines { file: lo.file, lines: Vec::new() });
}
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
// The span starts partway through the first line,
// but after that it starts from offset 0.
let mut start_col = lo.col;
// For every line but the last, it extends from `start_col`
// and to the end of the line. Be careful because the line
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
// lines.
//
// FIXME: now that we handle DUMMY_SP up above, we should consider
// asserting that the line numbers here are all indeed 1-based.
let hi_line = hi.line.saturating_sub(1);
for line_index in lo.line.saturating_sub(1)..hi_line {
let line_len = lo.file.get_line(line_index).map_or(0, |s| s.chars().count());
lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) });
start_col = CharPos::from_usize(0);
}
// For the last line, it extends from `start_col` to `hi.col`:
lines.push(LineInfo { line_index: hi_line, start_col, end_col: hi.col });
Ok(FileLines { file: lo.file, lines })
}
/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
/// extract function takes three arguments: a string slice containing the source, an index in
/// the slice for the beginning of the span and an index in the slice for the end of the span.
fn span_to_source<F, T>(&self, sp: Span, extract_source: F) -> Result<T, SpanSnippetError>
where
F: Fn(&str, usize, usize) -> Result<T, SpanSnippetError>,
{
let local_begin = self.lookup_byte_offset(sp.lo());
let local_end = self.lookup_byte_offset(sp.hi());
if local_begin.sf.start_pos != local_end.sf.start_pos {
Err(SpanSnippetError::DistinctSources(DistinctSources {
begin: (local_begin.sf.name.clone(), local_begin.sf.start_pos),
end: (local_end.sf.name.clone(), local_end.sf.start_pos),
}))
} else {
self.ensure_source_file_source_present(local_begin.sf.clone());
let start_index = local_begin.pos.to_usize();
let end_index = local_end.pos.to_usize();
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
if start_index > end_index || end_index > source_len {
return Err(SpanSnippetError::MalformedForSourcemap(MalformedSourceMapPositions {
name: local_begin.sf.name.clone(),
source_len,
begin_pos: local_begin.pos,
end_pos: local_end.pos,
}));
}
if let Some(ref src) = local_begin.sf.src {
extract_source(src, start_index, end_index)
} else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
extract_source(src, start_index, end_index)
} else {
Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() })
}
}
}
/// Returns the source snippet as `String` corresponding to the given `Span`.
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
self.span_to_source(sp, |src, start_index, end_index| {
src.get(start_index..end_index)
.map(|s| s.to_string())
.ok_or(SpanSnippetError::IllFormedSpan(sp))
})
}
pub fn span_to_margin(&self, sp: Span) -> Option<usize> {
match self.span_to_prev_source(sp) {
Err(_) => None,
Ok(source) => {
let last_line = source.rsplit_once('\n').unwrap_or(("", &source)).1;
Some(last_line.len() - last_line.trim_start().len())
}
}
}
/// Returns the source snippet as `String` before the given `Span`.
pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
self.span_to_source(sp, |src, start_index, _| {
src.get(..start_index).map(|s| s.to_string()).ok_or(SpanSnippetError::IllFormedSpan(sp))
})
}
/// Extends the given `Span` to just after the previous occurrence of `c`. Return the same span
/// if no character could be found or if an error occurred while retrieving the code snippet.
pub fn span_extend_to_prev_char(&self, sp: Span, c: char, accept_newlines: bool) -> Span {
if let Ok(prev_source) = self.span_to_prev_source(sp) {
let prev_source = prev_source.rsplit(c).next().unwrap_or("");
if !prev_source.is_empty() && (accept_newlines || !prev_source.contains('\n')) {
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
}
}
sp
}
/// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by
/// whitespace. Returns the same span if no character could be found or if an error occurred
/// while retrieving the code snippet.
pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span {
// assure that the pattern is delimited, to avoid the following
// fn my_fn()
// ^^^^ returned span without the check
// ---------- correct span
for ws in &[" ", "\t", "\n"] {
let pat = pat.to_owned() + ws;
if let Ok(prev_source) = self.span_to_prev_source(sp) {
let prev_source = prev_source.rsplit(&pat).next().unwrap_or("").trim_start();
if prev_source.is_empty() && sp.lo().0 != 0 {
return sp.with_lo(BytePos(sp.lo().0 - 1));
} else if accept_newlines || !prev_source.contains('\n') {
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
}
}
}
sp
}
/// Returns the source snippet as `String` after the given `Span`.
pub fn span_to_next_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
self.span_to_source(sp, |src, _, end_index| {
src.get(end_index..).map(|s| s.to_string()).ok_or(SpanSnippetError::IllFormedSpan(sp))
})
}
/// Extends the given `Span` to just after the next occurrence of `c`.
pub fn span_extend_to_next_char(&self, sp: Span, c: char, accept_newlines: bool) -> Span {
if let Ok(next_source) = self.span_to_next_source(sp) {
let next_source = next_source.split(c).next().unwrap_or("");
if !next_source.is_empty() && (accept_newlines || !next_source.contains('\n')) {
return sp.with_hi(BytePos(sp.hi().0 + next_source.len() as u32));
}
}
sp
}
/// Given a `Span`, tries to get a shorter span ending before the first occurrence of `char`
/// `c`.
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
match self.span_to_snippet(sp) {
Ok(snippet) => {
let snippet = snippet.split(c).next().unwrap_or("").trim_end();
if !snippet.is_empty() && !snippet.contains('\n') {
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
} else {
sp
}
}
_ => sp,
}
}
/// Given a `Span`, tries to get a shorter span ending just after the first occurrence of `char`
/// `c`.
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
if let Ok(snippet) = self.span_to_snippet(sp) {
if let Some(offset) = snippet.find(c) {
return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32));
}
}
sp
}
/// Given a `Span`, gets a new `Span` covering the first token and all its trailing whitespace
/// or the original `Span`.
///
/// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned.
pub fn span_until_non_whitespace(&self, sp: Span) -> Span {
let mut whitespace_found = false;
self.span_take_while(sp, |c| {
if !whitespace_found && c.is_whitespace() {
whitespace_found = true;
}
!whitespace_found || c.is_whitespace()
})
}
/// Given a `Span`, gets a new `Span` covering the first token without its trailing whitespace
/// or the original `Span` in case of error.
///
/// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned.
pub fn span_until_whitespace(&self, sp: Span) -> Span {
self.span_take_while(sp, |c| !c.is_whitespace())
}
/// Given a `Span`, gets a shorter one until `predicate` yields `false`.
pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span
where
P: for<'r> FnMut(&'r char) -> bool,
{
if let Ok(snippet) = self.span_to_snippet(sp) {
let offset = snippet.chars().take_while(predicate).map(|c| c.len_utf8()).sum::<usize>();
sp.with_hi(BytePos(sp.lo().0 + (offset as u32)))
} else {
sp
}
}
/// Given a `Span`, return a span ending in the closest `{`. This is useful when you have a
/// `Span` enclosing a whole item but we need to point at only the head (usually the first
/// line) of that item.
///
/// *Only suitable for diagnostics.*
pub fn guess_head_span(&self, sp: Span) -> Span {
// FIXME: extend the AST items to have a head span, or replace callers with pointing at
// the item's ident when appropriate.
self.span_until_char(sp, '{')
}
/// Returns a new span representing just the first character of the given span.
pub fn start_point(&self, sp: Span) -> Span {
let width = {
let sp = sp.data();
let local_begin = self.lookup_byte_offset(sp.lo);
let start_index = local_begin.pos.to_usize();
let src = local_begin.sf.external_src.borrow();
let snippet = if let Some(ref src) = local_begin.sf.src {
Some(&src[start_index..])
} else if let Some(src) = src.get_source() {
Some(&src[start_index..])
} else {
None
};
match snippet {
None => 1,
Some(snippet) => match snippet.chars().next() {
None => 1,
Some(c) => c.len_utf8(),
},
}
};
sp.with_hi(BytePos(sp.lo().0 + width as u32))
}
/// Returns a new span representing just the last character of this span.
pub fn end_point(&self, sp: Span) -> Span {
let pos = sp.hi().0;
let width = self.find_width_of_character_at_span(sp, false);
let corrected_end_position = pos.checked_sub(width).unwrap_or(pos);
let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0));
sp.with_lo(end_point)
}
/// Returns a new span representing the next character after the end-point of this span.
pub fn next_point(&self, sp: Span) -> Span {
if sp.is_dummy() {
return sp;
}
let start_of_next_point = sp.hi().0;
let width = self.find_width_of_character_at_span(sp.shrink_to_hi(), true);
// If the width is 1, then the next span should point to the same `lo` and `hi`. However,
// in the case of a multibyte character, where the width != 1, the next span should
// span multiple bytes to include the whole character.
let end_of_next_point =
start_of_next_point.checked_add(width - 1).unwrap_or(start_of_next_point);
let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point));
Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt())
}
/// Finds the width of the character, either before or after the end of provided span,
/// depending on the `forwards` parameter.
fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 {
let sp = sp.data();
if sp.lo == sp.hi {
debug!("find_width_of_character_at_span: early return empty span");
return 1;
}
let local_begin = self.lookup_byte_offset(sp.lo);
let local_end = self.lookup_byte_offset(sp.hi);
debug!(
"find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`",
local_begin, local_end
);
if local_begin.sf.start_pos != local_end.sf.start_pos {
debug!("find_width_of_character_at_span: begin and end are in different files");
return 1;
}
let start_index = local_begin.pos.to_usize();
let end_index = local_end.pos.to_usize();
debug!(
"find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`",
start_index, end_index
);
// Disregard indexes that are at the start or end of their spans, they can't fit bigger
// characters.
if (!forwards && end_index == usize::MIN) || (forwards && start_index == usize::MAX) {
debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte");
return 1;
}
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
// Ensure indexes are also not malformed.
if start_index > end_index || end_index > source_len {
debug!("find_width_of_character_at_span: source indexes are malformed");
return 1;
}
let src = local_begin.sf.external_src.borrow();
// We need to extend the snippet to the end of the src rather than to end_index so when
// searching forwards for boundaries we've got somewhere to search.
let snippet = if let Some(ref src) = local_begin.sf.src {
&src[start_index..]
} else if let Some(src) = src.get_source() {
&src[start_index..]
} else {
return 1;
};
debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet);
let mut target = if forwards { end_index + 1 } else { end_index - 1 };
debug!("find_width_of_character_at_span: initial target=`{:?}`", target);
while !snippet.is_char_boundary(target - start_index) && target < source_len {
target = if forwards {
target + 1
} else {
match target.checked_sub(1) {
Some(target) => target,
None => {
break;
}
}
};
debug!("find_width_of_character_at_span: target=`{:?}`", target);
}
debug!("find_width_of_character_at_span: final target=`{:?}`", target);
if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 }
}
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
// Remap filename before lookup
let filename = self.path_mapping().map_filename_prefix(filename).0;
for sf in self.files.borrow().source_files.iter() {
if filename == sf.name {
return Some(sf.clone());
}
}
None
}
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
let idx = self.lookup_source_file_idx(bpos);
let sf = (*self.files.borrow().source_files)[idx].clone();
let offset = bpos - sf.start_pos;
SourceFileAndBytePos { sf, pos: offset }
}
// Returns the index of the `SourceFile` (in `self.files`) that contains `pos`.
// This index is guaranteed to be valid for the lifetime of this `SourceMap`,
// since `source_files` is a `MonotonicVec`
pub fn
|
(&self, pos: BytePos) -> usize {
self.files
.borrow()
.source_files
.binary_search_by_key(&pos, |key| key.start_pos)
.unwrap_or_else(|p| p - 1)
}
pub fn count_lines(&self) -> usize {
self.files().iter().fold(0, |a, f| a + f.count_lines())
}
pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> {
let prev_span = self.span_extend_to_prev_str(span, "fn", true);
if let Ok(snippet) = self.span_to_snippet(prev_span) {
debug!(
"generate_fn_name_span: span={:?}, prev_span={:?}, snippet={:?}",
span, prev_span, snippet
);
if snippet.is_empty() {
return None;
};
let len = snippet
.find(|c: char| !c.is_alphanumeric() && c != '_')
.expect("no label after fn");
Some(prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)))
} else {
None
}
}
/// Takes the span of a type parameter in a function signature and try to generate a span for
/// the function name (with generics) and a new snippet for this span with the pointed type
/// parameter as a new local type parameter.
///
/// For instance:
/// ```rust,ignore (pseudo-Rust)
/// // Given span
/// fn my_function(param: T)
/// // ^ Original span
///
/// // Result
/// fn my_function(param: T)
/// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
/// ```
///
/// Attention: The method used is very fragile since it essentially duplicates the work of the
/// parser. If you need to use this function or something similar, please consider updating the
/// `SourceMap` functions and this function to something more robust.
pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> {
// Try to extend the span to the previous "fn" keyword to retrieve the function
// signature.
let sugg_span = self.span_extend_to_prev_str(span, "fn", false);
if sugg_span != span {
if let Ok(snippet) = self.span_to_snippet(sugg_span) {
// Consume the function name.
let mut offset = snippet
.find(|c: char| !c.is_alphanumeric() && c != '_')
.expect("no label after fn");
// Consume the generics part of the function signature.
let mut bracket_counter = 0;
let mut last_char = None;
for c in snippet[offset..].chars() {
match c {
'<' => bracket_counter += 1,
'>' => bracket_counter -= 1,
'(' => {
if bracket_counter == 0 {
break;
}
}
_ => {}
}
offset += c.len_utf8();
last_char = Some(c);
}
// Adjust the suggestion span to encompass the function name with its generics.
let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32));
// Prepare the new suggested snippet to append the type parameter that triggered
// the error in the generics of the function signature.
let mut new_snippet = if last_char == Some('>') {
format!("{}, ", &snippet[..(offset - '>'.len_utf8())])
} else {
format!("{}<", &snippet[..offset])
};
new_snippet
.push_str(&self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string()));
new_snippet.push('>');
return Some((sugg_span, new_snippet));
}
}
None
}
pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
source_file.add_external_src(|| match source_file.name {
FileName::Real(ref name) => {
if let Some(local_path) = name.local_path() {
self.file_loader.read_file(local_path).ok()
} else {
None
}
}
_ => None,
})
}
pub fn is_imported(&self, sp: Span) -> bool {
let source_file_index = self.lookup_source_file_idx(sp.lo());
let source_file = &self.files()[source_file_index];
source_file.is_imported()
}
}
#[derive(Clone)]
pub struct FilePathMapping {
mapping: Vec<(PathBuf, PathBuf)>,
}
impl FilePathMapping {
pub fn empty() -> FilePathMapping {
FilePathMapping { mapping: vec![] }
}
pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
FilePathMapping { mapping }
}
/// Applies any path prefix substitution as defined by the mapping.
/// The return value is the remapped path and a boolean indicating whether
/// the path was affected by the mapping.
pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
// NOTE: We are iterating over the mapping entries from last to first
// because entries specified later on the command line should
// take precedence.
for &(ref from, ref to) in self.mapping.iter().rev() {
if let Ok(rest) = path.strip_prefix(from) {
return (to.join(rest), true);
}
}
(path, false)
}
fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) {
match file {
FileName::Real(realfile) => {
if let RealFileName::LocalPath(local_path) = realfile {
let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf());
let realfile = if mapped {
RealFileName::Remapped {
local_path: Some(local_path.clone()),
virtual_name: mapped_path,
}
} else {
realfile.clone()
};
(FileName::Real(realfile), mapped)
} else {
unreachable!("attempted to remap an already remapped filename");
}
}
other => (other.clone(), false),
}
}
}
|
lookup_source_file_idx
|
activity.rs
|
//! Types for agents chain activity
use holo_hash::AgentPubKey;
use holo_hash::HeaderHash;
use holochain_serialized_bytes::prelude::*;
use holochain_zome_types::prelude::*;
#[derive(Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize, SerializedBytes)]
/// An agents chain elements returned from a agent_activity_query
pub struct AgentActivityResponse<T = SignedHeaderHashed> {
/// The agent this activity is for
pub agent: AgentPubKey,
/// Valid headers on this chain.
pub valid_activity: ChainItems<T>,
/// Headers that were rejected by the agent activity
/// authority and therefor invalidate the chain.
pub rejected_activity: ChainItems<T>,
/// The status of this chain.
pub status: ChainStatus,
/// The highest chain header that has
/// been observed by this authority.
pub highest_observed: Option<HighestObserved>,
}
holochain_serial!(AgentActivityResponse<HeaderHash>);
impl<A> AgentActivityResponse<A> {
/// Convert an empty response to a different type.
pub fn from_empty<B>(other: AgentActivityResponse<B>) -> Self {
let convert_activity = |items: &ChainItems<B>| match items {
ChainItems::Full(_) => ChainItems::Full(Vec::with_capacity(0)),
ChainItems::Hashes(_) => ChainItems::Hashes(Vec::with_capacity(0)),
ChainItems::NotRequested => ChainItems::NotRequested,
};
AgentActivityResponse {
agent: other.agent,
valid_activity: convert_activity(&other.valid_activity),
rejected_activity: convert_activity(&other.rejected_activity),
status: ChainStatus::Empty,
highest_observed: other.highest_observed,
}
}
/// Convert an status only response to a different type.
pub fn status_only<B>(other: AgentActivityResponse<B>) -> Self {
AgentActivityResponse {
agent: other.agent,
valid_activity: ChainItems::NotRequested,
rejected_activity: ChainItems::NotRequested,
status: ChainStatus::Empty,
highest_observed: other.highest_observed,
}
}
/// Convert an hashes only response to a different type.
pub fn hashes_only<B>(other: AgentActivityResponse<B>) -> Self {
let convert_activity = |items: ChainItems<B>| match items {
ChainItems::Full(_) => ChainItems::Full(Vec::with_capacity(0)),
ChainItems::Hashes(h) => ChainItems::Hashes(h),
ChainItems::NotRequested => ChainItems::NotRequested,
};
AgentActivityResponse {
agent: other.agent,
valid_activity: convert_activity(other.valid_activity),
rejected_activity: convert_activity(other.rejected_activity),
status: other.status,
|
}
}
#[derive(Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize, SerializedBytes)]
/// The type of agent activity returned in this request
pub enum ChainItems<T = SignedHeaderHashed> {
/// The full headers
Full(Vec<T>),
/// Just the hashes
Hashes(Vec<(u32, HeaderHash)>),
/// Activity was not requested
NotRequested,
}
impl From<AgentActivityResponse<Element>> for holochain_zome_types::query::AgentActivity {
fn from(a: AgentActivityResponse<Element>) -> Self {
let valid_activity = match a.valid_activity {
ChainItems::Full(elements) => elements
.into_iter()
.map(|el| (el.header().header_seq(), el.header_address().clone()))
.collect(),
ChainItems::Hashes(h) => h,
ChainItems::NotRequested => Vec::new(),
};
let rejected_activity = match a.rejected_activity {
ChainItems::Full(elements) => elements
.into_iter()
.map(|el| (el.header().header_seq(), el.header_address().clone()))
.collect(),
ChainItems::Hashes(h) => h,
ChainItems::NotRequested => Vec::new(),
};
Self {
valid_activity,
rejected_activity,
status: a.status,
highest_observed: a.highest_observed,
warrants: Vec::with_capacity(0),
}
}
}
|
highest_observed: other.highest_observed,
}
|
lib.rs
|
// Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Demo [Exonum][exonum] service implementing a simple cryptocurrency.
//! See [the documentation][docs] for a detailed step-by-step guide how to approach this demo,
//! and [the repository README][readme] on how to use, test, and contribute to it.
//!
//! **Note.** The service in this crate is intended for demo purposes only. It is not intended
//! for use in production.
//!
//! [exonum]: https://github.com/exonum/exonum
//! [docs]: https://exonum.com/doc/get-started/create-service
//! [readme]: https://github.com/exonum/cryptocurrency#readme
#![deny(
missing_debug_implementations,
missing_docs,
unsafe_code,
bare_trait_objects
)]
extern crate exonum;
#[macro_use]
extern crate exonum_derive;
#[macro_use]
extern crate failure;
extern crate serde;
#[macro_use]
extern crate serde_derive;
extern crate protobuf;
extern crate serde_json;
pub mod proto;
/// Persistent data.
pub mod schema {
use exonum::{
crypto::PublicKey,
storage::{Fork, MapIndex, Snapshot},
};
use super::proto;
// Declare the data to be stored in the blockchain, namely wallets with balances.
// See [serialization docs][1] for details.
//
// [1]: https://exonum.com/doc/architecture/serialization
/// Wallet struct used to persist data within the service.
#[derive(Serialize, Deserialize, Clone, Debug, ProtobufConvert)]
#[exonum(pb = "proto::Wallet")]
pub struct Wallet {
/// Public key of the wallet owner.
pub pub_key: PublicKey,
/// Name of the wallet owner.
pub name: String,
/// Current balance.
pub balance: u64,
}
/// Additional methods for managing balance of the wallet in an immutable fashion.
impl Wallet {
/// Create new Wallet.
pub fn new(&pub_key: &PublicKey, name: &str, balance: u64) -> Self {
Self {
pub_key,
name: name.to_owned(),
balance,
}
}
/// Returns a copy of this wallet with the balance increased by the specified amount.
pub fn increase(self, amount: u64) -> Self {
let balance = self.balance + amount;
Self::new(&self.pub_key, &self.name, balance)
}
/// Returns a copy of this wallet with the balance decreased by the specified amount.
pub fn decrease(self, amount: u64) -> Self {
debug_assert!(self.balance >= amount);
let balance = self.balance - amount;
Self::new(&self.pub_key, &self.name, balance)
}
}
/// Schema of the key-value storage used by the demo cryptocurrency service.
#[derive(Debug)]
pub struct CurrencySchema<T> {
view: T,
}
/// Declare the layout of data managed by the service. An instance of [`MapIndex`] is used
/// to keep wallets in the storage. Index values are serialized [`Wallet`] structs.
///
/// [`MapIndex`]: https://exonum.com/doc/architecture/storage#mapindex
/// [`Wallet`]: struct.Wallet.html
impl<T: AsRef<dyn Snapshot>> CurrencySchema<T> {
/// Creates a new schema instance.
pub fn new(view: T) -> Self {
CurrencySchema { view }
}
/// Returns an immutable version of the wallets table.
pub fn wallets(&self) -> MapIndex<&dyn Snapshot, PublicKey, Wallet> {
MapIndex::new("cryptocurrency.wallets", self.view.as_ref())
}
/// Gets a specific wallet from the storage.
pub fn wallet(&self, pub_key: &PublicKey) -> Option<Wallet> {
self.wallets().get(pub_key)
}
}
/// A mutable version of the schema with an additional method to persist wallets
/// to the storage.
impl<'a> CurrencySchema<&'a mut Fork> {
/// Returns a mutable version of the wallets table.
pub fn wallets_mut(&mut self) -> MapIndex<&mut Fork, PublicKey, Wallet> {
MapIndex::new("cryptocurrency.wallets", &mut self.view)
}
}
}
/// Transactions.
pub mod transactions {
use super::proto;
use super::service::SERVICE_ID;
use exonum::{
crypto::{PublicKey, SecretKey},
messages::{Message, RawTransaction, Signed},
};
/// Transaction type for creating a new wallet.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxCreateWallet` transactions are processed.
#[derive(Serialize, Deserialize, Clone, Debug, ProtobufConvert)]
#[exonum(pb = "proto::TxCreateWallet")]
pub struct TxCreateWallet {
/// UTF-8 string with the owner's name.
pub name: String,
}
/// Transaction type for transferring tokens between two wallets.
///
/// See [the `Transaction` trait implementation](#impl-Transaction) for details how
/// `TxTransfer` transactions are processed.
#[derive(Serialize, Deserialize, Clone, Debug, ProtobufConvert)]
#[exonum(pb = "proto::TxTransfer")]
pub struct TxTransfer {
/// Public key of the receiver.
pub to: PublicKey,
/// Number of tokens to transfer from sender's account to receiver's account.
pub amount: u64,
/// Auxiliary number to guarantee [non-idempotence][idempotence] of transactions.
///
/// [idempotence]: https://en.wikipedia.org/wiki/Idempotence
pub seed: u64,
}
/// Transaction group.
#[derive(Serialize, Deserialize, Clone, Debug, TransactionSet)]
pub enum CurrencyTransactions {
/// Create wallet transaction.
CreateWallet(TxCreateWallet),
/// Transfer tokens transaction.
Transfer(TxTransfer),
}
impl TxCreateWallet {
#[doc(hidden)]
pub fn sign(name: &str, pk: &PublicKey, sk: &SecretKey) -> Signed<RawTransaction> {
Message::sign_transaction(
Self {
name: name.to_owned(),
},
SERVICE_ID,
*pk,
sk,
)
}
}
impl TxTransfer {
#[doc(hidden)]
pub fn sign(
to: &PublicKey,
amount: u64,
seed: u64,
pk: &PublicKey,
sk: &SecretKey,
) -> Signed<RawTransaction> {
Message::sign_transaction(
Self {
to: *to,
amount,
seed,
},
SERVICE_ID,
*pk,
sk,
)
}
}
}
/// Contract errors.
pub mod errors {
// Workaround for `failure` see https://github.com/rust-lang-nursery/failure/issues/223 and
// ECR-1771 for the details.
#![allow(bare_trait_objects)]
use exonum::blockchain::ExecutionError;
/// Error codes emitted by `TxCreateWallet` and/or `TxTransfer` transactions during execution.
#[derive(Debug, Fail)]
#[repr(u8)]
pub enum Error {
/// Wallet already exists.
///
/// Can be emitted by `TxCreateWallet`.
#[fail(display = "Wallet already exists")]
WalletAlreadyExists = 0,
/// Sender doesn't exist.
///
/// Can be emitted by `TxTransfer`.
#[fail(display = "Sender doesn't exist")]
SenderNotFound = 1,
/// Receiver doesn't exist.
///
/// Can be emitted by `TxTransfer`.
#[fail(display = "Receiver doesn't exist")]
ReceiverNotFound = 2,
/// Insufficient currency amount.
///
/// Can be emitted by `TxTransfer`.
#[fail(display = "Insufficient currency amount")]
InsufficientCurrencyAmount = 3,
/// Sender same as receiver.
///
/// Can be emitted by `TxTransfer`.
#[fail(display = "Sender same as receiver")]
SenderSameAsReceiver = 4,
}
impl From<Error> for ExecutionError {
fn from(value: Error) -> ExecutionError {
let description = format!("{}", value);
ExecutionError::with_description(value as u8, description)
}
}
}
/// Contracts.
pub mod contracts {
use exonum::blockchain::{ExecutionResult, Transaction, TransactionContext};
use errors::Error;
use schema::{CurrencySchema, Wallet};
use transactions::{TxCreateWallet, TxTransfer};
/// Initial balance of a newly created wallet.
const INIT_BALANCE: u64 = 100;
impl Transaction for TxCreateWallet {
/// If a wallet with the specified public key is not registered, then creates a new wallet
/// with the specified public key and name, and an initial balance of 100.
/// Otherwise, performs no op.
fn execute(&self, mut context: TransactionContext) -> ExecutionResult {
let author = context.author();
let view = context.fork();
let mut schema = CurrencySchema::new(view);
if schema.wallet(&author).is_none() {
let wallet = Wallet::new(&author, &self.name, INIT_BALANCE);
println!("Create the wallet: {:?}", wallet);
schema.wallets_mut().put(&author, wallet);
Ok(())
} else {
Err(Error::WalletAlreadyExists)?
}
}
}
impl Transaction for TxTransfer {
/// Retrieves two wallets to apply the transfer; they should be previously registered
/// with the help of [`TxCreateWallet`] transactions. Checks the sender's
/// balance and applies changes to the balances of the wallets if the sender's balance
/// is sufficient. Otherwise, performs no op.
///
/// [`TxCreateWallet`]: ../transactions/struct.TxCreateWallet.html
fn execute(&self, mut context: TransactionContext) -> ExecutionResult {
let author = context.author();
let view = context.fork();
if author == self.to {
Err(Error::SenderSameAsReceiver)?
}
let mut schema = CurrencySchema::new(view);
let sender = match schema.wallet(&author) {
Some(val) => val,
None => Err(Error::SenderNotFound)?,
};
let receiver = match schema.wallet(&self.to) {
Some(val) => val,
None => Err(Error::ReceiverNotFound)?,
};
let amount = self.amount;
if sender.balance >= amount {
let sender = sender.decrease(amount);
let receiver = receiver.increase(amount);
println!("Transfer between wallets: {:?} => {:?}", sender, receiver);
let mut wallets = schema.wallets_mut();
wallets.put(&author, sender);
wallets.put(&self.to, receiver);
Ok(())
} else {
Err(Error::InsufficientCurrencyAmount)?
}
}
}
}
/// REST API.
pub mod api {
use exonum::{
api::{self, ServiceApiBuilder, ServiceApiState},
crypto::PublicKey,
};
use schema::{CurrencySchema, Wallet};
/// Public service API description.
#[derive(Debug, Clone)]
pub struct CryptocurrencyApi;
/// The structure describes the query parameters for the `get_wallet` endpoint.
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub struct WalletQuery {
/// Public key of the queried wallet.
pub pub_key: PublicKey,
}
impl CryptocurrencyApi {
/// Endpoint for getting a single wallet.
pub fn get_wallet(state: &ServiceApiState, query: WalletQuery) -> api::Result<Wallet> {
let snapshot = state.snapshot();
let schema = CurrencySchema::new(snapshot);
schema
.wallet(&query.pub_key)
.ok_or_else(|| api::Error::NotFound("\"Wallet not found\"".to_owned()))
}
/// Endpoint for dumping all wallets from the storage.
pub fn get_wallets(state: &ServiceApiState, _query: ()) -> api::Result<Vec<Wallet>> {
let snapshot = state.snapshot();
let schema = CurrencySchema::new(snapshot);
let idx = schema.wallets();
let wallets = idx.values().collect();
Ok(wallets)
}
/// 'ServiceApiBuilder' facilitates conversion between read requests and REST
/// endpoints.
pub fn wire(builder: &mut ServiceApiBuilder) {
// Binds handlers to specific routes.
builder
.public_scope()
.endpoint("v1/wallet", Self::get_wallet)
.endpoint("v1/wallets", Self::get_wallets);
}
}
}
/// Service declaration.
pub mod service {
use exonum::{
api::ServiceApiBuilder,
blockchain::{Service, Transaction, TransactionSet},
crypto::Hash,
messages::RawTransaction,
storage::Snapshot,
};
use api::CryptocurrencyApi;
use transactions::CurrencyTransactions;
/// Service ID for the `Service` trait.
pub const SERVICE_ID: u16 = 1;
/// Demo cryptocurrency service.
///
/// See [the crate documentation](index.html) for context.
///
/// # Public REST API
///
/// In all APIs, the request body (if applicable) and response are JSON-encoded.
///
/// ## Retrieve single wallet
///
/// GET `api/services/cryptocurrency/v1/wallet/?pub_key={hash}`
///
/// Returns information about a wallet with the specified public key (hex-encoded).
/// If a wallet with the specified pubkey is not in the storage, returns a string
/// `"Wallet not found"` with the HTTP 404 status.
///
/// ## Dump wallets
///
/// GET `api/services/cryptocurrency/v1/wallets`
///
/// Returns an array of all wallets in the storage.
///
/// ## Transactions endpoint
///
/// POST `api/explorer/v1/transactions`
///
/// Accepts a [`TxTransfer`] and [`TxCreateWallet`] transaction from an external client.
/// Transaction should be serialized into protobuf binary form and placed into signed
/// transaction message according to specification, endpoint accepts hex of this signed
/// transaction message as an object: `{ "tx_body": <hex> }`.
///
/// Returns the hex-encoded hash of the transaction
/// encumbered in an object: `{ "tx_hash": <hash> }`.
///
/// [`TxCreateWallet`]: ../transactions/struct.TxCreateWallet.html
/// [`TxTransfer`]: ../transactions/struct.TxTransfer.html
#[derive(Debug)]
pub struct CurrencyService;
impl Service for CurrencyService {
fn service_name(&self) -> &'static str {
"cryptocurrency"
}
fn
|
(&self) -> u16 {
SERVICE_ID
}
// Implement a method to deserialize transactions coming to the node.
fn tx_from_raw(&self, raw: RawTransaction) -> Result<Box<dyn Transaction>, failure::Error> {
let tx = CurrencyTransactions::tx_from_raw(raw)?;
Ok(tx.into())
}
// Hashes for the service tables that will be included into the state hash.
// To simplify things, we don't have [Merkelized tables][merkle] in the service storage
// for now, so we return an empty vector.
//
// [merkle]: https://exonum.com/doc/architecture/storage/#merklized-indices
fn state_hash(&self, _: &dyn Snapshot) -> Vec<Hash> {
vec![]
}
// Links the service api implementation to the Exonum.
fn wire_api(&self, builder: &mut ServiceApiBuilder) {
CryptocurrencyApi::wire(builder);
}
}
}
|
service_id
|
main-navigation.ts
|
export type MainNavigationDocument = {
entries: MainNavigationEntry[]
}
|
}
|
type MainNavigationEntry = {
name: string
path: string
|
test_benchmark.py
|
import json
import numpy as np
import pytest
import tensorflow as tf
from google.protobuf import json_format
from seldon_e2e_utils import post_comment_in_pr, run_benchmark_and_capture_results
@pytest.mark.benchmark
@pytest.mark.usefixtures("argo_worfklows")
def test_service_orchestrator():
sort_by = ["apiType", "disableOrchestrator"]
data_size = 1_000
data = [100.0] * data_size
data_tensor = {"data": {"tensor": {"values": data, "shape": [1, data_size]}}}
df = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
disable_orchestrator_list=["false", "true"],
image_list=["seldonio/seldontest_predict:1.10.0-dev"],
benchmark_data=data_tensor,
)
df = df.sort_values(sort_by)
result_body = "# Benchmark results - Testing Service Orchestrator\n\n"
orch_mean = all(
(
df[df["disableOrchestrator"] == "false"]["mean"].values
- df[df["disableOrchestrator"] == "true"]["mean"].values
)
< 3
)
result_body += f"* Orch added mean latency under 4ms: {orch_mean}\n"
orch_nth = all(
(
df[df["disableOrchestrator"] == "false"]["95th"].values
- df[df["disableOrchestrator"] == "true"]["95th"].values
)
< 5
)
result_body += f"* Orch added 95th latency under 5ms: {orch_nth}\n"
orch_nth = all(
(
df[df["disableOrchestrator"] == "false"]["99th"].values
- df[df["disableOrchestrator"] == "true"]["99th"].values
)
< 10
)
result_body += f"* Orch added 99th latency under 10ms: {orch_nth}\n"
# We have to set no errors to 1 as the tools for some reason have 1 as base
no_err = all(df["errors"] <= 1)
result_body += f"* No errors: {no_err}\n"
result_body += "\n### Results table\n\n"
result_body += str(df.to_markdown())
post_comment_in_pr(result_body)
assert orch_mean
assert orch_nth
@pytest.mark.benchmark
@pytest.mark.usefixtures("argo_worfklows")
def test_workers_performance():
sort_by = ["apiType", "serverWorkers"]
data_size = 10
data = [100.0] * data_size
data_tensor = {"data": {"tensor": {"values": data, "shape": [1, data_size]}}}
df = run_benchmark_and_capture_results(
api_type_list=["grpc", "rest"],
server_workers_list=["1", "5", "10"],
benchmark_concurrency_list=["10", "100", "1000"],
parallelism="1",
requests_cpu_list=["4000Mi"],
limits_cpu_list=["4000Mi"],
image_list=["seldonio/seldontest_predict:1.10.0-dev"],
benchmark_data=data_tensor,
)
df = df.sort_values(sort_by)
result_body = "# Benchmark results - Testing Workers Performance\n\n"
result_body += "\n### Results table\n\n"
result_body += str(df.to_markdown())
post_comment_in_pr(result_body)
@pytest.mark.benchmark
@pytest.mark.usefixtures("argo_worfklows")
def test_python_wrapper_v1_vs_v2_iris():
|
@pytest.mark.benchmark
@pytest.mark.usefixtures("argo_worfklows")
def test_v1_seldon_data_types():
sort_by = ["concurrency", "apiType"]
# 10000 element array
data_size = 10_000
data = [100.0] * data_size
benchmark_concurrency_list = ["1", "50", "150"]
image_list = ["seldonio/seldontest_predict:1.10.0-dev"]
data_ndarray = {"data": {"ndarray": data}}
data_tensor = {"data": {"tensor": {"values": data, "shape": [1, data_size]}}}
array = np.array(data)
tftensor_proto = tf.make_tensor_proto(array)
tftensor_json_str = json_format.MessageToJson(tftensor_proto)
tftensor_dict = json.loads(tftensor_json_str)
data_tftensor = {"data": {"tftensor": tftensor_dict}}
df_ndarray = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
image_list=image_list,
benchmark_concurrency_list=benchmark_concurrency_list,
benchmark_data=data_ndarray,
)
df_ndarray = df_ndarray.sort_values(sort_by)
df_tensor = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
image_list=image_list,
benchmark_concurrency_list=benchmark_concurrency_list,
benchmark_data=data_tensor,
)
df_tensor = df_tensor.sort_values(sort_by)
df_tftensor = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
image_list=image_list,
benchmark_concurrency_list=benchmark_concurrency_list,
benchmark_data=data_tftensor,
)
df_tftensor = df_tftensor.sort_values(sort_by)
result_body = "# Benchmark results - Testing Seldon V1 Data Types\n\n"
result_body += "\n### Results for NDArray\n\n"
result_body += str(df_ndarray.to_markdown())
result_body += "\n### Results for Tensor\n\n"
result_body += str(df_tensor.to_markdown())
result_body += "\n### Results for TFTensor\n\n"
result_body += str(df_tftensor.to_markdown())
post_comment_in_pr(result_body)
|
sort_by = ["concurrency", "apiType"]
benchmark_concurrency_list = ["1", "50", "150"]
result_body = ""
result_body += "\n# Benchmark Results - Python Wrapper V1 vs V2\n\n"
# Using single worker as fastapi also uses single worker
df_pywrapper = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
protocol="seldon",
server_list=["SKLEARN_SERVER"],
benchmark_concurrency_list=benchmark_concurrency_list,
model_uri_list=["gs://seldon-models/v1.12.0-dev/sklearn/iris"],
benchmark_data={"data": {"ndarray": [[1, 2, 3, 4]]}},
)
df_pywrapper = df_pywrapper.sort_values(sort_by)
conc_idx = df_pywrapper["concurrency"] == 1
# Python V1 Wrapper Validations
# Ensure base mean performance latency below 10 ms
v1_latency_mean = all((df_pywrapper[conc_idx]["mean"] < 10))
result_body += f"* V1 base mean performance latency under 10ms: {v1_latency_mean}\n"
# Ensure 99th percentiles are not spiking above 15ms
v1_latency_nth = all(df_pywrapper[conc_idx]["99th"] < 10)
result_body += f"* V1 base 99th performance latenc under 10ms: {v1_latency_nth}\n"
# Ensure throughput is above 180 rps for REST
v1_rps_rest = all(
df_pywrapper[(df_pywrapper["apiType"] == "rest") & conc_idx][
"throughputAchieved"
]
> 180
)
result_body += f"* V1 base throughput above 180rps: {v1_rps_rest}\n"
# Ensure throughput is above 250 rps for GRPC
v1_rps_grpc = all(
df_pywrapper[(df_pywrapper["apiType"] == "grpc") & conc_idx][
"throughputAchieved"
]
> 250
)
result_body += f"* V1 base throughput above 250rps: {v1_rps_grpc}\n"
# Validate latenc added by adding service orchestrator is lower than 4ms
# TODO: Validate equivallent of parallel workers in MLServer
df_mlserver = run_benchmark_and_capture_results(
api_type_list=["rest", "grpc"],
model_name="classifier",
protocol="kfserving",
server_list=["SKLEARN_SERVER"],
model_uri_list=["gs://seldon-models/sklearn/iris-0.23.2/lr_model"],
benchmark_concurrency_list=benchmark_concurrency_list,
benchmark_data={
"inputs": [
{
"name": "predict",
"datatype": "FP32",
"shape": [1, 4],
"data": [[1, 2, 3, 4]],
}
]
},
benchmark_grpc_data_override={
"model_name": "classifier",
"inputs": [
{
"name": "predict",
"datatype": "FP32",
"shape": [1, 4],
"contents": {"fp32_contents": [1, 2, 3, 4]},
}
],
},
)
# First we sort the dataframes to ensure they are compared correctly
df_mlserver = df_mlserver.sort_values(sort_by)
# Python V1 Wrapper Validations
conc_idx = df_mlserver["concurrency"] == 1
# Ensure all mean performance latency below 5 ms
v2_latency_mean = all(df_mlserver[conc_idx]["mean"] < 5)
result_body += f"* V2 mean performance latency under 5ms: {v2_latency_mean}\n"
# Ensure 99th percentiles are not spiking above 15ms
v2_latency_nth = all(df_mlserver[conc_idx]["99th"] < 10)
result_body += f"* V2 99th performance latenc under 10ms: {v2_latency_nth}\n"
# Ensure throughput is above 180 rps for REST
v2_rps_rest = all(
df_mlserver[(df_mlserver["apiType"] == "rest") & conc_idx]["throughputAchieved"]
> 250
)
result_body += f"* V2 REST throughput above 250rps: {v2_rps_rest}\n"
# Ensure throughput is above 250 rps for GRPC
v2_rps_grpc = all(
df_mlserver[(df_mlserver["apiType"] == "grpc") & conc_idx]["throughputAchieved"]
> 250
)
result_body += f"* V2 throughput above 300rps: {v2_rps_grpc}\n"
result_body += "\n### Python V1 Wrapper Results table\n\n"
result_body += str(df_pywrapper.to_markdown())
result_body += "\n\n\n### Python V2 MLServer Results table\n\n"
result_body += str(df_mlserver.to_markdown())
post_comment_in_pr(result_body)
assert v1_latency_mean
assert v1_latency_nth
assert v1_rps_rest
assert v1_rps_grpc
assert v2_latency_mean
assert v2_latency_nth
assert v2_rps_rest
assert v2_rps_grpc
|
echarts.js
|
var define,require,esl;!function(e){function t(e){m(e,J)||(O[e]=1)}function i(e,t){function i(e){0===e.indexOf(".")&&a.push(e)}var a=[];if("string"==typeof e?i(e):C(e,function(e){i(e)}),a.length>0)throw new Error("[REQUIRE_FATAL]Relative ID is not allowed in global require: "+a.join(", "));var o=N.waitSeconds;return o&&e instanceof Array&&(E&&clearTimeout(E),E=setTimeout(n,1e3*o)),D(e,t)}function n(){function e(r,s){if(!o[r]&&!m(r,J)){o[r]=1,m(r,F)||n[r]||(n[r]=1,t.push(r));var l=z[r];l?s&&(n[r]||(n[r]=1,t.push(r)),C(l.depMs,function(t){e(t.absId,t.hard)})):a[r]||(a[r]=1,i.push(r))}}var t=[],i=[],n={},a={},o={};for(var r in O)e(r,1);if(t.length||i.length)throw new Error("[MODULE_TIMEOUT]Hang( "+(t.join(", ")||"none")+" ) Miss( "+(i.join(", ")||"none")+" )")}function a(e){C(B,function(t){s(e,t.deps,t.factory)}),B.length=0}function o(e,t,i){if(null==i&&(null==t?(i=e,e=null):(i=t,t=null,e instanceof Array&&(t=e,e=null))),null!=i){var n=window.opera;if(!e&&document.attachEvent&&(!n||"[object Opera]"!==n.toString())){var a=I();e=a&&a.getAttribute("data-require-id")}e?s(e,t,i):B[0]={deps:t,factory:i}}}function r(){var e=N.config[this.id];return e&&"object"==typeof e?e:{}}function s(e,t,i){z[e]||(z[e]={id:e,depsDec:t,deps:t||["require","exports","module"],factoryDeps:[],factory:i,exports:{},config:r,state:A,require:v(e),depMs:[],depMkv:{},depRs:[]})}function l(e){var t=z[e];if(t&&!m(e,M)){var i=t.deps,n=t.factory,a=0;"function"==typeof n&&(a=Math.min(n.length,i.length),!t.depsDec&&n.toString().replace(/(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/gm,"").replace(/require\(\s*(['"'])([^'"]+)\1\s*\)/g,function(e,t,n){i.push(n)}));var o=[],r=[];C(i,function(i,n){var s,l,h=W(i),d=L(h.mod,e);d&&!P[d]?(h.res&&(l={id:i,mod:d,res:h.res},r.push(i),t.depRs.push(l)),s=t.depMkv[d],s||(s={id:h.mod,absId:d,hard:a>n},t.depMs.push(s),t.depMkv[d]=s,o.push(d))):s={absId:d},a>n&&t.factoryDeps.push(l||s)}),t.state=M,c(e),g(o),r.length&&t.require(r,function(){C(t.depRs,function(t){t.absId||(t.absId=L(t.id,e))}),h()})}}function h(){for(var e in O)l(e),d(e),p(e)}function d(e){function t(e){if(l(e),!m(e,M))return!1;if(m(e,F)||i[e])return!0;i[e]=1;var n=z[e],a=!0;return C(n.depMs,function(e){return a=t(e.absId)}),a&&C(n.depRs,function(e){return a=!!e.absId}),a&&(n.state=F),a}var i={};t(e)}function c(t){function i(){if(!n&&a.state===F){n=1;var i=1;if(C(a.factoryDeps,function(e){var t=e.absId;return P[t]?void 0:(p(t),i=m(t,J))}),i){try{var o=a.factory,r="function"==typeof o?o.apply(e,u(a.factoryDeps,{require:a.require,exports:a.exports,module:a})):o;null!=r&&(a.exports=r),a.invokeFactory=null}catch(s){if(/^\[MODULE_MISS\]"([^"]+)/.test(s.message)){var l=a.depMkv[RegExp.$1];return l&&(l.hard=1),void(n=0)}throw s}U(t)}}}var n,a=z[t];a.invokeFactory=i}function m(e,t){return z[e]&&z[e].state>=t}function p(e){var t=z[e];t&&t.invokeFactory&&t.invokeFactory()}function u(e,t){var i=[];return C(e,function(e,n){"object"==typeof e&&(e=e.absId),i[n]=t[e]||z[e].exports}),i}function V(e,t){if(m(e,J))return void t();var i=H[e];i||(i=H[e]=[]),i.push(t)}function U(e){var t=z[e];t.state=J,delete O[e];for(var i=H[e]||[],n=i.length;n--;)i[n]();i.length=0,H[e]=null}function g(t,i,n){function a(){if("function"==typeof i&&!o){var n=1;C(t,function(e){return P[e]?void 0:n=!!m(e,J)}),n&&(o=1,i.apply(e,u(t,P)))}}var o=0;C(t,function(e){P[e]||m(e,J)||(V(e,a),(e.indexOf("!")>0?y:f)(e,n))}),a()}function f(t){function i(){var e=Q[t];S(e||t,n)}function n(){if(r){var i;"function"==typeof r.init&&(i=r.init.apply(e,u(s,P))),null==i&&r.exports&&(i=e,C(r.exports.split("."),function(e){return i=i[e],!!i})),o(t,s,i||{})}else a(t);h()}if(!R[t]&&!z[t]){R[t]=1;var r=N.shim[t];r instanceof Array&&(N.shim[t]=r={deps:r});var s=r&&(r.deps||[]);s?(C(s,function(e){N.shim[e]||(N.shim[e]={})}),D(s,i)):i()}}function y(e,t){function i(t){l.exports=t||!0,U(e)}function n(n){var a=t?z[t].require:D;n.load(s.res,a,i,r.call({id:e}))}if(!z[e]){var o=Q[e];if(o)return void f(o);var s=W(e),l={id:e,state:M};z[e]=l,i.fromText=function(e,t){new Function(t)(),a(e)},n(D(s.mod))}}function b(e,t){var i=X(e,1,t);return i.sort(T),i}function _(){function e(e){Q[e]=t}N.baseUrl=N.baseUrl.replace(/\/$/,"")+"/",G=b(N.paths),Z=b(N.map,1),C(Z,function(e){e.v=b(e.v)}),Y=[],C(N.packages,function(e){var t=e;"string"==typeof e&&(t={name:e.split("/")[0],location:e,main:"main"}),t.location=t.location||t.name,t.main=(t.main||"main").replace(/\.js$/i,""),t.reg=K(t.name),Y.push(t)}),Y.sort(T),q=b(N.urlArgs,1),Q={};for(var t in N.bundles)C(N.bundles[t],e)}function x(e,t,i){C(t,function(t){return t.reg.test(e)?(i(t.v,t.k,t),!1):void 0})}function k(e){var t=/(\.[a-z0-9]+)$/i,i=/(\?[^#]*)$/,n="",a=e,o="";i.test(e)&&(o=RegExp.$1,e=e.replace(i,"")),t.test(e)&&(n=RegExp.$1,a=e.replace(t,""));var r,s=a;return x(a,G,function(e,t){s=s.replace(t,e),r=1}),r||x(a,Y,function(e,t,i){s=s.replace(i.name,i.location)}),/^([a-z]{2,10}:\/)?\//i.test(s)||(s=N.baseUrl+s),s+=n+o,x(a,q,function(e){s+=(s.indexOf("?")>0?"&":"?")+e}),s}function v(e){function i(i,a){if("string"==typeof i){if(!n[i]){var o=L(i,e);if(p(o),!m(o,J))throw new Error('[MODULE_MISS]"'+o+'" is not exists!');n[i]=z[o].exports}return n[i]}if(i instanceof Array){var r=[],s=[];C(i,function(i,n){var a=W(i),o=L(a.mod,e),l=a.res,h=o;if(l){var d=o+"!"+l;0!==l.indexOf(".")&&Q[d]?o=h=d:h=null}s[n]=h,t(o),r.push(o)}),g(r,function(){C(s,function(n,a){null==n&&(n=s[a]=L(i[a],e),t(n))}),g(s,a,e),h()},e),h()}}var n={};return i.toUrl=function(t){return k(L(t,e))},i}function L(e,t){if(!e)return"";t=t||"";var i=W(e);if(!i)return e;var n=i.res,a=w(i.mod,t);if(C(Y,function(e){var t=e.name;return t===a?(a=t+"/"+e.main,!1):void 0}),x(t,Z,function(e){x(a,e,function(e,t){a=a.replace(t,e)})}),n){var o=m(a,J)&&D(a);n=o&&o.normalize?o.normalize(n,function(e){return L(e,t)}):L(n,t),a+="!"+n}return a}function w(e,t){if(0===e.indexOf(".")){var i=t.split("/"),n=e.split("/"),a=i.length-1,o=n.length,r=0,s=0;e:for(var l=0;o>l;l++)switch(n[l]){case"..":if(!(a>r))break e;r++,s++;break;case".":s++;break;default:break e}return i.length=a-r,n=n.slice(s),i.concat(n).join("/")}return e}function W(e){var t=e.split("!");return t[0]?{mod:t[0],res:t[1]}:void 0}function X(e,t,i){var n=[];for(var a in e)if(e.hasOwnProperty(a)){var o={k:a,v:e[a]};n.push(o),t&&(o.reg="*"===a&&i?/^/:K(a))}return n}function I(){if(j)return j;if($&&"interactive"===$.readyState)return $;for(var e=document.getElementsByTagName("script"),t=e.length;t--;){var i=e[t];if("interactive"===i.readyState)return $=i,i}}function S(e,t){function i(){var e=n.readyState;("undefined"==typeof e||/^(loaded|complete)$/.test(e))&&(n.onload=n.onreadystatechange=null,n=null,t())}var n=document.createElement("script");n.setAttribute("data-require-id",e),n.src=k(e+".js"),n.async=!0,n.readyState?n.onreadystatechange=i:n.onload=i,j=n,te?ee.insertBefore(n,te):ee.appendChild(n),j=null}function K(e){return new RegExp("^"+e+"(/|$)")}function C(e,t){if(e instanceof Array)for(var i=0,n=e.length;n>i&&t(e[i],i)!==!1;i++);}function T(e,t){var i=e.k||e.name,n=t.k||t.name;return"*"===n?-1:"*"===i?1:n.length-i.length}var E,z={},A=1,M=2,F=3,J=4,O={},P={require:i,exports:1,module:1},D=v(),N={baseUrl:"./",paths:{},config:{},map:{},packages:[],shim:{},waitSeconds:0,bundles:{},urlArgs:{}};i.version="2.0.2",i.loader="esl",i.toUrl=D.toUrl;var B=[];o.amd={};var H={},R={};i.config=function(e){if(e){for(var t in N){var i=e[t],n=N[t];if(i)if("urlArgs"===t&&"string"==typeof i)N.urlArgs["*"]=i;else if(n instanceof Array)n.push.apply(n,i);else if("object"==typeof n)for(var a in i)n[a]=i[a];else N[t]=i}_()}},_();var G,Y,Z,Q,q,j,$,ee=document.getElementsByTagName("head")[0],te=document.getElementsByTagName("base")[0];te&&(ee=te.parentNode),define||(define=o,require||(require=i),esl=i)}(this),define("echarts",["echarts/echarts"],function(e){return e}),define("echarts/echarts",["require","./config","zrender/tool/util","zrender/tool/event","zrender/tool/env","zrender","zrender/config","./chart/island","./component/toolbox","./component","./component/title","./component/tooltip","./component/legend","./util/ecData","./chart","zrender/tool/color","./component/timeline","zrender/shape/Image","zrender/loadingEffect/Bar","zrender/loadingEffect/Bubble","zrender/loadingEffect/DynamicLine","zrender/loadingEffect/Ring","zrender/loadingEffect/Spin","zrender/loadingEffect/Whirling","./theme/macarons","./theme/infographic"],function(e){function t(){r.Dispatcher.call(this)}function i(e){e.innerHTML="",this._themeConfig={},this.dom=e,this._connected=!1,this._status={dragIn:!1,dragOut:!1,needRefresh:!1},this._curEventType=!1,this._chartList=[],this._messageCenter=new t,this._messageCenterOutSide=new t,this.resize=this.resize(),this._init()}function n(e,t,i,n,a){for(var o=e._chartList,r=o.length;r--;){var s=o[r];"function"==typeof s[t]&&s[t](i,n,a)}}var a=e("./config"),o=e("zrender/tool/util"),r=e("zrender/tool/event"),s={},l=e("zrender/tool/env").canvasSupported,h=new Date-0,d={},c="_echarts_instance_";s.version="2.2.7",s.dependencies={zrender:"2.1.1"},s.init=function(t,n){var a=e("zrender");a.version.replace(".","")-0<s.dependencies.zrender.replace(".","")-0&&console.error("ZRender "+a.version+" is too old for ECharts "+s.version+". Current version need ZRender "+s.dependencies.zrender+"+"),t=t instanceof Array?t[0]:t;var o=t.getAttribute(c);return o||(o=h++,t.setAttribute(c,o)),d[o]&&d[o].dispose(),d[o]=new i(t),d[o].id=o,d[o].canvasSupported=l,d[o].setTheme(n),d[o]},s.getInstanceById=function(e){return d[e]},o.merge(t.prototype,r.Dispatcher.prototype,!0);var m=e("zrender/config").EVENT,p=["CLICK","DBLCLICK","MOUSEOVER","MOUSEOUT","DRAGSTART","DRAGEND","DRAGENTER","DRAGOVER","DRAGLEAVE","DROP"];return i.prototype={_init:function(){var t=this,i=e("zrender").init(this.dom);this._zr=i,this._messageCenter.dispatch=function(e,i,n,a){n=n||{},n.type=e,n.event=i,t._messageCenter.dispatchWithContext(e,n,a),t._messageCenterOutSide.dispatchWithContext(e,n,a)},this._onevent=function(e){return t.__onevent(e)};for(var n in a.EVENT)"CLICK"!=n&&"DBLCLICK"!=n&&"HOVER"!=n&&"MOUSEOUT"!=n&&"MAP_ROAM"!=n&&this._messageCenter.bind(a.EVENT[n],this._onevent,this);var o={};this._onzrevent=function(e){return t[o[e.type]](e)};for(var r=0,s=p.length;s>r;r++){var l=p[r],h=m[l];o[h]="_on"+l.toLowerCase(),i.on(h,this._onzrevent)}this.chart={},this.component={};var d=e("./chart/island");this._island=new d(this._themeConfig,this._messageCenter,i,{},this),this.chart.island=this._island;var c=e("./component/toolbox");this._toolbox=new c(this._themeConfig,this._messageCenter,i,{},this),this.component.toolbox=this._toolbox;var u=e("./component");u.define("title",e("./component/title")),u.define("tooltip",e("./component/tooltip")),u.define("legend",e("./component/legend")),(0===i.getWidth()||0===i.getHeight())&&console.error("Dom鈥檚 width & height should be ready before init.")},__onevent:function(e){e.__echartsId=e.__echartsId||this.id;var t=e.__echartsId===this.id;switch(this._curEventType||(this._curEventType=e.type),e.type){case a.EVENT.LEGEND_SELECTED:this._onlegendSelected(e);break;case a.EVENT.DATA_ZOOM:if(!t){var i=this.component.dataZoom;i&&(i.silence(!0),i.absoluteZoom(e.zoom),i.silence(!1))}this._ondataZoom(e);break;case a.EVENT.DATA_RANGE:t&&this._ondataRange(e);break;case a.EVENT.MAGIC_TYPE_CHANGED:if(!t){var n=this.component.toolbox;n&&(n.silence(!0),n.setMagicType(e.magicType),n.silence(!1))}this._onmagicTypeChanged(e);break;case a.EVENT.DATA_VIEW_CHANGED:t&&this._ondataViewChanged(e);break;case a.EVENT.TOOLTIP_HOVER:t&&this._tooltipHover(e);break;case a.EVENT.RESTORE:this._onrestore();break;case a.EVENT.REFRESH:t&&this._onrefresh(e);break;case a.EVENT.TOOLTIP_IN_GRID:case a.EVENT.TOOLTIP_OUT_GRID:if(t){if(this._connected){var o=this.component.grid;o&&(e.x=(e.event.zrenderX-o.getX())/o.getWidth(),e.y=(e.event.zrenderY-o.getY())/o.getHeight())}}else{var o=this.component.grid;o&&this._zr.trigger("mousemove",{connectTrigger:!0,zrenderX:o.getX()+e.x*o.getWidth(),zrenderY:o.getY()+e.y*o.getHeight()})}}if(this._connected&&t&&this._curEventType===e.type){for(var r in this._connected)this._connected[r].connectedEventHandler(e);this._curEventType=null}(!t||!this._connected&&t)&&(this._curEventType=null)},_onclick:function(e){if(n(this,"onclick",e),e.target){var t=this._eventPackage(e.target);t&&null!=t.seriesIndex&&this._messageCenter.dispatch(a.EVENT.CLICK,e.event,t,this)}},_ondblclick:function(e){if(n(this,"ondblclick",e),e.target){var t=this._eventPackage(e.target);t&&null!=t.seriesIndex&&this._messageCenter.dispatch(a.EVENT.DBLCLICK,e.event,t,this)}},_onmouseover:function(e){if(e.target){var t=this._eventPackage(e.target);t&&null!=t.seriesIndex&&this._messageCenter.dispatch(a.EVENT.HOVER,e.event,t,this)}},_onmouseout:function(e){if(e.target){var t=this._eventPackage(e.target);t&&null!=t.seriesIndex&&this._messageCenter.dispatch(a.EVENT.MOUSEOUT,e.event,t,this)}},_ondragstart:function(e){this._status={dragIn:!1,dragOut:!1,needRefresh:!1},n(this,"ondragstart",e)},_ondragenter:function(e){n(this,"ondragenter",e)},_ondragover:function(e){n(this,"ondragover",e)},_ondragleave:function(e){n(this,"ondragleave",e)},_ondrop:function(e){n(this,"ondrop",e,this._status),this._island.ondrop(e,this._status)},_ondragend:function(e){if(n(this,"ondragend",e,this._status),this._timeline&&this._timeline.ondragend(e,this._status),this._island.ondragend(e,this._status),this._status.needRefresh){this._syncBackupData(this._option);var t=this._messageCenter;t.dispatch(a.EVENT.DATA_CHANGED,e.event,this._eventPackage(e.target),this),t.dispatch(a.EVENT.REFRESH,null,null,this)}},_onlegendSelected:function(e){this._status.needRefresh=!1,n(this,"onlegendSelected",e,this._status),this._status.needRefresh&&this._messageCenter.dispatch(a.EVENT.REFRESH,null,null,this)},_ondataZoom:function(e){this._status.needRefresh=!1,n(this,"ondataZoom",e,this._status),this._status.needRefresh&&this._messageCenter.dispatch(a.EVENT.REFRESH,null,null,this)},_ondataRange:function(e){this._clearEffect(),this._status.needRefresh=!1,n(this,"ondataRange",e,this._status),this._status.needRefresh&&this._zr.refreshNextFrame()},_onmagicTypeChanged:function(){this._clearEffect(),this._render(this._toolbox.getMagicOption())},_ondataViewChanged:function(e){this._syncBackupData(e.option),this._messageCenter.dispatch(a.EVENT.DATA_CHANGED,null,e,this),this._messageCenter.dispatch(a.EVENT.REFRESH,null,null,this)},_tooltipHover:function(e){var t=[];n(this,"ontooltipHover",e,t)},_onrestore:function(){this.restore()},_onrefresh:function(e){this._refreshInside=!0,this.refresh(e),this._refreshInside=!1},_syncBackupData:function(e){this.component.dataZoom&&this.component.dataZoom.syncBackupData(e)},_eventPackage:function(t){if(t){var i=e("./util/ecData"),n=i.get(t,"seriesIndex"),a=i.get(t,"dataIndex");return a=-1!=n&&this.component.dataZoom?this.component.dataZoom.getRealDataIndex(n,a):a,{seriesIndex:n,seriesName:(i.get(t,"series")||{}).name,dataIndex:a,data:i.get(t,"data"),name:i.get(t,"name"),value:i.get(t,"value"),special:i.get(t,"special")}}},_noDataCheck:function(e){for(var t=e.series,i=0,n=t.length;n>i;i++)if(t[i].type==a.CHART_TYPE_MAP||t[i].data&&t[i].data.length>0||t[i].markPoint&&t[i].markPoint.data&&t[i].markPoint.data.length>0||t[i].markLine&&t[i].markLine.data&&t[i].markLine.data.length>0||t[i].nodes&&t[i].nodes.length>0||t[i].links&&t[i].links.length>0||t[i].matrix&&t[i].matrix.length>0||t[i].eventList&&t[i].eventList.length>0)return!1;var o=this._option&&this._option.noDataLoadingOption||this._themeConfig.noDataLoadingOption||a.noDataLoadingOption||{text:this._option&&this._option.noDataText||this._themeConfig.noDataText||a.noDataText,effect:this._option&&this._option.noDataEffect||this._themeConfig.noDataEffect||a.noDataEffect};return this.clear(),this.showLoading(o),!0},_render:function(t){if(this._mergeGlobalConifg(t),!this._noDataCheck(t)){var i=t.backgroundColor;if(i)if(l||-1==i.indexOf("rgba"))this.dom.style.backgroundColor=i;else{var n=i.split(",");this.dom.style.filter="alpha(opacity="+100*n[3].substring(0,n[3].lastIndexOf(")"))+")",n.length=3,n[0]=n[0].replace("a",""),this.dom.style.backgroundColor=n.join(",")+")"}this._zr.clearAnimation(),this._chartList=[];var o=e("./chart"),r=e("./component");(t.xAxis||t.yAxis)&&(t.grid=t.grid||{},t.dataZoom=t.dataZoom||{});for(var s,h,d,c=["title","legend","tooltip","dataRange","roamController","grid","dataZoom","xAxis","yAxis","polar"],m=0,p=c.length;p>m;m++)h=c[m],d=this.component[h],t[h]?(d?d.refresh&&d.refresh(t):(s=r.get(/^[xy]Axis$/.test(h)?"axis":h),d=new s(this._themeConfig,this._messageCenter,this._zr,t,this,h),this.component[h]=d),this._chartList.push(d)):d&&(d.dispose(),this.component[h]=null,delete this.component[h]);for(var u,V,U,g={},m=0,p=t.series.length;p>m;m++)V=t.series[m].type,V?g[V]||(g[V]=!0,u=o.get(V),u?(this.chart[V]?(U=this.chart[V],U.refresh(t)):U=new u(this._themeConfig,this._messageCenter,this._zr,t,this),this._chartList.push(U),this.chart[V]=U):console.error(V+" has not been required.")):console.error("series["+m+"] chart type has not been defined.");for(V in this.chart)V==a.CHART_TYPE_ISLAND||g[V]||(this.chart[V].dispose(),this.chart[V]=null,delete this.chart[V]);this.component.grid&&this.component.grid.refixAxisShape(this.component),this._island.refresh(t),this._toolbox.refresh(t),t.animation&&!t.renderAsImage?this._zr.refresh():this._zr.render();var f="IMG"+this.id,y=document.getElementById(f);t.renderAsImage&&l?(y?y.src=this.getDataURL(t.renderAsImage):(y=this.getImage(t.renderAsImage),y.id=f,y.style.position="absolute",y.style.left=0,y.style.top=0,this.dom.firstChild.appendChild(y)),this.un(),this._zr.un(),this._disposeChartList(),this._zr.clear()):y&&y.parentNode.removeChild(y),y=null,this._option=t}},restore:function(){this._clearEffect(),this._option=o.clone(this._optionRestore),this._disposeChartList(),this._island.clear(),this._toolbox.reset(this._option,!0),this._render(this._option)},refresh:function(e){this._clearEffect(),e=e||{};var t=e.option;!this._refreshInside&&t&&(t=this.getOption(),o.merge(t,e.option,!0),o.merge(this._optionRestore,e.option,!0),this._toolbox.reset(t)),this._island.refresh(t),this._toolbox.refresh(t),this._zr.clearAnimation();for(var i=0,n=this._chartList.length;n>i;i++)this._chartList[i].refresh&&this._chartList[i].refresh(t);this.component.grid&&this.component.grid.refixAxisShape(this.component),this._zr.refresh()},_disposeChartList:function(){this._clearEffect(),this._zr.clearAnimation();for(var e=this._chartList.length;e--;){var t=this._chartList[e];if(t){var i=t.type;this.chart[i]&&delete this.chart[i],this.component[i]&&delete this.component[i],t.dispose&&t.dispose()}}this._chartList=[]},_mergeGlobalConifg:function(t){for(var i=["backgroundColor","calculable","calculableColor","calculableHolderColor","nameConnector","valueConnector","animation","animationThreshold","animationDuration","animationDurationUpdate","animationEasing","addDataAnimation","symbolList","DRAG_ENABLE_TIME"],n=i.length;n--;){var o=i[n];null==t[o]&&(t[o]=null!=this._themeConfig[o]?this._themeConfig[o]:a[o])}var r=t.color;r&&r.length||(r=this._themeConfig.color||a.color),this._zr.getColor=function(t){var i=e("zrender/tool/color");return i.getColor(t,r)},l||(t.animation=!1,t.addDataAnimation=!1)},setOption:function(e,t){return e.timeline?this._setTimelineOption(e):this._setOption(e,t)},_setOption:function(e,t,i){return!t&&this._option?this._option=o.merge(this.getOption(),o.clone(e),!0):(this._option=o.clone(e),!i&&this._timeline&&this._timeline.dispose()),this._optionRestore=o.clone(this._option),this._option.series&&0!==this._option.series.length?(this.component.dataZoom&&(this._option.dataZoom||this._option.toolbox&&this._option.toolbox.feature&&this._option.toolbox.feature.dataZoom&&this._option.toolbox.feature.dataZoom.show)&&this.component.dataZoom.syncOption(this._option),this._toolbox.reset(this._option),this._render(this._option),this):void this._zr.clear()},getOption:function(){function e(e){var n=i._optionRestore[e];if(n)if(n instanceof Array)for(var a=n.length;a--;)t[e][a].data=o.clone(n[a].data);else t[e].data=o.clone(n.data)}var t=o.clone(this._option),i=this;return e("xAxis"),e("yAxis"),e("series"),t},setSeries:function(e,t){return t?(this._option.series=e,this.setOption(this._option,t)):this.setOption({series:e}),this},getSeries:function(){return this.getOption().series},_setTimelineOption:function(t){this._timeline&&this._timeline.dispose();var i=e("./component/timeline"),n=new i(this._themeConfig,this._messageCenter,this._zr,t,this);return this._timeline=n,this.component.timeline=this._timeline,this},addData:function(e,t,i,n,r){function s(){if(c._zr){c._zr.clearAnimation();for(var e=0,t=w.length;t>e;e++)w[e].motionlessOnce=h.addDataAnimation&&w[e].addDataAnimation;c._messageCenter.dispatch(a.EVENT.REFRESH,null,{option:h},c)}}for(var l=e instanceof Array?e:[[e,t,i,n,r]],h=this.getOption(),d=this._optionRestore,c=this,m=0,p=l.length;p>m;m++){e=l[m][0],t=l[m][1],i=l[m][2],n=l[m][3],r=l[m][4];var u=d.series[e],V=i?"unshift":"push",U=i?"pop":"shift";if(u){var g=u.data,f=h.series[e].data;if(g[V](t),f[V](t),n||(g[U](),t=f[U]()),null!=r){var y,b;if(u.type===a.CHART_TYPE_PIE&&(y=d.legend)&&(b=y.data)){var _=h.legend.data;if(b[V](r),_[V](r),!n){var x=o.indexOf(b,t.name);-1!=x&&b.splice(x,1),x=o.indexOf(_,t.name),-1!=x&&_.splice(x,1)}}else if(null!=d.xAxis&&null!=d.yAxis){var k,v,L=u.xAxisIndex||0;(null==d.xAxis[L].type||"category"===d.xAxis[L].type)&&(k=d.xAxis[L].data,v=h.xAxis[L].data,k[V](r),v[V](r),n||(k[U](),v[U]())),L=u.yAxisIndex||0,"category"===d.yAxis[L].type&&(k=d.yAxis[L].data,v=h.yAxis[L].data,k[V](r),v[V](r),n||(k[U](),v[U]()))}}this._option.series[e].data=h.series[e].data}}this._zr.clearAnimation();for(var w=this._chartList,W=0,X=function(){W--,0===W&&s()},m=0,p=w.length;p>m;m++)h.addDataAnimation&&w[m].addDataAnimation&&(W++,w[m].addDataAnimation(l,X));return this.component.dataZoom&&this.component.dataZoom.syncOption(h),this._option=h,h.addDataAnimation||setTimeout(s,0),this},addMarkPoint:function(e,t){return this._addMark(e,t,"markPoint")},addMarkLine:function(e,t){return this._addMark(e,t,"markLine")},_addMark:function(e,t,i){var n,a=this._option.series;if(a&&(n=a[e])){var r=this._optionRestore.series,s=r[e],l=n[i],h=s[i];l=n[i]=l||{data:[]},h=s[i]=h||{data:[]};for(var d in t)"data"===d?(l.data=l.data.concat(t.data),h.data=h.data.concat(t.data)):"object"!=typeof t[d]||null==l[d]?l[d]=h[d]=t[d]:(o.merge(l[d],t[d],!0),o.merge(h[d],t[d],!0));var c=this.chart[n.type];c&&c.addMark(e,t,i)}return this},delMarkPoint:function(e,t){return this._delMark(e,t,"markPoint")},delMarkLine:function(e,t){return this._delMark(e,t,"markLine")},_delMark:function(e,t,i){var n,a,o,r=this._option.series;if(!(r&&(n=r[e])&&(a=n[i])&&(o=a.data)))return this;t=t.split(" > ");for(var s=-1,l=0,h=o.length;h>l;l++){var d=o[l];if(d instanceof Array){if(d[0].name===t[0]&&d[1].name===t[1]){s=l;break}}else if(d.name===t[0]){s=l;break}}if(s>-1){o.splice(s,1),this._optionRestore.series[e][i].data.splice(s,1);var c=this.chart[n.type];c&&c.delMark(e,t.join(" > "),i)}return this},getDom:function(){return this.dom},getZrender:function(){return this._zr},getDataURL:function(e){if(!l)return"";if(0===this._chartList.length){var t="IMG"+this.id,i=document.getElementById(t);if(i)return i.src}var n=this.component.tooltip;switch(n&&n.hideTip(),e){case"jpeg":break;default:e="png"}var a=this._option.backgroundColor;return a&&"rgba(0,0,0,0)"===a.replace(" ","")&&(a="#fff"),this._zr.toDataURL("image/"+e,a)},getImage:function(e){var t=this._optionRestore.title,i=document.createElement("img");return i.src=this.getDataURL(e),i.title=t&&t.text||"ECharts",i},getConnectedDataURL:function(t){if(!this.isConnected())return this.getDataURL(t);var i=this.dom,n={self:{img:this.getDataURL(t),left:i.offsetLeft,top:i.offsetTop,right:i.offsetLeft+i.offsetWidth,bottom:i.offsetTop+i.offsetHeight}},a=n.self.left,o=n.self.top,r=n.self.right,s=n.self.bottom;for(var l in this._connected)i=this._connected[l].getDom(),n[l]={img:this._connected[l].getDataURL(t),left:i.offsetLeft,top:i.offsetTop,right:i.offsetLeft+i.offsetWidth,bottom:i.offsetTop+i.offsetHeight},a=Math.min(a,n[l].left),o=Math.min(o,n[l].top),r=Math.max(r,n[l].right),s=Math.max(s,n[l].bottom);var h=document.createElement("div");h.style.position="absolute",h.style.left="-4000px",h.style.width=r-a+"px",h.style.height=s-o+"px",document.body.appendChild(h);var d=e("zrender").init(h),c=e("zrender/shape/Image");for(var l in n)d.addShape(new c({style:{x:n[l].left-a,y:n[l].top-o,image:n[l].img}}));d.render();var m=this._option.backgroundColor;m&&"rgba(0,0,0,0)"===m.replace(/ /g,"")&&(m="#fff");var p=d.toDataURL("image/png",m);return setTimeout(function(){d.dispose(),h.parentNode.removeChild(h),h=null},100),p},getConnectedImage:function(e){var t=this._optionRestore.title,i=document.createElement("img");return i.src=this.getConnectedDataURL(e),i.title=t&&t.text||"ECharts",i},on:function(e,t){return this._messageCenterOutSide.bind(e,t,this),this},un:function(e,t){return this._messageCenterOutSide.unbind(e,t),this},connect:function(e){if(!e)return this;if(this._connected||(this._connected={}),e instanceof Array)for(var t=0,i=e.length;i>t;t++)this._connected[e[t].id]=e[t];else this._connected[e.id]=e;return this},disConnect:function(e){if(!e||!this._connected)return this;if(e instanceof Array)for(var t=0,i=e.length;i>t;t++)delete this._connected[e[t].id];else delete this._connected[e.id];for(var n in this._connected)return this;return this._connected=!1,this},connectedEventHandler:function(e){e.__echartsId!=this.id&&this._onevent(e)},isConnected:function(){return!!this._connected},showLoading:function(t){var i={bar:e("zrender/loadingEffect/Bar"),bubble:e("zrender/loadingEffect/Bubble"),dynamicLine:e("zrender/loadingEffect/DynamicLine"),ring:e("zrender/loadingEffect/Ring"),spin:e("zrender/loadingEffect/Spin"),whirling:e("zrender/loadingEffect/Whirling")};this._toolbox.hideDataView(),t=t||{};var n=t.textStyle||{};t.textStyle=n;var r=o.merge(o.merge(o.clone(n),this._themeConfig.textStyle),a.textStyle);n.textFont=r.fontStyle+" "+r.fontWeight+" "+r.fontSize+"px "+r.fontFamily,n.text=t.text||this._option&&this._option.loadingText||this._themeConfig.loadingText||a.loadingText,null!=t.x&&(n.x=t.x),null!=t.y&&(n.y=t.y),t.effectOption=t.effectOption||{},t.effectOption.textStyle=n;var s=t.effect;return("string"==typeof s||null==s)&&(s=i[t.effect||this._option&&this._option.loadingEffect||this._themeConfig.loadingEffect||a.loadingEffect]||i.spin),this._zr.showLoading(new s(t.effectOption)),this},hideLoading:function(){return this._zr.hideLoading(),this},setTheme:function(t){if(t){if("string"==typeof t)switch(t){case"macarons":t=e("./theme/macarons");break;case"infographic":t=e("./theme/infographic");break;default:t={}}else t=t||{};this._themeConfig=t}if(!l){var i=this._themeConfig.textStyle;i&&i.fontFamily&&i.fontFamily2&&(i.fontFamily=i.fontFamily2),i=a.textStyle,i.fontFamily=i.fontFamily2}this._timeline&&this._timeline.setTheme(!0),this._optionRestore&&this.restore()},resize:function(){var e=this;return function(){if(e._clearEffect(),e._zr.resize(),e._option&&e._option.renderAsImage&&l)return e._render(e._option),e;e._zr.clearAnimation(),e._island.resize(),e._toolbox.resize(),e._timeline&&e._timeline.resize();for(var t=0,i=e._chartList.length;i>t;t++)e._chartList[t].resize&&e._chartList[t].resize();return e.component.grid&&e.component.grid.refixAxisShape(e.component),e._zr.refresh(),e._messageCenter.dispatch(a.EVENT.RESIZE,null,null,e),e}},_clearEffect:function(){this._zr.modLayer(a.EFFECT_ZLEVEL,{motionBlur:!1}),this._zr.painter.clearLayer(a.EFFECT_ZLEVEL)},clear:function(){return this._disposeChartList(),this._zr.clear(),this._option={},this._optionRestore={},this.dom.style.backgroundColor=null,this},dispose:function(){var e=this.dom.getAttribute(c);e&&delete d[e],this._island.dispose(),this._toolbox.dispose(),this._timeline&&this._timeline.dispose(),this._messageCenter.unbind(),this.clear(),this._zr.dispose(),this._zr=null}},s}),define("echarts/config",[],function(){var e={CHART_TYPE_LINE:"line",CHART_TYPE_BAR:"bar",CHART_TYPE_SCATTER:"scatter",CHART_TYPE_PIE:"pie",CHART_TYPE_RADAR:"radar",CHART_TYPE_VENN:"venn",CHART_TYPE_TREEMAP:"treemap",CHART_TYPE_TREE:"tree",CHART_TYPE_MAP:"map",CHART_TYPE_K:"k",CHART_TYPE_ISLAND:"island",CHART_TYPE_FORCE:"force",CHART_TYPE_CHORD:"chord",CHART_TYPE_GAUGE:"gauge",CHART_TYPE_FUNNEL:"funnel",CHART_TYPE_EVENTRIVER:"eventRiver",CHART_TYPE_WORDCLOUD:"wordCloud",CHART_TYPE_HEATMAP:"heatmap",COMPONENT_TYPE_TITLE:"title",COMPONENT_TYPE_LEGEND:"legend",COMPONENT_TYPE_DATARANGE:"dataRange",COMPONENT_TYPE_DATAVIEW:"dataView",COMPONENT_TYPE_DATAZOOM:"dataZoom",COMPONENT_TYPE_TOOLBOX:"toolbox",COMPONENT_TYPE_TOOLTIP:"tooltip",COMPONENT_TYPE_GRID:"grid",COMPONENT_TYPE_AXIS:"axis",COMPONENT_TYPE_POLAR:"polar",COMPONENT_TYPE_X_AXIS:"xAxis",COMPONENT_TYPE_Y_AXIS:"yAxis",COMPONENT_TYPE_AXIS_CATEGORY:"categoryAxis",COMPONENT_TYPE_AXIS_VALUE:"valueAxis",COMPONENT_TYPE_TIMELINE:"timeline",COMPONENT_TYPE_ROAMCONTROLLER:"roamController",backgroundColor:"rgba(0,0,0,0)",color:["#ff7f50","#87cefa","#da70d6","#32cd32","#6495ed","#ff69b4","#ba55d3","#cd5c5c","#ffa500","#40e0d0","#1e90ff","#ff6347","#7b68ee","#00fa9a","#ffd700","#6699FF","#ff6666","#3cb371","#b8860b","#30e0e0"],markPoint:{clickable:!0,symbol:"pin",symbolSize:10,large:!1,effect:{show:!1,loop:!0,period:15,type:"scale",scaleSize:2,bounceDistance:10},itemStyle:{normal:{borderWidth:2,label:{show:!0,position:"inside"}},emphasis:{label:{show:!0}}}},markLine:{clickable:!0,symbol:["circle","arrow"],symbolSize:[2,4],smoothness:.2,precision:2,effect:{show:!1,loop:!0,period:15,scaleSize:2},bundling:{enable:!1,maxTurningAngle:45},itemStyle:{normal:{borderWidth:1.5,label:{show:!0,position:"end"},lineStyle:{type:"dashed"}},emphasis:{label:{show:!1},lineStyle:{}}}},textStyle:{decoration:"none",fontFamily:"Arial, Verdana, sans-serif",fontFamily2:"寰蒋闆呴粦",fontSize:12,fontStyle:"normal",fontWeight:"normal"},EVENT:{REFRESH:"refresh",RESTORE:"restore",RESIZE:"resize",CLICK:"click",DBLCLICK:"dblclick",HOVER:"hover",MOUSEOUT:"mouseout",DATA_CHANGED:"dataChanged",DATA_ZOOM:"dataZoom",DATA_RANGE:"dataRange",DATA_RANGE_SELECTED:"dataRangeSelected",DATA_RANGE_HOVERLINK:"dataRangeHoverLink",LEGEND_SELECTED:"legendSelected",LEGEND_HOVERLINK:"legendHoverLink",MAP_SELECTED:"mapSelected",PIE_SELECTED:"pieSelected",MAGIC_TYPE_CHANGED:"magicTypeChanged",DATA_VIEW_CHANGED:"dataViewChanged",TIMELINE_CHANGED:"timelineChanged",MAP_ROAM:"mapRoam",FORCE_LAYOUT_END:"forceLayoutEnd",TOOLTIP_HOVER:"tooltipHover",TOOLTIP_IN_GRID:"tooltipInGrid",TOOLTIP_OUT_GRID:"tooltipOutGrid",ROAMCONTROLLER:"roamController"},DRAG_ENABLE_TIME:120,EFFECT_ZLEVEL:10,effectBlendAlpha:.95,symbolList:["circle","rectangle","triangle","diamond","emptyCircle","emptyRectangle","emptyTriangle","emptyDiamond"],loadingEffect:"spin",loadingText:"鏁版嵁璇诲彇涓�...",noDataEffect:"bubble",noDataText:"鏆傛棤鏁版嵁",calculable:!1,calculableColor:"rgba(255,165,0,0.6)",calculableHolderColor:"#ccc",nameConnector:" & ",valueConnector:": ",animation:!0,addDataAnimation:!0,animationThreshold:2e3,animationDuration:2e3,animationDurationUpdate:500,animationEasing:"ExponentialOut"};return e}),define("zrender/tool/util",["require","../dep/excanvas"],function(e){function t(e){return e&&1===e.nodeType&&"string"==typeof e.nodeName}function i(e){if("object"==typeof e&&null!==e){var n=e;if(e instanceof Array){n=[];for(var a=0,o=e.length;o>a;a++)n[a]=i(e[a])}else if(!g[f.call(e)]&&!t(e)){n={};for(var r in e)e.hasOwnProperty(r)&&(n[r]=i(e[r]))}return n}return e}function n(e,i,n,o){if(i.hasOwnProperty(n)){var r=e[n];"object"!=typeof r||g[f.call(r)]||t(r)?!o&&n in e||(e[n]=i[n]):a(e[n],i[n],o)}}function a(e,t,i){for(var a in t)n(e,t,a,i);return e}function o(){if(!m)if(e("../dep/excanvas"),window.G_vmlCanvasManager){var t=document.createElement("div");t.style.position="absolute",t.style.top="-1000px",document.body.appendChild(t),m=G_vmlCanvasManager.initElement(t).getContext("2d");
}else m=document.createElement("canvas").getContext("2d");return m}function r(e,t){if(e.indexOf)return e.indexOf(t);for(var i=0,n=e.length;n>i;i++)if(e[i]===t)return i;return-1}function s(e,t){function i(){}var n=e.prototype;i.prototype=t.prototype,e.prototype=new i;for(var a in n)e.prototype[a]=n[a];e.constructor=e}function l(e,t,i){if(e&&t)if(e.forEach&&e.forEach===u)e.forEach(t,i);else if(e.length===+e.length)for(var n=0,a=e.length;a>n;n++)t.call(i,e[n],n,e);else for(var o in e)e.hasOwnProperty(o)&&t.call(i,e[o],o,e)}function h(e,t,i){if(e&&t){if(e.map&&e.map===V)return e.map(t,i);for(var n=[],a=0,o=e.length;o>a;a++)n.push(t.call(i,e[a],a,e));return n}}function d(e,t,i){if(e&&t){if(e.filter&&e.filter===U)return e.filter(t,i);for(var n=[],a=0,o=e.length;o>a;a++)t.call(i,e[a],a,e)&&n.push(e[a]);return n}}function c(e,t){return function(){e.apply(t,arguments)}}var m,p=Array.prototype,u=p.forEach,V=p.map,U=p.filter,g={"[object Function]":1,"[object RegExp]":1,"[object Date]":1,"[object Error]":1,"[object CanvasGradient]":1},f=Object.prototype.toString;return{inherits:s,clone:i,merge:a,getContext:o,indexOf:r,each:l,map:h,filter:d,bind:c}}),define("zrender/tool/event",["require","../mixin/Eventful"],function(e){"use strict";function t(e){return"undefined"!=typeof e.zrenderX&&e.zrenderX||"undefined"!=typeof e.offsetX&&e.offsetX||"undefined"!=typeof e.layerX&&e.layerX||"undefined"!=typeof e.clientX&&e.clientX}function i(e){return"undefined"!=typeof e.zrenderY&&e.zrenderY||"undefined"!=typeof e.offsetY&&e.offsetY||"undefined"!=typeof e.layerY&&e.layerY||"undefined"!=typeof e.clientY&&e.clientY}function n(e){return"undefined"!=typeof e.zrenderDelta&&e.zrenderDelta||"undefined"!=typeof e.wheelDelta&&e.wheelDelta||"undefined"!=typeof e.detail&&-e.detail}var a=e("../mixin/Eventful"),o="function"==typeof window.addEventListener?function(e){e.preventDefault(),e.stopPropagation(),e.cancelBubble=!0}:function(e){e.returnValue=!1,e.cancelBubble=!0};return{getX:t,getY:i,getDelta:n,stop:o,Dispatcher:a}}),define("zrender/tool/env",[],function(){function e(e){var t=this.os={},i=this.browser={},n=e.match(/Web[kK]it[\/]{0,1}([\d.]+)/),a=e.match(/(Android);?[\s\/]+([\d.]+)?/),o=e.match(/(iPad).*OS\s([\d_]+)/),r=e.match(/(iPod)(.*OS\s([\d_]+))?/),s=!o&&e.match(/(iPhone\sOS)\s([\d_]+)/),l=e.match(/(webOS|hpwOS)[\s\/]([\d.]+)/),h=l&&e.match(/TouchPad/),d=e.match(/Kindle\/([\d.]+)/),c=e.match(/Silk\/([\d._]+)/),m=e.match(/(BlackBerry).*Version\/([\d.]+)/),p=e.match(/(BB10).*Version\/([\d.]+)/),u=e.match(/(RIM\sTablet\sOS)\s([\d.]+)/),V=e.match(/PlayBook/),U=e.match(/Chrome\/([\d.]+)/)||e.match(/CriOS\/([\d.]+)/),g=e.match(/Firefox\/([\d.]+)/),f=e.match(/MSIE ([\d.]+)/),y=n&&e.match(/Mobile\//)&&!U,b=e.match(/(iPhone|iPod|iPad).*AppleWebKit(?!.*Safari)/)&&!U,f=e.match(/MSIE\s([\d.]+)/);return(i.webkit=!!n)&&(i.version=n[1]),a&&(t.android=!0,t.version=a[2]),s&&!r&&(t.ios=t.iphone=!0,t.version=s[2].replace(/_/g,".")),o&&(t.ios=t.ipad=!0,t.version=o[2].replace(/_/g,".")),r&&(t.ios=t.ipod=!0,t.version=r[3]?r[3].replace(/_/g,"."):null),l&&(t.webos=!0,t.version=l[2]),h&&(t.touchpad=!0),m&&(t.blackberry=!0,t.version=m[2]),p&&(t.bb10=!0,t.version=p[2]),u&&(t.rimtabletos=!0,t.version=u[2]),V&&(i.playbook=!0),d&&(t.kindle=!0,t.version=d[1]),c&&(i.silk=!0,i.version=c[1]),!c&&t.android&&e.match(/Kindle Fire/)&&(i.silk=!0),U&&(i.chrome=!0,i.version=U[1]),g&&(i.firefox=!0,i.version=g[1]),f&&(i.ie=!0,i.version=f[1]),y&&(e.match(/Safari/)||t.ios)&&(i.safari=!0),b&&(i.webview=!0),f&&(i.ie=!0,i.version=f[1]),t.tablet=!!(o||V||a&&!e.match(/Mobile/)||g&&e.match(/Tablet/)||f&&!e.match(/Phone/)&&e.match(/Touch/)),t.phone=!(t.tablet||t.ipod||!(a||s||l||m||p||U&&e.match(/Android/)||U&&e.match(/CriOS\/([\d.]+)/)||g&&e.match(/Mobile/)||f&&e.match(/Touch/))),{browser:i,os:t,canvasSupported:document.createElement("canvas").getContext?!0:!1}}return e(navigator.userAgent)}),define("zrender",["zrender/zrender"],function(e){return e}),define("zrender/zrender",["require","./dep/excanvas","./tool/util","./tool/log","./tool/guid","./Handler","./Painter","./Storage","./animation/Animation","./tool/env"],function(e){function t(e){return function(){e._needsRefreshNextFrame&&e.refresh()}}e("./dep/excanvas");var i=e("./tool/util"),n=e("./tool/log"),a=e("./tool/guid"),o=e("./Handler"),r=e("./Painter"),s=e("./Storage"),l=e("./animation/Animation"),h={},d={};d.version="2.1.1",d.init=function(e){var t=new c(a(),e);return h[t.id]=t,t},d.dispose=function(e){if(e)e.dispose();else{for(var t in h)h[t].dispose();h={}}return d},d.getInstance=function(e){return h[e]},d.delInstance=function(e){return delete h[e],d};var c=function(i,n){this.id=i,this.env=e("./tool/env"),this.storage=new s,this.painter=new r(n,this.storage),this.handler=new o(n,this.storage,this.painter),this.animation=new l({stage:{update:t(this)}}),this.animation.start();var a=this;this.painter.refreshNextFrame=function(){a.refreshNextFrame()},this._needsRefreshNextFrame=!1;var a=this,h=this.storage,d=h.delFromMap;h.delFromMap=function(e){var t=h.get(e);a.stopAnimation(t),d.call(h,e)}};return c.prototype.getId=function(){return this.id},c.prototype.addShape=function(e){return this.addElement(e),this},c.prototype.addGroup=function(e){return this.addElement(e),this},c.prototype.delShape=function(e){return this.delElement(e),this},c.prototype.delGroup=function(e){return this.delElement(e),this},c.prototype.modShape=function(e,t){return this.modElement(e,t),this},c.prototype.modGroup=function(e,t){return this.modElement(e,t),this},c.prototype.addElement=function(e){return this.storage.addRoot(e),this._needsRefreshNextFrame=!0,this},c.prototype.delElement=function(e){return this.storage.delRoot(e),this._needsRefreshNextFrame=!0,this},c.prototype.modElement=function(e,t){return this.storage.mod(e,t),this._needsRefreshNextFrame=!0,this},c.prototype.modLayer=function(e,t){return this.painter.modLayer(e,t),this._needsRefreshNextFrame=!0,this},c.prototype.addHoverShape=function(e){return this.storage.addHover(e),this},c.prototype.render=function(e){return this.painter.render(e),this._needsRefreshNextFrame=!1,this},c.prototype.refresh=function(e){return this.painter.refresh(e),this._needsRefreshNextFrame=!1,this},c.prototype.refreshNextFrame=function(){return this._needsRefreshNextFrame=!0,this},c.prototype.refreshHover=function(e){return this.painter.refreshHover(e),this},c.prototype.refreshShapes=function(e,t){return this.painter.refreshShapes(e,t),this},c.prototype.resize=function(){return this.painter.resize(),this},c.prototype.animate=function(e,t,a){var o=this;if("string"==typeof e&&(e=this.storage.get(e)),e){var r;if(t){for(var s=t.split("."),l=e,h=0,d=s.length;d>h;h++)l&&(l=l[s[h]]);l&&(r=l)}else r=e;if(!r)return void n('Property "'+t+'" is not existed in element '+e.id);null==e.__animators&&(e.__animators=[]);var c=e.__animators,m=this.animation.animate(r,{loop:a}).during(function(){o.modShape(e)}).done(function(){var t=i.indexOf(e.__animators,m);t>=0&&c.splice(t,1)});return c.push(m),m}n("Element not existed")},c.prototype.stopAnimation=function(e){if(e.__animators){for(var t=e.__animators,i=t.length,n=0;i>n;n++)t[n].stop();t.length=0}return this},c.prototype.clearAnimation=function(){return this.animation.clear(),this},c.prototype.showLoading=function(e){return this.painter.showLoading(e),this},c.prototype.hideLoading=function(){return this.painter.hideLoading(),this},c.prototype.getWidth=function(){return this.painter.getWidth()},c.prototype.getHeight=function(){return this.painter.getHeight()},c.prototype.toDataURL=function(e,t,i){return this.painter.toDataURL(e,t,i)},c.prototype.shapeToImage=function(e,t,i){var n=a();return this.painter.shapeToImage(n,e,t,i)},c.prototype.on=function(e,t,i){return this.handler.on(e,t,i),this},c.prototype.un=function(e,t){return this.handler.un(e,t),this},c.prototype.trigger=function(e,t){return this.handler.trigger(e,t),this},c.prototype.clear=function(){return this.storage.delRoot(),this.painter.clear(),this},c.prototype.dispose=function(){this.animation.stop(),this.clear(),this.storage.dispose(),this.painter.dispose(),this.handler.dispose(),this.animation=this.storage=this.painter=this.handler=null,d.delInstance(this.id)},d}),define("zrender/config",[],function(){var e={EVENT:{RESIZE:"resize",CLICK:"click",DBLCLICK:"dblclick",MOUSEWHEEL:"mousewheel",MOUSEMOVE:"mousemove",MOUSEOVER:"mouseover",MOUSEOUT:"mouseout",MOUSEDOWN:"mousedown",MOUSEUP:"mouseup",GLOBALOUT:"globalout",DRAGSTART:"dragstart",DRAGEND:"dragend",DRAGENTER:"dragenter",DRAGOVER:"dragover",DRAGLEAVE:"dragleave",DROP:"drop",touchClickDelay:300},elementClassName:"zr-element",catchBrushException:!1,debugMode:0,devicePixelRatio:Math.max(window.devicePixelRatio||1,1)};return e}),define("echarts/chart/island",["require","./base","zrender/shape/Circle","../config","../util/ecData","zrender/tool/util","zrender/tool/event","zrender/tool/color","../util/accMath","../chart"],function(e){function t(e,t,n,a,r){i.call(this,e,t,n,a,r),this._nameConnector,this._valueConnector,this._zrHeight=this.zr.getHeight(),this._zrWidth=this.zr.getWidth();var l=this;l.shapeHandler.onmousewheel=function(e){var t=e.target,i=e.event,n=s.getDelta(i);n=n>0?-1:1,t.style.r-=n,t.style.r=t.style.r<5?5:t.style.r;var a=o.get(t,"value"),r=a*l.option.island.calculateStep;a=r>1?Math.round(a-r*n):+(a-r*n).toFixed(2);var h=o.get(t,"name");t.style.text=h+":"+a,o.set(t,"value",a),o.set(t,"name",h),l.zr.modShape(t.id),l.zr.refreshNextFrame(),s.stop(i)}}var i=e("./base"),n=e("zrender/shape/Circle"),a=e("../config");a.island={zlevel:0,z:5,r:15,calculateStep:.1};var o=e("../util/ecData"),r=e("zrender/tool/util"),s=e("zrender/tool/event");return t.prototype={type:a.CHART_TYPE_ISLAND,_combine:function(t,i){var n=e("zrender/tool/color"),a=e("../util/accMath"),r=a.accAdd(o.get(t,"value"),o.get(i,"value")),s=o.get(t,"name")+this._nameConnector+o.get(i,"name");t.style.text=s+this._valueConnector+r,o.set(t,"value",r),o.set(t,"name",s),t.style.r=this.option.island.r,t.style.color=n.mix(t.style.color,i.style.color)},refresh:function(e){e&&(e.island=this.reformOption(e.island),this.option=e,this._nameConnector=this.option.nameConnector,this._valueConnector=this.option.valueConnector)},getOption:function(){return this.option},resize:function(){var e=this.zr.getWidth(),t=this.zr.getHeight(),i=e/(this._zrWidth||e),n=t/(this._zrHeight||t);if(1!==i||1!==n){this._zrWidth=e,this._zrHeight=t;for(var a=0,o=this.shapeList.length;o>a;a++)this.zr.modShape(this.shapeList[a].id,{style:{x:Math.round(this.shapeList[a].style.x*i),y:Math.round(this.shapeList[a].style.y*n)}})}},add:function(e){var t=o.get(e,"name"),i=o.get(e,"value"),a=null!=o.get(e,"series")?o.get(e,"series").name:"",r=this.getFont(this.option.island.textStyle),s=this.option.island,l={zlevel:s.zlevel,z:s.z,style:{x:e.style.x,y:e.style.y,r:this.option.island.r,color:e.style.color||e.style.strokeColor,text:t+this._valueConnector+i,textFont:r},draggable:!0,hoverable:!0,onmousewheel:this.shapeHandler.onmousewheel,_type:"island"};"#fff"===l.style.color&&(l.style.color=e.style.strokeColor),this.setCalculable(l),l.dragEnableTime=0,o.pack(l,{name:a},-1,i,-1,t),l=new n(l),this.shapeList.push(l),this.zr.addShape(l)},del:function(e){this.zr.delShape(e.id);for(var t=[],i=0,n=this.shapeList.length;n>i;i++)this.shapeList[i].id!=e.id&&t.push(this.shapeList[i]);this.shapeList=t},ondrop:function(e,t){if(this.isDrop&&e.target){var i=e.target,n=e.dragged;this._combine(i,n),this.zr.modShape(i.id),t.dragIn=!0,this.isDrop=!1}},ondragend:function(e,t){var i=e.target;this.isDragend?t.dragIn&&(this.del(i),t.needRefresh=!0):t.dragIn||(i.style.x=s.getX(e.event),i.style.y=s.getY(e.event),this.add(i),t.needRefresh=!0),this.isDragend=!1}},r.inherits(t,i),e("../chart").define("island",t),t}),define("echarts/component/toolbox",["require","./base","zrender/shape/Line","zrender/shape/Image","zrender/shape/Rectangle","../util/shape/Icon","../config","zrender/tool/util","zrender/config","zrender/tool/event","./dataView","../component"],function(e){function t(e,t,n,a,o){i.call(this,e,t,n,a,o),this.dom=o.dom,this._magicType={},this._magicMap={},this._isSilence=!1,this._iconList,this._iconShapeMap={},this._featureTitle={},this._featureIcon={},this._featureColor={},this._featureOption={},this._enableColor="red",this._disableColor="#ccc",this._markShapeList=[];var r=this;r._onMark=function(e){r.__onMark(e)},r._onMarkUndo=function(e){r.__onMarkUndo(e)},r._onMarkClear=function(e){r.__onMarkClear(e)},r._onDataZoom=function(e){r.__onDataZoom(e)},r._onDataZoomReset=function(e){r.__onDataZoomReset(e)},r._onDataView=function(e){r.__onDataView(e)},r._onRestore=function(e){r.__onRestore(e)},r._onSaveAsImage=function(e){r.__onSaveAsImage(e)},r._onMagicType=function(e){r.__onMagicType(e)},r._onCustomHandler=function(e){r.__onCustomHandler(e)},r._onmousemove=function(e){return r.__onmousemove(e)},r._onmousedown=function(e){return r.__onmousedown(e)},r._onmouseup=function(e){return r.__onmouseup(e)},r._onclick=function(e){return r.__onclick(e)}}var i=e("./base"),n=e("zrender/shape/Line"),a=e("zrender/shape/Image"),o=e("zrender/shape/Rectangle"),r=e("../util/shape/Icon"),s=e("../config");s.toolbox={zlevel:0,z:6,show:!1,orient:"horizontal",x:"right",y:"top",color:["#1e90ff","#22bb22","#4b0082","#d2691e"],disableColor:"#ddd",effectiveColor:"red",backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,padding:5,itemGap:10,itemSize:16,showTitle:!0,feature:{mark:{show:!1,title:{mark:"杈呭姪绾垮紑鍏�",markUndo:"鍒犻櫎杈呭姪绾�",markClear:"娓呯┖杈呭姪绾�"},lineStyle:{width:1,color:"#1e90ff",type:"dashed"}},dataZoom:{show:!1,title:{dataZoom:"鍖哄煙缂╂斁",dataZoomReset:"鍖哄煙缂╂斁鍚庨€€"}},dataView:{show:!1,title:"鏁版嵁瑙嗗浘",readOnly:!1,lang:["鏁版嵁瑙嗗浘","鍏抽棴","鍒锋柊"]},magicType:{show:!1,title:{line:"鎶樼嚎鍥惧垏鎹�",bar:"鏌卞舰鍥惧垏鎹�",stack:"鍫嗙Н",tiled:"骞抽摵",force:"鍔涘鍚戝竷灞€鍥惧垏鎹�",chord:"鍜屽鸡鍥惧垏鎹�",pie:"楗煎浘鍒囨崲",funnel:"婕忔枟鍥惧垏鎹�"},type:[]},restore:{show:!1,title:"杩樺師"},saveAsImage:{show:!1,title:"淇濆瓨涓哄浘鐗�",type:"png",lang:["鐐瑰嚮淇濆瓨"]}}};var l=e("zrender/tool/util"),h=e("zrender/config"),d=e("zrender/tool/event"),c="stack",m="tiled";return t.prototype={type:s.COMPONENT_TYPE_TOOLBOX,_buildShape:function(){this._iconList=[];var e=this.option.toolbox;this._enableColor=e.effectiveColor,this._disableColor=e.disableColor;var t=e.feature,i=[];for(var n in t)if(t[n].show)switch(n){case"mark":i.push({key:n,name:"mark"}),i.push({key:n,name:"markUndo"}),i.push({key:n,name:"markClear"});break;case"magicType":for(var a=0,o=t[n].type.length;o>a;a++)t[n].title[t[n].type[a]+"Chart"]=t[n].title[t[n].type[a]],t[n].option&&(t[n].option[t[n].type[a]+"Chart"]=t[n].option[t[n].type[a]]),i.push({key:n,name:t[n].type[a]+"Chart"});break;case"dataZoom":i.push({key:n,name:"dataZoom"}),i.push({key:n,name:"dataZoomReset"});break;case"saveAsImage":this.canvasSupported&&i.push({key:n,name:"saveAsImage"});break;default:i.push({key:n,name:n})}if(i.length>0){for(var r,n,a=0,o=i.length;o>a;a++)r=i[a].name,n=i[a].key,this._iconList.push(r),this._featureTitle[r]=t[n].title[r]||t[n].title,t[n].icon&&(this._featureIcon[r]=t[n].icon[r]||t[n].icon),t[n].color&&(this._featureColor[r]=t[n].color[r]||t[n].color),t[n].option&&(this._featureOption[r]=t[n].option[r]||t[n].option);this._itemGroupLocation=this._getItemGroupLocation(),this._buildBackground(),this._buildItem();for(var a=0,o=this.shapeList.length;o>a;a++)this.zr.addShape(this.shapeList[a]);this._iconShapeMap.mark&&(this._iconDisable(this._iconShapeMap.markUndo),this._iconDisable(this._iconShapeMap.markClear)),this._iconShapeMap.dataZoomReset&&0===this._zoomQueue.length&&this._iconDisable(this._iconShapeMap.dataZoomReset)}},_buildItem:function(){var t,i,n,o,s=this.option.toolbox,l=this._iconList.length,h=this._itemGroupLocation.x,d=this._itemGroupLocation.y,c=s.itemSize,m=s.itemGap,p=s.color instanceof Array?s.color:[s.color],u=this.getFont(s.textStyle);"horizontal"===s.orient?(i=this._itemGroupLocation.y/this.zr.getHeight()<.5?"bottom":"top",n=this._itemGroupLocation.x/this.zr.getWidth()<.5?"left":"right",o=this._itemGroupLocation.y/this.zr.getHeight()<.5?"top":"bottom"):i=this._itemGroupLocation.x/this.zr.getWidth()<.5?"right":"left",this._iconShapeMap={};for(var V=this,U=0;l>U;U++){switch(t={type:"icon",zlevel:this.getZlevelBase(),z:this.getZBase(),style:{x:h,y:d,width:c,height:c,iconType:this._iconList[U],lineWidth:1,strokeColor:this._featureColor[this._iconList[U]]||p[U%p.length],brushType:"stroke"},highlightStyle:{lineWidth:1,text:s.showTitle?this._featureTitle[this._iconList[U]]:void 0,textFont:u,textPosition:i,strokeColor:this._featureColor[this._iconList[U]]||p[U%p.length]},hoverable:!0,clickable:!0},this._featureIcon[this._iconList[U]]&&(t.style.image=this._featureIcon[this._iconList[U]].replace(new RegExp("^image:\\/\\/"),""),t.style.opacity=.8,t.highlightStyle.opacity=1,t.type="image"),"horizontal"===s.orient&&(0===U&&"left"===n&&(t.highlightStyle.textPosition="specific",t.highlightStyle.textAlign=n,t.highlightStyle.textBaseline=o,t.highlightStyle.textX=h,t.highlightStyle.textY="top"===o?d+c+10:d-10),U===l-1&&"right"===n&&(t.highlightStyle.textPosition="specific",t.highlightStyle.textAlign=n,t.highlightStyle.textBaseline=o,t.highlightStyle.textX=h+c,t.highlightStyle.textY="top"===o?d+c+10:d-10)),this._iconList[U]){case"mark":t.onclick=V._onMark;break;case"markUndo":t.onclick=V._onMarkUndo;break;case"markClear":t.onclick=V._onMarkClear;break;case"dataZoom":t.onclick=V._onDataZoom;break;case"dataZoomReset":t.onclick=V._onDataZoomReset;break;case"dataView":if(!this._dataView){var g=e("./dataView");this._dataView=new g(this.ecTheme,this.messageCenter,this.zr,this.option,this.myChart)}t.onclick=V._onDataView;break;case"restore":t.onclick=V._onRestore;break;case"saveAsImage":t.onclick=V._onSaveAsImage;break;default:this._iconList[U].match("Chart")?(t._name=this._iconList[U].replace("Chart",""),t.onclick=V._onMagicType):t.onclick=V._onCustomHandler}"icon"===t.type?t=new r(t):"image"===t.type&&(t=new a(t)),this.shapeList.push(t),this._iconShapeMap[this._iconList[U]]=t,"horizontal"===s.orient?h+=c+m:d+=c+m}},_buildBackground:function(){var e=this.option.toolbox,t=this.reformCssArray(this.option.toolbox.padding);this.shapeList.push(new o({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._itemGroupLocation.x-t[3],y:this._itemGroupLocation.y-t[0],width:this._itemGroupLocation.width+t[3]+t[1],height:this._itemGroupLocation.height+t[0]+t[2],brushType:0===e.borderWidth?"fill":"both",color:e.backgroundColor,strokeColor:e.borderColor,lineWidth:e.borderWidth}}))},_getItemGroupLocation:function(){var e=this.option.toolbox,t=this.reformCssArray(this.option.toolbox.padding),i=this._iconList.length,n=e.itemGap,a=e.itemSize,o=0,r=0;"horizontal"===e.orient?(o=(a+n)*i-n,r=a):(r=(a+n)*i-n,o=a);var s,l=this.zr.getWidth();switch(e.x){case"center":s=Math.floor((l-o)/2);break;case"left":s=t[3]+e.borderWidth;break;case"right":s=l-o-t[1]-e.borderWidth;break;default:s=e.x-0,s=isNaN(s)?0:s}var h,d=this.zr.getHeight();switch(e.y){case"top":h=t[0]+e.borderWidth;break;case"bottom":h=d-r-t[2]-e.borderWidth;break;case"center":h=Math.floor((d-r)/2);break;default:h=e.y-0,h=isNaN(h)?0:h}return{x:s,y:h,width:o,height:r}},__onmousemove:function(e){this._marking&&(this._markShape.style.xEnd=d.getX(e.event),this._markShape.style.yEnd=d.getY(e.event),this.zr.addHoverShape(this._markShape)),this._zooming&&(this._zoomShape.style.width=d.getX(e.event)-this._zoomShape.style.x,this._zoomShape.style.height=d.getY(e.event)-this._zoomShape.style.y,this.zr.addHoverShape(this._zoomShape),this.dom.style.cursor="crosshair",d.stop(e.event)),this._zoomStart&&"pointer"!=this.dom.style.cursor&&"move"!=this.dom.style.cursor&&(this.dom.style.cursor="crosshair")},__onmousedown:function(e){if(!e.target){this._zooming=!0;var t=d.getX(e.event),i=d.getY(e.event),n=this.option.dataZoom||{};return this._zoomShape=new o({zlevel:this.getZlevelBase(),z:this.getZBase(),style:{x:t,y:i,width:1,height:1,brushType:"both"},highlightStyle:{lineWidth:2,color:n.fillerColor||s.dataZoom.fillerColor,strokeColor:n.handleColor||s.dataZoom.handleColor,brushType:"both"}}),this.zr.addHoverShape(this._zoomShape),!0}},__onmouseup:function(){if(!this._zoomShape||Math.abs(this._zoomShape.style.width)<10||Math.abs(this._zoomShape.style.height)<10)return this._zooming=!1,!0;if(this._zooming&&this.component.dataZoom){this._zooming=!1;var e=this.component.dataZoom.rectZoom(this._zoomShape.style);e&&(this._zoomQueue.push({start:e.start,end:e.end,start2:e.start2,end2:e.end2}),this._iconEnable(this._iconShapeMap.dataZoomReset),this.zr.refreshNextFrame())}return!0},__onclick:function(e){if(!e.target)if(this._marking)this._marking=!1,this._markShapeList.push(this._markShape),this._iconEnable(this._iconShapeMap.markUndo),this._iconEnable(this._iconShapeMap.markClear),this.zr.addShape(this._markShape),this.zr.refreshNextFrame();else if(this._markStart){this._marking=!0;var t=d.getX(e.event),i=d.getY(e.event);this._markShape=new n({zlevel:this.getZlevelBase(),z:this.getZBase(),style:{xStart:t,yStart:i,xEnd:t,yEnd:i,lineWidth:this.query(this.option,"toolbox.feature.mark.lineStyle.width"),strokeColor:this.query(this.option,"toolbox.feature.mark.lineStyle.color"),lineType:this.query(this.option,"toolbox.feature.mark.lineStyle.type")}}),this.zr.addHoverShape(this._markShape)}},__onMark:function(e){var t=e.target;if(this._marking||this._markStart)this._resetMark(),this.zr.refreshNextFrame();else{this._resetZoom(),this.zr.modShape(t.id,{style:{strokeColor:this._enableColor}}),this.zr.refreshNextFrame(),this._markStart=!0;var i=this;setTimeout(function(){i.zr&&i.zr.on(h.EVENT.CLICK,i._onclick)&&i.zr.on(h.EVENT.MOUSEMOVE,i._onmousemove)},10)}return!0},__onMarkUndo:function(){if(this._marking)this._marking=!1;else{var e=this._markShapeList.length;if(e>=1){var t=this._markShapeList[e-1];this.zr.delShape(t.id),this.zr.refreshNextFrame(),this._markShapeList.pop(),1===e&&(this._iconDisable(this._iconShapeMap.markUndo),this._iconDisable(this._iconShapeMap.markClear))}}return!0},__onMarkClear:function(){this._marking&&(this._marking=!1);var e=this._markShapeList.length;if(e>0){for(;e--;)this.zr.delShape(this._markShapeList.pop().id);this._iconDisable(this._iconShapeMap.markUndo),this._iconDisable(this._iconShapeMap.markClear),this.zr.refreshNextFrame()}return!0},__onDataZoom:function(e){var t=e.target;if(this._zooming||this._zoomStart)this._resetZoom(),this.zr.refreshNextFrame(),this.dom.style.cursor="default";else{this._resetMark(),this.zr.modShape(t.id,{style:{strokeColor:this._enableColor}}),this.zr.refreshNextFrame(),this._zoomStart=!0;var i=this;setTimeout(function(){i.zr&&i.zr.on(h.EVENT.MOUSEDOWN,i._onmousedown)&&i.zr.on(h.EVENT.MOUSEUP,i._onmouseup)&&i.zr.on(h.EVENT.MOUSEMOVE,i._onmousemove)},10),this.dom.style.cursor="crosshair"}return!0},__onDataZoomReset:function(){return this._zooming&&(this._zooming=!1),this._zoomQueue.pop(),this._zoomQueue.length>0?this.component.dataZoom.absoluteZoom(this._zoomQueue[this._zoomQueue.length-1]):(this.component.dataZoom.rectZoom(),this._iconDisable(this._iconShapeMap.dataZoomReset),this.zr.refreshNextFrame()),!0},_resetMark:function(){this._marking=!1,this._markStart&&(this._markStart=!1,this._iconShapeMap.mark&&this.zr.modShape(this._iconShapeMap.mark.id,{style:{strokeColor:this._iconShapeMap.mark.highlightStyle.strokeColor}}),this.zr.un(h.EVENT.CLICK,this._onclick),this.zr.un(h.EVENT.MOUSEMOVE,this._onmousemove))},_resetZoom:function(){this._zooming=!1,this._zoomStart&&(this._zoomStart=!1,this._iconShapeMap.dataZoom&&this.zr.modShape(this._iconShapeMap.dataZoom.id,{style:{strokeColor:this._iconShapeMap.dataZoom.highlightStyle.strokeColor}}),this.zr.un(h.EVENT.MOUSEDOWN,this._onmousedown),this.zr.un(h.EVENT.MOUSEUP,this._onmouseup),this.zr.un(h.EVENT.MOUSEMOVE,this._onmousemove))},_iconDisable:function(e){"image"!=e.type?this.zr.modShape(e.id,{hoverable:!1,clickable:!1,style:{strokeColor:this._disableColor}}):this.zr.modShape(e.id,{hoverable:!1,clickable:!1,style:{opacity:.3}})},_iconEnable:function(e){"image"!=e.type?this.zr.modShape(e.id,{hoverable:!0,clickable:!0,style:{strokeColor:e.highlightStyle.strokeColor}}):this.zr.modShape(e.id,{hoverable:!0,clickable:!0,style:{opacity:.8}})},__onDataView:function(){return this._dataView.show(this.option),!0},__onRestore:function(){return this._resetMark(),this._resetZoom(),this.messageCenter.dispatch(s.EVENT.RESTORE,null,null,this.myChart),!0},__onSaveAsImage:function(){var e=this.option.toolbox.feature.saveAsImage,t=e.type||"png";"png"!=t&&"jpeg"!=t&&(t="png");var i;i=this.myChart.isConnected()?this.myChart.getConnectedDataURL(t):this.zr.toDataURL("image/"+t,this.option.backgroundColor&&"rgba(0,0,0,0)"===this.option.backgroundColor.replace(" ","")?"#fff":this.option.backgroundColor);var n=document.createElement("div");n.id="__echarts_download_wrap__",n.style.cssText="position:fixed;z-index:99999;display:block;top:0;left:0;background-color:rgba(33,33,33,0.5);text-align:center;width:100%;height:100%;line-height:"+document.documentElement.clientHeight+"px;";var a=document.createElement("a");a.href=i,a.setAttribute("download",(e.name?e.name:this.option.title&&(this.option.title.text||this.option.title.subtext)?this.option.title.text||this.option.title.subtext:"ECharts")+"."+t),a.innerHTML='<img style="vertical-align:middle" src="'+i+'" title="'+(window.ActiveXObject||"ActiveXObject"in window?"鍙抽敭->鍥剧墖鍙﹀瓨涓�":e.lang?e.lang[0]:"鐐瑰嚮淇濆瓨")+'"/>',n.appendChild(a),document.body.appendChild(n),a=null,n=null,setTimeout(function(){var e=document.getElementById("__echarts_download_wrap__");e&&(e.onclick=function(){var e=document.getElementById("__echarts_download_wrap__");e.onclick=null,e.innerHTML="",document.body.removeChild(e),e=null},e=null)},500)},__onMagicType:function(e){this._resetMark();var t=e.target._name;return this._magicType[t]||(this._magicType[t]=!0,t===s.CHART_TYPE_LINE?this._magicType[s.CHART_TYPE_BAR]=!1:t===s.CHART_TYPE_BAR&&(this._magicType[s.CHART_TYPE_LINE]=!1),t===s.CHART_TYPE_PIE?this._magicType[s.CHART_TYPE_FUNNEL]=!1:t===s.CHART_TYPE_FUNNEL&&(this._magicType[s.CHART_TYPE_PIE]=!1),t===s.CHART_TYPE_FORCE?this._magicType[s.CHART_TYPE_CHORD]=!1:t===s.CHART_TYPE_CHORD&&(this._magicType[s.CHART_TYPE_FORCE]=!1),t===c?this._magicType[m]=!1:t===m&&(this._magicType[c]=!1),this.messageCenter.dispatch(s.EVENT.MAGIC_TYPE_CHANGED,e.event,{magicType:this._magicType},this.myChart)),!0},setMagicType:function(e){this._resetMark(),this._magicType=e,!this._isSilence&&this.messageCenter.dispatch(s.EVENT.MAGIC_TYPE_CHANGED,null,{magicType:this._magicType},this.myChart)},__onCustomHandler:function(e){var t=e.target.style.iconType,i=this.option.toolbox.feature[t].onclick;"function"==typeof i&&i.call(this,this.option)},reset:function(e,t){if(t&&this.clear(),this.query(e,"toolbox.show")&&this.query(e,"toolbox.feature.magicType.show")){var i=e.toolbox.feature.magicType.type,n=i.length;for(this._magicMap={};n--;)this._magicMap[i[n]]=!0;n=e.series.length;for(var a,o;n--;)a=e.series[n].type,this._magicMap[a]&&(o=e.xAxis instanceof Array?e.xAxis[e.series[n].xAxisIndex||0]:e.xAxis,o&&"category"===(o.type||"category")&&(o.__boundaryGap=null!=o.boundaryGap?o.boundaryGap:!0),o=e.yAxis instanceof Array?e.yAxis[e.series[n].yAxisIndex||0]:e.yAxis,o&&"category"===o.type&&(o.__boundaryGap=null!=o.boundaryGap?o.boundaryGap:!0),e.series[n].__type=a,e.series[n].__itemStyle=l.clone(e.series[n].itemStyle||{})),(this._magicMap[c]||this._magicMap[m])&&(e.series[n].__stack=e.series[n].stack)}this._magicType=t?{}:this._magicType||{};for(var r in this._magicType)if(this._magicType[r]){this.option=e,this.getMagicOption();break}var s=e.dataZoom;if(s&&s.show){var h=null!=s.start&&s.start>=0&&s.start<=100?s.start:0,d=null!=s.end&&s.end>=0&&s.end<=100?s.end:100;h>d&&(h+=d,d=h-d,h-=d),this._zoomQueue=[{start:h,end:d,start2:0,end2:100}]}else this._zoomQueue=[]},getMagicOption:function(){var e,t;if(this._magicType[s.CHART_TYPE_LINE]||this._magicType[s.CHART_TYPE_BAR]){for(var i=this._magicType[s.CHART_TYPE_LINE]?!1:!0,n=0,a=this.option.series.length;a>n;n++)t=this.option.series[n].type,(t==s.CHART_TYPE_LINE||t==s.CHART_TYPE_BAR)&&(e=this.option.xAxis instanceof Array?this.option.xAxis[this.option.series[n].xAxisIndex||0]:this.option.xAxis,e&&"category"===(e.type||"category")&&(e.boundaryGap=i?!0:e.__boundaryGap),e=this.option.yAxis instanceof Array?this.option.yAxis[this.option.series[n].yAxisIndex||0]:this.option.yAxis,e&&"category"===e.type&&(e.boundaryGap=i?!0:e.__boundaryGap));this._defaultMagic(s.CHART_TYPE_LINE,s.CHART_TYPE_BAR)}if(this._defaultMagic(s.CHART_TYPE_CHORD,s.CHART_TYPE_FORCE),this._defaultMagic(s.CHART_TYPE_PIE,s.CHART_TYPE_FUNNEL),this._magicType[c]||this._magicType[m])for(var n=0,a=this.option.series.length;a>n;n++)this._magicType[c]?(this.option.series[n].stack="_ECHARTS_STACK_KENER_2014_",t=c):this._magicType[m]&&(this.option.series[n].stack=null,t=m),this._featureOption[t+"Chart"]&&l.merge(this.option.series[n],this._featureOption[t+"Chart"]||{},!0);return this.option},_defaultMagic:function(e,t){if(this._magicType[e]||this._magicType[t])for(var i=0,n=this.option.series.length;n>i;i++){var a=this.option.series[i].type;(a==e||a==t)&&(this.option.series[i].type=this._magicType[e]?e:t,this.option.series[i].itemStyle=l.clone(this.option.series[i].__itemStyle),a=this.option.series[i].type,this._featureOption[a+"Chart"]&&l.merge(this.option.series[i],this._featureOption[a+"Chart"]||{},!0))}},silence:function(e){this._isSilence=e},resize:function(){this._resetMark(),this.clear(),this.option&&this.option.toolbox&&this.option.toolbox.show&&this._buildShape(),this._dataView&&this._dataView.resize()},hideDataView:function(){this._dataView&&this._dataView.hide()},clear:function(e){this.zr&&(this.zr.delShape(this.shapeList),this.shapeList=[],e||(this.zr.delShape(this._markShapeList),this._markShapeList=[]))},onbeforDispose:function(){this._dataView&&(this._dataView.dispose(),this._dataView=null),this._markShapeList=null},refresh:function(e){e&&(this._resetMark(),this._resetZoom(),e.toolbox=this.reformOption(e.toolbox),this.option=e,this.clear(!0),e.toolbox.show&&this._buildShape(),this.hideDataView())}},l.inherits(t,i),e("../component").define("toolbox",t),t}),define("echarts/component",[],function(){var e={},t={};return e.define=function(i,n){return t[i]=n,e},e.get=function(e){return t[e]},e}),define("echarts/component/title",["require","./base","zrender/shape/Text","zrender/shape/Rectangle","../config","zrender/tool/util","zrender/tool/area","zrender/tool/color","../component"],function(e){function t(e,t,n,a,o){i.call(this,e,t,n,a,o),this.refresh(a)}var i=e("./base"),n=e("zrender/shape/Text"),a=e("zrender/shape/Rectangle"),o=e("../config");o.title={zlevel:0,z:6,show:!0,text:"",subtext:"",x:"left",y:"top",backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,padding:5,itemGap:5,textStyle:{fontSize:18,fontWeight:"bolder",color:"#333"},subtextStyle:{color:"#aaa"}};var r=e("zrender/tool/util"),s=e("zrender/tool/area"),l=e("zrender/tool/color");return t.prototype={type:o.COMPONENT_TYPE_TITLE,_buildShape:function(){if(this.titleOption.show){this._itemGroupLocation=this._getItemGroupLocation(),this._buildBackground(),this._buildItem();for(var e=0,t=this.shapeList.length;t>e;e++)this.zr.addShape(this.shapeList[e])}},_buildItem:function(){var e=this.titleOption.text,t=this.titleOption.link,i=this.titleOption.target,a=this.titleOption.subtext,o=this.titleOption.sublink,r=this.titleOption.subtarget,s=this.getFont(this.titleOption.textStyle),h=this.getFont(this.titleOption.subtextStyle),d=this._itemGroupLocation.x,c=this._itemGroupLocation.y,m=this._itemGroupLocation.width,p=this._itemGroupLocation.height,u={zlevel:this.getZlevelBase(),z:this.getZBase(),style:{y:c,color:this.titleOption.textStyle.color,text:e,textFont:s,textBaseline:"top"},highlightStyle:{color:l.lift(this.titleOption.textStyle.color,1),brushType:"fill"},hoverable:!1};t&&(u.hoverable=!0,u.clickable=!0,u.onclick=function(){i&&"self"==i?window.location=t:window.open(t)});var V={zlevel:this.getZlevelBase(),z:this.getZBase(),
|
switch(V){case"left":case"center":case"right":break;case"end":V="ltr"==u.direction?"right":"left";break;case"start":V="rtl"==u.direction?"right":"left";break;default:V="left"}switch(this.textBaseline){case"hanging":case"top":d.y=m.size/1.75;break;case"middle":break;default:case null:case"alphabetic":case"ideographic":case"bottom":d.y=-m.size/2.25}switch(V){case"right":l=s,h=.05;break;case"center":l=h=s/2}var y=v(this,t+d.x,n+d.y);c.push('<g_vml_:line from="',-l,' 0" to="',h,' 0.05" ',' coordsize="100 100" coordorigin="0 0"',' filled="',!o,'" stroked="',!!o,'" style="position:absolute;width:1px;height:1px;">'),o?x(this,c):k(this,c,{x:-l,y:0},{x:h,y:m.size});var b=r[0][0].toFixed(3)+","+r[1][0].toFixed(3)+","+r[0][1].toFixed(3)+","+r[1][1].toFixed(3)+",0,0",_=T(y.x/F)+","+T(y.y/F);c.push('<g_vml_:skew on="t" matrix="',b,'" ',' offset="',_,'" origin="',l,' 0" />','<g_vml_:path textpathok="true" />','<g_vml_:textpath on="true" string="',i(e),'" style="v-text-align:',V,";font:",i(p),'" /></g_vml_:line>'),this.element_.insertAdjacentHTML("beforeEnd",c.join(""))},Q.fillText=function(e,t,i,n){this.drawText_(e,t,i,n,!1)},Q.strokeText=function(e,t,i,n){this.drawText_(e,t,i,n,!0)},Q.measureText=function(e){if(!this.textMeasureEl_){var t='<span style="position:absolute;top:-20000px;left:0;padding:0;margin:0;border:none;white-space:pre;"></span>';this.element_.insertAdjacentHTML("beforeEnd",t),this.textMeasureEl_=this.element_.lastChild}var i=this.element_.ownerDocument;this.textMeasureEl_.innerHTML="";try{this.textMeasureEl_.style.font=this.font}catch(n){}return this.textMeasureEl_.appendChild(i.createTextNode(e)),{width:this.textMeasureEl_.offsetWidth}},Q.clip=function(){},Q.arcTo=function(){},Q.createPattern=function(e,t){return new X(e,t)},W.prototype.addColorStop=function(e,t){t=V(t),this.colors_.push({offset:e,color:t.color,alpha:t.alpha})};var q=K.prototype=new Error;q.INDEX_SIZE_ERR=1,q.DOMSTRING_SIZE_ERR=2,q.HIERARCHY_REQUEST_ERR=3,q.WRONG_DOCUMENT_ERR=4,q.INVALID_CHARACTER_ERR=5,q.NO_DATA_ALLOWED_ERR=6,q.NO_MODIFICATION_ALLOWED_ERR=7,q.NOT_FOUND_ERR=8,q.NOT_SUPPORTED_ERR=9,q.INUSE_ATTRIBUTE_ERR=10,q.INVALID_STATE_ERR=11,q.SYNTAX_ERR=12,q.INVALID_MODIFICATION_ERR=13,q.NAMESPACE_ERR=14,q.INVALID_ACCESS_ERR=15,q.VALIDATION_ERR=16,q.TYPE_MISMATCH_ERR=17,G_vmlCanvasManager=P,CanvasRenderingContext2D=b,CanvasGradient=W,CanvasPattern=X,DOMException=K}(),G_vmlCanvasManager}),define("zrender/mixin/Eventful",["require"],function(){var e=function(){this._handlers={}};return e.prototype.one=function(e,t,i){var n=this._handlers;return t&&e?(n[e]||(n[e]=[]),n[e].push({h:t,one:!0,ctx:i||this}),this):this},e.prototype.bind=function(e,t,i){var n=this._handlers;return t&&e?(n[e]||(n[e]=[]),n[e].push({h:t,one:!1,ctx:i||this}),this):this},e.prototype.unbind=function(e,t){var i=this._handlers;if(!e)return this._handlers={},this;if(t){if(i[e]){for(var n=[],a=0,o=i[e].length;o>a;a++)i[e][a].h!=t&&n.push(i[e][a]);i[e]=n}i[e]&&0===i[e].length&&delete i[e]}else delete i[e];return this},e.prototype.dispatch=function(e){if(this._handlers[e]){var t=arguments,i=t.length;i>3&&(t=Array.prototype.slice.call(t,1));for(var n=this._handlers[e],a=n.length,o=0;a>o;){switch(i){case 1:n[o].h.call(n[o].ctx);break;case 2:n[o].h.call(n[o].ctx,t[1]);break;case 3:n[o].h.call(n[o].ctx,t[1],t[2]);break;default:n[o].h.apply(n[o].ctx,t)}n[o].one?(n.splice(o,1),a--):o++}}return this},e.prototype.dispatchWithContext=function(e){if(this._handlers[e]){var t=arguments,i=t.length;i>4&&(t=Array.prototype.slice.call(t,1,t.length-1));for(var n=t[t.length-1],a=this._handlers[e],o=a.length,r=0;o>r;){switch(i){case 1:a[r].h.call(n);break;case 2:a[r].h.call(n,t[1]);break;case 3:a[r].h.call(n,t[1],t[2]);break;default:a[r].h.apply(n,t)}a[r].one?(a.splice(r,1),o--):r++}}return this},e}),define("zrender/tool/log",["require","../config"],function(e){var t=e("../config");return function(){if(0!==t.debugMode)if(1==t.debugMode)for(var e in arguments)throw new Error(arguments[e]);else if(t.debugMode>1)for(var e in arguments)console.log(arguments[e])}}),define("zrender/tool/guid",[],function(){var e=2311;return function(){return"zrender__"+e++}}),define("zrender/Handler",["require","./config","./tool/env","./tool/event","./tool/util","./tool/vector","./tool/matrix","./mixin/Eventful"],function(e){"use strict";function t(e,t){return function(i,n){return e.call(t,i,n)}}function i(e,t){return function(i,n,a){return e.call(t,i,n,a)}}function n(e){for(var i=p.length;i--;){var n=p[i];e["_"+n+"Handler"]=t(V[n],e)}}function a(e,t,i){if(this._draggingTarget&&this._draggingTarget.id==e.id||e.isSilent())return!1;var n=this._event;if(e.isCover(t,i)){e.hoverable&&this.storage.addHover(e);for(var a=e.parent;a;){if(a.clipShape&&!a.clipShape.isCover(this._mouseX,this._mouseY))return!1;a=a.parent}return this._lastHover!=e&&(this._processOutShape(n),this._processDragLeave(n),this._lastHover=e,this._processDragEnter(n)),this._processOverShape(n),this._processDragOver(n),this._hasfound=1,!0}return!1}var o=e("./config"),r=e("./tool/env"),s=e("./tool/event"),l=e("./tool/util"),h=e("./tool/vector"),d=e("./tool/matrix"),c=o.EVENT,m=e("./mixin/Eventful"),p=["resize","click","dblclick","mousewheel","mousemove","mouseout","mouseup","mousedown","touchstart","touchend","touchmove"],u=function(e){if(window.G_vmlCanvasManager)return!0;e=e||window.event;var t=e.toElement||e.relatedTarget||e.srcElement||e.target;return t&&t.className.match(o.elementClassName)},V={resize:function(e){e=e||window.event,this._lastHover=null,this._isMouseDown=0,this.dispatch(c.RESIZE,e)},click:function(e,t){if(u(e)||t){e=this._zrenderEventFixed(e);var i=this._lastHover;(i&&i.clickable||!i)&&this._clickThreshold<5&&this._dispatchAgency(i,c.CLICK,e),this._mousemoveHandler(e)}},dblclick:function(e,t){if(u(e)||t){e=e||window.event,e=this._zrenderEventFixed(e);var i=this._lastHover;(i&&i.clickable||!i)&&this._clickThreshold<5&&this._dispatchAgency(i,c.DBLCLICK,e),this._mousemoveHandler(e)}},mousewheel:function(e,t){if(u(e)||t){e=this._zrenderEventFixed(e);var i=e.wheelDelta||-e.detail,n=i>0?1.1:1/1.1,a=!1,o=this._mouseX,r=this._mouseY;this.painter.eachBuildinLayer(function(t){var i=t.position;if(t.zoomable){t.__zoom=t.__zoom||1;var l=t.__zoom;l*=n,l=Math.max(Math.min(t.maxZoom,l),t.minZoom),n=l/t.__zoom,t.__zoom=l,i[0]-=(o-i[0])*(n-1),i[1]-=(r-i[1])*(n-1),t.scale[0]*=n,t.scale[1]*=n,t.dirty=!0,a=!0,s.stop(e)}}),a&&this.painter.refresh(),this._dispatchAgency(this._lastHover,c.MOUSEWHEEL,e),this._mousemoveHandler(e)}},mousemove:function(e,t){if((u(e)||t)&&!this.painter.isLoading()){e=this._zrenderEventFixed(e),this._lastX=this._mouseX,this._lastY=this._mouseY,this._mouseX=s.getX(e),this._mouseY=s.getY(e);var i=this._mouseX-this._lastX,n=this._mouseY-this._lastY;this._processDragStart(e),this._hasfound=0,this._event=e,this._iterateAndFindHover(),this._hasfound||((!this._draggingTarget||this._lastHover&&this._lastHover!=this._draggingTarget)&&(this._processOutShape(e),this._processDragLeave(e)),this._lastHover=null,this.storage.delHover(),this.painter.clearHover());var a="default";if(this._draggingTarget)this.storage.drift(this._draggingTarget.id,i,n),this._draggingTarget.modSelf(),this.storage.addHover(this._draggingTarget),this._clickThreshold++;else if(this._isMouseDown){var o=!1;this.painter.eachBuildinLayer(function(e){e.panable&&(a="move",e.position[0]+=i,e.position[1]+=n,o=!0,e.dirty=!0)}),o&&this.painter.refresh()}this._draggingTarget||this._hasfound&&this._lastHover.draggable?a="move":this._hasfound&&this._lastHover.clickable&&(a="pointer"),this.root.style.cursor=a,this._dispatchAgency(this._lastHover,c.MOUSEMOVE,e),(this._draggingTarget||this._hasfound||this.storage.hasHoverShape())&&this.painter.refreshHover()}},mouseout:function(e,t){if(u(e)||t){e=this._zrenderEventFixed(e);var i=e.toElement||e.relatedTarget;if(i!=this.root)for(;i&&9!=i.nodeType;){if(i==this.root)return void this._mousemoveHandler(e);i=i.parentNode}e.zrenderX=this._lastX,e.zrenderY=this._lastY,this.root.style.cursor="default",this._isMouseDown=0,this._processOutShape(e),this._processDrop(e),this._processDragEnd(e),this.painter.isLoading()||this.painter.refreshHover(),this.dispatch(c.GLOBALOUT,e)}},mousedown:function(e,t){if(u(e)||t){if(this._clickThreshold=0,2==this._lastDownButton)return this._lastDownButton=e.button,void(this._mouseDownTarget=null);this._lastMouseDownMoment=new Date,e=this._zrenderEventFixed(e),this._isMouseDown=1,this._mouseDownTarget=this._lastHover,this._dispatchAgency(this._lastHover,c.MOUSEDOWN,e),this._lastDownButton=e.button}},mouseup:function(e,t){(u(e)||t)&&(e=this._zrenderEventFixed(e),this.root.style.cursor="default",this._isMouseDown=0,this._mouseDownTarget=null,this._dispatchAgency(this._lastHover,c.MOUSEUP,e),this._processDrop(e),this._processDragEnd(e))},touchstart:function(e,t){(u(e)||t)&&(e=this._zrenderEventFixed(e,!0),this._lastTouchMoment=new Date,this._mobileFindFixed(e),this._mousedownHandler(e))},touchmove:function(e,t){(u(e)||t)&&(e=this._zrenderEventFixed(e,!0),this._mousemoveHandler(e),this._isDragging&&s.stop(e))},touchend:function(e,t){if(u(e)||t){e=this._zrenderEventFixed(e,!0),this._mouseupHandler(e);var i=new Date;i-this._lastTouchMoment<c.touchClickDelay&&(this._mobileFindFixed(e),this._clickHandler(e),i-this._lastClickMoment<c.touchClickDelay/2&&(this._dblclickHandler(e),this._lastHover&&this._lastHover.clickable&&s.stop(e)),this._lastClickMoment=i),this.painter.clearHover()}}},U=function(e,t,o){m.call(this),this.root=e,this.storage=t,this.painter=o,this._lastX=this._lastY=this._mouseX=this._mouseY=0,this._findHover=i(a,this),this._domHover=o.getDomHover(),n(this),window.addEventListener?(window.addEventListener("resize",this._resizeHandler),r.os.tablet||r.os.phone?(e.addEventListener("touchstart",this._touchstartHandler),e.addEventListener("touchmove",this._touchmoveHandler),e.addEventListener("touchend",this._touchendHandler)):(e.addEventListener("click",this._clickHandler),e.addEventListener("dblclick",this._dblclickHandler),e.addEventListener("mousewheel",this._mousewheelHandler),e.addEventListener("mousemove",this._mousemoveHandler),e.addEventListener("mousedown",this._mousedownHandler),e.addEventListener("mouseup",this._mouseupHandler)),e.addEventListener("DOMMouseScroll",this._mousewheelHandler),e.addEventListener("mouseout",this._mouseoutHandler)):(window.attachEvent("onresize",this._resizeHandler),e.attachEvent("onclick",this._clickHandler),e.ondblclick=this._dblclickHandler,e.attachEvent("onmousewheel",this._mousewheelHandler),e.attachEvent("onmousemove",this._mousemoveHandler),e.attachEvent("onmouseout",this._mouseoutHandler),e.attachEvent("onmousedown",this._mousedownHandler),e.attachEvent("onmouseup",this._mouseupHandler))};U.prototype.on=function(e,t,i){return this.bind(e,t,i),this},U.prototype.un=function(e,t){return this.unbind(e,t),this},U.prototype.trigger=function(e,t){switch(e){case c.RESIZE:case c.CLICK:case c.DBLCLICK:case c.MOUSEWHEEL:case c.MOUSEMOVE:case c.MOUSEDOWN:case c.MOUSEUP:case c.MOUSEOUT:this["_"+e+"Handler"](t,!0)}},U.prototype.dispose=function(){var e=this.root;window.removeEventListener?(window.removeEventListener("resize",this._resizeHandler),r.os.tablet||r.os.phone?(e.removeEventListener("touchstart",this._touchstartHandler),e.removeEventListener("touchmove",this._touchmoveHandler),e.removeEventListener("touchend",this._touchendHandler)):(e.removeEventListener("click",this._clickHandler),e.removeEventListener("dblclick",this._dblclickHandler),e.removeEventListener("mousewheel",this._mousewheelHandler),e.removeEventListener("mousemove",this._mousemoveHandler),e.removeEventListener("mousedown",this._mousedownHandler),e.removeEventListener("mouseup",this._mouseupHandler)),e.removeEventListener("DOMMouseScroll",this._mousewheelHandler),e.removeEventListener("mouseout",this._mouseoutHandler)):(window.detachEvent("onresize",this._resizeHandler),e.detachEvent("onclick",this._clickHandler),e.detachEvent("dblclick",this._dblclickHandler),e.detachEvent("onmousewheel",this._mousewheelHandler),e.detachEvent("onmousemove",this._mousemoveHandler),e.detachEvent("onmouseout",this._mouseoutHandler),e.detachEvent("onmousedown",this._mousedownHandler),e.detachEvent("onmouseup",this._mouseupHandler)),this.root=this._domHover=this.storage=this.painter=null,this.un()},U.prototype._processDragStart=function(e){var t=this._lastHover;if(this._isMouseDown&&t&&t.draggable&&!this._draggingTarget&&this._mouseDownTarget==t){if(t.dragEnableTime&&new Date-this._lastMouseDownMoment<t.dragEnableTime)return;var i=t;this._draggingTarget=i,this._isDragging=1,i.invisible=!0,this.storage.mod(i.id),this._dispatchAgency(i,c.DRAGSTART,e),this.painter.refresh()}},U.prototype._processDragEnter=function(e){this._draggingTarget&&this._dispatchAgency(this._lastHover,c.DRAGENTER,e,this._draggingTarget)},U.prototype._processDragOver=function(e){this._draggingTarget&&this._dispatchAgency(this._lastHover,c.DRAGOVER,e,this._draggingTarget)},U.prototype._processDragLeave=function(e){this._draggingTarget&&this._dispatchAgency(this._lastHover,c.DRAGLEAVE,e,this._draggingTarget)},U.prototype._processDrop=function(e){this._draggingTarget&&(this._draggingTarget.invisible=!1,this.storage.mod(this._draggingTarget.id),this.painter.refresh(),this._dispatchAgency(this._lastHover,c.DROP,e,this._draggingTarget))},U.prototype._processDragEnd=function(e){this._draggingTarget&&(this._dispatchAgency(this._draggingTarget,c.DRAGEND,e),this._lastHover=null),this._isDragging=0,this._draggingTarget=null},U.prototype._processOverShape=function(e){this._dispatchAgency(this._lastHover,c.MOUSEOVER,e)},U.prototype._processOutShape=function(e){this._dispatchAgency(this._lastHover,c.MOUSEOUT,e)},U.prototype._dispatchAgency=function(e,t,i,n){var a="on"+t,o={type:t,event:i,target:e,cancelBubble:!1},r=e;for(n&&(o.dragged=n);r&&(r[a]&&(o.cancelBubble=r[a](o)),r.dispatch(t,o),r=r.parent,!o.cancelBubble););if(e)o.cancelBubble||this.dispatch(t,o);else if(!n){var s={type:t,event:i};this.dispatch(t,s),this.painter.eachOtherLayer(function(e){"function"==typeof e[a]&&e[a](s),e.dispatch&&e.dispatch(t,s)})}},U.prototype._iterateAndFindHover=function(){var e=d.create();return function(){for(var t,i,n=this.storage.getShapeList(),a=[0,0],o=n.length-1;o>=0;o--){var r=n[o];if(t!==r.zlevel&&(i=this.painter.getLayer(r.zlevel,i),a[0]=this._mouseX,a[1]=this._mouseY,i.needTransform&&(d.invert(e,i.transform),h.applyTransform(a,a,e))),this._findHover(r,a[0],a[1]))break}}}();var g=[{x:10},{x:-20},{x:10,y:10},{y:-20}];return U.prototype._mobileFindFixed=function(e){this._lastHover=null,this._mouseX=e.zrenderX,this._mouseY=e.zrenderY,this._event=e,this._iterateAndFindHover();for(var t=0;!this._lastHover&&t<g.length;t++){var i=g[t];i.x&&(this._mouseX+=i.x),i.y&&(this._mouseY+=i.y),this._iterateAndFindHover()}this._lastHover&&(e.zrenderX=this._mouseX,e.zrenderY=this._mouseY)},U.prototype._zrenderEventFixed=function(e,t){if(e.zrenderFixed)return e;if(t){var i="touchend"!=e.type?e.targetTouches[0]:e.changedTouches[0];if(i){var n=this.painter._domRoot.getBoundingClientRect();e.zrenderX=i.clientX-n.left,e.zrenderY=i.clientY-n.top}}else{e=e||window.event;var a=e.toElement||e.relatedTarget||e.srcElement||e.target;a&&a!=this._domHover&&(e.zrenderX=("undefined"!=typeof e.offsetX?e.offsetX:e.layerX)+a.offsetLeft,e.zrenderY=("undefined"!=typeof e.offsetY?e.offsetY:e.layerY)+a.offsetTop)}return e.zrenderFixed=1,e},l.merge(U.prototype,m.prototype,!0),U}),define("zrender/Painter",["require","./config","./tool/util","./tool/log","./loadingEffect/Base","./Layer","./shape/Image"],function(e){"use strict";function t(){return!1}function i(){}function n(e){return e?e.isBuildin?!0:"function"!=typeof e.resize||"function"!=typeof e.refresh?!1:!0:!1}var a=e("./config"),o=e("./tool/util"),r=e("./tool/log"),s=e("./loadingEffect/Base"),l=e("./Layer"),h=function(e,i){this.root=e,e.style["-webkit-tap-highlight-color"]="transparent",e.style["-webkit-user-select"]="none",e.style["user-select"]="none",e.style["-webkit-touch-callout"]="none",this.storage=i,e.innerHTML="",this._width=this._getWidth(),this._height=this._getHeight();var n=document.createElement("div");this._domRoot=n,n.style.position="relative",n.style.overflow="hidden",n.style.width=this._width+"px",n.style.height=this._height+"px",e.appendChild(n),this._layers={},this._zlevelList=[],this._layerConfig={},this._loadingEffect=new s({}),this.shapeToImage=this._createShapeToImageProcessor(),this._bgDom=document.createElement("div"),this._bgDom.style.cssText=["position:absolute;left:0px;top:0px;width:",this._width,"px;height:",this._height+"px;","-webkit-user-select:none;user-select;none;","-webkit-touch-callout:none;"].join(""),this._bgDom.setAttribute("data-zr-dom-id","bg"),this._bgDom.className=a.elementClassName,n.appendChild(this._bgDom),this._bgDom.onselectstart=t;var o=new l("_zrender_hover_",this);this._layers.hover=o,n.appendChild(o.dom),o.initContext(),o.dom.onselectstart=t,o.dom.style["-webkit-user-select"]="none",o.dom.style["user-select"]="none",o.dom.style["-webkit-touch-callout"]="none",this.refreshNextFrame=null};return h.prototype.render=function(e){return this.isLoading()&&this.hideLoading(),this.refresh(e,!0),this},h.prototype.refresh=function(e,t){var i=this.storage.getShapeList(!0);this._paintList(i,t);for(var n=0;n<this._zlevelList.length;n++){var a=this._zlevelList[n],o=this._layers[a];!o.isBuildin&&o.refresh&&o.refresh()}return"function"==typeof e&&e(),this},h.prototype._preProcessLayer=function(e){e.unusedCount++,e.updateTransform()},h.prototype._postProcessLayer=function(e){e.dirty=!1,1==e.unusedCount&&e.clear()},h.prototype._paintList=function(e,t){"undefined"==typeof t&&(t=!1),this._updateLayerStatus(e);var i,n,o;this.eachBuildinLayer(this._preProcessLayer);for(var s=0,l=e.length;l>s;s++){var h=e[s];if(n!==h.zlevel&&(i&&(i.needTransform&&o.restore(),o.flush&&o.flush()),n=h.zlevel,i=this.getLayer(n),i.isBuildin||r("ZLevel "+n+" has been used by unkown layer "+i.id),o=i.ctx,i.unusedCount=0,(i.dirty||t)&&i.clear(),i.needTransform&&(o.save(),i.setTransform(o))),(i.dirty||t)&&!h.invisible&&(!h.onbrush||h.onbrush&&!h.onbrush(o,!1)))if(a.catchBrushException)try{h.brush(o,!1,this.refreshNextFrame)}catch(d){r(d,"brush error of "+h.type,h)}else h.brush(o,!1,this.refreshNextFrame);h.__dirty=!1}i&&(i.needTransform&&o.restore(),o.flush&&o.flush()),this.eachBuildinLayer(this._postProcessLayer)},h.prototype.getLayer=function(e){var t=this._layers[e];return t||(t=new l(e,this),t.isBuildin=!0,this._layerConfig[e]&&o.merge(t,this._layerConfig[e],!0),t.updateTransform(),this.insertLayer(e,t),t.initContext()),t},h.prototype.insertLayer=function(e,t){if(this._layers[e])return void r("ZLevel "+e+" has been used already");if(!n(t))return void r("Layer of zlevel "+e+" is not valid");var i=this._zlevelList.length,a=null,o=-1;if(i>0&&e>this._zlevelList[0]){for(o=0;i-1>o&&!(this._zlevelList[o]<e&&this._zlevelList[o+1]>e);o++);a=this._layers[this._zlevelList[o]]}this._zlevelList.splice(o+1,0,e);var s=a?a.dom:this._bgDom;s.nextSibling?s.parentNode.insertBefore(t.dom,s.nextSibling):s.parentNode.appendChild(t.dom),this._layers[e]=t},h.prototype.eachLayer=function(e,t){for(var i=0;i<this._zlevelList.length;i++){var n=this._zlevelList[i];e.call(t,this._layers[n],n)}},h.prototype.eachBuildinLayer=function(e,t){for(var i=0;i<this._zlevelList.length;i++){var n=this._zlevelList[i],a=this._layers[n];a.isBuildin&&e.call(t,a,n)}},h.prototype.eachOtherLayer=function(e,t){for(var i=0;i<this._zlevelList.length;i++){var n=this._zlevelList[i],a=this._layers[n];a.isBuildin||e.call(t,a,n)}},h.prototype.getLayers=function(){return this._layers},h.prototype._updateLayerStatus=function(e){var t=this._layers,i={};this.eachBuildinLayer(function(e,t){i[t]=e.elCount,e.elCount=0});for(var n=0,a=e.length;a>n;n++){var o=e[n],r=o.zlevel,s=t[r];if(s){if(s.elCount++,s.dirty)continue;s.dirty=o.__dirty}}this.eachBuildinLayer(function(e,t){i[t]!==e.elCount&&(e.dirty=!0)})},h.prototype.refreshShapes=function(e,t){for(var i=0,n=e.length;n>i;i++){var a=e[i];a.modSelf()}return this.refresh(t),this},h.prototype.setLoadingEffect=function(e){return this._loadingEffect=e,this},h.prototype.clear=function(){return this.eachBuildinLayer(this._clearLayer),this},h.prototype._clearLayer=function(e){e.clear()},h.prototype.modLayer=function(e,t){if(t){this._layerConfig[e]?o.merge(this._layerConfig[e],t,!0):this._layerConfig[e]=t;var i=this._layers[e];i&&o.merge(i,this._layerConfig[e],!0)}},h.prototype.delLayer=function(e){var t=this._layers[e];t&&(this.modLayer(e,{position:t.position,rotation:t.rotation,scale:t.scale}),t.dom.parentNode.removeChild(t.dom),delete this._layers[e],this._zlevelList.splice(o.indexOf(this._zlevelList,e),1))},h.prototype.refreshHover=function(){this.clearHover();for(var e=this.storage.getHoverShapes(!0),t=0,i=e.length;i>t;t++)this._brushHover(e[t]);var n=this._layers.hover.ctx;return n.flush&&n.flush(),this.storage.delHover(),this},h.prototype.clearHover=function(){var e=this._layers.hover;return e&&e.clear(),this},h.prototype.showLoading=function(e){return this._loadingEffect&&this._loadingEffect.stop(),e&&this.setLoadingEffect(e),this._loadingEffect.start(this),this.loading=!0,this},h.prototype.hideLoading=function(){return this._loadingEffect.stop(),this.clearHover(),this.loading=!1,this},h.prototype.isLoading=function(){return this.loading},h.prototype.resize=function(){var e=this._domRoot;e.style.display="none";var t=this._getWidth(),i=this._getHeight();if(e.style.display="",this._width!=t||i!=this._height){this._width=t,this._height=i,e.style.width=t+"px",e.style.height=i+"px";for(var n in this._layers)this._layers[n].resize(t,i);this.refresh(null,!0)}return this},h.prototype.clearLayer=function(e){var t=this._layers[e];t&&t.clear()},h.prototype.dispose=function(){this.isLoading()&&this.hideLoading(),this.root.innerHTML="",this.root=this.storage=this._domRoot=this._layers=null},h.prototype.getDomHover=function(){return this._layers.hover.dom},h.prototype.toDataURL=function(e,t,i){if(window.G_vmlCanvasManager)return null;var n=new l("image",this);this._bgDom.appendChild(n.dom),n.initContext();var o=n.ctx;n.clearColor=t||"#fff",n.clear();var s=this;this.storage.iterShape(function(e){if(!e.invisible&&(!e.onbrush||e.onbrush&&!e.onbrush(o,!1)))if(a.catchBrushException)try{e.brush(o,!1,s.refreshNextFrame)}catch(t){r(t,"brush error of "+e.type,e)}else e.brush(o,!1,s.refreshNextFrame)},{normal:"up",update:!0});var h=n.dom.toDataURL(e,i);return o=null,this._bgDom.removeChild(n.dom),h},h.prototype.getWidth=function(){return this._width},h.prototype.getHeight=function(){return this._height},h.prototype._getWidth=function(){var e=this.root,t=e.currentStyle||document.defaultView.getComputedStyle(e);return((e.clientWidth||parseInt(t.width,10))-parseInt(t.paddingLeft,10)-parseInt(t.paddingRight,10)).toFixed(0)-0},h.prototype._getHeight=function(){var e=this.root,t=e.currentStyle||document.defaultView.getComputedStyle(e);return((e.clientHeight||parseInt(t.height,10))-parseInt(t.paddingTop,10)-parseInt(t.paddingBottom,10)).toFixed(0)-0},h.prototype._brushHover=function(e){var t=this._layers.hover.ctx;if(!e.onbrush||e.onbrush&&!e.onbrush(t,!0)){var i=this.getLayer(e.zlevel);if(i.needTransform&&(t.save(),i.setTransform(t)),a.catchBrushException)try{e.brush(t,!0,this.refreshNextFrame)}catch(n){r(n,"hoverBrush error of "+e.type,e)}else e.brush(t,!0,this.refreshNextFrame);i.needTransform&&t.restore()}},h.prototype._shapeToImage=function(t,i,n,a,o){var r=document.createElement("canvas"),s=r.getContext("2d");r.style.width=n+"px",r.style.height=a+"px",r.setAttribute("width",n*o),r.setAttribute("height",a*o),s.clearRect(0,0,n*o,a*o);var l={position:i.position,rotation:i.rotation,scale:i.scale};i.position=[0,0,0],i.rotation=0,i.scale=[1,1],i&&i.brush(s,!1);var h=e("./shape/Image"),d=new h({id:t,style:{x:0,y:0,image:r}});return null!=l.position&&(d.position=i.position=l.position),null!=l.rotation&&(d.rotation=i.rotation=l.rotation),null!=l.scale&&(d.scale=i.scale=l.scale),d},h.prototype._createShapeToImageProcessor=function(){if(window.G_vmlCanvasManager)return i;var e=this;return function(t,i,n,o){return e._shapeToImage(t,i,n,o,a.devicePixelRatio)}},h}),define("zrender/Storage",["require","./tool/util","./Group"],function(e){"use strict";function t(e,t){return e.zlevel==t.zlevel?e.z==t.z?e.__renderidx-t.__renderidx:e.z-t.z:e.zlevel-t.zlevel}var i=e("./tool/util"),n=e("./Group"),a={hover:!1,normal:"down",update:!1},o=function(){this._elements={},this._hoverElements=[],this._roots=[],this._shapeList=[],this._shapeListOffset=0};return o.prototype.iterShape=function(e,t){if(t||(t=a),t.hover)for(var i=0,n=this._hoverElements.length;n>i;i++){var o=this._hoverElements[i];if(o.updateTransform(),e(o))return this}switch(t.update&&this.updateShapeList(),t.normal){case"down":for(var n=this._shapeList.length;n--;)if(e(this._shapeList[n]))return this;break;default:for(var i=0,n=this._shapeList.length;n>i;i++)if(e(this._shapeList[i]))return this}return this},o.prototype.getHoverShapes=function(e){for(var i=[],n=0,a=this._hoverElements.length;a>n;n++){i.push(this._hoverElements[n]);var o=this._hoverElements[n].hoverConnect;if(o){var r;o=o instanceof Array?o:[o];for(var s=0,l=o.length;l>s;s++)r=o[s].id?o[s]:this.get(o[s]),r&&i.push(r)}}if(i.sort(t),e)for(var n=0,a=i.length;a>n;n++)i[n].updateTransform();return i},o.prototype.getShapeList=function(e){return e&&this.updateShapeList(),this._shapeList},o.prototype.updateShapeList=function(){this._shapeListOffset=0;for(var e=0,i=this._roots.length;i>e;e++){var n=this._roots[e];this._updateAndAddShape(n)}this._shapeList.length=this._shapeListOffset;for(var e=0,i=this._shapeList.length;i>e;e++)this._shapeList[e].__renderidx=e;this._shapeList.sort(t)},o.prototype._updateAndAddShape=function(e,t){if(!e.ignore)if(e.updateTransform(),e.clipShape&&(e.clipShape.parent=e,e.clipShape.updateTransform(),t?(t=t.slice(),t.push(e.clipShape)):t=[e.clipShape]),"group"==e.type){for(var i=0;i<e._children.length;i++){var n=e._children[i];n.__dirty=e.__dirty||n.__dirty,this._updateAndAddShape(n,t)}e.__dirty=!1}else e.__clipShapes=t,this._shapeList[this._shapeListOffset++]=e},o.prototype.mod=function(e,t){if("string"==typeof e&&(e=this._elements[e]),e&&(e.modSelf(),t))if(t.parent||t._storage||t.__clipShapes){var n={};for(var a in t)"parent"!==a&&"_storage"!==a&&"__clipShapes"!==a&&t.hasOwnProperty(a)&&(n[a]=t[a]);i.merge(e,n,!0)}else i.merge(e,t,!0);return this},o.prototype.drift=function(e,t,i){var n=this._elements[e];return n&&(n.needTransform=!0,"horizontal"===n.draggable?i=0:"vertical"===n.draggable&&(t=0),(!n.ondrift||n.ondrift&&!n.ondrift(t,i))&&n.drift(t,i)),this},o.prototype.addHover=function(e){return e.updateNeedTransform(),this._hoverElements.push(e),this},o.prototype.delHover=function(){return this._hoverElements=[],this},o.prototype.hasHoverShape=function(){return this._hoverElements.length>0},o.prototype.addRoot=function(e){this._elements[e.id]||(e instanceof n&&e.addChildrenToStorage(this),this.addToMap(e),this._roots.push(e))},o.prototype.delRoot=function(e){if("undefined"==typeof e){for(var t=0;t<this._roots.length;t++){var a=this._roots[t];a instanceof n&&a.delChildrenFromStorage(this)}return this._elements={},this._hoverElements=[],this._roots=[],this._shapeList=[],void(this._shapeListOffset=0)}if(e instanceof Array)for(var t=0,o=e.length;o>t;t++)this.delRoot(e[t]);else{var r;r="string"==typeof e?this._elements[e]:e;var s=i.indexOf(this._roots,r);s>=0&&(this.delFromMap(r.id),this._roots.splice(s,1),r instanceof n&&r.delChildrenFromStorage(this))}},o.prototype.addToMap=function(e){return e instanceof n&&(e._storage=this),e.modSelf(),this._elements[e.id]=e,this},o.prototype.get=function(e){return this._elements[e]},o.prototype.delFromMap=function(e){var t=this._elements[e];return t&&(delete this._elements[e],t instanceof n&&(t._storage=null)),this},o.prototype.dispose=function(){this._elements=this._renderList=this._roots=this._hoverElements=null},o}),define("zrender/animation/Animation",["require","./Clip","../tool/color","../tool/util","../tool/event"],function(e){"use strict";function t(e,t){return e[t]}function i(e,t,i){e[t]=i}function n(e,t,i){return(t-e)*i+e}function a(e,t,i,a,o){var r=e.length;if(1==o)for(var s=0;r>s;s++)a[s]=n(e[s],t[s],i);else for(var l=e[0].length,s=0;r>s;s++)for(var h=0;l>h;h++)a[s][h]=n(e[s][h],t[s][h],i)}function o(e){switch(typeof e){case"undefined":case"string":return!1}return"undefined"!=typeof e.length}function r(e,t,i,n,a,o,r,l,h){var d=e.length;if(1==h)for(var c=0;d>c;c++)l[c]=s(e[c],t[c],i[c],n[c],a,o,r);else for(var m=e[0].length,c=0;d>c;c++)for(var p=0;m>p;p++)l[c][p]=s(e[c][p],t[c][p],i[c][p],n[c][p],a,o,r)}function s(e,t,i,n,a,o,r){var s=.5*(i-e),l=.5*(n-t);return(2*(t-i)+s+l)*r+(-3*(t-i)-2*s-l)*o+s*a+t}function l(e){if(o(e)){var t=e.length;if(o(e[0])){for(var i=[],n=0;t>n;n++)i.push(V.call(e[n]));return i}return V.call(e)}return e}function h(e){return e[0]=Math.floor(e[0]),e[1]=Math.floor(e[1]),e[2]=Math.floor(e[2]),"rgba("+e.join(",")+")"}var d=e("./Clip"),c=e("../tool/color"),m=e("../tool/util"),p=e("../tool/event").Dispatcher,u=window.requestAnimationFrame||window.msRequestAnimationFrame||window.mozRequestAnimationFrame||window.webkitRequestAnimationFrame||function(e){setTimeout(e,16)},V=Array.prototype.slice,U=function(e){e=e||{},this.stage=e.stage||{},this.onframe=e.onframe||function(){},this._clips=[],this._running=!1,this._time=0,p.call(this)};U.prototype={add:function(e){this._clips.push(e)},remove:function(e){if(e.__inStep)e.__needsRemove=!0;else{var t=m.indexOf(this._clips,e);t>=0&&this._clips.splice(t,1)}},_update:function(){for(var e=(new Date).getTime(),t=e-this._time,i=this._clips,n=i.length,a=[],o=[],r=0;n>r;r++){var s=i[r];s.__inStep=!0;var l=s.step(e);s.__inStep=!1,l&&(a.push(l),o.push(s))}for(var r=0;n>r;)i[r].__needsRemove?(i[r]=i[n-1],i.pop(),n--):r++;n=a.length;for(var r=0;n>r;r++)o[r].fire(a[r]);this._time=e,this.onframe(t),this.dispatch("frame",t),this.stage.update&&this.stage.update()},start:function(){function e(){t._running&&(u(e),t._update())}var t=this;this._running=!0,this._time=(new Date).getTime(),u(e)},stop:function(){this._running=!1},clear:function(){this._clips=[]},animate:function(e,t){t=t||{};var i=new g(e,t.loop,t.getter,t.setter);return i.animation=this,i},constructor:U},m.merge(U.prototype,p.prototype,!0);var g=function(e,n,a,o){this._tracks={},this._target=e,this._loop=n||!1,this._getter=a||t,this._setter=o||i,this._clipCount=0,this._delay=0,this._doneList=[],this._onframeList=[],this._clipList=[]};return g.prototype={when:function(e,t){for(var i in t)this._tracks[i]||(this._tracks[i]=[],0!==e&&this._tracks[i].push({time:0,value:l(this._getter(this._target,i))})),this._tracks[i].push({time:parseInt(e,10),value:t[i]});return this},during:function(e){return this._onframeList.push(e),this},start:function(e){var t=this,i=this._setter,l=this._getter,m="spline"===e,p=function(){if(t._clipCount--,0===t._clipCount){t._tracks={};for(var e=t._doneList.length,i=0;e>i;i++)t._doneList[i].call(t)}},u=function(u,V){var U=u.length;if(U){var g=u[0].value,f=o(g),y=!1,b=f&&o(g[0])?2:1;u.sort(function(e,t){return e.time-t.time});var _;if(U){_=u[U-1].time;for(var x=[],k=[],v=0;U>v;v++){x.push(u[v].time/_);var L=u[v].value;"string"==typeof L&&(L=c.toArray(L),0===L.length&&(L[0]=L[1]=L[2]=0,L[3]=1),y=!0),k.push(L)}var w,v,W,X,I,S,K,C=0,T=0;if(y)var E=[0,0,0,0];var z=function(e,o){if(T>o){for(w=Math.min(C+1,U-1),v=w;v>=0&&!(x[v]<=o);v--);v=Math.min(v,U-2)}else{for(v=C;U>v&&!(x[v]>o);v++);v=Math.min(v-1,U-2)}C=v,T=o;var d=x[v+1]-x[v];if(0!==d){if(W=(o-x[v])/d,m)if(I=k[v],X=k[0===v?v:v-1],S=k[v>U-2?U-1:v+1],K=k[v>U-3?U-1:v+2],f)r(X,I,S,K,W,W*W,W*W*W,l(e,V),b);else{var c;y?(c=r(X,I,S,K,W,W*W,W*W*W,E,1),c=h(E)):c=s(X,I,S,K,W,W*W,W*W*W),i(e,V,c)}else if(f)a(k[v],k[v+1],W,l(e,V),b);else{var c;y?(a(k[v],k[v+1],W,E,1),c=h(E)):c=n(k[v],k[v+1],W),i(e,V,c)}for(v=0;v<t._onframeList.length;v++)t._onframeList[v](e,o);
}},A=new d({target:t._target,life:_,loop:t._loop,delay:t._delay,onframe:z,ondestroy:p});e&&"spline"!==e&&(A.easing=e),t._clipList.push(A),t._clipCount++,t.animation.add(A)}}};for(var V in this._tracks)u(this._tracks[V],V);return this},stop:function(){for(var e=0;e<this._clipList.length;e++){var t=this._clipList[e];this.animation.remove(t)}this._clipList=[]},delay:function(e){return this._delay=e,this},done:function(e){return e&&this._doneList.push(e),this}},U}),define("zrender/tool/vector",[],function(){var e="undefined"==typeof Float32Array?Array:Float32Array,t={create:function(t,i){var n=new e(2);return n[0]=t||0,n[1]=i||0,n},copy:function(e,t){return e[0]=t[0],e[1]=t[1],e},clone:function(t){var i=new e(2);return i[0]=t[0],i[1]=t[1],i},set:function(e,t,i){return e[0]=t,e[1]=i,e},add:function(e,t,i){return e[0]=t[0]+i[0],e[1]=t[1]+i[1],e},scaleAndAdd:function(e,t,i,n){return e[0]=t[0]+i[0]*n,e[1]=t[1]+i[1]*n,e},sub:function(e,t,i){return e[0]=t[0]-i[0],e[1]=t[1]-i[1],e},len:function(e){return Math.sqrt(this.lenSquare(e))},lenSquare:function(e){return e[0]*e[0]+e[1]*e[1]},mul:function(e,t,i){return e[0]=t[0]*i[0],e[1]=t[1]*i[1],e},div:function(e,t,i){return e[0]=t[0]/i[0],e[1]=t[1]/i[1],e},dot:function(e,t){return e[0]*t[0]+e[1]*t[1]},scale:function(e,t,i){return e[0]=t[0]*i,e[1]=t[1]*i,e},normalize:function(e,i){var n=t.len(i);return 0===n?(e[0]=0,e[1]=0):(e[0]=i[0]/n,e[1]=i[1]/n),e},distance:function(e,t){return Math.sqrt((e[0]-t[0])*(e[0]-t[0])+(e[1]-t[1])*(e[1]-t[1]))},distanceSquare:function(e,t){return(e[0]-t[0])*(e[0]-t[0])+(e[1]-t[1])*(e[1]-t[1])},negate:function(e,t){return e[0]=-t[0],e[1]=-t[1],e},lerp:function(e,t,i,n){return e[0]=t[0]+n*(i[0]-t[0]),e[1]=t[1]+n*(i[1]-t[1]),e},applyTransform:function(e,t,i){var n=t[0],a=t[1];return e[0]=i[0]*n+i[2]*a+i[4],e[1]=i[1]*n+i[3]*a+i[5],e},min:function(e,t,i){return e[0]=Math.min(t[0],i[0]),e[1]=Math.min(t[1],i[1]),e},max:function(e,t,i){return e[0]=Math.max(t[0],i[0]),e[1]=Math.max(t[1],i[1]),e}};return t.length=t.len,t.lengthSquare=t.lenSquare,t.dist=t.distance,t.distSquare=t.distanceSquare,t}),define("zrender/tool/matrix",[],function(){var e="undefined"==typeof Float32Array?Array:Float32Array,t={create:function(){var i=new e(6);return t.identity(i),i},identity:function(e){return e[0]=1,e[1]=0,e[2]=0,e[3]=1,e[4]=0,e[5]=0,e},copy:function(e,t){return e[0]=t[0],e[1]=t[1],e[2]=t[2],e[3]=t[3],e[4]=t[4],e[5]=t[5],e},mul:function(e,t,i){return e[0]=t[0]*i[0]+t[2]*i[1],e[1]=t[1]*i[0]+t[3]*i[1],e[2]=t[0]*i[2]+t[2]*i[3],e[3]=t[1]*i[2]+t[3]*i[3],e[4]=t[0]*i[4]+t[2]*i[5]+t[4],e[5]=t[1]*i[4]+t[3]*i[5]+t[5],e},translate:function(e,t,i){return e[0]=t[0],e[1]=t[1],e[2]=t[2],e[3]=t[3],e[4]=t[4]+i[0],e[5]=t[5]+i[1],e},rotate:function(e,t,i){var n=t[0],a=t[2],o=t[4],r=t[1],s=t[3],l=t[5],h=Math.sin(i),d=Math.cos(i);return e[0]=n*d+r*h,e[1]=-n*h+r*d,e[2]=a*d+s*h,e[3]=-a*h+d*s,e[4]=d*o+h*l,e[5]=d*l-h*o,e},scale:function(e,t,i){var n=i[0],a=i[1];return e[0]=t[0]*n,e[1]=t[1]*a,e[2]=t[2]*n,e[3]=t[3]*a,e[4]=t[4]*n,e[5]=t[5]*a,e},invert:function(e,t){var i=t[0],n=t[2],a=t[4],o=t[1],r=t[3],s=t[5],l=i*r-o*n;return l?(l=1/l,e[0]=r*l,e[1]=-o*l,e[2]=-n*l,e[3]=i*l,e[4]=(n*s-r*a)*l,e[5]=(o*a-i*s)*l,e):null}};return t}),define("zrender/loadingEffect/Base",["require","../tool/util","../shape/Text","../shape/Rectangle"],function(e){function t(e){this.setOptions(e)}var i=e("../tool/util"),n=e("../shape/Text"),a=e("../shape/Rectangle"),o="Loading...",r="normal 16px Arial";return t.prototype.createTextShape=function(e){return new n({highlightStyle:i.merge({x:this.canvasWidth/2,y:this.canvasHeight/2,text:o,textAlign:"center",textBaseline:"middle",textFont:r,color:"#333",brushType:"fill"},e,!0)})},t.prototype.createBackgroundShape=function(e){return new a({highlightStyle:{x:0,y:0,width:this.canvasWidth,height:this.canvasHeight,brushType:"fill",color:e}})},t.prototype.start=function(e){function t(t){e.storage.addHover(t)}function i(){e.refreshHover()}this.canvasWidth=e._width,this.canvasHeight=e._height,this.loadingTimer=this._start(t,i)},t.prototype._start=function(){return setInterval(function(){},1e4)},t.prototype.stop=function(){clearInterval(this.loadingTimer)},t.prototype.setOptions=function(e){this.options=e||{}},t.prototype.adjust=function(e,t){return e<=t[0]?e=t[0]:e>=t[1]&&(e=t[1]),e},t.prototype.getLocation=function(e,t,i){var n=null!=e.x?e.x:"center";switch(n){case"center":n=Math.floor((this.canvasWidth-t)/2);break;case"left":n=0;break;case"right":n=this.canvasWidth-t}var a=null!=e.y?e.y:"center";switch(a){case"center":a=Math.floor((this.canvasHeight-i)/2);break;case"top":a=0;break;case"bottom":a=this.canvasHeight-i}return{x:n,y:a,width:t,height:i}},t}),define("zrender/Layer",["require","./mixin/Transformable","./tool/util","./config"],function(e){function t(){return!1}function i(e,t,i){var n=document.createElement(t),a=i.getWidth(),o=i.getHeight();return n.style.position="absolute",n.style.left=0,n.style.top=0,n.style.width=a+"px",n.style.height=o+"px",n.width=a*r.devicePixelRatio,n.height=o*r.devicePixelRatio,n.setAttribute("data-zr-dom-id",e),n}var n=e("./mixin/Transformable"),a=e("./tool/util"),o=window.G_vmlCanvasManager,r=e("./config"),s=function(e,a){this.id=e,this.dom=i(e,"canvas",a),this.dom.onselectstart=t,this.dom.style["-webkit-user-select"]="none",this.dom.style["user-select"]="none",this.dom.style["-webkit-touch-callout"]="none",this.dom.style["-webkit-tap-highlight-color"]="rgba(0,0,0,0)",this.dom.className=r.elementClassName,o&&o.initElement(this.dom),this.domBack=null,this.ctxBack=null,this.painter=a,this.unusedCount=0,this.config=null,this.dirty=!0,this.elCount=0,this.clearColor=0,this.motionBlur=!1,this.lastFrameAlpha=.7,this.zoomable=!1,this.panable=!1,this.maxZoom=1/0,this.minZoom=0,n.call(this)};return s.prototype.initContext=function(){this.ctx=this.dom.getContext("2d");var e=r.devicePixelRatio;1!=e&&this.ctx.scale(e,e)},s.prototype.createBackBuffer=function(){if(!o){this.domBack=i("back-"+this.id,"canvas",this.painter),this.ctxBack=this.domBack.getContext("2d");var e=r.devicePixelRatio;1!=e&&this.ctxBack.scale(e,e)}},s.prototype.resize=function(e,t){var i=r.devicePixelRatio;this.dom.style.width=e+"px",this.dom.style.height=t+"px",this.dom.setAttribute("width",e*i),this.dom.setAttribute("height",t*i),1!=i&&this.ctx.scale(i,i),this.domBack&&(this.domBack.setAttribute("width",e*i),this.domBack.setAttribute("height",t*i),1!=i&&this.ctxBack.scale(i,i))},s.prototype.clear=function(){var e=this.dom,t=this.ctx,i=e.width,n=e.height,a=this.clearColor&&!o,s=this.motionBlur&&!o,l=this.lastFrameAlpha,h=r.devicePixelRatio;if(s&&(this.domBack||this.createBackBuffer(),this.ctxBack.globalCompositeOperation="copy",this.ctxBack.drawImage(e,0,0,i/h,n/h)),t.clearRect(0,0,i/h,n/h),a&&(t.save(),t.fillStyle=this.clearColor,t.fillRect(0,0,i/h,n/h),t.restore()),s){var d=this.domBack;t.save(),t.globalAlpha=l,t.drawImage(d,0,0,i/h,n/h),t.restore()}},a.merge(s.prototype,n.prototype),s}),define("zrender/shape/Text",["require","../tool/area","./Base","../tool/util"],function(e){var t=e("../tool/area"),i=e("./Base"),n=function(e){i.call(this,e)};return n.prototype={type:"text",brush:function(e,i){var n=this.style;if(i&&(n=this.getHighlightStyle(n,this.highlightStyle||{})),"undefined"!=typeof n.text&&n.text!==!1){e.save(),this.doClip(e),this.setContext(e,n),this.setTransform(e),n.textFont&&(e.font=n.textFont),e.textAlign=n.textAlign||"start",e.textBaseline=n.textBaseline||"middle";var a,o=(n.text+"").split("\n"),r=t.getTextHeight("国",n.textFont),s=this.getRect(n),l=n.x;a="top"==n.textBaseline?s.y:"bottom"==n.textBaseline?s.y+r:s.y+r/2;for(var h=0,d=o.length;d>h;h++){if(n.maxWidth)switch(n.brushType){case"fill":e.fillText(o[h],l,a,n.maxWidth);break;case"stroke":e.strokeText(o[h],l,a,n.maxWidth);break;case"both":e.fillText(o[h],l,a,n.maxWidth),e.strokeText(o[h],l,a,n.maxWidth);break;default:e.fillText(o[h],l,a,n.maxWidth)}else switch(n.brushType){case"fill":e.fillText(o[h],l,a);break;case"stroke":e.strokeText(o[h],l,a);break;case"both":e.fillText(o[h],l,a),e.strokeText(o[h],l,a);break;default:e.fillText(o[h],l,a)}a+=r}e.restore()}},getRect:function(e){if(e.__rect)return e.__rect;var i=t.getTextWidth(e.text,e.textFont),n=t.getTextHeight(e.text,e.textFont),a=e.x;"end"==e.textAlign||"right"==e.textAlign?a-=i:"center"==e.textAlign&&(a-=i/2);var o;return o="top"==e.textBaseline?e.y:"bottom"==e.textBaseline?e.y-n:e.y-n/2,e.__rect={x:a,y:o,width:i,height:n},e.__rect}},e("../tool/util").inherits(n,i),n}),define("zrender/shape/Rectangle",["require","./Base","../tool/util"],function(e){var t=e("./Base"),i=function(e){t.call(this,e)};return i.prototype={type:"rectangle",_buildRadiusPath:function(e,t){var i,n,a,o,r=t.x,s=t.y,l=t.width,h=t.height,d=t.radius;"number"==typeof d?i=n=a=o=d:d instanceof Array?1===d.length?i=n=a=o=d[0]:2===d.length?(i=a=d[0],n=o=d[1]):3===d.length?(i=d[0],n=o=d[1],a=d[2]):(i=d[0],n=d[1],a=d[2],o=d[3]):i=n=a=o=0;var c;i+n>l&&(c=i+n,i*=l/c,n*=l/c),a+o>l&&(c=a+o,a*=l/c,o*=l/c),n+a>h&&(c=n+a,n*=h/c,a*=h/c),i+o>h&&(c=i+o,i*=h/c,o*=h/c),e.moveTo(r+i,s),e.lineTo(r+l-n,s),0!==n&&e.quadraticCurveTo(r+l,s,r+l,s+n),e.lineTo(r+l,s+h-a),0!==a&&e.quadraticCurveTo(r+l,s+h,r+l-a,s+h),e.lineTo(r+o,s+h),0!==o&&e.quadraticCurveTo(r,s+h,r,s+h-o),e.lineTo(r,s+i),0!==i&&e.quadraticCurveTo(r,s,r+i,s)},buildPath:function(e,t){t.radius?this._buildRadiusPath(e,t):(e.moveTo(t.x,t.y),e.lineTo(t.x+t.width,t.y),e.lineTo(t.x+t.width,t.y+t.height),e.lineTo(t.x,t.y+t.height),e.lineTo(t.x,t.y)),e.closePath()},getRect:function(e){if(e.__rect)return e.__rect;var t;return t="stroke"==e.brushType||"fill"==e.brushType?e.lineWidth||1:0,e.__rect={x:Math.round(e.x-t/2),y:Math.round(e.y-t/2),width:e.width+t,height:e.height+t},e.__rect}},e("../tool/util").inherits(i,t),i}),define("zrender/tool/area",["require","./util","./curve"],function(e){"use strict";function t(e){return e%=C,0>e&&(e+=C),e}function i(e,t,i,o){if(!t||!e)return!1;var r=e.type;v=v||L.getContext();var s=n(e,t,i,o);if("undefined"!=typeof s)return s;if(e.buildPath&&v.isPointInPath)return a(e,v,t,i,o);switch(r){case"ellipse":return!0;case"trochoid":var l="out"==t.location?t.r1+t.r2+t.d:t.r1-t.r2+t.d;return p(t,i,o,l);case"rose":return p(t,i,o,t.maxr);default:return!1}}function n(e,t,i,n){var a=e.type;switch(a){case"bezier-curve":return"undefined"==typeof t.cpX2?l(t.xStart,t.yStart,t.cpX1,t.cpY1,t.xEnd,t.yEnd,t.lineWidth,i,n):s(t.xStart,t.yStart,t.cpX1,t.cpY1,t.cpX2,t.cpY2,t.xEnd,t.yEnd,t.lineWidth,i,n);case"line":return r(t.xStart,t.yStart,t.xEnd,t.yEnd,t.lineWidth,i,n);case"polyline":return d(t.pointList,t.lineWidth,i,n);case"ring":return c(t.x,t.y,t.r0,t.r,i,n);case"circle":return p(t.x,t.y,t.r,i,n);case"sector":var o=t.startAngle*Math.PI/180,h=t.endAngle*Math.PI/180;return t.clockWise||(o=-o,h=-h),u(t.x,t.y,t.r0,t.r,o,h,!t.clockWise,i,n);case"path":return t.pathArray&&_(t.pathArray,Math.max(t.lineWidth,5),t.brushType,i,n);case"polygon":case"star":case"isogon":return V(t.pointList,i,n);case"text":var U=t.__rect||e.getRect(t);return m(U.x,U.y,U.width,U.height,i,n);case"rectangle":case"image":return m(t.x,t.y,t.width,t.height,i,n)}}function a(e,t,i,n,a){return t.beginPath(),e.buildPath(t,i),t.closePath(),t.isPointInPath(n,a)}function o(e,t,n,a){return!i(e,t,n,a)}function r(e,t,i,n,a,o,r){if(0===a)return!1;var s=Math.max(a,5),l=0,h=e;if(r>t+s&&r>n+s||t-s>r&&n-s>r||o>e+s&&o>i+s||e-s>o&&i-s>o)return!1;if(e===i)return Math.abs(o-e)<=s/2;l=(t-n)/(e-i),h=(e*n-i*t)/(e-i);var d=l*o-r+h,c=d*d/(l*l+1);return s/2*s/2>=c}function s(e,t,i,n,a,o,r,s,l,h,d){if(0===l)return!1;var c=Math.max(l,5);if(d>t+c&&d>n+c&&d>o+c&&d>s+c||t-c>d&&n-c>d&&o-c>d&&s-c>d||h>e+c&&h>i+c&&h>a+c&&h>r+c||e-c>h&&i-c>h&&a-c>h&&r-c>h)return!1;var m=w.cubicProjectPoint(e,t,i,n,a,o,r,s,h,d,null);return c/2>=m}function l(e,t,i,n,a,o,r,s,l){if(0===r)return!1;var h=Math.max(r,5);if(l>t+h&&l>n+h&&l>o+h||t-h>l&&n-h>l&&o-h>l||s>e+h&&s>i+h&&s>a+h||e-h>s&&i-h>s&&a-h>s)return!1;var d=w.quadraticProjectPoint(e,t,i,n,a,o,s,l,null);return h/2>=d}function h(e,i,n,a,o,r,s,l,h){if(0===s)return!1;var d=Math.max(s,5);l-=e,h-=i;var c=Math.sqrt(l*l+h*h);if(c-d>n||n>c+d)return!1;if(Math.abs(a-o)>=C)return!0;if(r){var m=a;a=t(o),o=t(m)}else a=t(a),o=t(o);a>o&&(o+=C);var p=Math.atan2(h,l);return 0>p&&(p+=C),p>=a&&o>=p||p+C>=a&&o>=p+C}function d(e,t,i,n){for(var t=Math.max(t,10),a=0,o=e.length-1;o>a;a++){var s=e[a][0],l=e[a][1],h=e[a+1][0],d=e[a+1][1];if(r(s,l,h,d,t,i,n))return!0}return!1}function c(e,t,i,n,a,o){var r=(a-e)*(a-e)+(o-t)*(o-t);return n*n>r&&r>i*i}function m(e,t,i,n,a,o){return a>=e&&e+i>=a&&o>=t&&t+n>=o}function p(e,t,i,n,a){return i*i>(n-e)*(n-e)+(a-t)*(a-t)}function u(e,t,i,n,a,o,r,s,l){return h(e,t,(i+n)/2,a,o,r,n-i,s,l)}function V(e,t,i){for(var n=e.length,a=0,o=0,r=n-1;n>o;o++){var s=e[r][0],l=e[r][1],h=e[o][0],d=e[o][1];a+=U(s,l,h,d,t,i),r=o}return 0!==a}function U(e,t,i,n,a,o){if(o>t&&o>n||t>o&&n>o)return 0;if(n==t)return 0;var r=t>n?1:-1,s=(o-t)/(n-t),l=s*(i-e)+e;return l>a?r:0}function g(){var e=E[0];E[0]=E[1],E[1]=e}function f(e,t,i,n,a,o,r,s,l,h){if(h>t&&h>n&&h>o&&h>s||t>h&&n>h&&o>h&&s>h)return 0;var d=w.cubicRootAt(t,n,o,s,h,T);if(0===d)return 0;for(var c,m,p=0,u=-1,V=0;d>V;V++){var U=T[V],f=w.cubicAt(e,i,a,r,U);l>f||(0>u&&(u=w.cubicExtrema(t,n,o,s,E),E[1]<E[0]&&u>1&&g(),c=w.cubicAt(t,n,o,s,E[0]),u>1&&(m=w.cubicAt(t,n,o,s,E[1]))),p+=2==u?U<E[0]?t>c?1:-1:U<E[1]?c>m?1:-1:m>s?1:-1:U<E[0]?t>c?1:-1:c>s?1:-1)}return p}function y(e,t,i,n,a,o,r,s){if(s>t&&s>n&&s>o||t>s&&n>s&&o>s)return 0;var l=w.quadraticRootAt(t,n,o,s,T);if(0===l)return 0;var h=w.quadraticExtremum(t,n,o);if(h>=0&&1>=h){for(var d=0,c=w.quadraticAt(t,n,o,h),m=0;l>m;m++){var p=w.quadraticAt(e,i,a,T[m]);r>p||(d+=T[m]<h?t>c?1:-1:c>o?1:-1)}return d}var p=w.quadraticAt(e,i,a,T[0]);return r>p?0:t>o?1:-1}function b(e,i,n,a,o,r,s,l){if(l-=i,l>n||-n>l)return 0;var h=Math.sqrt(n*n-l*l);if(T[0]=-h,T[1]=h,Math.abs(a-o)>=C){a=0,o=C;var d=r?1:-1;return s>=T[0]+e&&s<=T[1]+e?d:0}if(r){var h=a;a=t(o),o=t(h)}else a=t(a),o=t(o);a>o&&(o+=C);for(var c=0,m=0;2>m;m++){var p=T[m];if(p+e>s){var u=Math.atan2(l,p),d=r?1:-1;0>u&&(u=C+u),(u>=a&&o>=u||u+C>=a&&o>=u+C)&&(u>Math.PI/2&&u<1.5*Math.PI&&(d=-d),c+=d)}}return c}function _(e,t,i,n,a){var o=0,d=0,c=0,m=0,p=0,u=!0,V=!0;i=i||"fill";for(var g="stroke"===i||"both"===i,_="fill"===i||"both"===i,x=0;x<e.length;x++){var k=e[x],v=k.points;if(u||"M"===k.command){if(x>0&&(_&&(o+=U(d,c,m,p,n,a)),0!==o))return!0;m=v[v.length-2],p=v[v.length-1],u=!1,V&&"A"!==k.command&&(V=!1,d=m,c=p)}switch(k.command){case"M":d=v[0],c=v[1];break;case"L":if(g&&r(d,c,v[0],v[1],t,n,a))return!0;_&&(o+=U(d,c,v[0],v[1],n,a)),d=v[0],c=v[1];break;case"C":if(g&&s(d,c,v[0],v[1],v[2],v[3],v[4],v[5],t,n,a))return!0;_&&(o+=f(d,c,v[0],v[1],v[2],v[3],v[4],v[5],n,a)),d=v[4],c=v[5];break;case"Q":if(g&&l(d,c,v[0],v[1],v[2],v[3],t,n,a))return!0;_&&(o+=y(d,c,v[0],v[1],v[2],v[3],n,a)),d=v[2],c=v[3];break;case"A":var L=v[0],w=v[1],W=v[2],X=v[3],I=v[4],S=v[5],K=Math.cos(I)*W+L,C=Math.sin(I)*X+w;V?(V=!1,m=K,p=C):o+=U(d,c,K,C);var T=(n-L)*X/W+L;if(g&&h(L,w,X,I,I+S,1-v[7],t,T,a))return!0;_&&(o+=b(L,w,X,I,I+S,1-v[7],T,a)),d=Math.cos(I+S)*W+L,c=Math.sin(I+S)*X+w;break;case"z":if(g&&r(d,c,m,p,t,n,a))return!0;u=!0}}return _&&(o+=U(d,c,m,p,n,a)),0!==o}function x(e,t){var i=e+":"+t;if(W[i])return W[i];v=v||L.getContext(),v.save(),t&&(v.font=t),e=(e+"").split("\n");for(var n=0,a=0,o=e.length;o>a;a++)n=Math.max(v.measureText(e[a]).width,n);return v.restore(),W[i]=n,++I>K&&(I=0,W={}),n}function k(e,t){var i=e+":"+t;if(X[i])return X[i];v=v||L.getContext(),v.save(),t&&(v.font=t),e=(e+"").split("\n");var n=(v.measureText("国").width+2)*e.length;return v.restore(),X[i]=n,++S>K&&(S=0,X={}),n}var v,L=e("./util"),w=e("./curve"),W={},X={},I=0,S=0,K=5e3,C=2*Math.PI,T=[-1,-1,-1],E=[-1,-1];return{isInside:i,isOutside:o,getTextWidth:x,getTextHeight:k,isInsidePath:_,isInsidePolygon:V,isInsideSector:u,isInsideCircle:p,isInsideLine:r,isInsideRect:m,isInsidePolyline:d,isInsideCubicStroke:s,isInsideQuadraticStroke:l}}),define("zrender/shape/Base",["require","../tool/matrix","../tool/guid","../tool/util","../tool/log","../mixin/Transformable","../mixin/Eventful","../tool/area","../tool/color"],function(e){function t(t,n,a,o,r,s,l){r&&(t.font=r),t.textAlign=s,t.textBaseline=l;var h=i(n,a,o,r,s,l);n=(n+"").split("\n");var d=e("../tool/area").getTextHeight("国",r);switch(l){case"top":o=h.y;break;case"bottom":o=h.y+d;break;default:o=h.y+d/2}for(var c=0,m=n.length;m>c;c++)t.fillText(n[c],a,o),o+=d}function i(t,i,n,a,o,r){var s=e("../tool/area"),l=s.getTextWidth(t,a),h=s.getTextHeight("国",a);switch(t=(t+"").split("\n"),o){case"end":case"right":i-=l;break;case"center":i-=l/2}switch(r){case"top":break;case"bottom":n-=h*t.length;break;default:n-=h*t.length/2}return{x:i,y:n,width:l,height:h*t.length}}var n=window.G_vmlCanvasManager,a=e("../tool/matrix"),o=e("../tool/guid"),r=e("../tool/util"),s=e("../tool/log"),l=e("../mixin/Transformable"),h=e("../mixin/Eventful"),d=function(e){e=e||{},this.id=e.id||o();for(var t in e)this[t]=e[t];this.style=this.style||{},this.highlightStyle=this.highlightStyle||null,this.parent=null,this.__dirty=!0,this.__clipShapes=[],l.call(this),h.call(this)};d.prototype.invisible=!1,d.prototype.ignore=!1,d.prototype.zlevel=0,d.prototype.draggable=!1,d.prototype.clickable=!1,d.prototype.hoverable=!0,d.prototype.z=0,d.prototype.brush=function(e,t){var i=this.beforeBrush(e,t);switch(e.beginPath(),this.buildPath(e,i),i.brushType){case"both":e.fill();case"stroke":i.lineWidth>0&&e.stroke();break;default:e.fill()}this.drawText(e,i,this.style),this.afterBrush(e)},d.prototype.beforeBrush=function(e,t){var i=this.style;return this.brushTypeOnly&&(i.brushType=this.brushTypeOnly),t&&(i=this.getHighlightStyle(i,this.highlightStyle||{},this.brushTypeOnly)),"stroke"==this.brushTypeOnly&&(i.strokeColor=i.strokeColor||i.color),e.save(),this.doClip(e),this.setContext(e,i),this.setTransform(e),i},d.prototype.afterBrush=function(e){e.restore()};var c=[["color","fillStyle"],["strokeColor","strokeStyle"],["opacity","globalAlpha"],["lineCap","lineCap"],["lineJoin","lineJoin"],["miterLimit","miterLimit"],["lineWidth","lineWidth"],["shadowBlur","shadowBlur"],["shadowColor","shadowColor"],["shadowOffsetX","shadowOffsetX"],["shadowOffsetY","shadowOffsetY"]];d.prototype.setContext=function(e,t){for(var i=0,n=c.length;n>i;i++){var a=c[i][0],o=t[a],r=c[i][1];"undefined"!=typeof o&&(e[r]=o)}};var m=a.create();return d.prototype.doClip=function(e){if(this.__clipShapes&&!n)for(var t=0;t<this.__clipShapes.length;t++){var i=this.__clipShapes[t];if(i.needTransform){var o=i.transform;a.invert(m,o),e.transform(o[0],o[1],o[2],o[3],o[4],o[5])}if(e.beginPath(),i.buildPath(e,i.style),e.clip(),i.needTransform){var o=m;e.transform(o[0],o[1],o[2],o[3],o[4],o[5])}}},d.prototype.getHighlightStyle=function(t,i,n){var a={};for(var o in t)a[o]=t[o];var r=e("../tool/color"),s=r.getHighlightColor();"stroke"!=t.brushType?(a.strokeColor=s,a.lineWidth=(t.lineWidth||1)+this.getHighlightZoom(),a.brushType="both"):"stroke"!=n?(a.strokeColor=s,a.lineWidth=(t.lineWidth||1)+this.getHighlightZoom()):a.strokeColor=i.strokeColor||r.mix(t.strokeColor,r.toRGB(s));for(var o in i)"undefined"!=typeof i[o]&&(a[o]=i[o]);return a},d.prototype.getHighlightZoom=function(){return"text"!=this.type?6:2},d.prototype.drift=function(e,t){this.position[0]+=e,this.position[1]+=t},d.prototype.buildPath=function(){s("buildPath not implemented in "+this.type)},d.prototype.getRect=function(){s("getRect not implemented in "+this.type)},d.prototype.isCover=function(t,i){var n=this.transformCoordToLocal(t,i);return t=n[0],i=n[1],this.isCoverRect(t,i)?e("../tool/area").isInside(this,this.style,t,i):!1},d.prototype.isCoverRect=function(e,t){var i=this.style.__rect;return i||(i=this.style.__rect=this.getRect(this.style)),e>=i.x&&e<=i.x+i.width&&t>=i.y&&t<=i.y+i.height},d.prototype.drawText=function(e,i,n){if("undefined"!=typeof i.text&&i.text!==!1){var a=i.textColor||i.color||i.strokeColor;e.fillStyle=a;var o,r,s,l,h=10,d=i.textPosition||this.textPosition||"top";switch(d){case"inside":case"top":case"bottom":case"left":case"right":if(this.getRect){var c=(n||i).__rect||this.getRect(n||i);switch(d){case"inside":s=c.x+c.width/2,l=c.y+c.height/2,o="center",r="middle","stroke"!=i.brushType&&a==i.color&&(e.fillStyle="#fff");break;case"left":s=c.x-h,l=c.y+c.height/2,o="end",r="middle";break;case"right":s=c.x+c.width+h,l=c.y+c.height/2,o="start",r="middle";break;case"top":s=c.x+c.width/2,l=c.y-h,o="center",r="bottom";break;case"bottom":s=c.x+c.width/2,l=c.y+c.height+h,o="center",r="top"}}break;case"start":case"end":var m=i.pointList||[[i.xStart||0,i.yStart||0],[i.xEnd||0,i.yEnd||0]],p=m.length;if(2>p)return;var u,V,U,g;switch(d){case"start":u=m[1][0],V=m[0][0],U=m[1][1],g=m[0][1];break;case"end":u=m[p-2][0],V=m[p-1][0],U=m[p-2][1],g=m[p-1][1]}s=V,l=g;var f=Math.atan((U-g)/(V-u))/Math.PI*180;0>V-u?f+=180:0>U-g&&(f+=360),h=5,f>=30&&150>=f?(o="center",r="bottom",l-=h):f>150&&210>f?(o="right",r="middle",s-=h):f>=210&&330>=f?(o="center",r="top",l+=h):(o="left",r="middle",s+=h);break;case"specific":s=i.textX||0,l=i.textY||0,o="start",r="middle"}null!=s&&null!=l&&t(e,i.text,s,l,i.textFont,i.textAlign||o,i.textBaseline||r)}},d.prototype.modSelf=function(){this.__dirty=!0,this.style&&(this.style.__rect=null),this.highlightStyle&&(this.highlightStyle.__rect=null)},d.prototype.isSilent=function(){return!(this.hoverable||this.draggable||this.clickable||this.onmousemove||this.onmouseover||this.onmouseout||this.onmousedown||this.onmouseup||this.onclick||this.ondragenter||this.ondragover||this.ondragleave||this.ondrop)},r.merge(d.prototype,l.prototype,!0),r.merge(d.prototype,h.prototype,!0),d}),define("zrender/tool/curve",["require","./vector"],function(e){function t(e){return e>-U&&U>e}function i(e){return e>U||-U>e}function n(e,t,i,n,a){var o=1-a;return o*o*(o*e+3*a*t)+a*a*(a*n+3*o*i)}function a(e,t,i,n,a){var o=1-a;return 3*(((t-e)*o+2*(i-t)*a)*o+(n-i)*a*a)}function o(e,i,n,a,o,r){var s=a+3*(i-n)-e,l=3*(n-2*i+e),h=3*(i-e),d=e-o,c=l*l-3*s*h,m=l*h-9*s*d,p=h*h-3*l*d,u=0;if(t(c)&&t(m))if(t(l))r[0]=0;else{var V=-h/l;V>=0&&1>=V&&(r[u++]=V)}else{var U=m*m-4*c*p;if(t(U)){var y=m/c,V=-l/s+y,b=-y/2;V>=0&&1>=V&&(r[u++]=V),b>=0&&1>=b&&(r[u++]=b)}else if(U>0){var _=Math.sqrt(U),x=c*l+1.5*s*(-m+_),k=c*l+1.5*s*(-m-_);x=0>x?-Math.pow(-x,f):Math.pow(x,f),k=0>k?-Math.pow(-k,f):Math.pow(k,f);var V=(-l-(x+k))/(3*s);V>=0&&1>=V&&(r[u++]=V)}else{var v=(2*c*l-3*s*m)/(2*Math.sqrt(c*c*c)),L=Math.acos(v)/3,w=Math.sqrt(c),W=Math.cos(L),V=(-l-2*w*W)/(3*s),b=(-l+w*(W+g*Math.sin(L)))/(3*s),X=(-l+w*(W-g*Math.sin(L)))/(3*s);V>=0&&1>=V&&(r[u++]=V),b>=0&&1>=b&&(r[u++]=b),X>=0&&1>=X&&(r[u++]=X)}}return u}function r(e,n,a,o,r){var s=6*a-12*n+6*e,l=9*n+3*o-3*e-9*a,h=3*n-3*e,d=0;if(t(l)){if(i(s)){var c=-h/s;c>=0&&1>=c&&(r[d++]=c)}}else{var m=s*s-4*l*h;if(t(m))r[0]=-s/(2*l);else if(m>0){var p=Math.sqrt(m),c=(-s+p)/(2*l),u=(-s-p)/(2*l);c>=0&&1>=c&&(r[d++]=c),u>=0&&1>=u&&(r[d++]=u)}}return d}function s(e,t,i,n,a,o){var r=(t-e)*a+e,s=(i-t)*a+t,l=(n-i)*a+i,h=(s-r)*a+r,d=(l-s)*a+s,c=(d-h)*a+h;o[0]=e,o[1]=r,o[2]=h,o[3]=c,o[4]=c,o[5]=d,o[6]=l,o[7]=n}function l(e,t,i,a,o,r,s,l,h,d,c){var m,p=.005,u=1/0;y[0]=h,y[1]=d;for(var g=0;1>g;g+=.05){b[0]=n(e,i,o,s,g),b[1]=n(t,a,r,l,g);var f=V.distSquare(y,b);u>f&&(m=g,u=f)}u=1/0;for(var x=0;32>x&&!(U>p);x++){var k=m-p,v=m+p;b[0]=n(e,i,o,s,k),b[1]=n(t,a,r,l,k);var f=V.distSquare(b,y);if(k>=0&&u>f)m=k,u=f;else{_[0]=n(e,i,o,s,v),_[1]=n(t,a,r,l,v);var L=V.distSquare(_,y);1>=v&&u>L?(m=v,u=L):p*=.5}}return c&&(c[0]=n(e,i,o,s,m),c[1]=n(t,a,r,l,m)),Math.sqrt(u)}function h(e,t,i,n){var a=1-n;return a*(a*e+2*n*t)+n*n*i}function d(e,t,i,n){return 2*((1-n)*(t-e)+n*(i-t))}function c(e,n,a,o,r){var s=e-2*n+a,l=2*(n-e),h=e-o,d=0;if(t(s)){if(i(l)){var c=-h/l;c>=0&&1>=c&&(r[d++]=c)}}else{var m=l*l-4*s*h;if(t(m)){var c=-l/(2*s);c>=0&&1>=c&&(r[d++]=c)}else if(m>0){var p=Math.sqrt(m),c=(-l+p)/(2*s),u=(-l-p)/(2*s);c>=0&&1>=c&&(r[d++]=c),u>=0&&1>=u&&(r[d++]=u)}}return d}function m(e,t,i){var n=e+i-2*t;return 0===n?.5:(e-t)/n}function p(e,t,i,n,a){var o=(t-e)*n+e,r=(i-t)*n+t,s=(r-o)*n+o;a[0]=e,a[1]=o,a[2]=s,a[3]=s,a[4]=r,a[5]=i}function u(e,t,i,n,a,o,r,s,l){var d,c=.005,m=1/0;y[0]=r,y[1]=s;for(var p=0;1>p;p+=.05){b[0]=h(e,i,a,p),b[1]=h(t,n,o,p);var u=V.distSquare(y,b);m>u&&(d=p,m=u)}m=1/0;for(var g=0;32>g&&!(U>c);g++){var f=d-c,x=d+c;b[0]=h(e,i,a,f),b[1]=h(t,n,o,f);var u=V.distSquare(b,y);if(f>=0&&m>u)d=f,m=u;else{_[0]=h(e,i,a,x),_[1]=h(t,n,o,x);var k=V.distSquare(_,y);1>=x&&m>k?(d=x,m=k):c*=.5}}return l&&(l[0]=h(e,i,a,d),l[1]=h(t,n,o,d)),Math.sqrt(m)}var V=e("./vector"),U=1e-4,g=Math.sqrt(3),f=1/3,y=V.create(),b=V.create(),_=V.create();return{cubicAt:n,cubicDerivativeAt:a,cubicRootAt:o,cubicExtrema:r,cubicSubdivide:s,cubicProjectPoint:l,quadraticAt:h,quadraticDerivativeAt:d,quadraticRootAt:c,quadraticExtremum:m,quadraticSubdivide:p,quadraticProjectPoint:u}}),define("zrender/mixin/Transformable",["require","../tool/matrix","../tool/vector"],function(e){"use strict";function t(e){return e>-s&&s>e}function i(e){return e>s||-s>e}var n=e("../tool/matrix"),a=e("../tool/vector"),o=[0,0],r=n.translate,s=5e-5,l=function(){this.position||(this.position=[0,0]),"undefined"==typeof this.rotation&&(this.rotation=[0,0,0]),this.scale||(this.scale=[1,1,0,0]),this.needLocalTransform=!1,this.needTransform=!1};return l.prototype={constructor:l,updateNeedTransform:function(){this.needLocalTransform=i(this.rotation[0])||i(this.position[0])||i(this.position[1])||i(this.scale[0]-1)||i(this.scale[1]-1)},updateTransform:function(){this.updateNeedTransform();var e=this.parent&&this.parent.needTransform;if(this.needTransform=this.needLocalTransform||e,this.needTransform){var t=this.transform||n.create();if(n.identity(t),this.needLocalTransform){var a=this.scale;if(i(a[0])||i(a[1])){o[0]=-a[2]||0,o[1]=-a[3]||0;var s=i(o[0])||i(o[1]);s&&r(t,t,o),n.scale(t,t,a),s&&(o[0]=-o[0],o[1]=-o[1],r(t,t,o))}if(this.rotation instanceof Array){if(0!==this.rotation[0]){o[0]=-this.rotation[1]||0,o[1]=-this.rotation[2]||0;var s=i(o[0])||i(o[1]);s&&r(t,t,o),n.rotate(t,t,this.rotation[0]),s&&(o[0]=-o[0],o[1]=-o[1],r(t,t,o))}}else 0!==this.rotation&&n.rotate(t,t,this.rotation);(i(this.position[0])||i(this.position[1]))&&r(t,t,this.position)}e&&(this.needLocalTransform?n.mul(t,this.parent.transform,t):n.copy(t,this.parent.transform)),this.transform=t,this.invTransform=this.invTransform||n.create(),n.invert(this.invTransform,t)}},setTransform:function(e){if(this.needTransform){var t=this.transform;e.transform(t[0],t[1],t[2],t[3],t[4],t[5])}},lookAt:function(){var e=a.create();return function(i){this.transform||(this.transform=n.create());var o=this.transform;if(a.sub(e,i,this.position),!t(e[0])||!t(e[1])){a.normalize(e,e);var r=this.scale;o[2]=e[0]*r[1],o[3]=e[1]*r[1],o[0]=e[1]*r[0],o[1]=-e[0]*r[0],o[4]=this.position[0],o[5]=this.position[1],this.decomposeTransform()}}}(),decomposeTransform:function(){if(this.transform){var e=this.transform,t=e[0]*e[0]+e[1]*e[1],n=this.position,a=this.scale,o=this.rotation;i(t-1)&&(t=Math.sqrt(t));var r=e[2]*e[2]+e[3]*e[3];i(r-1)&&(r=Math.sqrt(r)),n[0]=e[4],n[1]=e[5],a[0]=t,a[1]=r,a[2]=a[3]=0,o[0]=Math.atan2(-e[1]/r,e[0]/t),o[1]=o[2]=0}},transformCoordToLocal:function(e,t){var i=[e,t];return this.needTransform&&this.invTransform&&a.applyTransform(i,i,this.invTransform),i}},l}),define("zrender/Group",["require","./tool/guid","./tool/util","./mixin/Transformable","./mixin/Eventful"],function(e){var t=e("./tool/guid"),i=e("./tool/util"),n=e("./mixin/Transformable"),a=e("./mixin/Eventful"),o=function(e){e=e||{},this.id=e.id||t();for(var i in e)this[i]=e[i];this.type="group",this.clipShape=null,this._children=[],this._storage=null,this.__dirty=!0,n.call(this),a.call(this)};return o.prototype.ignore=!1,o.prototype.children=function(){return this._children.slice()},o.prototype.childAt=function(e){return this._children[e]},o.prototype.addChild=function(e){e!=this&&e.parent!=this&&(e.parent&&e.parent.removeChild(e),this._children.push(e),e.parent=this,this._storage&&this._storage!==e._storage&&(this._storage.addToMap(e),e instanceof o&&e.addChildrenToStorage(this._storage)))},o.prototype.removeChild=function(e){var t=i.indexOf(this._children,e);t>=0&&this._children.splice(t,1),e.parent=null,this._storage&&(this._storage.delFromMap(e.id),e instanceof o&&e.delChildrenFromStorage(this._storage))},o.prototype.clearChildren=function(){for(var e=0;e<this._children.length;e++){var t=this._children[e];this._storage&&(this._storage.delFromMap(t.id),t instanceof o&&t.delChildrenFromStorage(this._storage))}this._children.length=0},o.prototype.eachChild=function(e,t){for(var i=!!t,n=0;n<this._children.length;n++){var a=this._children[n];i?e.call(t,a):e(a)}},o.prototype.traverse=function(e,t){for(var i=!!t,n=0;n<this._children.length;n++){var a=this._children[n];i?e.call(t,a):e(a),"group"===a.type&&a.traverse(e,t)}},o.prototype.addChildrenToStorage=function(e){for(var t=0;t<this._children.length;t++){var i=this._children[t];e.addToMap(i),i instanceof o&&i.addChildrenToStorage(e)}},o.prototype.delChildrenFromStorage=function(e){for(var t=0;t<this._children.length;t++){var i=this._children[t];e.delFromMap(i.id),i instanceof o&&i.delChildrenFromStorage(e)}},o.prototype.modSelf=function(){this.__dirty=!0},i.merge(o.prototype,n.prototype,!0),i.merge(o.prototype,a.prototype,!0),o}),define("zrender/animation/Clip",["require","./easing"],function(e){function t(e){this._targetPool=e.target||{},this._targetPool instanceof Array||(this._targetPool=[this._targetPool]),this._life=e.life||1e3,this._delay=e.delay||0,this._startTime=(new Date).getTime()+this._delay,this._endTime=this._startTime+1e3*this._life,this.loop="undefined"==typeof e.loop?!1:e.loop,this.gap=e.gap||0,this.easing=e.easing||"Linear",this.onframe=e.onframe,this.ondestroy=e.ondestroy,this.onrestart=e.onrestart}var i=e("./easing");return t.prototype={step:function(e){var t=(e-this._startTime)/this._life;if(!(0>t)){t=Math.min(t,1);var n="string"==typeof this.easing?i[this.easing]:this.easing,a="function"==typeof n?n(t):t;return this.fire("frame",a),1==t?this.loop?(this.restart(),"restart"):(this.__needsRemove=!0,"destroy"):null}},restart:function(){var e=(new Date).getTime(),t=(e-this._startTime)%this._life;this._startTime=(new Date).getTime()-t+this.gap,this.__needsRemove=!1},fire:function(e,t){for(var i=0,n=this._targetPool.length;n>i;i++)this["on"+e]&&this["on"+e](this._targetPool[i],t)},constructor:t},t}),define("zrender/animation/easing",[],function(){var e={Linear:function(e){return e},QuadraticIn:function(e){return e*e},QuadraticOut:function(e){return e*(2-e)},QuadraticInOut:function(e){return(e*=2)<1?.5*e*e:-.5*(--e*(e-2)-1)},CubicIn:function(e){return e*e*e},CubicOut:function(e){return--e*e*e+1},CubicInOut:function(e){return(e*=2)<1?.5*e*e*e:.5*((e-=2)*e*e+2)},QuarticIn:function(e){return e*e*e*e},QuarticOut:function(e){return 1- --e*e*e*e},QuarticInOut:function(e){return(e*=2)<1?.5*e*e*e*e:-.5*((e-=2)*e*e*e-2)},QuinticIn:function(e){return e*e*e*e*e},QuinticOut:function(e){return--e*e*e*e*e+1},QuinticInOut:function(e){return(e*=2)<1?.5*e*e*e*e*e:.5*((e-=2)*e*e*e*e+2)},SinusoidalIn:function(e){return 1-Math.cos(e*Math.PI/2)},SinusoidalOut:function(e){return Math.sin(e*Math.PI/2)},SinusoidalInOut:function(e){return.5*(1-Math.cos(Math.PI*e))},ExponentialIn:function(e){return 0===e?0:Math.pow(1024,e-1)},ExponentialOut:function(e){return 1===e?1:1-Math.pow(2,-10*e)},ExponentialInOut:function(e){return 0===e?0:1===e?1:(e*=2)<1?.5*Math.pow(1024,e-1):.5*(-Math.pow(2,-10*(e-1))+2)},CircularIn:function(e){return 1-Math.sqrt(1-e*e)},CircularOut:function(e){return Math.sqrt(1- --e*e)},CircularInOut:function(e){return(e*=2)<1?-.5*(Math.sqrt(1-e*e)-1):.5*(Math.sqrt(1-(e-=2)*e)+1)},ElasticIn:function(e){var t,i=.1,n=.4;return 0===e?0:1===e?1:(!i||1>i?(i=1,t=n/4):t=n*Math.asin(1/i)/(2*Math.PI),-(i*Math.pow(2,10*(e-=1))*Math.sin(2*(e-t)*Math.PI/n)))},ElasticOut:function(e){var t,i=.1,n=.4;return 0===e?0:1===e?1:(!i||1>i?(i=1,t=n/4):t=n*Math.asin(1/i)/(2*Math.PI),i*Math.pow(2,-10*e)*Math.sin(2*(e-t)*Math.PI/n)+1);
},ElasticInOut:function(e){var t,i=.1,n=.4;return 0===e?0:1===e?1:(!i||1>i?(i=1,t=n/4):t=n*Math.asin(1/i)/(2*Math.PI),(e*=2)<1?-.5*i*Math.pow(2,10*(e-=1))*Math.sin(2*(e-t)*Math.PI/n):i*Math.pow(2,-10*(e-=1))*Math.sin(2*(e-t)*Math.PI/n)*.5+1)},BackIn:function(e){var t=1.70158;return e*e*((t+1)*e-t)},BackOut:function(e){var t=1.70158;return--e*e*((t+1)*e+t)+1},BackInOut:function(e){var t=2.5949095;return(e*=2)<1?.5*e*e*((t+1)*e-t):.5*((e-=2)*e*((t+1)*e+t)+2)},BounceIn:function(t){return 1-e.BounceOut(1-t)},BounceOut:function(e){return 1/2.75>e?7.5625*e*e:2/2.75>e?7.5625*(e-=1.5/2.75)*e+.75:2.5/2.75>e?7.5625*(e-=2.25/2.75)*e+.9375:7.5625*(e-=2.625/2.75)*e+.984375},BounceInOut:function(t){return.5>t?.5*e.BounceIn(2*t):.5*e.BounceOut(2*t-1)+.5}};return e}),define("echarts/chart/base",["require","zrender/shape/Image","../util/shape/Icon","../util/shape/MarkLine","../util/shape/Symbol","zrender/shape/Polyline","zrender/shape/ShapeBundle","../config","../util/ecData","../util/ecAnimation","../util/ecEffect","../util/accMath","../component/base","../layout/EdgeBundling","zrender/tool/util","zrender/tool/area"],function(e){function t(e){return null!=e.x&&null!=e.y}function i(e,t,i,n,a){u.call(this,e,t,i,n,a);var o=this;this.selectedMap={},this.lastShapeList=[],this.shapeHandler={onclick:function(){o.isClick=!0},ondragover:function(e){var t=e.target;t.highlightStyle=t.highlightStyle||{};var i=t.highlightStyle,n=i.brushTyep,a=i.strokeColor,r=i.lineWidth;i.brushType="stroke",i.strokeColor=o.ecTheme.calculableColor||h.calculableColor,i.lineWidth="icon"===t.type?30:10,o.zr.addHoverShape(t),setTimeout(function(){i&&(i.brushType=n,i.strokeColor=a,i.lineWidth=r)},20)},ondrop:function(e){null!=d.get(e.dragged,"data")&&(o.isDrop=!0)},ondragend:function(){o.isDragend=!0}}}var n=e("zrender/shape/Image"),a=e("../util/shape/Icon"),o=e("../util/shape/MarkLine"),r=e("../util/shape/Symbol"),s=e("zrender/shape/Polyline"),l=e("zrender/shape/ShapeBundle"),h=e("../config"),d=e("../util/ecData"),c=e("../util/ecAnimation"),m=e("../util/ecEffect"),p=e("../util/accMath"),u=e("../component/base"),V=e("../layout/EdgeBundling"),U=e("zrender/tool/util"),g=e("zrender/tool/area");return i.prototype={setCalculable:function(e){return e.dragEnableTime=this.ecTheme.DRAG_ENABLE_TIME||h.DRAG_ENABLE_TIME,e.ondragover=this.shapeHandler.ondragover,e.ondragend=this.shapeHandler.ondragend,e.ondrop=this.shapeHandler.ondrop,e},ondrop:function(e,t){if(this.isDrop&&e.target&&!t.dragIn){var i,n=e.target,a=e.dragged,o=d.get(n,"seriesIndex"),r=d.get(n,"dataIndex"),s=this.series,l=this.component.legend;if(-1===r){if(d.get(a,"seriesIndex")==o)return t.dragOut=t.dragIn=t.needRefresh=!0,void(this.isDrop=!1);i={value:d.get(a,"value"),name:d.get(a,"name")},this.type===h.CHART_TYPE_PIE&&i.value<0&&(i.value=0);for(var c=!1,m=s[o].data,u=0,V=m.length;V>u;u++)m[u].name===i.name&&"-"===m[u].value&&(s[o].data[u].value=i.value,c=!0);!c&&s[o].data.push(i),l&&l.add(i.name,a.style.color||a.style.strokeColor)}else i=s[o].data[r]||"-",null!=i.value?(s[o].data[r].value="-"!=i.value?p.accAdd(s[o].data[r].value,d.get(a,"value")):d.get(a,"value"),(this.type===h.CHART_TYPE_FUNNEL||this.type===h.CHART_TYPE_PIE)&&(l&&1===l.getRelatedAmount(i.name)&&this.component.legend.del(i.name),i.name+=this.option.nameConnector+d.get(a,"name"),l&&l.add(i.name,a.style.color||a.style.strokeColor))):s[o].data[r]="-"!=i?p.accAdd(s[o].data[r],d.get(a,"value")):d.get(a,"value");t.dragIn=t.dragIn||!0,this.isDrop=!1;var U=this;setTimeout(function(){U.zr.trigger("mousemove",e.event)},300)}},ondragend:function(e,t){if(this.isDragend&&e.target&&!t.dragOut){var i=e.target,n=d.get(i,"seriesIndex"),a=d.get(i,"dataIndex"),o=this.series;if(null!=o[n].data[a].value){o[n].data[a].value="-";var r=o[n].data[a].name,s=this.component.legend;s&&0===s.getRelatedAmount(r)&&s.del(r)}else o[n].data[a]="-";t.dragOut=!0,t.needRefresh=!0,this.isDragend=!1}},onlegendSelected:function(e,t){var i=e.selected;for(var n in this.selectedMap)this.selectedMap[n]!=i[n]&&(t.needRefresh=!0),this.selectedMap[n]=i[n]},_buildPosition:function(){this._symbol=this.option.symbolList,this._sIndex2ShapeMap={},this._sIndex2ColorMap={},this.selectedMap={},this.xMarkMap={};for(var e,t,i,n,a=this.series,o={top:[],bottom:[],left:[],right:[],other:[]},r=0,s=a.length;s>r;r++)a[r].type===this.type&&(a[r]=this.reformOption(a[r]),this.legendHoverLink=a[r].legendHoverLink||this.legendHoverLink,e=a[r].xAxisIndex,t=a[r].yAxisIndex,i=this.component.xAxis.getAxis(e),n=this.component.yAxis.getAxis(t),i.type===h.COMPONENT_TYPE_AXIS_CATEGORY?o[i.getPosition()].push(r):n.type===h.COMPONENT_TYPE_AXIS_CATEGORY?o[n.getPosition()].push(r):o.other.push(r));for(var l in o)o[l].length>0&&this._buildSinglePosition(l,o[l]);this.addShapeList()},_buildSinglePosition:function(e,t){var i=this._mapData(t),n=i.locationMap,a=i.maxDataLength;if(0!==a&&0!==n.length){switch(e){case"bottom":case"top":this._buildHorizontal(t,a,n,this.xMarkMap);break;case"left":case"right":this._buildVertical(t,a,n,this.xMarkMap);break;case"other":this._buildOther(t,a,n,this.xMarkMap)}for(var o=0,r=t.length;r>o;o++)this.buildMark(t[o])}},_mapData:function(e){for(var t,i,n,a,o=this.series,r=0,s={},l="__kener__stack__",d=this.component.legend,c=[],m=0,p=0,u=e.length;u>p;p++){if(t=o[e[p]],n=t.name,this._sIndex2ShapeMap[e[p]]=this._sIndex2ShapeMap[e[p]]||this.query(t,"symbol")||this._symbol[p%this._symbol.length],d){if(this.selectedMap[n]=d.isSelected(n),this._sIndex2ColorMap[e[p]]=d.getColor(n),a=d.getItemShape(n)){var V=a.style;if(this.type==h.CHART_TYPE_LINE)V.iconType="legendLineIcon",V.symbol=this._sIndex2ShapeMap[e[p]];else if(t.itemStyle.normal.barBorderWidth>0){var U=a.highlightStyle;V.brushType="both",V.x+=1,V.y+=1,V.width-=2,V.height-=2,V.strokeColor=U.strokeColor=t.itemStyle.normal.barBorderColor,U.lineWidth=3}d.setItemShape(n,a)}}else this.selectedMap[n]=!0,this._sIndex2ColorMap[e[p]]=this.zr.getColor(e[p]);this.selectedMap[n]&&(i=t.stack||l+e[p],null==s[i]?(s[i]=r,c[r]=[e[p]],r++):c[s[i]].push(e[p])),m=Math.max(m,t.data.length)}return{locationMap:c,maxDataLength:m}},_calculMarkMapXY:function(e,t,i){for(var n=this.series,a=0,o=t.length;o>a;a++)for(var r=0,s=t[a].length;s>r;r++){var l=t[a][r],h="xy"==i?0:"",d=this.component.grid,c=e[l];if("-1"!=i.indexOf("x")){c["counter"+h]>0&&(c["average"+h]=c["sum"+h]/c["counter"+h]);var m=this.component.xAxis.getAxis(n[l].xAxisIndex||0).getCoord(c["average"+h]);c["averageLine"+h]=[[m,d.getYend()],[m,d.getY()]],c["minLine"+h]=[[c["minX"+h],d.getYend()],[c["minX"+h],d.getY()]],c["maxLine"+h]=[[c["maxX"+h],d.getYend()],[c["maxX"+h],d.getY()]],c.isHorizontal=!1}if(h="xy"==i?1:"","-1"!=i.indexOf("y")){c["counter"+h]>0&&(c["average"+h]=c["sum"+h]/c["counter"+h]);var p=this.component.yAxis.getAxis(n[l].yAxisIndex||0).getCoord(c["average"+h]);c["averageLine"+h]=[[d.getX(),p],[d.getXend(),p]],c["minLine"+h]=[[d.getX(),c["minY"+h]],[d.getXend(),c["minY"+h]]],c["maxLine"+h]=[[d.getX(),c["maxY"+h]],[d.getXend(),c["maxY"+h]]],c.isHorizontal=!0}}},addLabel:function(e,t,i,n,a){var o=[i,t],r=this.deepMerge(o,"itemStyle.normal.label"),s=this.deepMerge(o,"itemStyle.emphasis.label"),l=r.textStyle||{},h=s.textStyle||{};if(r.show){var d=e.style;d.text=this._getLabelText(t,i,n,"normal"),d.textPosition=null==r.position?"horizontal"===a?"right":"top":r.position,d.textColor=l.color,d.textFont=this.getFont(l),d.textAlign=l.align,d.textBaseline=l.baseline}if(s.show){var c=e.highlightStyle;c.text=this._getLabelText(t,i,n,"emphasis"),c.textPosition=r.show?e.style.textPosition:null==s.position?"horizontal"===a?"right":"top":s.position,c.textColor=h.color,c.textFont=this.getFont(h),c.textAlign=h.align,c.textBaseline=h.baseline}return e},_getLabelText:function(e,t,i,n){var a=this.deepQuery([t,e],"itemStyle."+n+".label.formatter");a||"emphasis"!==n||(a=this.deepQuery([t,e],"itemStyle.normal.label.formatter"));var o=this.getDataFromOption(t,"-");return a?"function"==typeof a?a.call(this.myChart,{seriesName:e.name,series:e,name:i,value:o,data:t,status:n}):"string"==typeof a?a=a.replace("{a}","{a0}").replace("{b}","{b0}").replace("{c}","{c0}").replace("{a0}",e.name).replace("{b0}",i).replace("{c0}",this.numAddCommas(o)):void 0:o instanceof Array?null!=o[2]?this.numAddCommas(o[2]):o[0]+" , "+o[1]:this.numAddCommas(o)},buildMark:function(e){var t=this.series[e];this.selectedMap[t.name]&&(t.markLine&&this._buildMarkLine(e),t.markPoint&&this._buildMarkPoint(e))},_buildMarkPoint:function(e){for(var t,i,n=(this.markAttachStyle||{})[e],a=this.series[e],o=U.clone(a.markPoint),r=0,s=o.data.length;s>r;r++)t=o.data[r],i=this.getMarkCoord(e,t),t.x=null!=t.x?t.x:i[0],t.y=null!=t.y?t.y:i[1],!t.type||"max"!==t.type&&"min"!==t.type||(t.value=i[3],t.name=t.name||t.type,t.symbolSize=t.symbolSize||g.getTextWidth(i[3],this.getFont())/2+5);for(var l=this._markPoint(e,o),r=0,s=l.length;s>r;r++){var d=l[r];d.zlevel=a.zlevel,d.z=a.z+1;for(var c in n)d[c]=U.clone(n[c]);this.shapeList.push(d)}if(this.type===h.CHART_TYPE_FORCE||this.type===h.CHART_TYPE_CHORD)for(var r=0,s=l.length;s>r;r++)this.zr.addShape(l[r])},_buildMarkLine:function(e){for(var t,i=(this.markAttachStyle||{})[e],n=this.series[e],a=U.clone(n.markLine),o=0,r=a.data.length;r>o;o++){var s=a.data[o];!s.type||"max"!==s.type&&"min"!==s.type&&"average"!==s.type?t=[this.getMarkCoord(e,s[0]),this.getMarkCoord(e,s[1])]:(t=this.getMarkCoord(e,s),a.data[o]=[U.clone(s),{}],a.data[o][0].name=s.name||s.type,a.data[o][0].value="average"!==s.type?t[3]:+t[3].toFixed(null!=a.precision?a.precision:this.deepQuery([this.ecTheme,h],"markLine.precision")),t=t[2],s=[{},{}]),null!=t&&null!=t[0]&&null!=t[1]&&(a.data[o][0].x=null!=s[0].x?s[0].x:t[0][0],a.data[o][0].y=null!=s[0].y?s[0].y:t[0][1],a.data[o][1].x=null!=s[1].x?s[1].x:t[1][0],a.data[o][1].y=null!=s[1].y?s[1].y:t[1][1])}var d=this._markLine(e,a),c=a.large;if(c){var m=new l({style:{shapeList:d}}),p=d[0];if(p){U.merge(m.style,p.style),U.merge(m.highlightStyle={},p.highlightStyle),m.style.brushType="stroke",m.zlevel=n.zlevel,m.z=n.z+1,m.hoverable=!1;for(var u in i)m[u]=U.clone(i[u])}this.shapeList.push(m),this.zr.addShape(m),m._mark="largeLine";var V=a.effect;V.show&&(m.effect=V)}else{for(var o=0,r=d.length;r>o;o++){var g=d[o];g.zlevel=n.zlevel,g.z=n.z+1;for(var u in i)g[u]=U.clone(i[u]);this.shapeList.push(g)}if(this.type===h.CHART_TYPE_FORCE||this.type===h.CHART_TYPE_CHORD)for(var o=0,r=d.length;r>o;o++)this.zr.addShape(d[o])}},_markPoint:function(e,t){var i=this.series[e],n=this.component;U.merge(U.merge(t,U.clone(this.ecTheme.markPoint||{})),U.clone(h.markPoint)),t.name=i.name;var a,o,r,s,l,c,m,p=[],u=t.data,V=n.dataRange,g=n.legend,f=this.zr.getWidth(),y=this.zr.getHeight();if(t.large)a=this.getLargeMarkPointShape(e,t),a._mark="largePoint",a&&p.push(a);else for(var b=0,_=u.length;_>b;b++)null!=u[b].x&&null!=u[b].y&&(r=null!=u[b].value?u[b].value:"",g&&(o=g.getColor(i.name)),V&&(o=isNaN(r)?o:V.getColor(r),s=[u[b],t],l=this.deepQuery(s,"itemStyle.normal.color")||o,c=this.deepQuery(s,"itemStyle.emphasis.color")||l,null==l&&null==c)||(o=null==o?this.zr.getColor(e):o,u[b].tooltip=u[b].tooltip||t.tooltip||{trigger:"item"},u[b].name=null!=u[b].name?u[b].name:"",u[b].value=r,a=this.getSymbolShape(t,e,u[b],b,u[b].name,this.parsePercent(u[b].x,f),this.parsePercent(u[b].y,y),"pin",o,"rgba(0,0,0,0)","horizontal"),a._mark="point",m=this.deepMerge([u[b],t],"effect"),m.show&&(a.effect=m),i.type===h.CHART_TYPE_MAP&&(a._geo=this.getMarkGeo(u[b])),d.pack(a,i,e,u[b],b,u[b].name,r),p.push(a)));return p},_markLine:function(){function e(e,t){e[t]=e[t]instanceof Array?e[t].length>1?e[t]:[e[t][0],e[t][0]]:[e[t],e[t]]}return function(i,n){var a=this.series[i],o=this.component,r=o.dataRange,s=o.legend;U.merge(U.merge(n,U.clone(this.ecTheme.markLine||{})),U.clone(h.markLine));var l=s?s.getColor(a.name):this.zr.getColor(i);e(n,"symbol"),e(n,"symbolSize"),e(n,"symbolRotate");for(var c=n.data,m=[],p=this.zr.getWidth(),u=this.zr.getHeight(),g=0;g<c.length;g++){var f=c[g];if(t(f[0])&&t(f[1])){var y=this.deepMerge(f),b=[y,n],_=l,x=null!=y.value?y.value:"";if(r){_=isNaN(x)?_:r.getColor(x);var k=this.deepQuery(b,"itemStyle.normal.color")||_,v=this.deepQuery(b,"itemStyle.emphasis.color")||k;if(null==k&&null==v)continue}f[0].tooltip=y.tooltip||n.tooltip||{trigger:"item"},f[0].name=f[0].name||"",f[1].name=f[1].name||"",f[0].value=x,m.push({points:[[this.parsePercent(f[0].x,p),this.parsePercent(f[0].y,u)],[this.parsePercent(f[1].x,p),this.parsePercent(f[1].y,u)]],rawData:f,color:_})}}var L=this.query(n,"bundling.enable");if(L){var w=new V;w.maxTurningAngle=this.query(n,"bundling.maxTurningAngle")/180*Math.PI,m=w.run(m)}n.name=a.name;for(var W=[],g=0,X=m.length;X>g;g++){var I=m[g],S=I.rawEdge||I,f=S.rawData,x=null!=f.value?f.value:"",K=this.getMarkLineShape(n,i,f,g,I.points,L,S.color);K._mark="line";var C=this.deepMerge([f[0],f[1],n],"effect");C.show&&(K.effect=C,K.effect.large=n.large),a.type===h.CHART_TYPE_MAP&&(K._geo=[this.getMarkGeo(f[0]),this.getMarkGeo(f[1])]),d.pack(K,a,i,f[0],g,f[0].name+(""!==f[1].name?" > "+f[1].name:""),x),W.push(K)}return W}}(),getMarkCoord:function(){return[0,0]},getSymbolShape:function(e,t,i,o,r,s,l,h,c,m,p){var u=[i,e],V=this.getDataFromOption(i,"-");h=this.deepQuery(u,"symbol")||h;var U=this.deepQuery(u,"symbolSize");U="function"==typeof U?U(V):U,"number"==typeof U&&(U=[U,U]);var g=this.deepQuery(u,"symbolRotate"),f=this.deepMerge(u,"itemStyle.normal"),y=this.deepMerge(u,"itemStyle.emphasis"),b=null!=f.borderWidth?f.borderWidth:f.lineStyle&&f.lineStyle.width;null==b&&(b=h.match("empty")?2:0);var _=null!=y.borderWidth?y.borderWidth:y.lineStyle&&y.lineStyle.width;null==_&&(_=b+2);var x=this.getItemStyleColor(f.color,t,o,i),k=this.getItemStyleColor(y.color,t,o,i),v=U[0],L=U[1],w=new a({style:{iconType:h.replace("empty","").toLowerCase(),x:s-v,y:l-L,width:2*v,height:2*L,brushType:"both",color:h.match("empty")?m:x||c,strokeColor:f.borderColor||x||c,lineWidth:b},highlightStyle:{color:h.match("empty")?m:k||x||c,strokeColor:y.borderColor||f.borderColor||k||x||c,lineWidth:_},clickable:this.deepQuery(u,"clickable")});return h.match("image")&&(w.style.image=h.replace(new RegExp("^image:\\/\\/"),""),w=new n({style:w.style,highlightStyle:w.highlightStyle,clickable:this.deepQuery(u,"clickable")})),null!=g&&(w.rotation=[g*Math.PI/180,s,l]),h.match("star")&&(w.style.iconType="star",w.style.n=h.replace("empty","").replace("star","")-0||5),"none"===h&&(w.invisible=!0,w.hoverable=!1),w=this.addLabel(w,e,i,r,p),h.match("empty")&&(null==w.style.textColor&&(w.style.textColor=w.style.strokeColor),null==w.highlightStyle.textColor&&(w.highlightStyle.textColor=w.highlightStyle.strokeColor)),d.pack(w,e,t,i,o,r),w._x=s,w._y=l,w._dataIndex=o,w._seriesIndex=t,w},getMarkLineShape:function(e,t,i,n,a,r,l){var h=null!=i[0].value?i[0].value:"-",d=null!=i[1].value?i[1].value:"-",c=[i[0].symbol||e.symbol[0],i[1].symbol||e.symbol[1]],m=[i[0].symbolSize||e.symbolSize[0],i[1].symbolSize||e.symbolSize[1]];m[0]="function"==typeof m[0]?m[0](h):m[0],m[1]="function"==typeof m[1]?m[1](d):m[1];var p=[this.query(i[0],"symbolRotate")||e.symbolRotate[0],this.query(i[1],"symbolRotate")||e.symbolRotate[1]],u=[i[0],i[1],e],V=this.deepMerge(u,"itemStyle.normal");V.color=this.getItemStyleColor(V.color,t,n,i);var U=this.deepMerge(u,"itemStyle.emphasis");U.color=this.getItemStyleColor(U.color,t,n,i);var g=V.lineStyle,f=U.lineStyle,y=g.width;null==y&&(y=V.borderWidth);var b=f.width;null==b&&(b=null!=U.borderWidth?U.borderWidth:y+2);var _=this.deepQuery(u,"smoothness");this.deepQuery(u,"smooth")||(_=0);var x=r?s:o,k=new x({style:{symbol:c,symbolSize:m,symbolRotate:p,brushType:"both",lineType:g.type,shadowColor:g.shadowColor||g.color||V.borderColor||V.color||l,shadowBlur:g.shadowBlur,shadowOffsetX:g.shadowOffsetX,shadowOffsetY:g.shadowOffsetY,color:V.color||l,strokeColor:g.color||V.borderColor||V.color||l,lineWidth:y,symbolBorderColor:V.borderColor||V.color||l,symbolBorder:V.borderWidth},highlightStyle:{shadowColor:f.shadowColor,shadowBlur:f.shadowBlur,shadowOffsetX:f.shadowOffsetX,shadowOffsetY:f.shadowOffsetY,color:U.color||V.color||l,strokeColor:f.color||g.color||U.borderColor||V.borderColor||U.color||V.color||l,lineWidth:b,symbolBorderColor:U.borderColor||V.borderColor||U.color||V.color||l,symbolBorder:null==U.borderWidth?V.borderWidth+2:U.borderWidth},clickable:this.deepQuery(u,"clickable")}),v=k.style;return r?(v.pointList=a,v.smooth=_):(v.xStart=a[0][0],v.yStart=a[0][1],v.xEnd=a[1][0],v.yEnd=a[1][1],v.curveness=_,k.updatePoints(k.style)),k=this.addLabel(k,e,i[0],i[0].name+" : "+i[1].name)},getLargeMarkPointShape:function(e,t){var i,n,a,o,s,l,h=this.series[e],d=this.component,c=t.data,m=d.dataRange,p=d.legend,u=[c[0],t];if(p&&(n=p.getColor(h.name)),!m||(a=null!=c[0].value?c[0].value:"",n=isNaN(a)?n:m.getColor(a),o=this.deepQuery(u,"itemStyle.normal.color")||n,s=this.deepQuery(u,"itemStyle.emphasis.color")||o,null!=o||null!=s)){n=this.deepMerge(u,"itemStyle.normal").color||n;var V=this.deepQuery(u,"symbol")||"circle";V=V.replace("empty","").replace(/\d/g,""),l=this.deepMerge([c[0],t],"effect");var U=window.devicePixelRatio||1;return i=new r({style:{pointList:c,color:n,strokeColor:n,shadowColor:l.shadowColor||n,shadowBlur:(null!=l.shadowBlur?l.shadowBlur:8)*U,size:this.deepQuery(u,"symbolSize"),iconType:V,brushType:"fill",lineWidth:1},draggable:!1,hoverable:!1}),l.show&&(i.effect=l),i}},backupShapeList:function(){this.shapeList&&this.shapeList.length>0?(this.lastShapeList=this.shapeList,this.shapeList=[]):this.lastShapeList=[]},addShapeList:function(){var e,t,i=this.option.animationThreshold/(this.canvasSupported?2:4),n=this.lastShapeList,a=this.shapeList,o=n.length>0,r=o?this.query(this.option,"animationDurationUpdate"):this.query(this.option,"animationDuration"),s=this.query(this.option,"animationEasing"),l={},d={};if(this.option.animation&&!this.option.renderAsImage&&a.length<i&&!this.motionlessOnce){for(var c=0,m=n.length;m>c;c++)t=this._getAnimationKey(n[c]),t.match("undefined")?this.zr.delShape(n[c].id):(t+=n[c].type,l[t]?this.zr.delShape(n[c].id):l[t]=n[c]);for(var c=0,m=a.length;m>c;c++)t=this._getAnimationKey(a[c]),t.match("undefined")?this.zr.addShape(a[c]):(t+=a[c].type,d[t]=a[c]);for(t in l)d[t]||this.zr.delShape(l[t].id);for(t in d)l[t]?(this.zr.delShape(l[t].id),this._animateMod(l[t],d[t],r,s,0,o)):(e=this.type!=h.CHART_TYPE_LINE&&this.type!=h.CHART_TYPE_RADAR||0===t.indexOf("icon")?0:r/2,this._animateMod(!1,d[t],r,s,e,o));this.zr.refresh(),this.animationEffect()}else{this.motionlessOnce=!1,this.zr.delShape(n);for(var c=0,m=a.length;m>c;c++)this.zr.addShape(a[c])}},_getAnimationKey:function(e){return this.type!=h.CHART_TYPE_MAP&&this.type!=h.CHART_TYPE_TREEMAP&&this.type!=h.CHART_TYPE_VENN&&this.type!=h.CHART_TYPE_TREE?d.get(e,"seriesIndex")+"_"+d.get(e,"dataIndex")+(e._mark?e._mark:"")+(this.type===h.CHART_TYPE_RADAR?d.get(e,"special"):""):d.get(e,"seriesIndex")+"_"+d.get(e,"dataIndex")+(e._mark?e._mark:"undefined")},_animateMod:function(e,t,i,n,a,o){switch(t.type){case"polyline":case"half-smooth-polygon":c.pointList(this.zr,e,t,i,n);break;case"rectangle":c.rectangle(this.zr,e,t,i,n);break;case"image":case"icon":c.icon(this.zr,e,t,i,n,a);break;case"candle":o?this.zr.addShape(t):c.candle(this.zr,e,t,i,n);break;case"ring":case"sector":case"circle":o?"sector"===t.type?c.sector(this.zr,e,t,i,n):this.zr.addShape(t):c.ring(this.zr,e,t,i+(d.get(t,"dataIndex")||0)%20*100,n);break;case"text":c.text(this.zr,e,t,i,n);break;case"polygon":o?c.pointList(this.zr,e,t,i,n):c.polygon(this.zr,e,t,i,n);break;case"ribbon":c.ribbon(this.zr,e,t,i,n);break;case"gauge-pointer":c.gaugePointer(this.zr,e,t,i,n);break;case"mark-line":c.markline(this.zr,e,t,i,n);break;case"bezier-curve":case"line":c.line(this.zr,e,t,i,n);break;default:this.zr.addShape(t)}},animationMark:function(e,t,i){for(var i=i||this.shapeList,n=0,a=i.length;a>n;n++)i[n]._mark&&this._animateMod(!1,i[n],e,t,0,!0);this.animationEffect(i)},animationEffect:function(e){if(!e&&this.clearEffectShape(),e=e||this.shapeList,null!=e){var t=h.EFFECT_ZLEVEL;this.canvasSupported&&this.zr.modLayer(t,{motionBlur:!0,lastFrameAlpha:this.option.effectBlendAlpha||h.effectBlendAlpha});for(var i,n=0,a=e.length;a>n;n++)i=e[n],i._mark&&i.effect&&i.effect.show&&m[i._mark]&&(m[i._mark](this.zr,this.effectList,i,t),this.effectList[this.effectList.length-1]._mark=i._mark)}},clearEffectShape:function(e){var t=this.effectList;if(this.zr&&t&&t.length>0){e&&this.zr.modLayer(h.EFFECT_ZLEVEL,{motionBlur:!1}),this.zr.delShape(t);for(var i=0;i<t.length;i++)t[i].effectAnimator&&t[i].effectAnimator.stop()}this.effectList=[]},addMark:function(e,t,i){var n=this.series[e];if(this.selectedMap[n.name]){var a=this.query(this.option,"animationDurationUpdate"),o=this.query(this.option,"animationEasing"),r=n[i].data,s=this.shapeList.length;if(n[i].data=t.data,this["_build"+i.replace("m","M")](e),this.option.animation&&!this.option.renderAsImage)this.animationMark(a,o,this.shapeList.slice(s));else{for(var l=s,h=this.shapeList.length;h>l;l++)this.zr.addShape(this.shapeList[l]);this.zr.refreshNextFrame()}n[i].data=r}},delMark:function(e,t,i){i=i.replace("mark","").replace("large","").toLowerCase();var n=this.series[e];if(this.selectedMap[n.name]){for(var a=!1,o=[this.shapeList,this.effectList],r=2;r--;)for(var s=0,l=o[r].length;l>s;s++)if(o[r][s]._mark==i&&d.get(o[r][s],"seriesIndex")==e&&d.get(o[r][s],"name")==t){this.zr.delShape(o[r][s].id),o[r].splice(s,1),a=!0;break}a&&this.zr.refreshNextFrame()}}},U.inherits(i,u),i}),define("zrender/shape/Circle",["require","./Base","../tool/util"],function(e){"use strict";var t=e("./Base"),i=function(e){t.call(this,e)};return i.prototype={type:"circle",buildPath:function(e,t){e.moveTo(t.x+t.r,t.y),e.arc(t.x,t.y,t.r,0,2*Math.PI,!0)},getRect:function(e){if(e.__rect)return e.__rect;var t;return t="stroke"==e.brushType||"fill"==e.brushType?e.lineWidth||1:0,e.__rect={x:Math.round(e.x-e.r-t/2),y:Math.round(e.y-e.r-t/2),width:2*e.r+t,height:2*e.r+t},e.__rect}},e("../tool/util").inherits(i,t),i}),define("echarts/util/accMath",[],function(){function e(e,t){var i=e.toString(),n=t.toString(),a=0;try{a=n.split(".")[1].length}catch(o){}try{a-=i.split(".")[1].length}catch(o){}return(i.replace(".","")-0)/(n.replace(".","")-0)*Math.pow(10,a)}function t(e,t){var i=e.toString(),n=t.toString(),a=0;try{a+=i.split(".")[1].length}catch(o){}try{a+=n.split(".")[1].length}catch(o){}return(i.replace(".","")-0)*(n.replace(".","")-0)/Math.pow(10,a)}function i(e,t){var i=0,n=0;try{i=e.toString().split(".")[1].length}catch(a){}try{n=t.toString().split(".")[1].length}catch(a){}var o=Math.pow(10,Math.max(i,n));return(Math.round(e*o)+Math.round(t*o))/o}function n(e,t){return i(e,-t)}return{accDiv:e,accMul:t,accAdd:i,accSub:n}}),define("echarts/util/shape/Icon",["require","zrender/tool/util","zrender/shape/Star","zrender/shape/Heart","zrender/shape/Droplet","zrender/shape/Image","zrender/shape/Base"],function(e){function t(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n+t.height),e.lineTo(i+5*a,n+14*o),e.lineTo(i+t.width,n+3*o),e.lineTo(i+13*a,n),e.lineTo(i+2*a,n+11*o),e.lineTo(i,n+t.height),e.moveTo(i+6*a,n+10*o),e.lineTo(i+14*a,n+2*o),e.moveTo(i+10*a,n+13*o),e.lineTo(i+t.width,n+13*o),e.moveTo(i+13*a,n+10*o),e.lineTo(i+13*a,n+t.height)}function i(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n+t.height),e.lineTo(i+5*a,n+14*o),e.lineTo(i+t.width,n+3*o),e.lineTo(i+13*a,n),e.lineTo(i+2*a,n+11*o),e.lineTo(i,n+t.height),e.moveTo(i+6*a,n+10*o),e.lineTo(i+14*a,n+2*o),e.moveTo(i+10*a,n+13*o),e.lineTo(i+t.width,n+13*o)}function n(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i+4*a,n+15*o),e.lineTo(i+9*a,n+13*o),e.lineTo(i+14*a,n+8*o),e.lineTo(i+11*a,n+5*o),e.lineTo(i+6*a,n+10*o),e.lineTo(i+4*a,n+15*o),e.moveTo(i+5*a,n),e.lineTo(i+11*a,n),e.moveTo(i+5*a,n+o),e.lineTo(i+11*a,n+o),e.moveTo(i,n+2*o),e.lineTo(i+t.width,n+2*o),e.moveTo(i,n+5*o),e.lineTo(i+3*a,n+t.height),e.lineTo(i+13*a,n+t.height),e.lineTo(i+t.width,n+5*o)}function a(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n+3*o),e.lineTo(i+6*a,n+3*o),e.moveTo(i+3*a,n),e.lineTo(i+3*a,n+6*o),e.moveTo(i+3*a,n+8*o),e.lineTo(i+3*a,n+t.height),e.lineTo(i+t.width,n+t.height),e.lineTo(i+t.width,n+3*o),e.lineTo(i+8*a,n+3*o)}function o(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i+6*a,n),e.lineTo(i+2*a,n+3*o),e.lineTo(i+6*a,n+6*o),e.moveTo(i+2*a,n+3*o),e.lineTo(i+14*a,n+3*o),e.lineTo(i+14*a,n+11*o),e.moveTo(i+2*a,n+5*o),e.lineTo(i+2*a,n+13*o),e.lineTo(i+14*a,n+13*o),e.moveTo(i+10*a,n+10*o),e.lineTo(i+14*a,n+13*o),e.lineTo(i+10*a,n+t.height)}function r(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16,r=t.width/2;e.lineWidth=1.5,e.arc(i+r,n+r,r-a,0,2*Math.PI/3),e.moveTo(i+3*a,n+t.height),e.lineTo(i+0*a,n+12*o),e.lineTo(i+5*a,n+11*o),e.moveTo(i,n+8*o),e.arc(i+r,n+r,r-a,Math.PI,5*Math.PI/3),e.moveTo(i+13*a,n),e.lineTo(i+t.width,n+4*o),e.lineTo(i+11*a,n+5*o)}function s(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n),e.lineTo(i,n+t.height),e.lineTo(i+t.width,n+t.height),e.moveTo(i+2*a,n+14*o),e.lineTo(i+7*a,n+6*o),e.lineTo(i+11*a,n+11*o),e.lineTo(i+15*a,n+2*o)}function l(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n),e.lineTo(i,n+t.height),e.lineTo(i+t.width,n+t.height),e.moveTo(i+3*a,n+14*o),e.lineTo(i+3*a,n+6*o),e.lineTo(i+4*a,n+6*o),e.lineTo(i+4*a,n+14*o),e.moveTo(i+7*a,n+14*o),e.lineTo(i+7*a,n+2*o),e.lineTo(i+8*a,n+2*o),e.lineTo(i+8*a,n+14*o),e.moveTo(i+11*a,n+14*o),e.lineTo(i+11*a,n+9*o),e.lineTo(i+12*a,n+9*o),e.lineTo(i+12*a,n+14*o)}function h(e,t){var i=t.x,n=t.y,a=t.width-2,o=t.height-2,r=Math.min(a,o)/2;n+=2,e.moveTo(i+r+3,n+r-3),e.arc(i+r+3,n+r-3,r-1,0,-Math.PI/2,!0),e.lineTo(i+r+3,n+r-3),e.moveTo(i+r,n),e.lineTo(i+r,n+r),e.arc(i+r,n+r,r,-Math.PI/2,2*Math.PI,!0),e.lineTo(i+r,n+r),e.lineWidth=1.5}function d(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;n-=o,e.moveTo(i+1*a,n+2*o),e.lineTo(i+15*a,n+2*o),e.lineTo(i+14*a,n+3*o),e.lineTo(i+2*a,n+3*o),e.moveTo(i+3*a,n+6*o),e.lineTo(i+13*a,n+6*o),e.lineTo(i+12*a,n+7*o),e.lineTo(i+4*a,n+7*o),e.moveTo(i+5*a,n+10*o),e.lineTo(i+11*a,n+10*o),e.lineTo(i+10*a,n+11*o),e.lineTo(i+6*a,n+11*o),e.moveTo(i+7*a,n+14*o),e.lineTo(i+9*a,n+14*o),e.lineTo(i+8*a,n+15*o),e.lineTo(i+7*a,n+15*o)}function c(e,t){var i=t.x,n=t.y,a=t.width,o=t.height,r=a/16,s=o/16,l=2*Math.min(r,s);e.moveTo(i+r+l,n+s+l),e.arc(i+r,n+s,l,Math.PI/4,3*Math.PI),e.lineTo(i+7*r-l,n+6*s-l),e.arc(i+7*r,n+6*s,l,Math.PI/4*5,4*Math.PI),e.arc(i+7*r,n+6*s,l/2,Math.PI/4*5,4*Math.PI),e.moveTo(i+7*r-l/2,n+6*s+l),e.lineTo(i+r+l,n+14*s-l),e.arc(i+r,n+14*s,l,-Math.PI/4,2*Math.PI),e.moveTo(i+7*r+l/2,n+6*s),e.lineTo(i+14*r-l,n+10*s-l/2),e.moveTo(i+16*r,n+10*s),e.arc(i+14*r,n+10*s,l,0,3*Math.PI),e.lineWidth=1.5}function m(e,t){var i=t.x,n=t.y,a=t.width,o=t.height,r=Math.min(a,o)/2;e.moveTo(i+a,n+o/2),e.arc(i+r,n+r,r,0,2*Math.PI),e.arc(i+r,n,r,Math.PI/4,Math.PI/5*4),e.arc(i,n+r,r,-Math.PI/3,Math.PI/3),e.arc(i+a,n+o,r,Math.PI,Math.PI/2*3),e.lineWidth=1.5}function p(e,t){for(var i=t.x,n=t.y,a=t.width,o=t.height,r=Math.round(o/3),s=Math.round((r-2)/2),l=3;l--;)e.rect(i,n+r*l+s,a,2)}function u(e,t){for(var i=t.x,n=t.y,a=t.width,o=t.height,r=Math.round(a/3),s=Math.round((r-2)/2),l=3;l--;)e.rect(i+r*l+s,n,2,o)}function V(e,t){var i=t.x,n=t.y,a=t.width/16;e.moveTo(i+a,n),e.lineTo(i+a,n+t.height),e.lineTo(i+15*a,n+t.height),e.lineTo(i+15*a,n),e.lineTo(i+a,n),e.moveTo(i+3*a,n+3*a),e.lineTo(i+13*a,n+3*a),e.moveTo(i+3*a,n+6*a),e.lineTo(i+13*a,n+6*a),e.moveTo(i+3*a,n+9*a),e.lineTo(i+13*a,n+9*a),e.moveTo(i+3*a,n+12*a),e.lineTo(i+9*a,n+12*a)}function U(e,t){var i=t.x,n=t.y,a=t.width/16,o=t.height/16;e.moveTo(i,n),e.lineTo(i,n+t.height),e.lineTo(i+t.width,n+t.height),e.lineTo(i+t.width,n),e.lineTo(i,n),e.moveTo(i+4*a,n),e.lineTo(i+4*a,n+8*o),e.lineTo(i+12*a,n+8*o),e.lineTo(i+12*a,n),e.moveTo(i+6*a,n+11*o),e.lineTo(i+6*a,n+13*o),e.lineTo(i+10*a,n+13*o),e.lineTo(i+10*a,n+11*o),e.lineTo(i+6*a,n+11*o)}function g(e,t){var i=t.x,n=t.y,a=t.width,o=t.height;e.moveTo(i,n+o/2),e.lineTo(i+a,n+o/2),e.moveTo(i+a/2,n),e.lineTo(i+a/2,n+o)}function f(e,t){var i=t.width/2,n=t.height/2,a=Math.min(i,n);e.moveTo(t.x+i+a,t.y+n),e.arc(t.x+i,t.y+n,a,0,2*Math.PI),e.closePath()}function y(e,t){e.rect(t.x,t.y,t.width,t.height),e.closePath()}function b(e,t){var i=t.width/2,n=t.height/2,a=t.x+i,o=t.y+n,r=Math.min(i,n);e.moveTo(a,o-r),e.lineTo(a+r,o+r),e.lineTo(a-r,o+r),e.lineTo(a,o-r),e.closePath()}function _(e,t){var i=t.width/2,n=t.height/2,a=t.x+i,o=t.y+n,r=Math.min(i,n);e.moveTo(a,o-r),e.lineTo(a+r,o),e.lineTo(a,o+r),e.lineTo(a-r,o),e.lineTo(a,o-r),e.closePath()}function x(e,t){var i=t.x,n=t.y,a=t.width/16;e.moveTo(i+8*a,n),e.lineTo(i+a,n+t.height),e.lineTo(i+8*a,n+t.height/4*3),e.lineTo(i+15*a,n+t.height),e.lineTo(i+8*a,n),e.closePath()}function k(t,i){var n=e("zrender/shape/Star"),a=i.width/2,o=i.height/2;n.prototype.buildPath(t,{x:i.x+a,y:i.y+o,r:Math.min(a,o),n:i.n||5})}function v(t,i){var n=e("zrender/shape/Heart");n.prototype.buildPath(t,{x:i.x+i.width/2,y:i.y+.2*i.height,a:i.width/2,b:.8*i.height})}function L(t,i){var n=e("zrender/shape/Droplet");n.prototype.buildPath(t,{x:i.x+.5*i.width,y:i.y+.5*i.height,a:.5*i.width,b:.8*i.height})}function w(e,t){var i=t.x,n=t.y-t.height/2*1.5,a=t.width/2,o=t.height/2,r=Math.min(a,o);e.arc(i+a,n+o,r,Math.PI/5*4,Math.PI/5),e.lineTo(i+a,n+o+1.5*r),e.closePath()}function W(t,i,n){var a=e("zrender/shape/Image");this._imageShape=this._imageShape||new a({style:{}});for(var o in i)this._imageShape.style[o]=i[o];this._imageShape.brush(t,!1,n)}function X(e){S.call(this,e)}var I=e("zrender/tool/util"),S=e("zrender/shape/Base");return X.prototype={type:"icon",iconLibrary:{mark:t,markUndo:i,markClear:n,dataZoom:a,dataZoomReset:o,restore:r,lineChart:s,barChart:l,pieChart:h,funnelChart:d,forceChart:c,chordChart:m,stackChart:p,tiledChart:u,dataView:V,saveAsImage:U,cross:g,circle:f,rectangle:y,triangle:b,diamond:_,arrow:x,star:k,heart:v,droplet:L,pin:w,image:W},brush:function(t,i,n){var a=i?this.highlightStyle:this.style;a=a||{};var o=a.iconType||this.style.iconType;if("image"===o){var r=e("zrender/shape/Image");r.prototype.brush.call(this,t,i,n)}else{var a=this.beforeBrush(t,i);switch(t.beginPath(),this.buildPath(t,a,n),a.brushType){case"both":t.fill();case"stroke":a.lineWidth>0&&t.stroke();break;default:t.fill()}this.drawText(t,a,this.style),this.afterBrush(t)}},buildPath:function(e,t,i){this.iconLibrary[t.iconType]?this.iconLibrary[t.iconType].call(this,e,t,i):(e.moveTo(t.x,t.y),e.lineTo(t.x+t.width,t.y),e.lineTo(t.x+t.width,t.y+t.height),e.lineTo(t.x,t.y+t.height),e.lineTo(t.x,t.y),e.closePath())},getRect:function(e){return e.__rect?e.__rect:(e.__rect={x:Math.round(e.x),y:Math.round(e.y-("pin"==e.iconType?e.height/2*1.5:0)),width:e.width,height:e.height*("pin"===e.iconType?1.25:1)},e.__rect)},isCover:function(e,t){var i=this.transformCoordToLocal(e,t);e=i[0],t=i[1];var n=this.style.__rect;n||(n=this.style.__rect=this.getRect(this.style));var a=n.height<8||n.width<8?4:0;return e>=n.x-a&&e<=n.x+n.width+a&&t>=n.y-a&&t<=n.y+n.height+a}},I.inherits(X,S),X}),define("echarts/util/shape/MarkLine",["require","zrender/shape/Base","./Icon","zrender/shape/Line","zrender/shape/BezierCurve","zrender/tool/area","zrender/shape/util/dashedLineTo","zrender/tool/util","zrender/tool/curve"],function(e){function t(e){i.call(this,e),this.style.curveness>0&&this.updatePoints(this.style),this.highlightStyle.curveness>0&&this.updatePoints(this.highlightStyle)}var i=e("zrender/shape/Base"),n=e("./Icon"),a=e("zrender/shape/Line"),o=new a({}),r=e("zrender/shape/BezierCurve"),s=new r({}),l=e("zrender/tool/area"),h=e("zrender/shape/util/dashedLineTo"),d=e("zrender/tool/util"),c=e("zrender/tool/curve");return t.prototype={type:"mark-line",brush:function(e,t){var i=this.style;t&&(i=this.getHighlightStyle(i,this.highlightStyle||{})),e.save(),this.setContext(e,i),this.setTransform(e),e.save(),e.beginPath(),this.buildPath(e,i),e.stroke(),e.restore(),this.brushSymbol(e,i,0),this.brushSymbol(e,i,1),this.drawText(e,i,this.style),e.restore()},buildPath:function(e,t){var i=t.lineType||"solid";
if(e.moveTo(t.xStart,t.yStart),t.curveness>0){var n=null;switch(i){case"dashed":n=[5,5];break;case"dotted":n=[1,1]}n&&e.setLineDash&&e.setLineDash(n),e.quadraticCurveTo(t.cpX1,t.cpY1,t.xEnd,t.yEnd)}else if("solid"==i)e.lineTo(t.xEnd,t.yEnd);else{var a=(t.lineWidth||1)*("dashed"==t.lineType?5:1);h(e,t.xStart,t.yStart,t.xEnd,t.yEnd,a)}},updatePoints:function(e){var t=e.curveness||0,i=1,n=e.xStart,a=e.yStart,o=e.xEnd,r=e.yEnd,s=(n+o)/2-i*(a-r)*t,l=(a+r)/2-i*(o-n)*t;e.cpX1=s,e.cpY1=l},brushSymbol:function(e,t,i){if("none"!=t.symbol[i]){e.save(),e.beginPath(),e.lineWidth=t.symbolBorder,e.strokeStyle=t.symbolBorderColor;var a=t.symbol[i].replace("empty","").toLowerCase();t.symbol[i].match("empty")&&(e.fillStyle="#fff");var o=t.xStart,r=t.yStart,s=t.xEnd,l=t.yEnd,h=0===i?o:s,d=0===i?r:l,m=t.curveness||0,p=null!=t.symbolRotate[i]?t.symbolRotate[i]-0:0;if(p=p/180*Math.PI,"arrow"==a&&0===p)if(0===m){var u=0===i?-1:1;p=Math.PI/2+Math.atan2(u*(l-r),u*(s-o))}else{var V=t.cpX1,U=t.cpY1,g=c.quadraticDerivativeAt,f=g(o,V,s,i),y=g(r,U,l,i);p=Math.PI/2+Math.atan2(y,f)}e.translate(h,d),0!==p&&e.rotate(p);var b=t.symbolSize[i];n.prototype.buildPath(e,{x:-b,y:-b,width:2*b,height:2*b,iconType:a}),e.closePath(),e.fill(),e.stroke(),e.restore()}},getRect:function(e){return e.curveness>0?s.getRect(e):o.getRect(e),e.__rect},isCover:function(e,t){var i=this.transformCoordToLocal(e,t);return e=i[0],t=i[1],this.isCoverRect(e,t)?this.style.curveness>0?l.isInside(s,this.style,e,t):l.isInside(o,this.style,e,t):!1}},d.inherits(t,i),t}),define("echarts/util/shape/Symbol",["require","zrender/shape/Base","zrender/shape/Polygon","zrender/tool/util","./normalIsCover"],function(e){function t(e){i.call(this,e)}var i=e("zrender/shape/Base"),n=e("zrender/shape/Polygon"),a=new n({}),o=e("zrender/tool/util");return t.prototype={type:"symbol",buildPath:function(e,t){var i=t.pointList,n=i.length;if(0!==n)for(var a,o,r,s,l,h=1e4,d=Math.ceil(n/h),c=i[0]instanceof Array,m=t.size?t.size:2,p=m,u=m/2,V=2*Math.PI,U=0;d>U;U++){e.beginPath(),a=U*h,o=a+h,o=o>n?n:o;for(var g=a;o>g;g++)if(t.random&&(r=t["randomMap"+g%20]/100,p=m*r*r,u=p/2),c?(s=i[g][0],l=i[g][1]):(s=i[g].x,l=i[g].y),3>p)e.rect(s-u,l-u,p,p);else switch(t.iconType){case"circle":e.moveTo(s,l),e.arc(s,l,u,0,V,!0);break;case"diamond":e.moveTo(s,l-u),e.lineTo(s+u/3,l-u/3),e.lineTo(s+u,l),e.lineTo(s+u/3,l+u/3),e.lineTo(s,l+u),e.lineTo(s-u/3,l+u/3),e.lineTo(s-u,l),e.lineTo(s-u/3,l-u/3),e.lineTo(s,l-u);break;default:e.rect(s-u,l-u,p,p)}if(e.closePath(),d-1>U)switch(t.brushType){case"both":e.fill(),t.lineWidth>0&&e.stroke();break;case"stroke":t.lineWidth>0&&e.stroke();break;default:e.fill()}}},getRect:function(e){return e.__rect||a.getRect(e)},isCover:e("./normalIsCover")},o.inherits(t,i),t}),define("zrender/shape/Polyline",["require","./Base","./util/smoothSpline","./util/smoothBezier","./util/dashedLineTo","./Polygon","../tool/util"],function(e){var t=e("./Base"),i=e("./util/smoothSpline"),n=e("./util/smoothBezier"),a=e("./util/dashedLineTo"),o=function(e){this.brushTypeOnly="stroke",this.textPosition="end",t.call(this,e)};return o.prototype={type:"polyline",buildPath:function(e,t){var n=t.pointList;if(!(n.length<2)){var o=Math.min(t.pointList.length,Math.round(t.pointListLength||t.pointList.length));if(t.smooth&&"spline"!==t.smooth){t.controlPointList||this.updateControlPoints(t);var r=t.controlPointList;e.moveTo(n[0][0],n[0][1]);for(var s,l,h,d=0;o-1>d;d++)s=r[2*d],l=r[2*d+1],h=n[d+1],e.bezierCurveTo(s[0],s[1],l[0],l[1],h[0],h[1])}else if("spline"===t.smooth&&(n=i(n),o=n.length),t.lineType&&"solid"!=t.lineType){if("dashed"==t.lineType||"dotted"==t.lineType){var c=(t.lineWidth||1)*("dashed"==t.lineType?5:1);e.moveTo(n[0][0],n[0][1]);for(var d=1;o>d;d++)a(e,n[d-1][0],n[d-1][1],n[d][0],n[d][1],c)}}else{e.moveTo(n[0][0],n[0][1]);for(var d=1;o>d;d++)e.lineTo(n[d][0],n[d][1])}}},updateControlPoints:function(e){e.controlPointList=n(e.pointList,e.smooth,!1,e.smoothConstraint)},getRect:function(t){return e("./Polygon").prototype.getRect(t)}},e("../tool/util").inherits(o,t),o}),define("zrender/shape/ShapeBundle",["require","./Base","../tool/util"],function(e){var t=e("./Base"),i=function(e){t.call(this,e)};return i.prototype={constructor:i,type:"shape-bundle",brush:function(e,t){var i=this.beforeBrush(e,t);e.beginPath();for(var n=0;n<i.shapeList.length;n++){var a=i.shapeList[n],o=a.style;t&&(o=a.getHighlightStyle(o,a.highlightStyle||{},a.brushTypeOnly)),a.buildPath(e,o)}switch(i.brushType){case"both":e.fill();case"stroke":i.lineWidth>0&&e.stroke();break;default:e.fill()}this.drawText(e,i,this.style),this.afterBrush(e)},getRect:function(e){if(e.__rect)return e.__rect;for(var t=1/0,i=-(1/0),n=1/0,a=-(1/0),o=0;o<e.shapeList.length;o++)var r=e.shapeList[o],s=r.getRect(r.style),t=Math.min(s.x,t),n=Math.min(s.y,n),i=Math.max(s.x+s.width,i),a=Math.max(s.y+s.height,a);return e.__rect={x:t,y:n,width:i-t,height:a-n},e.__rect},isCover:function(e,t){var i=this.transformCoordToLocal(e,t);if(e=i[0],t=i[1],this.isCoverRect(e,t))for(var n=0;n<this.style.shapeList.length;n++){var a=this.style.shapeList[n];if(a.isCover(e,t))return!0}return!1}},e("../tool/util").inherits(i,t),i}),define("echarts/util/ecAnimation",["require","zrender/tool/util","zrender/tool/curve","zrender/shape/Polygon"],function(e){function t(e,t,i,n,a){var o,r=i.style.pointList,s=r.length;if(!t){if(o=[],"vertical"!=i._orient)for(var l=r[0][1],h=0;s>h;h++)o[h]=[r[h][0],l];else for(var d=r[0][0],h=0;s>h;h++)o[h]=[d,r[h][1]];"half-smooth-polygon"==i.type&&(o[s-1]=u.clone(r[s-1]),o[s-2]=u.clone(r[s-2])),t={style:{pointList:o}}}o=t.style.pointList;var c=o.length;i.style.pointList=c==s?o:s>c?o.concat(r.slice(c)):o.slice(0,s),e.addShape(i),i.__animating=!0,e.animate(i.id,"style").when(n,{pointList:r}).during(function(){i.updateControlPoints&&i.updateControlPoints(i.style)}).done(function(){i.__animating=!1}).start(a)}function i(e,t){for(var i=arguments.length,n=2;i>n;n++){var a=arguments[n];e.style[a]=t.style[a]}}function n(e,t,n,a,o){var r=n.style;t||(t={position:n.position,style:{x:r.x,y:"vertical"==n._orient?r.y+r.height:r.y,width:"vertical"==n._orient?r.width:0,height:"vertical"!=n._orient?r.height:0}});var s=r.x,l=r.y,h=r.width,d=r.height,c=[n.position[0],n.position[1]];i(n,t,"x","y","width","height"),n.position=t.position,e.addShape(n),(c[0]!=t.position[0]||c[1]!=t.position[1])&&e.animate(n.id,"").when(a,{position:c}).start(o),n.__animating=!0,e.animate(n.id,"style").when(a,{x:s,y:l,width:h,height:d}).done(function(){n.__animating=!1}).start(o)}function a(e,t,i,n,a){if(!t){var o=i.style.y;t={style:{y:[o[0],o[0],o[0],o[0]]}}}var r=i.style.y;i.style.y=t.style.y,e.addShape(i),i.__animating=!0,e.animate(i.id,"style").when(n,{y:r}).done(function(){i.__animating=!1}).start(a)}function o(e,t,i,n,a){var o=i.style.x,r=i.style.y,s=i.style.r0,l=i.style.r;i.__animating=!0,"r"!=i._animationAdd?(i.style.r0=0,i.style.r=0,i.rotation=[2*Math.PI,o,r],e.addShape(i),e.animate(i.id,"style").when(n,{r0:s,r:l}).done(function(){i.__animating=!1}).start(a),e.animate(i.id,"").when(n,{rotation:[0,o,r]}).start(a)):(i.style.r0=i.style.r,e.addShape(i),e.animate(i.id,"style").when(n,{r0:s}).done(function(){i.__animating=!1}).start(a))}function r(e,t,n,a,o){t||(t="r"!=n._animationAdd?{style:{startAngle:n.style.startAngle,endAngle:n.style.startAngle}}:{style:{r0:n.style.r}});var r=n.style.startAngle,s=n.style.endAngle;i(n,t,"startAngle","endAngle"),e.addShape(n),n.__animating=!0,e.animate(n.id,"style").when(a,{startAngle:r,endAngle:s}).done(function(){n.__animating=!1}).start(o)}function s(e,t,n,a,o){t||(t={style:{x:"left"==n.style.textAlign?n.style.x+100:n.style.x-100,y:n.style.y}});var r=n.style.x,s=n.style.y;i(n,t,"x","y"),e.addShape(n),n.__animating=!0,e.animate(n.id,"style").when(a,{x:r,y:s}).done(function(){n.__animating=!1}).start(o)}function l(t,i,n,a,o){var r=e("zrender/shape/Polygon").prototype.getRect(n.style),s=r.x+r.width/2,l=r.y+r.height/2;n.scale=[.1,.1,s,l],t.addShape(n),n.__animating=!0,t.animate(n.id,"").when(a,{scale:[1,1,s,l]}).done(function(){n.__animating=!1}).start(o)}function h(e,t,n,a,o){t||(t={style:{source0:0,source1:n.style.source1>0?360:-360,target0:0,target1:n.style.target1>0?360:-360}});var r=n.style.source0,s=n.style.source1,l=n.style.target0,h=n.style.target1;t.style&&i(n,t,"source0","source1","target0","target1"),e.addShape(n),n.__animating=!0,e.animate(n.id,"style").when(a,{source0:r,source1:s,target0:l,target1:h}).done(function(){n.__animating=!1}).start(o)}function d(e,t,i,n,a){t||(t={style:{angle:i.style.startAngle}});var o=i.style.angle;i.style.angle=t.style.angle,e.addShape(i),i.__animating=!0,e.animate(i.id,"style").when(n,{angle:o}).done(function(){i.__animating=!1}).start(a)}function c(e,t,i,a,o,r){if(i.style._x=i.style.x,i.style._y=i.style.y,i.style._width=i.style.width,i.style._height=i.style.height,t)n(e,t,i,a,o);else{var s=i._x||0,l=i._y||0;i.scale=[.01,.01,s,l],e.addShape(i),i.__animating=!0,e.animate(i.id,"").delay(r).when(a,{scale:[1,1,s,l]}).done(function(){i.__animating=!1}).start(o||"QuinticOut")}}function m(e,t,n,a,o){t||(t={style:{xStart:n.style.xStart,yStart:n.style.yStart,xEnd:n.style.xStart,yEnd:n.style.yStart}});var r=n.style.xStart,s=n.style.xEnd,l=n.style.yStart,h=n.style.yEnd;i(n,t,"xStart","xEnd","yStart","yEnd"),e.addShape(n),n.__animating=!0,e.animate(n.id,"style").when(a,{xStart:r,xEnd:s,yStart:l,yEnd:h}).done(function(){n.__animating=!1}).start(o)}function p(e,t,i,n,a){a=a||"QuinticOut",i.__animating=!0,e.addShape(i);var o=i.style,r=function(){i.__animating=!1},s=o.xStart,l=o.yStart,h=o.xEnd,d=o.yEnd;if(o.curveness>0){i.updatePoints(o);var c={p:0},m=o.cpX1,p=o.cpY1,u=[],U=[],g=V.quadraticSubdivide;e.animation.animate(c).when(n,{p:1}).during(function(){g(s,m,h,c.p,u),g(l,p,d,c.p,U),o.cpX1=u[1],o.cpY1=U[1],o.xEnd=u[2],o.yEnd=U[2],e.modShape(i)}).done(r).start(a)}else e.animate(i.id,"style").when(0,{xEnd:s,yEnd:l}).when(n,{xEnd:h,yEnd:d}).done(r).start(a)}var u=e("zrender/tool/util"),V=e("zrender/tool/curve");return{pointList:t,rectangle:n,candle:a,ring:o,sector:r,text:s,polygon:l,ribbon:h,gaugePointer:d,icon:c,line:m,markline:p}}),define("echarts/util/ecEffect",["require","../util/ecData","zrender/shape/Circle","zrender/shape/Image","zrender/tool/curve","../util/shape/Icon","../util/shape/Symbol","zrender/shape/ShapeBundle","zrender/shape/Polyline","zrender/tool/vector","zrender/tool/env"],function(e){function t(e,t,i,n){var a,r=i.effect,l=r.color||i.style.strokeColor||i.style.color,d=r.shadowColor||l,c=r.scaleSize,m=r.bounceDistance,p="undefined"!=typeof r.shadowBlur?r.shadowBlur:c;"image"!==i.type?(a=new h({zlevel:n,style:{brushType:"stroke",iconType:"droplet"!=i.style.iconType?i.style.iconType:"circle",x:p+1,y:p+1,n:i.style.n,width:i.style._width*c,height:i.style._height*c,lineWidth:1,strokeColor:l,shadowColor:d,shadowBlur:p},draggable:!1,hoverable:!1}),"pin"==i.style.iconType&&(a.style.y+=a.style.height/2*1.5),u&&(a.style.image=e.shapeToImage(a,a.style.width+2*p+2,a.style.height+2*p+2).style.image,a=new s({zlevel:a.zlevel,style:a.style,draggable:!1,hoverable:!1}))):a=new s({zlevel:n,style:i.style,draggable:!1,hoverable:!1}),o.clone(i,a),a.position=i.position,t.push(a),e.addShape(a);var V="image"!==i.type?window.devicePixelRatio||1:1,U=(a.style.width/V-i.style._width)/2;a.style.x=i.style._x-U,a.style.y=i.style._y-U,"pin"==i.style.iconType&&(a.style.y-=i.style.height/2*1.5);var g=100*(r.period+10*Math.random());e.modShape(i.id,{invisible:!0});var f=a.style.x+a.style.width/2/V,y=a.style.y+a.style.height/2/V;"scale"===r.type?(e.modShape(a.id,{scale:[.1,.1,f,y]}),e.animate(a.id,"",r.loop).when(g,{scale:[1,1,f,y]}).done(function(){i.effect.show=!1,e.delShape(a.id)}).start()):e.animate(a.id,"style",r.loop).when(g,{y:a.style.y-m}).when(2*g,{y:a.style.y}).done(function(){i.effect.show=!1,e.delShape(a.id)}).start()}function i(e,t,i,n){var a=i.effect,o=a.color||i.style.strokeColor||i.style.color,r=a.scaleSize,s=a.shadowColor||o,l="undefined"!=typeof a.shadowBlur?a.shadowBlur:2*r,h=window.devicePixelRatio||1,c=new d({zlevel:n,position:i.position,scale:i.scale,style:{pointList:i.style.pointList,iconType:i.style.iconType,color:o,strokeColor:o,shadowColor:s,shadowBlur:l*h,random:!0,brushType:"fill",lineWidth:1,size:i.style.size},draggable:!1,hoverable:!1});t.push(c),e.addShape(c),e.modShape(i.id,{invisible:!0});for(var m=Math.round(100*a.period),p={},u={},V=0;20>V;V++)c.style["randomMap"+V]=0,p={},p["randomMap"+V]=100,u={},u["randomMap"+V]=0,c.style["randomMap"+V]=100*Math.random(),e.animate(c.id,"style",!0).when(m,p).when(2*m,u).when(3*m,p).when(4*m,p).delay(Math.random()*m*V).start()}function n(e,t,i,n,a){var s=i.effect,h=i.style,d=s.color||h.strokeColor||h.color,c=s.shadowColor||h.strokeColor||d,V=h.lineWidth*s.scaleSize,U="undefined"!=typeof s.shadowBlur?s.shadowBlur:V,g=new r({zlevel:n,style:{x:U,y:U,r:V,color:d,shadowColor:c,shadowBlur:U},hoverable:!1}),f=0;if(u&&!a){var n=g.zlevel;g=e.shapeToImage(g,2*(V+U),2*(V+U)),g.zlevel=n,g.hoverable=!1,f=U}a||(o.clone(i,g),g.position=i.position,t.push(g),e.addShape(g));var y=function(){a||(i.effect.show=!1,e.delShape(g.id)),g.effectAnimator=null};if(i instanceof m){for(var b=[0],_=0,x=h.pointList,k=h.controlPointList,v=1;v<x.length;v++){if(k){var L=k[2*(v-1)],w=k[2*(v-1)+1];_+=p.dist(x[v-1],L)+p.dist(L,w)+p.dist(w,x[v])}else _+=p.dist(x[v-1],x[v]);b.push(_)}for(var W={p:0},X=e.animation.animate(W,{loop:s.loop}),v=0;v<b.length;v++)X.when(b[v]*s.period,{p:v});X.during(function(){var t,i,n=Math.floor(W.p);if(n==x.length-1)t=x[n][0],i=x[n][1];else{var o=W.p-n,r=x[n],s=x[n+1];if(k){var h=k[2*n],d=k[2*n+1];t=l.cubicAt(r[0],h[0],d[0],s[0],o),i=l.cubicAt(r[1],h[1],d[1],s[1],o)}else t=(s[0]-r[0])*o+r[0],i=(s[1]-r[1])*o+r[1]}g.style.x=t,g.style.y=i,a||e.modShape(g)}).done(y).start(),X.duration=_*s.period,g.effectAnimator=X}else{var I=h.xStart-f,S=h.yStart-f,K=h.xEnd-f,C=h.yEnd-f;g.style.x=I,g.style.y=S;var T=(K-I)*(K-I)+(C-S)*(C-S),E=Math.round(Math.sqrt(Math.round(T*s.period*s.period)));if(i.style.curveness>0){var z=h.cpX1-f,A=h.cpY1-f;g.effectAnimator=e.animation.animate(g,{loop:s.loop}).when(E,{p:1}).during(function(t,i){g.style.x=l.quadraticAt(I,z,K,i),g.style.y=l.quadraticAt(S,A,C,i),a||e.modShape(g)}).done(y).start()}else g.effectAnimator=e.animation.animate(g.style,{loop:s.loop}).when(E,{x:K,y:C}).during(function(){a||e.modShape(g)}).done(y).start();g.effectAnimator.duration=E}return g}function a(e,t,i,a){var o=new c({style:{shapeList:[]},zlevel:a,hoverable:!1}),r=i.style.shapeList,s=i.effect;o.position=i.position;for(var l=0,h=[],d=0;d<r.length;d++){r[d].effect=s;var m=n(e,null,r[d],a,!0),p=m.effectAnimator;o.style.shapeList.push(m),p.duration>l&&(l=p.duration),0===d&&(o.style.color=m.style.color,o.style.shadowBlur=m.style.shadowBlur,o.style.shadowColor=m.style.shadowColor),h.push(p)}t.push(o),e.addShape(o);var u=function(){for(var e=0;e<h.length;e++)h[e].stop()};if(l){o.__dummy=0;var V=e.animate(o.id,"",s.loop).when(l,{__dummy:1}).during(function(){e.modShape(o)}).done(function(){i.effect.show=!1,e.delShape(o.id)}).start(),U=V.stop;V.stop=function(){u(),U.call(this)}}}var o=e("../util/ecData"),r=e("zrender/shape/Circle"),s=e("zrender/shape/Image"),l=e("zrender/tool/curve"),h=e("../util/shape/Icon"),d=e("../util/shape/Symbol"),c=e("zrender/shape/ShapeBundle"),m=e("zrender/shape/Polyline"),p=e("zrender/tool/vector"),u=e("zrender/tool/env").canvasSupported;return{point:t,largePoint:i,line:n,largeLine:a}}),define("echarts/component/base",["require","../config","../util/ecData","../util/ecQuery","../util/number","zrender/tool/util","zrender/tool/env"],function(e){function t(e,t,a,o,r){this.ecTheme=e,this.messageCenter=t,this.zr=a,this.option=o,this.series=o.series,this.myChart=r,this.component=r.component,this.shapeList=[],this.effectList=[];var s=this;s._onlegendhoverlink=function(e){if(s.legendHoverLink)for(var t,a=e.target,o=s.shapeList.length-1;o>=0;o--)t=s.type==i.CHART_TYPE_PIE||s.type==i.CHART_TYPE_FUNNEL?n.get(s.shapeList[o],"name"):(n.get(s.shapeList[o],"series")||{}).name,t!=a||s.shapeList[o].invisible||s.shapeList[o].__animating||s.zr.addHoverShape(s.shapeList[o])},t&&t.bind(i.EVENT.LEGEND_HOVERLINK,this._onlegendhoverlink)}var i=e("../config"),n=e("../util/ecData"),a=e("../util/ecQuery"),o=e("../util/number"),r=e("zrender/tool/util");return t.prototype={canvasSupported:e("zrender/tool/env").canvasSupported,_getZ:function(e){if(null!=this[e])return this[e];var t=this.ecTheme[this.type];return t&&null!=t[e]?t[e]:(t=i[this.type],t&&null!=t[e]?t[e]:0)},getZlevelBase:function(){return this._getZ("zlevel")},getZBase:function(){return this._getZ("z")},reformOption:function(e){return e=r.merge(r.merge(e||{},r.clone(this.ecTheme[this.type]||{})),r.clone(i[this.type]||{})),this.z=e.z,this.zlevel=e.zlevel,e},reformCssArray:function(e){if(!(e instanceof Array))return[e,e,e,e];switch(e.length+""){case"4":return e;case"3":return[e[0],e[1],e[2],e[1]];case"2":return[e[0],e[1],e[0],e[1]];case"1":return[e[0],e[0],e[0],e[0]];case"0":return[0,0,0,0]}},getShapeById:function(e){for(var t=0,i=this.shapeList.length;i>t;t++)if(this.shapeList[t].id===e)return this.shapeList[t];return null},getFont:function(e){var t=this.getTextStyle(r.clone(e));return t.fontStyle+" "+t.fontWeight+" "+t.fontSize+"px "+t.fontFamily},getTextStyle:function(e){return r.merge(r.merge(e||{},this.ecTheme.textStyle),i.textStyle)},getItemStyleColor:function(e,t,i,n){return"function"==typeof e?e.call(this.myChart,{seriesIndex:t,series:this.series[t],dataIndex:i,data:n}):e},getDataFromOption:function(e,t){return null!=e?null!=e.value?e.value:e:t},subPixelOptimize:function(e,t){return e=t%2===1?Math.floor(e)+.5:Math.round(e)},resize:function(){this.refresh&&this.refresh(),this.clearEffectShape&&this.clearEffectShape(!0);var e=this;setTimeout(function(){e.animationEffect&&e.animationEffect()},200)},clear:function(){this.clearEffectShape&&this.clearEffectShape(),this.zr&&this.zr.delShape(this.shapeList),this.shapeList=[]},dispose:function(){this.onbeforDispose&&this.onbeforDispose(),this.clear(),this.shapeList=null,this.effectList=null,this.messageCenter&&this.messageCenter.unbind(i.EVENT.LEGEND_HOVERLINK,this._onlegendhoverlink),this.onafterDispose&&this.onafterDispose()},query:a.query,deepQuery:a.deepQuery,deepMerge:a.deepMerge,parsePercent:o.parsePercent,parseCenter:o.parseCenter,parseRadius:o.parseRadius,numAddCommas:o.addCommas,getPrecision:o.getPrecision},t}),define("echarts/layout/EdgeBundling",["require","../data/KDTree","zrender/tool/vector"],function(e){function t(e,t){e=e.array,t=t.array;var i=t[0]-e[0],n=t[1]-e[1],a=t[2]-e[2],o=t[3]-e[3];return i*i+n*n+a*a+o*o}function i(e){this.points=[e.mp0,e.mp1],this.group=e}function n(e){var t=e.points;t[0][1]<t[1][1]||e instanceof i?(this.array=[t[0][0],t[0][1],t[1][0],t[1][1]],this._startPoint=t[0],this._endPoint=t[1]):(this.array=[t[1][0],t[1][1],t[0][0],t[0][1]],this._startPoint=t[1],this._endPoint=t[0]),this.ink=d(t[0],t[1]),this.edge=e,this.group=null}function a(){this.edgeList=[],this.mp0=l(),this.mp1=l(),this.ink=0}function o(){this.maxNearestEdge=6,this.maxTurningAngle=Math.PI/4,this.maxIteration=20}var r=e("../data/KDTree"),s=e("zrender/tool/vector"),l=s.create,h=s.distSquare,d=s.dist,c=s.copy,m=s.clone;return n.prototype.getStartPoint=function(){return this._startPoint},n.prototype.getEndPoint=function(){return this._endPoint},a.prototype.addEdge=function(e){e.group=this,this.edgeList.push(e)},a.prototype.removeEdge=function(e){e.group=null,this.edgeList.splice(this.edgeList.indexOf(e),1)},o.prototype={constructor:o,run:function(e){function t(e,t){return h(e,t)<1e-10}function n(e,i){for(var n=[],a=0,o=0;o<e.length;o++)a>0&&t(e[o],n[a-1])||(n[a++]=m(e[o]));return i[0]&&!t(n[0],i[0])&&(n=n.reverse()),n}for(var a=this._iterate(e),o=0;o++<this.maxIteration;){for(var r=[],s=0;s<a.groups.length;s++)r.push(new i(a.groups[s]));var l=this._iterate(r);if(l.savedInk<=0)break;a=l}var d=[],c=function(e,t){for(var a,o=0;o<e.length;o++){var r=e[o];if(r.edgeList[0]&&r.edgeList[0].edge instanceof i){for(var s=[],l=0;l<r.edgeList.length;l++)s.push(r.edgeList[l].edge.group);a=t?t.slice():[],a.unshift(r.mp0),a.push(r.mp1),c(s,a)}else for(var l=0;l<r.edgeList.length;l++){var h=r.edgeList[l];a=t?t.slice():[],a.unshift(r.mp0),a.push(r.mp1),a.unshift(h.getStartPoint()),a.push(h.getEndPoint()),d.push({points:n(a,h.edge.points),rawEdge:h.edge})}}};return c(a.groups),d},_iterate:function(e){for(var i=[],o=[],s=0,h=0;h<e.length;h++){var d=new n(e[h]);i.push(d)}for(var m=new r(i,4),p=[],u=l(),V=l(),U=0,g=l(),f=l(),y=0,h=0;h<i.length;h++){var d=i[h];if(!d.group){m.nearestN(d,this.maxNearestEdge,t,p);for(var b=0,_=null,x=null,k=0;k<p.length;k++){var v=p[k],L=0;v.group?v.group!==x&&(x=v.group,U=this._calculateGroupEdgeInk(v.group,d,u,V),L=v.group.ink+d.ink-U):(U=this._calculateEdgeEdgeInk(d,v,u,V),L=v.ink+d.ink-U),L>b&&(b=L,_=v,c(f,V),c(g,u),y=U)}if(_){s+=b;var w;_.group||(w=new a,o.push(w),w.addEdge(_)),w=_.group,c(w.mp0,g),c(w.mp1,f),w.ink=y,_.group.addEdge(d)}else{var w=new a;o.push(w),c(w.mp0,d.getStartPoint()),c(w.mp1,d.getEndPoint()),w.ink=d.ink,w.addEdge(d)}}}return{groups:o,edges:i,savedInk:s}},_calculateEdgeEdgeInk:function(){var e=[],t=[];return function(i,n,a,o){e[0]=i.getStartPoint(),e[1]=n.getStartPoint(),t[0]=i.getEndPoint(),t[1]=n.getEndPoint(),this._calculateMeetPoints(e,t,a,o);var r=d(e[0],a)+d(a,o)+d(o,t[0])+d(e[1],a)+d(o,t[1]);return r}}(),_calculateGroupEdgeInk:function(e,t,i,n){for(var a=[],o=[],r=0;r<e.edgeList.length;r++){var s=e.edgeList[r];a.push(s.getStartPoint()),o.push(s.getEndPoint())}a.push(t.getStartPoint()),o.push(t.getEndPoint()),this._calculateMeetPoints(a,o,i,n);for(var l=d(i,n),r=0;r<a.length;r++)l+=d(a[r],i)+d(o[r],n);return l},_calculateMeetPoints:function(){var e=l(),t=l();return function(i,n,a,o){s.set(e,0,0),s.set(t,0,0);for(var r=i.length,l=0;r>l;l++)s.add(e,e,i[l]);s.scale(e,e,1/r),r=n.length;for(var l=0;r>l;l++)s.add(t,t,n[l]);s.scale(t,t,1/r),this._limitTurningAngle(i,e,t,a),this._limitTurningAngle(n,t,e,o)}}(),_limitTurningAngle:function(){var e=l(),t=l(),i=l(),n=l();return function(a,o,r,l){var c=Math.cos(this.maxTurningAngle),m=Math.tan(this.maxTurningAngle);s.sub(e,o,r),s.normalize(e,e),s.copy(l,o);for(var p=0,u=0;u<a.length;u++){var V=a[u];s.sub(t,V,o);var U=s.len(t);s.scale(t,t,1/U);var g=s.dot(t,e);if(c>g){s.scaleAndAdd(i,o,e,U*g);var f=d(i,V),y=f/m;s.scaleAndAdd(n,i,e,-y);var b=h(n,o);b>p&&(p=b,s.copy(l,n))}}}}()},o}),define("zrender/shape/Star",["require","../tool/math","./Base","../tool/util"],function(e){var t=e("../tool/math"),i=t.sin,n=t.cos,a=Math.PI,o=e("./Base"),r=function(e){o.call(this,e)};return r.prototype={type:"star",buildPath:function(e,t){var o=t.n;if(o&&!(2>o)){var r=t.x,s=t.y,l=t.r,h=t.r0;null==h&&(h=o>4?l*n(2*a/o)/n(a/o):l/3);var d=a/o,c=-a/2,m=r+l*n(c),p=s+l*i(c);c+=d;var u=t.pointList=[];u.push([m,p]);for(var V,U=0,g=2*o-1;g>U;U++)V=U%2===0?h:l,u.push([r+V*n(c),s+V*i(c)]),c+=d;u.push([m,p]),e.moveTo(u[0][0],u[0][1]);for(var U=0;U<u.length;U++)e.lineTo(u[U][0],u[U][1]);e.closePath()}},getRect:function(e){if(e.__rect)return e.__rect;var t;return t="stroke"==e.brushType||"fill"==e.brushType?e.lineWidth||1:0,e.__rect={x:Math.round(e.x-e.r-t/2),y:Math.round(e.y-e.r-t/2),width:2*e.r+t,height:2*e.r+t},e.__rect}},e("../tool/util").inherits(r,o),r}),define("zrender/shape/Heart",["require","./Base","./util/PathProxy","../tool/area","../tool/util"],function(e){"use strict";var t=e("./Base"),i=e("./util/PathProxy"),n=e("../tool/area"),a=function(e){t.call(this,e),this._pathProxy=new i};return a.prototype={type:"heart",buildPath:function(e,t){var n=this._pathProxy||new i;n.begin(e),n.moveTo(t.x,t.y),n.bezierCurveTo(t.x+t.a/2,t.y-2*t.b/3,t.x+2*t.a,t.y+t.b/3,t.x,t.y+t.b),n.bezierCurveTo(t.x-2*t.a,t.y+t.b/3,t.x-t.a/2,t.y-2*t.b/3,t.x,t.y),n.closePath()},getRect:function(e){return e.__rect?e.__rect:(this._pathProxy.isEmpty()||this.buildPath(null,e),this._pathProxy.fastBoundingRect())},isCover:function(e,t){var i=this.transformCoordToLocal(e,t);return e=i[0],t=i[1],this.isCoverRect(e,t)?n.isInsidePath(this._pathProxy.pathCommands,this.style.lineWidth,this.style.brushType,e,t):void 0}},e("../tool/util").inherits(a,t),a}),define("zrender/shape/Droplet",["require","./Base","./util/PathProxy","../tool/area","../tool/util"],function(e){"use strict";var t=e("./Base"),i=e("./util/PathProxy"),n=e("../tool/area"),a=function(e){t.call(this,e),this._pathProxy=new i};return a.prototype={type:"droplet",buildPath:function(e,t){var n=this._pathProxy||new i;n.begin(e),n.moveTo(t.x,t.y+t.a),n.bezierCurveTo(t.x+t.a,t.y+t.a,t.x+3*t.a/2,t.y-t.a/3,t.x,t.y-t.b),n.bezierCurveTo(t.x-3*t.a/2,t.y-t.a/3,t.x-t.a,t.y+t.a,t.x,t.y+t.a),n.closePath()},getRect:function(e){return e.__rect?e.__rect:(this._pathProxy.isEmpty()||this.buildPath(null,e),this._pathProxy.fastBoundingRect())},isCover:function(e,t){var i=this.transformCoordToLocal(e,t);return e=i[0],t=i[1],this.isCoverRect(e,t)?n.isInsidePath(this._pathProxy.pathCommands,this.style.lineWidth,this.style.brushType,e,t):void 0}},e("../tool/util").inherits(a,t),a}),define("zrender/tool/math",[],function(){function e(e,t){return Math.sin(t?e*a:e)}function t(e,t){return Math.cos(t?e*a:e)}function i(e){return e*a}function n(e){return e/a}var a=Math.PI/180;return{sin:e,cos:t,degreeToRadian:i,radianToDegree:n}}),define("zrender/shape/util/PathProxy",["require","../../tool/vector"],function(e){var t=e("../../tool/vector"),i=function(e,t){this.command=e,this.points=t||null},n=function(){this.pathCommands=[],this._ctx=null,this._min=[],this._max=[]};return n.prototype.fastBoundingRect=function(){var e=this._min,i=this._max;e[0]=e[1]=1/0,i[0]=i[1]=-(1/0);for(var n=0;n<this.pathCommands.length;n++){var a=this.pathCommands[n],o=a.points;switch(a.command){case"M":t.min(e,e,o),t.max(i,i,o);break;case"L":t.min(e,e,o),t.max(i,i,o);break;case"C":for(var r=0;6>r;r+=2)e[0]=Math.min(e[0],e[0],o[r]),e[1]=Math.min(e[1],e[1],o[r+1]),i[0]=Math.max(i[0],i[0],o[r]),i[1]=Math.max(i[1],i[1],o[r+1]);break;case"Q":for(var r=0;4>r;r+=2)e[0]=Math.min(e[0],e[0],o[r]),e[1]=Math.min(e[1],e[1],o[r+1]),i[0]=Math.max(i[0],i[0],o[r]),i[1]=Math.max(i[1],i[1],o[r+1]);break;case"A":var s=o[0],l=o[1],h=o[2],d=o[3];e[0]=Math.min(e[0],e[0],s-h),e[1]=Math.min(e[1],e[1],l-d),i[0]=Math.max(i[0],i[0],s+h),i[1]=Math.max(i[1],i[1],l+d)}}return{x:e[0],y:e[1],width:i[0]-e[0],height:i[1]-e[1]}},n.prototype.begin=function(e){return this._ctx=e||null,this.pathCommands.length=0,this},n.prototype.moveTo=function(e,t){return this.pathCommands.push(new i("M",[e,t])),this._ctx&&this._ctx.moveTo(e,t),this},n.prototype.lineTo=function(e,t){return this.pathCommands.push(new i("L",[e,t])),this._ctx&&this._ctx.lineTo(e,t),this},n.prototype.bezierCurveTo=function(e,t,n,a,o,r){return this.pathCommands.push(new i("C",[e,t,n,a,o,r])),this._ctx&&this._ctx.bezierCurveTo(e,t,n,a,o,r),this},n.prototype.quadraticCurveTo=function(e,t,n,a){return this.pathCommands.push(new i("Q",[e,t,n,a])),this._ctx&&this._ctx.quadraticCurveTo(e,t,n,a),this},n.prototype.arc=function(e,t,n,a,o,r){return this.pathCommands.push(new i("A",[e,t,n,n,a,o-a,0,r?0:1])),this._ctx&&this._ctx.arc(e,t,n,a,o,r),this},n.prototype.arcTo=function(e,t,i,n,a){return this._ctx&&this._ctx.arcTo(e,t,i,n,a),this},n.prototype.rect=function(e,t,i,n){return this._ctx&&this._ctx.rect(e,t,i,n),this},n.prototype.closePath=function(){return this.pathCommands.push(new i("z")),this._ctx&&this._ctx.closePath(),this},n.prototype.isEmpty=function(){return 0===this.pathCommands.length},n.PathSegment=i,n}),define("zrender/shape/Line",["require","./Base","./util/dashedLineTo","../tool/util"],function(e){var t=e("./Base"),i=e("./util/dashedLineTo"),n=function(e){this.brushTypeOnly="stroke",this.textPosition="end",t.call(this,e)};return n.prototype={type:"line",buildPath:function(e,t){if(t.lineType&&"solid"!=t.lineType){if("dashed"==t.lineType||"dotted"==t.lineType){var n=(t.lineWidth||1)*("dashed"==t.lineType?5:1);i(e,t.xStart,t.yStart,t.xEnd,t.yEnd,n)}}else e.moveTo(t.xStart,t.yStart),e.lineTo(t.xEnd,t.yEnd)},getRect:function(e){if(e.__rect)return e.__rect;var t=e.lineWidth||1;return e.__rect={x:Math.min(e.xStart,e.xEnd)-t,y:Math.min(e.yStart,e.yEnd)-t,width:Math.abs(e.xStart-e.xEnd)+t,height:Math.abs(e.yStart-e.yEnd)+t},e.__rect}},e("../tool/util").inherits(n,t),n}),define("zrender/shape/BezierCurve",["require","./Base","../tool/util"],function(e){"use strict";var t=e("./Base"),i=function(e){this.brushTypeOnly="stroke",this.textPosition="end",t.call(this,e)};return i.prototype={type:"bezier-curve",buildPath:function(e,t){e.moveTo(t.xStart,t.yStart),"undefined"!=typeof t.cpX2&&"undefined"!=typeof t.cpY2?e.bezierCurveTo(t.cpX1,t.cpY1,t.cpX2,t.cpY2,t.xEnd,t.yEnd):e.quadraticCurveTo(t.cpX1,t.cpY1,t.xEnd,t.yEnd)},getRect:function(e){if(e.__rect)return e.__rect;var t=Math.min(e.xStart,e.xEnd,e.cpX1),i=Math.min(e.yStart,e.yEnd,e.cpY1),n=Math.max(e.xStart,e.xEnd,e.cpX1),a=Math.max(e.yStart,e.yEnd,e.cpY1),o=e.cpX2,r=e.cpY2;"undefined"!=typeof o&&"undefined"!=typeof r&&(t=Math.min(t,o),i=Math.min(i,r),n=Math.max(n,o),a=Math.max(a,r));var s=e.lineWidth||1;return e.__rect={x:t-s,y:i-s,width:n-t+s,height:a-i+s},e.__rect}},e("../tool/util").inherits(i,t),i}),define("zrender/shape/util/dashedLineTo",[],function(){var e=[5,5];return function(t,i,n,a,o,r){if(t.setLineDash)return e[0]=e[1]=r,t.setLineDash(e),t.moveTo(i,n),void t.lineTo(a,o);r="number"!=typeof r?5:r;var s=a-i,l=o-n,h=Math.floor(Math.sqrt(s*s+l*l)/r);s/=h,l/=h;for(var d=!0,c=0;h>c;++c)d?t.moveTo(i,n):t.lineTo(i,n),d=!d,i+=s,n+=l;t.lineTo(a,o)}}),define("zrender/shape/Polygon",["require","./Base","./util/smoothSpline","./util/smoothBezier","./util/dashedLineTo","../tool/util"],function(e){var t=e("./Base"),i=e("./util/smoothSpline"),n=e("./util/smoothBezier"),a=e("./util/dashedLineTo"),o=function(e){t.call(this,e)};return o.prototype={type:"polygon",buildPath:function(e,t){var o=t.pointList;if(!(o.length<2)){if(t.smooth&&"spline"!==t.smooth){var r=n(o,t.smooth,!0,t.smoothConstraint);e.moveTo(o[0][0],o[0][1]);for(var s,l,h,d=o.length,c=0;d>c;c++)s=r[2*c],l=r[2*c+1],h=o[(c+1)%d],e.bezierCurveTo(s[0],s[1],l[0],l[1],h[0],h[1])}else if("spline"===t.smooth&&(o=i(o,!0)),t.lineType&&"solid"!=t.lineType){if("dashed"==t.lineType||"dotted"==t.lineType){var m=t._dashLength||(t.lineWidth||1)*("dashed"==t.lineType?5:1);t._dashLength=m,e.moveTo(o[0][0],o[0][1]);for(var c=1,p=o.length;p>c;c++)a(e,o[c-1][0],o[c-1][1],o[c][0],o[c][1],m);a(e,o[o.length-1][0],o[o.length-1][1],o[0][0],o[0][1],m)}}else{e.moveTo(o[0][0],o[0][1]);for(var c=1,p=o.length;p>c;c++)e.lineTo(o[c][0],o[c][1]);e.lineTo(o[0][0],o[0][1])}e.closePath()}},getRect:function(e){if(e.__rect)return e.__rect;for(var t=Number.MAX_VALUE,i=Number.MIN_VALUE,n=Number.MAX_VALUE,a=Number.MIN_VALUE,o=e.pointList,r=0,s=o.length;s>r;r++)o[r][0]<t&&(t=o[r][0]),o[r][0]>i&&(i=o[r][0]),o[r][1]<n&&(n=o[r][1]),o[r][1]>a&&(a=o[r][1]);var l;return l="stroke"==e.brushType||"fill"==e.brushType?e.lineWidth||1:0,e.__rect={x:Math.round(t-l/2),y:Math.round(n-l/2),width:i-t+l,height:a-n+l},e.__rect}},e("../tool/util").inherits(o,t),o}),define("echarts/util/shape/normalIsCover",[],function(){return function(e,t){var i=this.transformCoordToLocal(e,t);return e=i[0],t=i[1],this.isCoverRect(e,t)}}),define("zrender/shape/util/smoothSpline",["require","../../tool/vector"],function(e){function t(e,t,i,n,a,o,r){var s=.5*(i-e),l=.5*(n-t);return(2*(t-i)+s+l)*r+(-3*(t-i)-2*s-l)*o+s*a+t}var i=e("../../tool/vector");return function(e,n){for(var a=e.length,o=[],r=0,s=1;a>s;s++)r+=i.distance(e[s-1],e[s]);var l=r/5;l=a>l?a:l;for(var s=0;l>s;s++){var h,d,c,m=s/(l-1)*(n?a:a-1),p=Math.floor(m),u=m-p,V=e[p%a];n?(h=e[(p-1+a)%a],d=e[(p+1)%a],c=e[(p+2)%a]):(h=e[0===p?p:p-1],d=e[p>a-2?a-1:p+1],c=e[p>a-3?a-1:p+2]);var U=u*u,g=u*U;o.push([t(h[0],V[0],d[0],c[0],u,U,g),t(h[1],V[1],d[1],c[1],u,U,g)])}return o}}),define("zrender/shape/util/smoothBezier",["require","../../tool/vector"],function(e){var t=e("../../tool/vector");return function(e,i,n,a){var o,r,s,l,h=[],d=[],c=[],m=[],p=!!a;if(p){s=[1/0,1/0],l=[-(1/0),-(1/0)];
for(var u=0,V=e.length;V>u;u++)t.min(s,s,e[u]),t.max(l,l,e[u]);t.min(s,s,a[0]),t.max(l,l,a[1])}for(var u=0,V=e.length;V>u;u++){var o,r,U=e[u];if(n)o=e[u?u-1:V-1],r=e[(u+1)%V];else{if(0===u||u===V-1){h.push(t.clone(e[u]));continue}o=e[u-1],r=e[u+1]}t.sub(d,r,o),t.scale(d,d,i);var g=t.distance(U,o),f=t.distance(U,r),y=g+f;0!==y&&(g/=y,f/=y),t.scale(c,d,-g),t.scale(m,d,f);var b=t.add([],U,c),_=t.add([],U,m);p&&(t.max(b,b,s),t.min(b,b,l),t.max(_,_,s),t.min(_,_,l)),h.push(b),h.push(_)}return n&&h.push(t.clone(h.shift())),h}}),define("echarts/util/ecQuery",["require","zrender/tool/util"],function(e){function t(e,t){if("undefined"!=typeof e){if(!t)return e;t=t.split(".");for(var i=t.length,n=0;i>n;){if(e=e[t[n]],"undefined"==typeof e)return;n++}return e}}function i(e,i){for(var n,a=0,o=e.length;o>a;a++)if(n=t(e[a],i),"undefined"!=typeof n)return n}function n(e,i){for(var n,o=e.length;o--;){var r=t(e[o],i);"undefined"!=typeof r&&("undefined"==typeof n?n=a.clone(r):a.merge(n,r,!0))}return n}var a=e("zrender/tool/util");return{query:t,deepQuery:i,deepMerge:n}}),define("echarts/util/number",[],function(){function e(e){return e.replace(/^\s+/,"").replace(/\s+$/,"")}function t(t,i){return"string"==typeof t?e(t).match(/%$/)?parseFloat(t)/100*i:parseFloat(t):t}function i(e,i){return[t(i[0],e.getWidth()),t(i[1],e.getHeight())]}function n(e,i){i instanceof Array||(i=[0,i]);var n=Math.min(e.getWidth(),e.getHeight())/2;return[t(i[0],n),t(i[1],n)]}function a(e){return isNaN(e)?"-":(e=(e+"").split("."),e[0].replace(/(\d{1,3})(?=(?:\d{3})+(?!\d))/g,"$1,")+(e.length>1?"."+e[1]:""))}function o(e){for(var t=1,i=0;Math.round(e*t)/t!==e;)t*=10,i++;return i}return{parsePercent:t,parseCenter:i,parseRadius:n,addCommas:a,getPrecision:o}}),define("echarts/data/KDTree",["require","./quickSelect"],function(e){function t(e,t){this.left=null,this.right=null,this.axis=e,this.data=t}var i=e("./quickSelect"),n=function(e,t){e.length&&(t||(t=e[0].array.length),this.dimension=t,this.root=this._buildTree(e,0,e.length-1,0),this._stack=[],this._nearstNList=[])};return n.prototype._buildTree=function(e,n,a,o){if(n>a)return null;var r=Math.floor((n+a)/2);r=i(e,n,a,r,function(e,t){return e.array[o]-t.array[o]});var s=e[r],l=new t(o,s);return o=(o+1)%this.dimension,a>n&&(l.left=this._buildTree(e,n,r-1,o),l.right=this._buildTree(e,r+1,a,o)),l},n.prototype.nearest=function(e,t){var i=this.root,n=this._stack,a=0,o=1/0,r=null;for(i.data!==e&&(o=t(i.data,e),r=i),e.array[i.axis]<i.data.array[i.axis]?(i.right&&(n[a++]=i.right),i.left&&(n[a++]=i.left)):(i.left&&(n[a++]=i.left),i.right&&(n[a++]=i.right));a--;){i=n[a];var s=e.array[i.axis]-i.data.array[i.axis],l=0>s,h=!1;s*=s,o>s&&(s=t(i.data,e),o>s&&i.data!==e&&(o=s,r=i),h=!0),l?(h&&i.right&&(n[a++]=i.right),i.left&&(n[a++]=i.left)):(h&&i.left&&(n[a++]=i.left),i.right&&(n[a++]=i.right))}return r.data},n.prototype._addNearest=function(e,t,i){for(var n=this._nearstNList,a=e-1;a>0&&!(t>=n[a-1].dist);a--)n[a].dist=n[a-1].dist,n[a].node=n[a-1].node;n[a].dist=t,n[a].node=i},n.prototype.nearestN=function(e,t,i,n){if(0>=t)return n.length=0,n;for(var a=this.root,o=this._stack,r=0,s=this._nearstNList,l=0;t>l;l++)s[l]||(s[l]={}),s[l].dist=0,s[l].node=null;var h=i(a.data,e),d=0;for(a.data!==e&&(d++,this._addNearest(d,h,a)),e.array[a.axis]<a.data.array[a.axis]?(a.right&&(o[r++]=a.right),a.left&&(o[r++]=a.left)):(a.left&&(o[r++]=a.left),a.right&&(o[r++]=a.right));r--;){a=o[r];var h=e.array[a.axis]-a.data.array[a.axis],c=0>h,m=!1;h*=h,(t>d||h<s[d-1].dist)&&(h=i(a.data,e),(t>d||h<s[d-1].dist)&&a.data!==e&&(t>d&&d++,this._addNearest(d,h,a)),m=!0),c?(m&&a.right&&(o[r++]=a.right),a.left&&(o[r++]=a.left)):(m&&a.left&&(o[r++]=a.left),a.right&&(o[r++]=a.right))}for(var l=0;d>l;l++)n[l]=s[l].node.data;return n.length=d,n},n}),define("echarts/data/quickSelect",["require"],function(){function e(e,t){return e-t}function t(e,t,i){var n=e[t];e[t]=e[i],e[i]=n}function i(e,i,n,a,o){for(var r=i;n>i;){var r=Math.round((n+i)/2),s=e[r];t(e,r,n),r=i;for(var l=i;n-1>=l;l++)o(s,e[l])>=0&&(t(e,l,r),r++);if(t(e,n,r),r===a)return r;a>r?i=r+1:n=r-1}return i}function n(t,n,a,o,r){return arguments.length<=3&&(o=n,r=2==arguments.length?e:a,n=0,a=t.length-1),i(t,n,a,o,r)}return n}),define("echarts/component/dataView",["require","./base","../config","zrender/tool/util","../component"],function(e){function t(e,t,n,a,o){i.call(this,e,t,n,a,o),this.dom=o.dom,this._tDom=document.createElement("div"),this._textArea=document.createElement("textArea"),this._buttonRefresh=document.createElement("button"),this._buttonRefresh.setAttribute("type","button"),this._buttonClose=document.createElement("button"),this._buttonClose.setAttribute("type","button"),this._hasShow=!1,this._zrHeight=n.getHeight(),this._zrWidth=n.getWidth(),this._tDom.className="echarts-dataview",this.hide(),this.dom.firstChild.appendChild(this._tDom),window.addEventListener?(this._tDom.addEventListener("click",this._stop),this._tDom.addEventListener("mousewheel",this._stop),this._tDom.addEventListener("mousemove",this._stop),this._tDom.addEventListener("mousedown",this._stop),this._tDom.addEventListener("mouseup",this._stop),this._tDom.addEventListener("touchstart",this._stop),this._tDom.addEventListener("touchmove",this._stop),this._tDom.addEventListener("touchend",this._stop)):(this._tDom.attachEvent("onclick",this._stop),this._tDom.attachEvent("onmousewheel",this._stop),this._tDom.attachEvent("onmousemove",this._stop),this._tDom.attachEvent("onmousedown",this._stop),this._tDom.attachEvent("onmouseup",this._stop))}var i=e("./base"),n=e("../config"),a=e("zrender/tool/util");return t.prototype={type:n.COMPONENT_TYPE_DATAVIEW,_lang:["Data View","close","refresh"],_gCssText:"position:absolute;display:block;overflow:hidden;transition:height 0.8s,background-color 1s;-moz-transition:height 0.8s,background-color 1s;-webkit-transition:height 0.8s,background-color 1s;-o-transition:height 0.8s,background-color 1s;z-index:1;left:0;top:0;",hide:function(){this._sizeCssText="width:"+this._zrWidth+"px;height:0px;background-color:#f0ffff;",this._tDom.style.cssText=this._gCssText+this._sizeCssText},show:function(e){this._hasShow=!0;var t=this.query(this.option,"toolbox.feature.dataView.lang")||this._lang;this.option=e,this._tDom.innerHTML='<p style="padding:8px 0;margin:0 0 10px 0;border-bottom:1px solid #eee">'+(t[0]||this._lang[0])+"</p>";var i=this.query(this.option,"toolbox.feature.dataView.optionToContent");"function"!=typeof i?this._textArea.value=this._optionToContent():(this._textArea=document.createElement("div"),this._textArea.innerHTML=i(this.option)),this._textArea.style.cssText="display:block;margin:0 0 8px 0;padding:4px 6px;overflow:auto;width:100%;height:"+(this._zrHeight-100)+"px;",this._tDom.appendChild(this._textArea),this._buttonClose.style.cssText="float:right;padding:1px 6px;",this._buttonClose.innerHTML=t[1]||this._lang[1];var n=this;this._buttonClose.onclick=function(){n.hide()},this._tDom.appendChild(this._buttonClose),this.query(this.option,"toolbox.feature.dataView.readOnly")===!1?(this._buttonRefresh.style.cssText="float:right;margin-right:10px;padding:1px 6px;",this._buttonRefresh.innerHTML=t[2]||this._lang[2],this._buttonRefresh.onclick=function(){n._save()},this._textArea.readOnly=!1,this._textArea.style.cursor="default"):(this._buttonRefresh.style.cssText="display:none",this._textArea.readOnly=!0,this._textArea.style.cursor="text"),this._tDom.appendChild(this._buttonRefresh),this._sizeCssText="width:"+this._zrWidth+"px;height:"+this._zrHeight+"px;background-color:#fff;",this._tDom.style.cssText=this._gCssText+this._sizeCssText},_optionToContent:function(){var e,t,i,a,o,r,s=[],l="";if(this.option.xAxis)for(s=this.option.xAxis instanceof Array?this.option.xAxis:[this.option.xAxis],e=0,a=s.length;a>e;e++)if("category"==(s[e].type||"category")){for(r=[],t=0,i=s[e].data.length;i>t;t++)r.push(this.getDataFromOption(s[e].data[t]));l+=r.join(", ")+"\n\n"}if(this.option.yAxis)for(s=this.option.yAxis instanceof Array?this.option.yAxis:[this.option.yAxis],e=0,a=s.length;a>e;e++)if("category"==s[e].type){for(r=[],t=0,i=s[e].data.length;i>t;t++)r.push(this.getDataFromOption(s[e].data[t]));l+=r.join(", ")+"\n\n"}var h,d=this.option.series;for(e=0,a=d.length;a>e;e++){for(r=[],t=0,i=d[e].data.length;i>t;t++)o=d[e].data[t],h=d[e].type==n.CHART_TYPE_PIE||d[e].type==n.CHART_TYPE_MAP?(o.name||"-")+":":"",d[e].type==n.CHART_TYPE_SCATTER&&(o=this.getDataFromOption(o).join(", ")),r.push(h+this.getDataFromOption(o));l+=(d[e].name||"-")+" : \n",l+=r.join(d[e].type==n.CHART_TYPE_SCATTER?"\n":", "),l+="\n\n"}return l},_save:function(){var e=this.query(this.option,"toolbox.feature.dataView.contentToOption");if("function"!=typeof e){for(var t=this._textArea.value.split("\n"),i=[],a=0,o=t.length;o>a;a++)t[a]=this._trim(t[a]),""!==t[a]&&i.push(t[a]);this._contentToOption(i)}else e(this._textArea,this.option);this.hide();var r=this;setTimeout(function(){r.messageCenter&&r.messageCenter.dispatch(n.EVENT.DATA_VIEW_CHANGED,null,{option:r.option},r.myChart)},r.canvasSupported?800:100)},_contentToOption:function(e){var t,i,a,o,r,s,l,h=[],d=0;if(this.option.xAxis)for(h=this.option.xAxis instanceof Array?this.option.xAxis:[this.option.xAxis],t=0,o=h.length;o>t;t++)if("category"==(h[t].type||"category")){for(s=e[d].split(","),i=0,a=h[t].data.length;a>i;i++)l=this._trim(s[i]||""),r=h[t].data[i],"undefined"!=typeof h[t].data[i].value?h[t].data[i].value=l:h[t].data[i]=l;d++}if(this.option.yAxis)for(h=this.option.yAxis instanceof Array?this.option.yAxis:[this.option.yAxis],t=0,o=h.length;o>t;t++)if("category"==h[t].type){for(s=e[d].split(","),i=0,a=h[t].data.length;a>i;i++)l=this._trim(s[i]||""),r=h[t].data[i],"undefined"!=typeof h[t].data[i].value?h[t].data[i].value=l:h[t].data[i]=l;d++}var c=this.option.series;for(t=0,o=c.length;o>t;t++)if(d++,c[t].type==n.CHART_TYPE_SCATTER)for(var i=0,a=c[t].data.length;a>i;i++)s=e[d],l=s.replace(" ","").split(","),"undefined"!=typeof c[t].data[i].value?c[t].data[i].value=l:c[t].data[i]=l,d++;else{s=e[d].split(",");for(var i=0,a=c[t].data.length;a>i;i++)l=(s[i]||"").replace(/.*:/,""),l=this._trim(l),l="-"!=l&&""!==l?l-0:"-","undefined"!=typeof c[t].data[i].value?c[t].data[i].value=l:c[t].data[i]=l;d++}},_trim:function(e){var t=new RegExp("(^[\\s\\t\\xa0\\u3000]+)|([\\u3000\\xa0\\s\\t]+$)","g");return e.replace(t,"")},_stop:function(e){e=e||window.event,e.stopPropagation?e.stopPropagation():e.cancelBubble=!0},resize:function(){this._zrHeight=this.zr.getHeight(),this._zrWidth=this.zr.getWidth(),this._tDom.offsetHeight>10&&(this._sizeCssText="width:"+this._zrWidth+"px;height:"+this._zrHeight+"px;background-color:#fff;",this._tDom.style.cssText=this._gCssText+this._sizeCssText,this._textArea.style.cssText="display:block;margin:0 0 8px 0;padding:4px 6px;overflow:auto;width:100%;height:"+(this._zrHeight-100)+"px;")},dispose:function(){window.removeEventListener?(this._tDom.removeEventListener("click",this._stop),this._tDom.removeEventListener("mousewheel",this._stop),this._tDom.removeEventListener("mousemove",this._stop),this._tDom.removeEventListener("mousedown",this._stop),this._tDom.removeEventListener("mouseup",this._stop),this._tDom.removeEventListener("touchstart",this._stop),this._tDom.removeEventListener("touchmove",this._stop),this._tDom.removeEventListener("touchend",this._stop)):(this._tDom.detachEvent("onclick",this._stop),this._tDom.detachEvent("onmousewheel",this._stop),this._tDom.detachEvent("onmousemove",this._stop),this._tDom.detachEvent("onmousedown",this._stop),this._tDom.detachEvent("onmouseup",this._stop)),this._buttonRefresh.onclick=null,this._buttonClose.onclick=null,this._hasShow&&(this._tDom.removeChild(this._textArea),this._tDom.removeChild(this._buttonRefresh),this._tDom.removeChild(this._buttonClose)),this._textArea=null,this._buttonRefresh=null,this._buttonClose=null,this.dom.firstChild.removeChild(this._tDom),this._tDom=null}},a.inherits(t,i),e("../component").define("dataView",t),t}),define("echarts/util/shape/Cross",["require","zrender/shape/Base","zrender/shape/Line","zrender/tool/util","./normalIsCover"],function(e){function t(e){i.call(this,e)}var i=e("zrender/shape/Base"),n=e("zrender/shape/Line"),a=e("zrender/tool/util");return t.prototype={type:"cross",buildPath:function(e,t){var i=t.rect;t.xStart=i.x,t.xEnd=i.x+i.width,t.yStart=t.yEnd=t.y,n.prototype.buildPath(e,t),t.xStart=t.xEnd=t.x,t.yStart=i.y,t.yEnd=i.y+i.height,n.prototype.buildPath(e,t)},getRect:function(e){return e.rect},isCover:e("./normalIsCover")},a.inherits(t,i),t}),define("zrender/shape/Sector",["require","../tool/math","../tool/computeBoundingBox","../tool/vector","./Base","../tool/util"],function(e){var t=e("../tool/math"),i=e("../tool/computeBoundingBox"),n=e("../tool/vector"),a=e("./Base"),o=n.create(),r=n.create(),s=n.create(),l=n.create(),h=function(e){a.call(this,e)};return h.prototype={type:"sector",buildPath:function(e,i){var n=i.x,a=i.y,o=i.r0||0,r=i.r,s=i.startAngle,l=i.endAngle,h=i.clockWise||!1;s=t.degreeToRadian(s),l=t.degreeToRadian(l),h||(s=-s,l=-l);var d=t.cos(s),c=t.sin(s);e.moveTo(d*o+n,c*o+a),e.lineTo(d*r+n,c*r+a),e.arc(n,a,r,s,l,!h),e.lineTo(t.cos(l)*o+n,t.sin(l)*o+a),0!==o&&e.arc(n,a,o,l,s,h),e.closePath()},getRect:function(e){if(e.__rect)return e.__rect;var a=e.x,h=e.y,d=e.r0||0,c=e.r,m=t.degreeToRadian(e.startAngle),p=t.degreeToRadian(e.endAngle),u=e.clockWise;return u||(m=-m,p=-p),d>1?i.arc(a,h,d,m,p,!u,o,s):(o[0]=s[0]=a,o[1]=s[1]=h),i.arc(a,h,c,m,p,!u,r,l),n.min(o,o,r),n.max(s,s,l),e.__rect={x:o[0],y:o[1],width:s[0]-o[0],height:s[1]-o[1]},e.__rect}},e("../tool/util").inherits(h,a),h}),define("echarts/util/shape/Candle",["require","zrender/shape/Base","zrender/tool/util","./normalIsCover"],function(e){function t(e){i.call(this,e)}var i=e("zrender/shape/Base"),n=e("zrender/tool/util");return t.prototype={type:"candle",_numberOrder:function(e,t){return t-e},buildPath:function(e,t){var i=n.clone(t.y).sort(this._numberOrder);e.moveTo(t.x,i[3]),e.lineTo(t.x,i[2]),e.moveTo(t.x-t.width/2,i[2]),e.rect(t.x-t.width/2,i[2],t.width,i[1]-i[2]),e.moveTo(t.x,i[1]),e.lineTo(t.x,i[0])},getRect:function(e){if(!e.__rect){var t=0;("stroke"==e.brushType||"fill"==e.brushType)&&(t=e.lineWidth||1);var i=n.clone(e.y).sort(this._numberOrder);e.__rect={x:Math.round(e.x-e.width/2-t/2),y:Math.round(i[3]-t/2),width:e.width+t,height:i[0]-i[3]+t}}return e.__rect},isCover:e("./normalIsCover")},n.inherits(t,i),t}),define("zrender/tool/computeBoundingBox",["require","./vector","./curve"],function(e){function t(e,t,i){if(0!==e.length){for(var n=e[0][0],a=e[0][0],o=e[0][1],r=e[0][1],s=1;s<e.length;s++){var l=e[s];l[0]<n&&(n=l[0]),l[0]>a&&(a=l[0]),l[1]<o&&(o=l[1]),l[1]>r&&(r=l[1])}t[0]=n,t[1]=o,i[0]=a,i[1]=r}}function i(e,t,i,n,a,r){var s=[];o.cubicExtrema(e[0],t[0],i[0],n[0],s);for(var l=0;l<s.length;l++)s[l]=o.cubicAt(e[0],t[0],i[0],n[0],s[l]);var h=[];o.cubicExtrema(e[1],t[1],i[1],n[1],h);for(var l=0;l<h.length;l++)h[l]=o.cubicAt(e[1],t[1],i[1],n[1],h[l]);s.push(e[0],n[0]),h.push(e[1],n[1]);var d=Math.min.apply(null,s),c=Math.max.apply(null,s),m=Math.min.apply(null,h),p=Math.max.apply(null,h);a[0]=d,a[1]=m,r[0]=c,r[1]=p}function n(e,t,i,n,a){var r=o.quadraticExtremum(e[0],t[0],i[0]),s=o.quadraticExtremum(e[1],t[1],i[1]);r=Math.max(Math.min(r,1),0),s=Math.max(Math.min(s,1),0);var l=1-r,h=1-s,d=l*l*e[0]+2*l*r*t[0]+r*r*i[0],c=l*l*e[1]+2*l*r*t[1]+r*r*i[1],m=h*h*e[0]+2*h*s*t[0]+s*s*i[0],p=h*h*e[1]+2*h*s*t[1]+s*s*i[1];n[0]=Math.min(e[0],i[0],d,m),n[1]=Math.min(e[1],i[1],c,p),a[0]=Math.max(e[0],i[0],d,m),a[1]=Math.max(e[1],i[1],c,p)}var a=e("./vector"),o=e("./curve"),r=a.create(),s=a.create(),l=a.create(),h=function(e,t,i,n,o,h,d,c){if(Math.abs(n-o)>=2*Math.PI)return d[0]=e-i,d[1]=t-i,c[0]=e+i,void(c[1]=t+i);if(r[0]=Math.cos(n)*i+e,r[1]=Math.sin(n)*i+t,s[0]=Math.cos(o)*i+e,s[1]=Math.sin(o)*i+t,a.min(d,r,s),a.max(c,r,s),n%=2*Math.PI,0>n&&(n+=2*Math.PI),o%=2*Math.PI,0>o&&(o+=2*Math.PI),n>o&&!h?o+=2*Math.PI:o>n&&h&&(n+=2*Math.PI),h){var m=o;o=n,n=m}for(var p=0;o>p;p+=Math.PI/2)p>n&&(l[0]=Math.cos(p)*i+e,l[1]=Math.sin(p)*i+t,a.min(d,l,d),a.max(c,l,c))};return t.cubeBezier=i,t.quadraticBezier=n,t.arc=h,t}),define("echarts/util/shape/Chain",["require","zrender/shape/Base","./Icon","zrender/shape/util/dashedLineTo","zrender/tool/util","zrender/tool/matrix"],function(e){function t(e){i.call(this,e)}var i=e("zrender/shape/Base"),n=e("./Icon"),a=e("zrender/shape/util/dashedLineTo"),o=e("zrender/tool/util"),r=e("zrender/tool/matrix");return t.prototype={type:"chain",brush:function(e,t){var i=this.style;t&&(i=this.getHighlightStyle(i,this.highlightStyle||{})),e.save(),this.setContext(e,i),this.setTransform(e),e.save(),e.beginPath(),this.buildLinePath(e,i),e.stroke(),e.restore(),this.brushSymbol(e,i),e.restore()},buildLinePath:function(e,t){var i=t.x,n=t.y+5,o=t.width,r=t.height/2-10;if(e.moveTo(i,n),e.lineTo(i,n+r),e.moveTo(i+o,n),e.lineTo(i+o,n+r),e.moveTo(i,n+r/2),t.lineType&&"solid"!=t.lineType){if("dashed"==t.lineType||"dotted"==t.lineType){var s=(t.lineWidth||1)*("dashed"==t.lineType?5:1);a(e,i,n+r/2,i+o,n+r/2,s)}}else e.lineTo(i+o,n+r/2)},brushSymbol:function(e,t){var i=t.y+t.height/4;e.save();for(var a,o=t.chainPoint,r=0,s=o.length;s>r;r++){if(a=o[r],"none"!=a.symbol){e.beginPath();var l=a.symbolSize;n.prototype.buildPath(e,{iconType:a.symbol,x:a.x-l,y:i-l,width:2*l,height:2*l,n:a.n}),e.fillStyle=a.isEmpty?"#fff":t.strokeColor,e.closePath(),e.fill(),e.stroke()}a.showLabel&&(e.font=a.textFont,e.fillStyle=a.textColor,e.textAlign=a.textAlign,e.textBaseline=a.textBaseline,a.rotation?(e.save(),this._updateTextTransform(e,a.rotation),e.fillText(a.name,a.textX,a.textY),e.restore()):e.fillText(a.name,a.textX,a.textY))}e.restore()},_updateTextTransform:function(e,t){var i=r.create();if(r.identity(i),0!==t[0]){var n=t[1]||0,a=t[2]||0;(n||a)&&r.translate(i,i,[-n,-a]),r.rotate(i,i,t[0]),(n||a)&&r.translate(i,i,[n,a])}e.transform.apply(e,i)},isCover:function(e,t){var i=this.style;return e>=i.x&&e<=i.x+i.width&&t>=i.y&&t<=i.y+i.height?!0:!1}},o.inherits(t,i),t}),define("zrender/shape/Ring",["require","./Base","../tool/util"],function(e){var t=e("./Base"),i=function(e){t.call(this,e)};return i.prototype={type:"ring",buildPath:function(e,t){e.arc(t.x,t.y,t.r,0,2*Math.PI,!1),e.moveTo(t.x+t.r0,t.y),e.arc(t.x,t.y,t.r0,0,2*Math.PI,!0)},getRect:function(e){if(e.__rect)return e.__rect;var t;return t="stroke"==e.brushType||"fill"==e.brushType?e.lineWidth||1:0,e.__rect={x:Math.round(e.x-e.r-t/2),y:Math.round(e.y-e.r-t/2),width:2*e.r+t,height:2*e.r+t},e.__rect}},e("../tool/util").inherits(i,t),i}),define("echarts/component/axis",["require","./base","zrender/shape/Line","../config","../util/ecData","zrender/tool/util","zrender/tool/color","./categoryAxis","./valueAxis","../component"],function(e){function t(e,t,n,a,o,r){i.call(this,e,t,n,a,o),this.axisType=r,this._axisList=[],this.refresh(a)}var i=e("./base"),n=e("zrender/shape/Line"),a=e("../config"),o=e("../util/ecData"),r=e("zrender/tool/util"),s=e("zrender/tool/color");return t.prototype={type:a.COMPONENT_TYPE_AXIS,axisBase:{_buildAxisLine:function(){var e=this.option.axisLine.lineStyle.width,t=e/2,i={_axisShape:"axisLine",zlevel:this.getZlevelBase(),z:this.getZBase()+3,hoverable:!1},a=this.grid;switch(this.option.position){case"left":i.style={xStart:a.getX()-t,yStart:a.getYend(),xEnd:a.getX()-t,yEnd:a.getY(),lineCap:"round"};break;case"right":i.style={xStart:a.getXend()+t,yStart:a.getYend(),xEnd:a.getXend()+t,yEnd:a.getY(),lineCap:"round"};break;case"bottom":i.style={xStart:a.getX(),yStart:a.getYend()+t,xEnd:a.getXend(),yEnd:a.getYend()+t,lineCap:"round"};break;case"top":i.style={xStart:a.getX(),yStart:a.getY()-t,xEnd:a.getXend(),yEnd:a.getY()-t,lineCap:"round"}}var o=i.style;""!==this.option.name&&(o.text=this.option.name,o.textPosition=this.option.nameLocation,o.textFont=this.getFont(this.option.nameTextStyle),this.option.nameTextStyle.align&&(o.textAlign=this.option.nameTextStyle.align),this.option.nameTextStyle.baseline&&(o.textBaseline=this.option.nameTextStyle.baseline),this.option.nameTextStyle.color&&(o.textColor=this.option.nameTextStyle.color)),o.strokeColor=this.option.axisLine.lineStyle.color,o.lineWidth=e,this.isHorizontal()?o.yStart=o.yEnd=this.subPixelOptimize(o.yEnd,e):o.xStart=o.xEnd=this.subPixelOptimize(o.xEnd,e),o.lineType=this.option.axisLine.lineStyle.type,i=new n(i),this.shapeList.push(i)},_axisLabelClickable:function(e,t){return e?(o.pack(t,void 0,-1,void 0,-1,t.style.text),t.hoverable=!0,t.clickable=!0,t.highlightStyle={color:s.lift(t.style.color,1),brushType:"fill"},t):t},refixAxisShape:function(e,t){if(this.option.axisLine.onZero){var i;if(this.isHorizontal()&&null!=t)for(var n=0,a=this.shapeList.length;a>n;n++)"axisLine"===this.shapeList[n]._axisShape?(this.shapeList[n].style.yStart=this.shapeList[n].style.yEnd=this.subPixelOptimize(t,this.shapeList[n].stylelineWidth),this.zr.modShape(this.shapeList[n].id)):"axisTick"===this.shapeList[n]._axisShape&&(i=this.shapeList[n].style.yEnd-this.shapeList[n].style.yStart,this.shapeList[n].style.yStart=t-i,this.shapeList[n].style.yEnd=t,this.zr.modShape(this.shapeList[n].id));if(!this.isHorizontal()&&null!=e)for(var n=0,a=this.shapeList.length;a>n;n++)"axisLine"===this.shapeList[n]._axisShape?(this.shapeList[n].style.xStart=this.shapeList[n].style.xEnd=this.subPixelOptimize(e,this.shapeList[n].stylelineWidth),this.zr.modShape(this.shapeList[n].id)):"axisTick"===this.shapeList[n]._axisShape&&(i=this.shapeList[n].style.xEnd-this.shapeList[n].style.xStart,this.shapeList[n].style.xStart=e,this.shapeList[n].style.xEnd=e+i,this.zr.modShape(this.shapeList[n].id))}},getPosition:function(){return this.option.position},isHorizontal:function(){return"bottom"===this.option.position||"top"===this.option.position}},reformOption:function(e){if(!e||e instanceof Array&&0===e.length?e=[{type:a.COMPONENT_TYPE_AXIS_VALUE}]:e instanceof Array||(e=[e]),e.length>2&&(e=[e[0],e[1]]),"xAxis"===this.axisType){(!e[0].position||"bottom"!=e[0].position&&"top"!=e[0].position)&&(e[0].position="bottom"),e.length>1&&(e[1].position="bottom"===e[0].position?"top":"bottom");for(var t=0,i=e.length;i>t;t++)e[t].type=e[t].type||"category",e[t].xAxisIndex=t,e[t].yAxisIndex=-1}else{(!e[0].position||"left"!=e[0].position&&"right"!=e[0].position)&&(e[0].position="left"),e.length>1&&(e[1].position="left"===e[0].position?"right":"left");for(var t=0,i=e.length;i>t;t++)e[t].type=e[t].type||"value",e[t].xAxisIndex=-1,e[t].yAxisIndex=t}return e},refresh:function(t){var i;t&&(this.option=t,"xAxis"===this.axisType?(this.option.xAxis=this.reformOption(t.xAxis),i=this.option.xAxis):(this.option.yAxis=this.reformOption(t.yAxis),i=this.option.yAxis),this.series=t.series);for(var n=e("./categoryAxis"),a=e("./valueAxis"),o=Math.max(i&&i.length||0,this._axisList.length),r=0;o>r;r++)!this._axisList[r]||!t||i[r]&&this._axisList[r].type==i[r].type||(this._axisList[r].dispose&&this._axisList[r].dispose(),this._axisList[r]=!1),this._axisList[r]?this._axisList[r].refresh&&this._axisList[r].refresh(i?i[r]:!1,this.series):i&&i[r]&&(this._axisList[r]="category"===i[r].type?new n(this.ecTheme,this.messageCenter,this.zr,i[r],this.myChart,this.axisBase):new a(this.ecTheme,this.messageCenter,this.zr,i[r],this.myChart,this.axisBase,this.series))},getAxis:function(e){return this._axisList[e]},getAxisCount:function(){return this._axisList.length},clear:function(){for(var e=0,t=this._axisList.length;t>e;e++)this._axisList[e].dispose&&this._axisList[e].dispose();this._axisList=[]}},r.inherits(t,i),e("../component").define("axis",t),t}),define("echarts/component/grid",["require","./base","zrender/shape/Rectangle","../config","zrender/tool/util","../component"],function(e){function t(e,t,n,a,o){i.call(this,e,t,n,a,o),this.refresh(a)}var i=e("./base"),n=e("zrender/shape/Rectangle"),a=e("../config");a.grid={zlevel:0,z:0,x:80,y:60,x2:80,y2:60,backgroundColor:"rgba(0,0,0,0)",borderWidth:1,borderColor:"#ccc"};var o=e("zrender/tool/util");return t.prototype={type:a.COMPONENT_TYPE_GRID,getX:function(){return this._x},getY:function(){return this._y},getWidth:function(){return this._width},getHeight:function(){return this._height},getXend:function(){return this._x+this._width},getYend:function(){return this._y+this._height},getArea:function(){return{x:this._x,y:this._y,width:this._width,height:this._height}},getBbox:function(){return[[this._x,this._y],[this.getXend(),this.getYend()]]},refixAxisShape:function(e){for(var t,i,n,o=e.xAxis._axisList.concat(e.yAxis?e.yAxis._axisList:[]),r=o.length;r--;)n=o[r],n.type==a.COMPONENT_TYPE_AXIS_VALUE&&n._min<0&&n._max>=0&&(n.isHorizontal()?t=n.getCoord(0):i=n.getCoord(0));if("undefined"!=typeof t||"undefined"!=typeof i)for(r=o.length;r--;)o[r].refixAxisShape(t,i)},refresh:function(e){if(e||this._zrWidth!=this.zr.getWidth()||this._zrHeight!=this.zr.getHeight()){this.clear(),this.option=e||this.option,this.option.grid=this.reformOption(this.option.grid);var t=this.option.grid;this._zrWidth=this.zr.getWidth(),this._zrHeight=this.zr.getHeight(),this._x=this.parsePercent(t.x,this._zrWidth),this._y=this.parsePercent(t.y,this._zrHeight);var i=this.parsePercent(t.x2,this._zrWidth),a=this.parsePercent(t.y2,this._zrHeight);this._width="undefined"==typeof t.width?this._zrWidth-this._x-i:this.parsePercent(t.width,this._zrWidth),this._width=this._width<=0?10:this._width,this._height="undefined"==typeof t.height?this._zrHeight-this._y-a:this.parsePercent(t.height,this._zrHeight),this._height=this._height<=0?10:this._height,this._x=this.subPixelOptimize(this._x,t.borderWidth),this._y=this.subPixelOptimize(this._y,t.borderWidth),this.shapeList.push(new n({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._x,y:this._y,width:this._width,height:this._height,brushType:t.borderWidth>0?"both":"fill",color:t.backgroundColor,strokeColor:t.borderColor,lineWidth:t.borderWidth}})),this.zr.addShape(this.shapeList[0])}}},o.inherits(t,i),e("../component").define("grid",t),t}),define("echarts/component/dataZoom",["require","./base","zrender/shape/Rectangle","zrender/shape/Polygon","../util/shape/Icon","../config","../util/date","zrender/tool/util","../component"],function(e){function t(e,t,n,a,o){i.call(this,e,t,n,a,o);var r=this;r._ondrift=function(e,t){return r.__ondrift(this,e,t)},r._ondragend=function(){return r.__ondragend()},this._fillerSize=30,this._isSilence=!1,this._zoom={},this.option.dataZoom=this.reformOption(this.option.dataZoom),this.zoomOption=this.option.dataZoom,this._handleSize=this.zoomOption.handleSize,this.myChart.canvasSupported||(this.zoomOption.realtime=!1),this._location=this._getLocation(),this._zoom=this._getZoom(),this._backupData(),this.option.dataZoom.show&&this._buildShape(),this._syncData()}var i=e("./base"),n=e("zrender/shape/Rectangle"),a=e("zrender/shape/Polygon"),o=e("../util/shape/Icon"),r=e("../config");r.dataZoom={zlevel:0,z:4,show:!1,orient:"horizontal",backgroundColor:"rgba(0,0,0,0)",dataBackgroundColor:"#eee",fillerColor:"rgba(144,197,237,0.2)",handleColor:"rgba(70,130,180,0.8)",handleSize:8,showDetail:!0,realtime:!0};var s=e("../util/date"),l=e("zrender/tool/util");return t.prototype={type:r.COMPONENT_TYPE_DATAZOOM,_buildShape:function(){this._buildBackground(),this._buildFiller(),this._buildHandle(),this._buildFrame();for(var e=0,t=this.shapeList.length;t>e;e++)this.zr.addShape(this.shapeList[e]);this._syncFrameShape()},_getLocation:function(){var e,t,i,n,a=this.component.grid;return"horizontal"==this.zoomOption.orient?(i=this.zoomOption.width||a.getWidth(),n=this.zoomOption.height||this._fillerSize,e=null!=this.zoomOption.x?this.zoomOption.x:a.getX(),t=null!=this.zoomOption.y?this.zoomOption.y:this.zr.getHeight()-n-2):(i=this.zoomOption.width||this._fillerSize,n=this.zoomOption.height||a.getHeight(),e=null!=this.zoomOption.x?this.zoomOption.x:2,t=null!=this.zoomOption.y?this.zoomOption.y:a.getY()),{x:e,y:t,width:i,height:n}},_getZoom:function(){var e=this.option.series,t=this.option.xAxis;!t||t instanceof Array||(t=[t],this.option.xAxis=t);var i=this.option.yAxis;!i||i instanceof Array||(i=[i],this.option.yAxis=i);var n,a,o=[],s=this.zoomOption.xAxisIndex;if(t&&null==s){n=[];for(var l=0,h=t.length;h>l;l++)("category"==t[l].type||null==t[l].type)&&n.push(l)}else n=s instanceof Array?s:null!=s?[s]:[];if(s=this.zoomOption.yAxisIndex,i&&null==s){a=[];for(var l=0,h=i.length;h>l;l++)"category"==i[l].type&&a.push(l)}else a=s instanceof Array?s:null!=s?[s]:[];for(var d,l=0,h=e.length;h>l;l++)if(d=e[l],d.type==r.CHART_TYPE_LINE||d.type==r.CHART_TYPE_BAR||d.type==r.CHART_TYPE_SCATTER||d.type==r.CHART_TYPE_K){for(var c=0,m=n.length;m>c;c++)if(n[c]==(d.xAxisIndex||0)){o.push(l);break}for(var c=0,m=a.length;m>c;c++)if(a[c]==(d.yAxisIndex||0)){o.push(l);break}null==this.zoomOption.xAxisIndex&&null==this.zoomOption.yAxisIndex&&d.data&&this.getDataFromOption(d.data[0])instanceof Array&&(d.type==r.CHART_TYPE_SCATTER||d.type==r.CHART_TYPE_LINE||d.type==r.CHART_TYPE_BAR)&&o.push(l)}var p=null!=this._zoom.start?this._zoom.start:null!=this.zoomOption.start?this.zoomOption.start:0,u=null!=this._zoom.end?this._zoom.end:null!=this.zoomOption.end?this.zoomOption.end:100;p>u&&(p+=u,u=p-u,p-=u);var V=Math.round((u-p)/100*("horizontal"==this.zoomOption.orient?this._location.width:this._location.height));return{start:p,end:u,start2:0,end2:100,size:V,xAxisIndex:n,yAxisIndex:a,seriesIndex:o,scatterMap:this._zoom.scatterMap||{}}},_backupData:function(){this._originalData={xAxis:{},yAxis:{},series:{}};for(var e=this.option.xAxis,t=this._zoom.xAxisIndex,i=0,n=t.length;n>i;i++)this._originalData.xAxis[t[i]]=e[t[i]].data;for(var a=this.option.yAxis,o=this._zoom.yAxisIndex,i=0,n=o.length;n>i;i++)this._originalData.yAxis[o[i]]=a[o[i]].data;for(var s,l=this.option.series,h=this._zoom.seriesIndex,i=0,n=h.length;n>i;i++)s=l[h[i]],this._originalData.series[h[i]]=s.data,s.data&&this.getDataFromOption(s.data[0])instanceof Array&&(s.type==r.CHART_TYPE_SCATTER||s.type==r.CHART_TYPE_LINE||s.type==r.CHART_TYPE_BAR)&&(this._backupScale(),this._calculScatterMap(h[i]))},_calculScatterMap:function(t){this._zoom.scatterMap=this._zoom.scatterMap||{},this._zoom.scatterMap[t]=this._zoom.scatterMap[t]||{};var i=e("../component"),n=i.get("axis"),a=l.clone(this.option.xAxis);"category"==a[0].type&&(a[0].type="value"),a[1]&&"category"==a[1].type&&(a[1].type="value");var o=new n(this.ecTheme,null,!1,{xAxis:a,series:this.option.series},this,"xAxis"),r=this.option.series[t].xAxisIndex||0;this._zoom.scatterMap[t].x=o.getAxis(r).getExtremum(),o.dispose(),a=l.clone(this.option.yAxis),"category"==a[0].type&&(a[0].type="value"),a[1]&&"category"==a[1].type&&(a[1].type="value"),o=new n(this.ecTheme,null,!1,{yAxis:a,series:this.option.series},this,"yAxis"),r=this.option.series[t].yAxisIndex||0,this._zoom.scatterMap[t].y=o.getAxis(r).getExtremum(),o.dispose()},_buildBackground:function(){var e=this._location.width,t=this._location.height;this.shapeList.push(new n({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._location.x,y:this._location.y,width:e,height:t,color:this.zoomOption.backgroundColor}}));for(var i=0,o=this._originalData.xAxis,s=this._zoom.xAxisIndex,l=0,h=s.length;h>l;l++)i=Math.max(i,o[s[l]].length);for(var d=this._originalData.yAxis,c=this._zoom.yAxisIndex,l=0,h=c.length;h>l;l++)i=Math.max(i,d[c[l]].length);for(var m,p=this._zoom.seriesIndex[0],u=this._originalData.series[p],V=Number.MIN_VALUE,U=Number.MAX_VALUE,l=0,h=u.length;h>l;l++)m=this.getDataFromOption(u[l],0),this.option.series[p].type==r.CHART_TYPE_K&&(m=m[1]),isNaN(m)&&(m=0),V=Math.max(V,m),U=Math.min(U,m);var g=V-U,f=[],y=e/(i-(i>1?1:0)),b=t/(i-(i>1?1:0)),_=1;"horizontal"==this.zoomOption.orient&&1>y?_=Math.floor(3*i/e):"vertical"==this.zoomOption.orient&&1>b&&(_=Math.floor(3*i/t));for(var l=0,h=i;h>l;l+=_)m=this.getDataFromOption(u[l],0),this.option.series[p].type==r.CHART_TYPE_K&&(m=m[1]),isNaN(m)&&(m=0),f.push("horizontal"==this.zoomOption.orient?[this._location.x+y*l,this._location.y+t-1-Math.round((m-U)/g*(t-10))]:[this._location.x+1+Math.round((m-U)/g*(e-10)),this._location.y+b*(h-l-1)]);
"horizontal"==this.zoomOption.orient?(f.push([this._location.x+e,this._location.y+t]),f.push([this._location.x,this._location.y+t])):(f.push([this._location.x,this._location.y]),f.push([this._location.x,this._location.y+t])),this.shapeList.push(new a({zlevel:this.getZlevelBase(),z:this.getZBase(),style:{pointList:f,color:this.zoomOption.dataBackgroundColor},hoverable:!1}))},_buildFiller:function(){this._fillerShae={zlevel:this.getZlevelBase(),z:this.getZBase(),draggable:!0,ondrift:this._ondrift,ondragend:this._ondragend,_type:"filler"},this._fillerShae.style="horizontal"==this.zoomOption.orient?{x:this._location.x+Math.round(this._zoom.start/100*this._location.width)+this._handleSize,y:this._location.y,width:this._zoom.size-2*this._handleSize,height:this._location.height,color:this.zoomOption.fillerColor,text:":::",textPosition:"inside"}:{x:this._location.x,y:this._location.y+Math.round(this._zoom.start/100*this._location.height)+this._handleSize,width:this._location.width,height:this._zoom.size-2*this._handleSize,color:this.zoomOption.fillerColor,text:"::",textPosition:"inside"},this._fillerShae.highlightStyle={brushType:"fill",color:"rgba(0,0,0,0)"},this._fillerShae=new n(this._fillerShae),this.shapeList.push(this._fillerShae)},_buildHandle:function(){var e=this.zoomOption.showDetail?this._getDetail():{start:"",end:""};this._startShape={zlevel:this.getZlevelBase(),z:this.getZBase(),draggable:!0,style:{iconType:"rectangle",x:this._location.x,y:this._location.y,width:this._handleSize,height:this._handleSize,color:this.zoomOption.handleColor,text:"=",textPosition:"inside"},highlightStyle:{text:e.start,brushType:"fill",textPosition:"left"},ondrift:this._ondrift,ondragend:this._ondragend},"horizontal"==this.zoomOption.orient?(this._startShape.style.height=this._location.height,this._endShape=l.clone(this._startShape),this._startShape.style.x=this._fillerShae.style.x-this._handleSize,this._endShape.style.x=this._fillerShae.style.x+this._fillerShae.style.width,this._endShape.highlightStyle.text=e.end,this._endShape.highlightStyle.textPosition="right"):(this._startShape.style.width=this._location.width,this._endShape=l.clone(this._startShape),this._startShape.style.y=this._fillerShae.style.y+this._fillerShae.style.height,this._startShape.highlightStyle.textPosition="bottom",this._endShape.style.y=this._fillerShae.style.y-this._handleSize,this._endShape.highlightStyle.text=e.end,this._endShape.highlightStyle.textPosition="top"),this._startShape=new o(this._startShape),this._endShape=new o(this._endShape),this.shapeList.push(this._startShape),this.shapeList.push(this._endShape)},_buildFrame:function(){var e=this.subPixelOptimize(this._location.x,1),t=this.subPixelOptimize(this._location.y,1);this._startFrameShape={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:e,y:t,width:this._location.width-(e>this._location.x?1:0),height:this._location.height-(t>this._location.y?1:0),lineWidth:1,brushType:"stroke",strokeColor:this.zoomOption.handleColor}},this._endFrameShape=l.clone(this._startFrameShape),this._startFrameShape=new n(this._startFrameShape),this._endFrameShape=new n(this._endFrameShape),this.shapeList.push(this._startFrameShape),this.shapeList.push(this._endFrameShape)},_syncHandleShape:function(){"horizontal"==this.zoomOption.orient?(this._startShape.style.x=this._fillerShae.style.x-this._handleSize,this._endShape.style.x=this._fillerShae.style.x+this._fillerShae.style.width,this._zoom.start=(this._startShape.style.x-this._location.x)/this._location.width*100,this._zoom.end=(this._endShape.style.x+this._handleSize-this._location.x)/this._location.width*100):(this._startShape.style.y=this._fillerShae.style.y+this._fillerShae.style.height,this._endShape.style.y=this._fillerShae.style.y-this._handleSize,this._zoom.start=(this._location.y+this._location.height-this._startShape.style.y)/this._location.height*100,this._zoom.end=(this._location.y+this._location.height-this._endShape.style.y-this._handleSize)/this._location.height*100),this.zr.modShape(this._startShape.id),this.zr.modShape(this._endShape.id),this._syncFrameShape(),this.zr.refreshNextFrame()},_syncFillerShape:function(){var e,t;"horizontal"==this.zoomOption.orient?(e=this._startShape.style.x,t=this._endShape.style.x,this._fillerShae.style.x=Math.min(e,t)+this._handleSize,this._fillerShae.style.width=Math.abs(e-t)-this._handleSize,this._zoom.start=(Math.min(e,t)-this._location.x)/this._location.width*100,this._zoom.end=(Math.max(e,t)+this._handleSize-this._location.x)/this._location.width*100):(e=this._startShape.style.y,t=this._endShape.style.y,this._fillerShae.style.y=Math.min(e,t)+this._handleSize,this._fillerShae.style.height=Math.abs(e-t)-this._handleSize,this._zoom.start=(this._location.y+this._location.height-Math.max(e,t))/this._location.height*100,this._zoom.end=(this._location.y+this._location.height-Math.min(e,t)-this._handleSize)/this._location.height*100),this.zr.modShape(this._fillerShae.id),this._syncFrameShape(),this.zr.refreshNextFrame()},_syncFrameShape:function(){"horizontal"==this.zoomOption.orient?(this._startFrameShape.style.width=this._fillerShae.style.x-this._location.x,this._endFrameShape.style.x=this._fillerShae.style.x+this._fillerShae.style.width,this._endFrameShape.style.width=this._location.x+this._location.width-this._endFrameShape.style.x):(this._startFrameShape.style.y=this._fillerShae.style.y+this._fillerShae.style.height,this._startFrameShape.style.height=this._location.y+this._location.height-this._startFrameShape.style.y,this._endFrameShape.style.height=this._fillerShae.style.y-this._location.y),this.zr.modShape(this._startFrameShape.id),this.zr.modShape(this._endFrameShape.id)},_syncShape:function(){this.zoomOption.show&&("horizontal"==this.zoomOption.orient?(this._startShape.style.x=this._location.x+this._zoom.start/100*this._location.width,this._endShape.style.x=this._location.x+this._zoom.end/100*this._location.width-this._handleSize,this._fillerShae.style.x=this._startShape.style.x+this._handleSize,this._fillerShae.style.width=this._endShape.style.x-this._startShape.style.x-this._handleSize):(this._startShape.style.y=this._location.y+this._location.height-this._zoom.start/100*this._location.height,this._endShape.style.y=this._location.y+this._location.height-this._zoom.end/100*this._location.height-this._handleSize,this._fillerShae.style.y=this._endShape.style.y+this._handleSize,this._fillerShae.style.height=this._startShape.style.y-this._endShape.style.y-this._handleSize),this.zr.modShape(this._startShape.id),this.zr.modShape(this._endShape.id),this.zr.modShape(this._fillerShae.id),this._syncFrameShape(),this.zr.refresh())},_syncData:function(e){var t,i,n,a,o;for(var s in this._originalData){t=this._originalData[s];for(var l in t)o=t[l],null!=o&&(a=o.length,i=Math.floor(this._zoom.start/100*a),n=Math.ceil(this._zoom.end/100*a),this.getDataFromOption(o[0])instanceof Array&&this.option[s][l].type!=r.CHART_TYPE_K?(this._setScale(),this.option[s][l].data=this._synScatterData(l,o)):this.option[s][l].data=o.slice(i,n))}this._isSilence||!this.zoomOption.realtime&&!e||this.messageCenter.dispatch(r.EVENT.DATA_ZOOM,null,{zoom:this._zoom},this.myChart)},_synScatterData:function(e,t){if(0===this._zoom.start&&100==this._zoom.end&&0===this._zoom.start2&&100==this._zoom.end2)return t;var i,n,a,o,r,s=[],l=this._zoom.scatterMap[e];"horizontal"==this.zoomOption.orient?(i=l.x.max-l.x.min,n=this._zoom.start/100*i+l.x.min,a=this._zoom.end/100*i+l.x.min,i=l.y.max-l.y.min,o=this._zoom.start2/100*i+l.y.min,r=this._zoom.end2/100*i+l.y.min):(i=l.x.max-l.x.min,n=this._zoom.start2/100*i+l.x.min,a=this._zoom.end2/100*i+l.x.min,i=l.y.max-l.y.min,o=this._zoom.start/100*i+l.y.min,r=this._zoom.end/100*i+l.y.min);var h;(h=l.x.dataMappingMethods)&&(n=h.coord2Value(n),a=h.coord2Value(a)),(h=l.y.dataMappingMethods)&&(o=h.coord2Value(o),r=h.coord2Value(r));for(var d,c=0,m=t.length;m>c;c++)d=t[c].value||t[c],d[0]>=n&&d[0]<=a&&d[1]>=o&&d[1]<=r&&s.push(t[c]);return s},_setScale:function(){var e=0!==this._zoom.start||100!==this._zoom.end||0!==this._zoom.start2||100!==this._zoom.end2,t={xAxis:this.option.xAxis,yAxis:this.option.yAxis};for(var i in t)for(var n=0,a=t[i].length;a>n;n++)t[i][n].scale=e||t[i][n]._scale},_backupScale:function(){var e={xAxis:this.option.xAxis,yAxis:this.option.yAxis};for(var t in e)for(var i=0,n=e[t].length;n>i;i++)e[t][i]._scale=e[t][i].scale},_getDetail:function(){for(var e=["xAxis","yAxis"],t=0,i=e.length;i>t;t++){var n=this._originalData[e[t]];for(var a in n){var o=n[a];if(null!=o){var r=o.length,l=Math.floor(this._zoom.start/100*r),h=Math.ceil(this._zoom.end/100*r);return h-=h>0?1:0,{start:this.getDataFromOption(o[l]),end:this.getDataFromOption(o[h])}}}}e="horizontal"==this.zoomOption.orient?"xAxis":"yAxis";var d=this._zoom.seriesIndex[0],c=this.option.series[d][e+"Index"]||0,m=this.option[e][c].type,p=this._zoom.scatterMap[d][e.charAt(0)].min,u=this._zoom.scatterMap[d][e.charAt(0)].max,V=u-p;if("value"==m)return{start:p+V*this._zoom.start/100,end:p+V*this._zoom.end/100};if("time"==m){u=p+V*this._zoom.end/100,p+=V*this._zoom.start/100;var U=s.getAutoFormatter(p,u).formatter;return{start:s.format(U,p),end:s.format(U,u)}}return{start:"",end:""}},__ondrift:function(e,t,i){this.zoomOption.zoomLock&&(e=this._fillerShae);var n="filler"==e._type?this._handleSize:0;if("horizontal"==this.zoomOption.orient?e.style.x+t-n<=this._location.x?e.style.x=this._location.x+n:e.style.x+t+e.style.width+n>=this._location.x+this._location.width?e.style.x=this._location.x+this._location.width-e.style.width-n:e.style.x+=t:e.style.y+i-n<=this._location.y?e.style.y=this._location.y+n:e.style.y+i+e.style.height+n>=this._location.y+this._location.height?e.style.y=this._location.y+this._location.height-e.style.height-n:e.style.y+=i,"filler"==e._type?this._syncHandleShape():this._syncFillerShape(),this.zoomOption.realtime&&this._syncData(),this.zoomOption.showDetail){var a=this._getDetail();this._startShape.style.text=this._startShape.highlightStyle.text=a.start,this._endShape.style.text=this._endShape.highlightStyle.text=a.end,this._startShape.style.textPosition=this._startShape.highlightStyle.textPosition,this._endShape.style.textPosition=this._endShape.highlightStyle.textPosition}return!0},__ondragend:function(){this.zoomOption.showDetail&&(this._startShape.style.text=this._endShape.style.text="=",this._startShape.style.textPosition=this._endShape.style.textPosition="inside",this.zr.modShape(this._startShape.id),this.zr.modShape(this._endShape.id),this.zr.refreshNextFrame()),this.isDragend=!0},ondragend:function(e,t){this.isDragend&&e.target&&(!this.zoomOption.realtime&&this._syncData(),t.dragOut=!0,t.dragIn=!0,this._isSilence||this.zoomOption.realtime||this.messageCenter.dispatch(r.EVENT.DATA_ZOOM,null,{zoom:this._zoom},this.myChart),t.needRefresh=!1,this.isDragend=!1)},ondataZoom:function(e,t){t.needRefresh=!0},absoluteZoom:function(e){this._zoom.start=e.start,this._zoom.end=e.end,this._zoom.start2=e.start2,this._zoom.end2=e.end2,this._syncShape(),this._syncData(!0)},rectZoom:function(e){if(!e)return this._zoom.start=this._zoom.start2=0,this._zoom.end=this._zoom.end2=100,this._syncShape(),this._syncData(!0),this._zoom;var t=this.component.grid.getArea(),i={x:e.x,y:e.y,width:e.width,height:e.height};if(i.width<0&&(i.x+=i.width,i.width=-i.width),i.height<0&&(i.y+=i.height,i.height=-i.height),i.x>t.x+t.width||i.y>t.y+t.height)return!1;i.x<t.x&&(i.x=t.x),i.x+i.width>t.x+t.width&&(i.width=t.x+t.width-i.x),i.y+i.height>t.y+t.height&&(i.height=t.y+t.height-i.y);var n,a=(i.x-t.x)/t.width,o=1-(i.x+i.width-t.x)/t.width,r=1-(i.y+i.height-t.y)/t.height,s=(i.y-t.y)/t.height;return"horizontal"==this.zoomOption.orient?(n=this._zoom.end-this._zoom.start,this._zoom.start+=n*a,this._zoom.end-=n*o,n=this._zoom.end2-this._zoom.start2,this._zoom.start2+=n*r,this._zoom.end2-=n*s):(n=this._zoom.end-this._zoom.start,this._zoom.start+=n*r,this._zoom.end-=n*s,n=this._zoom.end2-this._zoom.start2,this._zoom.start2+=n*a,this._zoom.end2-=n*o),this._syncShape(),this._syncData(!0),this._zoom},syncBackupData:function(e){for(var t,i,n=this._originalData.series,a=e.series,o=0,r=a.length;r>o;o++){i=a[o].data||a[o].eventList,t=n[o]?Math.floor(this._zoom.start/100*n[o].length):0;for(var s=0,l=i.length;l>s;s++)n[o]&&(n[o][s+t]=i[s])}},syncOption:function(e){this.silence(!0),this.option=e,this.option.dataZoom=this.reformOption(this.option.dataZoom),this.zoomOption=this.option.dataZoom,this.myChart.canvasSupported||(this.zoomOption.realtime=!1),this.clear(),this._location=this._getLocation(),this._zoom=this._getZoom(),this._backupData(),this.option.dataZoom&&this.option.dataZoom.show&&this._buildShape(),this._syncData(),this.silence(!1)},silence:function(e){this._isSilence=e},getRealDataIndex:function(e,t){if(!this._originalData||0===this._zoom.start&&100==this._zoom.end)return t;var i=this._originalData.series;return i[e]?Math.floor(this._zoom.start/100*i[e].length)+t:-1},resize:function(){this.clear(),this._location=this._getLocation(),this._zoom=this._getZoom(),this.option.dataZoom.show&&this._buildShape()}},l.inherits(t,i),e("../component").define("dataZoom",t),t}),define("echarts/component/categoryAxis",["require","./base","zrender/shape/Text","zrender/shape/Line","zrender/shape/Rectangle","../config","zrender/tool/util","zrender/tool/area","../component"],function(e){function t(e,t,n,a,o,r){if(a.data.length<1)return void console.error("option.data.length < 1.");i.call(this,e,t,n,a,o),this.grid=this.component.grid;for(var s in r)this[s]=r[s];this.refresh(a)}var i=e("./base"),n=e("zrender/shape/Text"),a=e("zrender/shape/Line"),o=e("zrender/shape/Rectangle"),r=e("../config");r.categoryAxis={zlevel:0,z:0,show:!0,position:"bottom",name:"",nameLocation:"end",nameTextStyle:{},boundaryGap:!0,axisLine:{show:!0,onZero:!0,lineStyle:{color:"#48b",width:2,type:"solid"}},axisTick:{show:!0,interval:"auto",inside:!1,length:5,lineStyle:{color:"#333",width:1}},axisLabel:{show:!0,interval:"auto",rotate:0,margin:8,textStyle:{color:"#333"}},splitLine:{show:!0,lineStyle:{color:["#ccc"],width:1,type:"solid"}},splitArea:{show:!1,areaStyle:{color:["rgba(250,250,250,0.3)","rgba(200,200,200,0.3)"]}}};var s=e("zrender/tool/util"),l=e("zrender/tool/area");return t.prototype={type:r.COMPONENT_TYPE_AXIS_CATEGORY,_getReformedLabel:function(e){var t=this.getDataFromOption(this.option.data[e]),i=this.option.data[e].formatter||this.option.axisLabel.formatter;return i&&("function"==typeof i?t=i.call(this.myChart,t):"string"==typeof i&&(t=i.replace("{value}",t))),t},_getInterval:function(){var e=this.option.axisLabel.interval;if("auto"==e){var t=this.option.axisLabel.textStyle.fontSize,i=this.option.data,n=this.option.data.length;if(this.isHorizontal())if(n>3){var a,o,r=this.getGap(),h=!1,d=Math.floor(.5/r);for(d=1>d?1:d,e=Math.floor(15/r);!h&&n>e;){e+=d,h=!0,a=Math.floor(r*e);for(var c=Math.floor((n-1)/e)*e;c>=0;c-=e){if(0!==this.option.axisLabel.rotate)o=t;else if(i[c].textStyle)o=l.getTextWidth(this._getReformedLabel(c),this.getFont(s.merge(i[c].textStyle,this.option.axisLabel.textStyle)));else{var m=this._getReformedLabel(c)+"",p=(m.match(/\w/g)||"").length,u=m.length-p;o=p*t*2/3+u*t}if(o>a){h=!1;break}}}}else e=1;else if(n>3){var r=this.getGap();for(e=Math.floor(11/r);t>r*e-6&&n>e;)e++}else e=1}else e="function"==typeof e?1:e-0+1;return e},_buildShape:function(){if(this._interval=this._getInterval(),this.option.show){this.option.splitArea.show&&this._buildSplitArea(),this.option.splitLine.show&&this._buildSplitLine(),this.option.axisLine.show&&this._buildAxisLine(),this.option.axisTick.show&&this._buildAxisTick(),this.option.axisLabel.show&&this._buildAxisLabel();for(var e=0,t=this.shapeList.length;t>e;e++)this.zr.addShape(this.shapeList[e])}},_buildAxisTick:function(){var e,t=this.option.data,i=this.option.data.length,n=this.option.axisTick,o=n.length,r=n.lineStyle.color,s=n.lineStyle.width,l="function"==typeof n.interval?n.interval:"auto"==n.interval&&"function"==typeof this.option.axisLabel.interval?this.option.axisLabel.interval:!1,h=l?1:"auto"==n.interval?this._interval:n.interval-0+1,d=n.onGap,c=d?this.getGap()/2:"undefined"==typeof d&&this.option.boundaryGap?this.getGap()/2:0,m=c>0?-h:0;if(this.isHorizontal())for(var p,u="bottom"==this.option.position?n.inside?this.grid.getYend()-o-1:this.grid.getYend()+1:n.inside?this.grid.getY()+1:this.grid.getY()-o-1,V=m;i>V;V+=h)(!l||l(V,t[V]))&&(p=this.subPixelOptimize(this.getCoordByIndex(V)+(V>=0?c:0),s),e={_axisShape:"axisTick",zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:p,yStart:u,xEnd:p,yEnd:u+o,strokeColor:r,lineWidth:s}},this.shapeList.push(new a(e)));else for(var U,g="left"==this.option.position?n.inside?this.grid.getX()+1:this.grid.getX()-o-1:n.inside?this.grid.getXend()-o-1:this.grid.getXend()+1,V=m;i>V;V+=h)(!l||l(V,t[V]))&&(U=this.subPixelOptimize(this.getCoordByIndex(V)-(V>=0?c:0),s),e={_axisShape:"axisTick",zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:g,yStart:U,xEnd:g+o,yEnd:U,strokeColor:r,lineWidth:s}},this.shapeList.push(new a(e)))},_buildAxisLabel:function(){var e,t,i=this.option.data,a=this.option.data.length,o=this.option.axisLabel,r=o.rotate,l=o.margin,h=o.clickable,d=o.textStyle,c="function"==typeof o.interval?o.interval:!1;if(this.isHorizontal()){var m,p;"bottom"==this.option.position?(m=this.grid.getYend()+l,p="top"):(m=this.grid.getY()-l,p="bottom");for(var u=0;a>u;u+=this._interval)c&&!c(u,i[u])||""===this._getReformedLabel(u)||(t=s.merge(i[u].textStyle||{},d),e={zlevel:this.getZlevelBase(),z:this.getZBase()+3,hoverable:!1,style:{x:this.getCoordByIndex(u),y:m,color:t.color,text:this._getReformedLabel(u),textFont:this.getFont(t),textAlign:t.align||"center",textBaseline:t.baseline||p}},r&&(e.style.textAlign=r>0?"bottom"==this.option.position?"right":"left":"bottom"==this.option.position?"left":"right",e.rotation=[r*Math.PI/180,e.style.x,e.style.y]),this.shapeList.push(new n(this._axisLabelClickable(h,e))))}else{var V,U;"left"==this.option.position?(V=this.grid.getX()-l,U="right"):(V=this.grid.getXend()+l,U="left");for(var u=0;a>u;u+=this._interval)c&&!c(u,i[u])||""===this._getReformedLabel(u)||(t=s.merge(i[u].textStyle||{},d),e={zlevel:this.getZlevelBase(),z:this.getZBase()+3,hoverable:!1,style:{x:V,y:this.getCoordByIndex(u),color:t.color,text:this._getReformedLabel(u),textFont:this.getFont(t),textAlign:t.align||U,textBaseline:t.baseline||0===u&&""!==this.option.name?"bottom":u==a-1&&""!==this.option.name?"top":"middle"}},r&&(e.rotation=[r*Math.PI/180,e.style.x,e.style.y]),this.shapeList.push(new n(this._axisLabelClickable(h,e))))}},_buildSplitLine:function(){var e,t=this.option.data,i=this.option.data.length,n=this.option.splitLine,o=n.lineStyle.type,r=n.lineStyle.width,s=n.lineStyle.color;s=s instanceof Array?s:[s];var l=s.length,h="function"==typeof this.option.axisLabel.interval?this.option.axisLabel.interval:!1,d=n.onGap,c=d?this.getGap()/2:"undefined"==typeof d&&this.option.boundaryGap?this.getGap()/2:0;if(i-=d||"undefined"==typeof d&&this.option.boundaryGap?1:0,this.isHorizontal())for(var m,p=this.grid.getY(),u=this.grid.getYend(),V=0;i>V;V+=this._interval)(!h||h(V,t[V]))&&(m=this.subPixelOptimize(this.getCoordByIndex(V)+c,r),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:m,yStart:p,xEnd:m,yEnd:u,strokeColor:s[V/this._interval%l],lineType:o,lineWidth:r}},this.shapeList.push(new a(e)));else for(var U,g=this.grid.getX(),f=this.grid.getXend(),V=0;i>V;V+=this._interval)(!h||h(V,t[V]))&&(U=this.subPixelOptimize(this.getCoordByIndex(V)-c,r),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:g,yStart:U,xEnd:f,yEnd:U,strokeColor:s[V/this._interval%l],lineType:o,lineWidth:r}},this.shapeList.push(new a(e)))},_buildSplitArea:function(){var e,t=this.option.data,i=this.option.splitArea,n=i.areaStyle.color;if(n instanceof Array){var a=n.length,r=this.option.data.length,s="function"==typeof this.option.axisLabel.interval?this.option.axisLabel.interval:!1,l=i.onGap,h=l?this.getGap()/2:"undefined"==typeof l&&this.option.boundaryGap?this.getGap()/2:0;if(this.isHorizontal())for(var d,c=this.grid.getY(),m=this.grid.getHeight(),p=this.grid.getX(),u=0;r>=u;u+=this._interval)s&&!s(u,t[u])&&r>u||(d=r>u?this.getCoordByIndex(u)+h:this.grid.getXend(),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:p,y:c,width:d-p,height:m,color:n[u/this._interval%a]}},this.shapeList.push(new o(e)),p=d);else for(var V,U=this.grid.getX(),g=this.grid.getWidth(),f=this.grid.getYend(),u=0;r>=u;u+=this._interval)s&&!s(u,t[u])&&r>u||(V=r>u?this.getCoordByIndex(u)-h:this.grid.getY(),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:U,y:V,width:g,height:f-V,color:n[u/this._interval%a]}},this.shapeList.push(new o(e)),f=V)}else e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this.grid.getX(),y:this.grid.getY(),width:this.grid.getWidth(),height:this.grid.getHeight(),color:n}},this.shapeList.push(new o(e))},refresh:function(e){e&&(this.option=this.reformOption(e),this.option.axisLabel.textStyle=this.getTextStyle(this.option.axisLabel.textStyle)),this.clear(),this._buildShape()},getGap:function(){var e=this.option.data.length,t=this.isHorizontal()?this.grid.getWidth():this.grid.getHeight();return this.option.boundaryGap?t/e:t/(e>1?e-1:1)},getCoord:function(e){for(var t=this.option.data,i=t.length,n=this.getGap(),a=this.option.boundaryGap?n/2:0,o=0;i>o;o++){if(this.getDataFromOption(t[o])==e)return a=this.isHorizontal()?this.grid.getX()+a:this.grid.getYend()-a;a+=n}},getCoordByIndex:function(e){if(0>e)return this.isHorizontal()?this.grid.getX():this.grid.getYend();if(e>this.option.data.length-1)return this.isHorizontal()?this.grid.getXend():this.grid.getY();var t=this.getGap(),i=this.option.boundaryGap?t/2:0;return i+=e*t,i=this.isHorizontal()?this.grid.getX()+i:this.grid.getYend()-i},getNameByIndex:function(e){return this.getDataFromOption(this.option.data[e])},getIndexByName:function(e){for(var t=this.option.data,i=t.length,n=0;i>n;n++)if(this.getDataFromOption(t[n])==e)return n;return-1},getValueFromCoord:function(){return""},isMainAxis:function(e){return e%this._interval===0}},s.inherits(t,i),e("../component").define("categoryAxis",t),t}),define("echarts/component/valueAxis",["require","./base","zrender/shape/Text","zrender/shape/Line","zrender/shape/Rectangle","../config","../util/date","zrender/tool/util","../util/smartSteps","../util/accMath","../util/smartLogSteps","../component"],function(e){function t(e,t,n,a,o,r,s){if(!s||0===s.length)return void console.err("option.series.length == 0.");i.call(this,e,t,n,a,o),this.series=s,this.grid=this.component.grid;for(var l in r)this[l]=r[l];this.refresh(a,s)}var i=e("./base"),n=e("zrender/shape/Text"),a=e("zrender/shape/Line"),o=e("zrender/shape/Rectangle"),r=e("../config");r.valueAxis={zlevel:0,z:0,show:!0,position:"left",name:"",nameLocation:"end",nameTextStyle:{},boundaryGap:[0,0],axisLine:{show:!0,onZero:!0,lineStyle:{color:"#48b",width:2,type:"solid"}},axisTick:{show:!1,inside:!1,length:5,lineStyle:{color:"#333",width:1}},axisLabel:{show:!0,rotate:0,margin:8,textStyle:{color:"#333"}},splitLine:{show:!0,lineStyle:{color:["#ccc"],width:1,type:"solid"}},splitArea:{show:!1,areaStyle:{color:["rgba(250,250,250,0.3)","rgba(200,200,200,0.3)"]}}};var s=e("../util/date"),l=e("zrender/tool/util");return t.prototype={type:r.COMPONENT_TYPE_AXIS_VALUE,_buildShape:function(){if(this._hasData=!1,this._calculateValue(),this._hasData&&this.option.show){this.option.splitArea.show&&this._buildSplitArea(),this.option.splitLine.show&&this._buildSplitLine(),this.option.axisLine.show&&this._buildAxisLine(),this.option.axisTick.show&&this._buildAxisTick(),this.option.axisLabel.show&&this._buildAxisLabel();for(var e=0,t=this.shapeList.length;t>e;e++)this.zr.addShape(this.shapeList[e])}},_buildAxisTick:function(){var e,t=this._valueList,i=this._valueList.length,n=this.option.axisTick,o=n.length,r=n.lineStyle.color,s=n.lineStyle.width;if(this.isHorizontal())for(var l,h="bottom"===this.option.position?n.inside?this.grid.getYend()-o-1:this.grid.getYend()+1:n.inside?this.grid.getY()+1:this.grid.getY()-o-1,d=0;i>d;d++)l=this.subPixelOptimize(this.getCoord(t[d]),s),e={_axisShape:"axisTick",zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:l,yStart:h,xEnd:l,yEnd:h+o,strokeColor:r,lineWidth:s}},this.shapeList.push(new a(e));else for(var c,m="left"===this.option.position?n.inside?this.grid.getX()+1:this.grid.getX()-o-1:n.inside?this.grid.getXend()-o-1:this.grid.getXend()+1,d=0;i>d;d++)c=this.subPixelOptimize(this.getCoord(t[d]),s),e={_axisShape:"axisTick",zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:m,yStart:c,xEnd:m+o,yEnd:c,strokeColor:r,lineWidth:s}},this.shapeList.push(new a(e))},_buildAxisLabel:function(){var e,t=this._valueList,i=this._valueList.length,a=this.option.axisLabel.rotate,o=this.option.axisLabel.margin,r=this.option.axisLabel.clickable,s=this.option.axisLabel.textStyle;if(this.isHorizontal()){var l,h;"bottom"===this.option.position?(l=this.grid.getYend()+o,h="top"):(l=this.grid.getY()-o,h="bottom");for(var d=0;i>d;d++)e={zlevel:this.getZlevelBase(),z:this.getZBase()+3,hoverable:!1,style:{x:this.getCoord(t[d]),y:l,color:"function"==typeof s.color?s.color(t[d]):s.color,text:this._valueLabel[d],textFont:this.getFont(s),textAlign:s.align||"center",textBaseline:s.baseline||h}},a&&(e.style.textAlign=a>0?"bottom"===this.option.position?"right":"left":"bottom"===this.option.position?"left":"right",e.rotation=[a*Math.PI/180,e.style.x,e.style.y]),this.shapeList.push(new n(this._axisLabelClickable(r,e)))}else{var c,m;"left"===this.option.position?(c=this.grid.getX()-o,m="right"):(c=this.grid.getXend()+o,m="left");for(var d=0;i>d;d++)e={zlevel:this.getZlevelBase(),z:this.getZBase()+3,hoverable:!1,style:{x:c,y:this.getCoord(t[d]),color:"function"==typeof s.color?s.color(t[d]):s.color,text:this._valueLabel[d],textFont:this.getFont(s),textAlign:s.align||m,textBaseline:s.baseline||(0===d&&""!==this.option.name?"bottom":d===i-1&&""!==this.option.name?"top":"middle")}},a&&(e.rotation=[a*Math.PI/180,e.style.x,e.style.y]),this.shapeList.push(new n(this._axisLabelClickable(r,e)))}},_buildSplitLine:function(){var e,t=this._valueList,i=this._valueList.length,n=this.option.splitLine,o=n.lineStyle.type,r=n.lineStyle.width,s=n.lineStyle.color;s=s instanceof Array?s:[s];var l=s.length;if(this.isHorizontal())for(var h,d=this.grid.getY(),c=this.grid.getYend(),m=0;i>m;m++)h=this.subPixelOptimize(this.getCoord(t[m]),r),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:h,yStart:d,xEnd:h,yEnd:c,strokeColor:s[m%l],lineType:o,lineWidth:r}},this.shapeList.push(new a(e));else for(var p,u=this.grid.getX(),V=this.grid.getXend(),m=0;i>m;m++)p=this.subPixelOptimize(this.getCoord(t[m]),r),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{xStart:u,yStart:p,xEnd:V,yEnd:p,strokeColor:s[m%l],lineType:o,lineWidth:r}},this.shapeList.push(new a(e))},_buildSplitArea:function(){var e,t=this.option.splitArea.areaStyle.color;if(t instanceof Array){var i=t.length,n=this._valueList,a=this._valueList.length;if(this.isHorizontal())for(var r,s=this.grid.getY(),l=this.grid.getHeight(),h=this.grid.getX(),d=0;a>=d;d++)r=a>d?this.getCoord(n[d]):this.grid.getXend(),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:h,y:s,width:r-h,height:l,color:t[d%i]}},this.shapeList.push(new o(e)),h=r;else for(var c,m=this.grid.getX(),p=this.grid.getWidth(),u=this.grid.getYend(),d=0;a>=d;d++)c=a>d?this.getCoord(n[d]):this.grid.getY(),e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:m,y:c,width:p,height:u-c,color:t[d%i]}},this.shapeList.push(new o(e)),u=c}else e={zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this.grid.getX(),y:this.grid.getY(),width:this.grid.getWidth(),height:this.grid.getHeight(),color:t}},this.shapeList.push(new o(e))},_calculateValue:function(){if(isNaN(this.option.min-0)||isNaN(this.option.max-0)){for(var e,t,i={},n=this.component.legend,a=0,o=this.series.length;o>a;a++)!(this.series[a].type!=r.CHART_TYPE_LINE&&this.series[a].type!=r.CHART_TYPE_BAR&&this.series[a].type!=r.CHART_TYPE_SCATTER&&this.series[a].type!=r.CHART_TYPE_K&&this.series[a].type!=r.CHART_TYPE_EVENTRIVER||n&&!n.isSelected(this.series[a].name)||(e=this.series[a].xAxisIndex||0,t=this.series[a].yAxisIndex||0,this.option.xAxisIndex!=e&&this.option.yAxisIndex!=t||!this._calculSum(i,a)));var s;for(var a in i){s=i[a];for(var l=0,h=s.length;h>l;l++)if(!isNaN(s[l])){this._hasData=!0,this._min=s[l],this._max=s[l];break}if(this._hasData)break}for(var a in i){s=i[a];for(var l=0,h=s.length;h>l;l++)isNaN(s[l])||(this._min=Math.min(this._min,s[l]),this._max=Math.max(this._max,s[l]))}var d="log"!==this.option.type?this.option.boundaryGap:[0,0],c=Math.abs(this._max-this._min);this._min=isNaN(this.option.min-0)?this._min-Math.abs(c*d[0]):this.option.min-0,this._max=isNaN(this.option.max-0)?this._max+Math.abs(c*d[1]):this.option.max-0,this._min===this._max&&(0===this._max?this._max=1:this._max>0?this._min=this._max/this.option.splitNumber!=null?this.option.splitNumber:5:this._max=this._max/this.option.splitNumber!=null?this.option.splitNumber:5),"time"===this.option.type?this._reformTimeValue():"log"===this.option.type?this._reformLogValue():this._reformValue(this.option.scale)}else this._hasData=!0,this._min=this.option.min-0,this._max=this.option.max-0,"time"===this.option.type?this._reformTimeValue():"log"===this.option.type?this._reformLogValue():this._customerValue()},_calculSum:function(e,t){var i,n,a=this.series[t].name||"kener";if(this.series[t].stack){var o="__Magic_Key_Positive__"+this.series[t].stack,l="__Magic_Key_Negative__"+this.series[t].stack;e[o]=e[o]||[],e[l]=e[l]||[],e[a]=e[a]||[],n=this.series[t].data;for(var h=0,d=n.length;d>h;h++)i=this.getDataFromOption(n[h]),"-"!==i&&(i-=0,i>=0?null!=e[o][h]?e[o][h]+=i:e[o][h]=i:null!=e[l][h]?e[l][h]+=i:e[l][h]=i,this.option.scale&&e[a].push(i))}else if(e[a]=e[a]||[],this.series[t].type!=r.CHART_TYPE_EVENTRIVER){n=this.series[t].data;for(var h=0,d=n.length;d>h;h++)i=this.getDataFromOption(n[h]),this.series[t].type===r.CHART_TYPE_K?(e[a].push(i[0]),e[a].push(i[1]),e[a].push(i[2]),e[a].push(i[3])):i instanceof Array?(-1!=this.option.xAxisIndex&&e[a].push("time"!=this.option.type?i[0]:s.getNewDate(i[0])),-1!=this.option.yAxisIndex&&e[a].push("time"!=this.option.type?i[1]:s.getNewDate(i[1]))):e[a].push(i)}else{n=this.series[t].data;for(var h=0,d=n.length;d>h;h++)for(var c=n[h].evolution,m=0,p=c.length;p>m;m++)e[a].push(s.getNewDate(c[m].time))}},_reformValue:function(t){var i=e("../util/smartSteps"),n=this.option.splitNumber;!t&&this._min>=0&&this._max>=0&&(this._min=0),!t&&this._min<=0&&this._max<=0&&(this._max=0);var a=i(this._min,this._max,n);n=null!=n?n:a.secs,this._min=a.min,this._max=a.max,this._valueList=a.pnts,this._reformLabelData()},_reformTimeValue:function(){var e=null!=this.option.splitNumber?this.option.splitNumber:5,t=s.getAutoFormatter(this._min,this._max,e),i=t.formatter,n=t.gapValue;this._valueList=[s.getNewDate(this._min)];var a;switch(i){case"week":a=s.nextMonday(this._min);break;case"month":a=s.nextNthOnMonth(this._min,1);break;case"quarter":a=s.nextNthOnQuarterYear(this._min,1);break;case"half-year":a=s.nextNthOnHalfYear(this._min,1);break;case"year":a=s.nextNthOnYear(this._min,1);break;default:72e5>=n?a=(Math.floor(this._min/n)+1)*n:(a=s.getNewDate(this._min- -n),a.setHours(6*Math.round(a.getHours()/6)),a.setMinutes(0),a.setSeconds(0))}for(a-this._min<n/2&&(a-=-n),t=s.getNewDate(a),e*=1.5;e-->=0&&(("month"==i||"quarter"==i||"half-year"==i||"year"==i)&&t.setDate(1),!(this._max-t<n/2));)this._valueList.push(t),t=s.getNewDate(t- -n);this._valueList.push(s.getNewDate(this._max)),this._reformLabelData(function(e){return function(t){return s.format(e,t)}}(i))},_customerValue:function(){var t=e("../util/accMath"),i=null!=this.option.splitNumber?this.option.splitNumber:5,n=(this._max-this._min)/i;
this._valueList=[];for(var a=0;i>=a;a++)this._valueList.push(t.accAdd(this._min,t.accMul(n,a)));this._reformLabelData()},_reformLogValue:function(){var t=this.option,i=e("../util/smartLogSteps")({dataMin:this._min,dataMax:this._max,logPositive:t.logPositive,logLabelBase:t.logLabelBase,splitNumber:t.splitNumber});this._min=i.dataMin,this._max=i.dataMax,this._valueList=i.tickList,this._dataMappingMethods=i.dataMappingMethods,this._reformLabelData(i.labelFormatter)},_reformLabelData:function(e){this._valueLabel=[];var t=this.option.axisLabel.formatter;if(t)for(var i=0,n=this._valueList.length;n>i;i++)"function"==typeof t?this._valueLabel.push(e?t.call(this.myChart,this._valueList[i],e):t.call(this.myChart,this._valueList[i])):"string"==typeof t&&this._valueLabel.push(e?s.format(t,this._valueList[i]):t.replace("{value}",this._valueList[i]));else for(var i=0,n=this._valueList.length;n>i;i++)this._valueLabel.push(e?e(this._valueList[i]):this.numAddCommas(this._valueList[i]))},getExtremum:function(){this._calculateValue();var e=this._dataMappingMethods;return{min:this._min,max:this._max,dataMappingMethods:e?l.merge({},e):null}},refresh:function(e,t){e&&(this.option=this.reformOption(e),this.option.axisLabel.textStyle=l.merge(this.option.axisLabel.textStyle||{},this.ecTheme.textStyle),this.series=t),this.zr&&(this.clear(),this._buildShape())},getCoord:function(e){this._dataMappingMethods&&(e=this._dataMappingMethods.value2Coord(e)),e=e<this._min?this._min:e,e=e>this._max?this._max:e;var t;return t=this.isHorizontal()?this.grid.getX()+(e-this._min)/(this._max-this._min)*this.grid.getWidth():this.grid.getYend()-(e-this._min)/(this._max-this._min)*this.grid.getHeight()},getCoordSize:function(e){return Math.abs(this.isHorizontal()?e/(this._max-this._min)*this.grid.getWidth():e/(this._max-this._min)*this.grid.getHeight())},getValueFromCoord:function(e){var t;return this.isHorizontal()?(e=e<this.grid.getX()?this.grid.getX():e,e=e>this.grid.getXend()?this.grid.getXend():e,t=this._min+(e-this.grid.getX())/this.grid.getWidth()*(this._max-this._min)):(e=e<this.grid.getY()?this.grid.getY():e,e=e>this.grid.getYend()?this.grid.getYend():e,t=this._max-(e-this.grid.getY())/this.grid.getHeight()*(this._max-this._min)),this._dataMappingMethods&&(t=this._dataMappingMethods.coord2Value(t)),t.toFixed(2)-0},isMaindAxis:function(e){for(var t=0,i=this._valueList.length;i>t;t++)if(this._valueList[t]===e)return!0;return!1}},l.inherits(t,i),e("../component").define("valueAxis",t),t}),define("echarts/util/date",[],function(){function e(e,t,i){i=i>1?i:2;for(var n,a,o,r,s=0,l=d.length;l>s;s++)if(n=d[s].value,a=Math.ceil(t/n)*n-Math.floor(e/n)*n,Math.round(a/n)<=1.2*i){o=d[s].formatter,r=d[s].value;break}return null==o&&(o="year",n=317088e5,a=Math.ceil(t/n)*n-Math.floor(e/n)*n,r=Math.round(a/(i-1)/n)*n),{formatter:o,gapValue:r}}function t(e){return 10>e?"0"+e:e}function i(e,i){("week"==e||"month"==e||"quarter"==e||"half-year"==e||"year"==e)&&(e="MM - dd\nyyyy");var n=h(i),a=n.getFullYear(),o=n.getMonth()+1,r=n.getDate(),s=n.getHours(),l=n.getMinutes(),d=n.getSeconds();return e=e.replace("MM",t(o)),e=e.toLowerCase(),e=e.replace("yyyy",a),e=e.replace("yy",a%100),e=e.replace("dd",t(r)),e=e.replace("d",r),e=e.replace("hh",t(s)),e=e.replace("h",s),e=e.replace("mm",t(l)),e=e.replace("m",l),e=e.replace("ss",t(d)),e=e.replace("s",d)}function n(e){return e=h(e),e.setDate(e.getDate()+8-e.getDay()),e}function a(e,t,i){return e=h(e),e.setMonth(Math.ceil((e.getMonth()+1)/i)*i),e.setDate(t),e}function o(e,t){return a(e,t,1)}function r(e,t){return a(e,t,3)}function s(e,t){return a(e,t,6)}function l(e,t){return a(e,t,12)}function h(e){return e instanceof Date?e:new Date("string"==typeof e?e.replace(/-/g,"/"):e)}var d=[{formatter:"hh : mm : ss",value:1e3},{formatter:"hh : mm : ss",value:5e3},{formatter:"hh : mm : ss",value:1e4},{formatter:"hh : mm : ss",value:15e3},{formatter:"hh : mm : ss",value:3e4},{formatter:"hh : mm\nMM - dd",value:6e4},{formatter:"hh : mm\nMM - dd",value:3e5},{formatter:"hh : mm\nMM - dd",value:6e5},{formatter:"hh : mm\nMM - dd",value:9e5},{formatter:"hh : mm\nMM - dd",value:18e5},{formatter:"hh : mm\nMM - dd",value:36e5},{formatter:"hh : mm\nMM - dd",value:72e5},{formatter:"hh : mm\nMM - dd",value:216e5},{formatter:"hh : mm\nMM - dd",value:432e5},{formatter:"MM - dd\nyyyy",value:864e5},{formatter:"week",value:6048e5},{formatter:"month",value:26784e5},{formatter:"quarter",value:8208e6},{formatter:"half-year",value:16416e6},{formatter:"year",value:32832e6}];return{getAutoFormatter:e,getNewDate:h,format:i,nextMonday:n,nextNthPerNmonth:a,nextNthOnMonth:o,nextNthOnQuarterYear:r,nextNthOnHalfYear:s,nextNthOnYear:l}}),define("echarts/util/smartSteps",[],function(){function e(e){return w.log(S(e))/w.LN10}function t(e){return w.pow(10,e)}function i(e){return e===X(e)}function n(e,t,n,a){y=a||{},b=y.steps||v,_=y.secs||L,n=W(+n||0)%99,e=+e||0,t=+t||0,x=k=0,"min"in y&&(e=+y.min||0,x=1),"max"in y&&(t=+y.max||0,k=1),e>t&&(t=[e,e=t][0]);var o=t-e;if(x&&k)return f(e,t,n);if((n||5)>o){if(i(e)&&i(t))return p(e,t,n);if(0===o)return u(e,t,n)}return h(e,t,n)}function a(e,i,n,a){a=a||0;var s=o((i-e)/n,-1),l=o(e,-1,1),h=o(i,-1),d=w.min(s.e,l.e,h.e);0===l.c?d=w.min(s.e,h.e):0===h.c&&(d=w.min(s.e,l.e)),r(s,{c:0,e:d}),r(l,s,1),r(h,s),a+=d,e=l.c,i=h.c;for(var c=(i-e)/n,m=t(a),p=0,u=[],V=n+1;V--;)u[V]=(e+c*V)*m;if(0>a){p=U(m),c=+(c*m).toFixed(p),e=+(e*m).toFixed(p),i=+(i*m).toFixed(p);for(var V=u.length;V--;)u[V]=u[V].toFixed(p),0===+u[V]&&(u[V]="0")}else e*=m,i*=m,c*=m;return _=0,b=0,y=0,{min:e,max:i,secs:n,step:c,fix:p,exp:a,pnts:u}}function o(n,a,o){a=W(a%10)||2,0>a&&(i(n)?a=(""+S(n)).replace(/0+$/,"").length||1:(n=n.toFixed(15).replace(/0+$/,""),a=n.replace(".","").replace(/^[-0]+/,"").length,n=+n));var r=X(e(n))-a+1,s=+(n*t(-r)).toFixed(15)||0;return s=o?X(s):I(s),!s&&(r=0),(""+S(s)).length>a&&(r+=1,s/=10),{c:s,e:r}}function r(e,i,n){var a=i.e-e.e;a&&(e.e+=a,e.c*=t(-a),e.c=n?X(e.c):I(e.c))}function s(e,t,i){e.e<t.e?r(t,e,i):r(e,t,i)}function l(e,t){t=t||v,e=o(e);for(var i=e.c,n=0;i>t[n];)n++;if(!t[n])for(i/=10,e.e+=1,n=0;i>t[n];)n++;return e.c=t[n],e}function h(e,t,n){var s,h=n||+_.slice(-1),u=l((t-e)/h,b),U=o(t-e),f=o(e,-1,1),y=o(t,-1);if(r(U,u),r(f,u,1),r(y,u),n?s=c(f,y,h):h=d(f,y),i(e)&&i(t)&&e*t>=0){if(h>t-e)return p(e,t,h);h=m(e,t,n,f,y,h)}var v=V(e,t,f.c,y.c);return f.c=v[0],y.c=v[1],(x||k)&&g(e,t,f,y),a(f.c,y.c,h,y.e)}function d(e,i){for(var n,a,o,r,s=[],h=_.length;h--;)n=_[h],a=l((i.c-e.c)/n,b),a=a.c*t(a.e),o=X(e.c/a)*a,r=I(i.c/a)*a,s[h]={min:o,max:r,step:a,span:r-o};return s.sort(function(e,t){var i=e.span-t.span;return 0===i&&(i=e.step-t.step),i}),s=s[0],n=s.span/s.step,e.c=s.min,i.c=s.max,3>n?2*n:n}function c(e,i,n){for(var a,o,r=i.c,s=(i.c-e.c)/n-1;r>e.c;)s=l(s+1,b),s=s.c*t(s.e),a=s*n,o=I(i.c/s)*s,r=o-a;var h=e.c-r,d=o-i.c,c=h-d;return c>1.1*s&&(c=W(c/s/2)*s,r+=c,o+=c),e.c=r,i.c=o,s}function m(e,n,a,o,r,s){var l=r.c-o.c,h=l/s*t(r.e);if(!i(h)&&(h=X(h),l=h*s,n-e>l&&(h+=1,l=h*s,!a&&h*(s-1)>=n-e&&(s-=1,l=h*s)),l>=n-e)){var d=l-(n-e);o.c=W(e-d/2),r.c=W(n+d/2),o.e=0,r.e=0}return s}function p(e,t,i){if(i=i||5,x)t=e+i;else if(k)e=t-i;else{var n=i-(t-e),o=W(e-n/2),r=W(t+n/2),s=V(e,t,o,r);e=s[0],t=s[1]}return a(e,t,i)}function u(e,t,i){i=i||5;var n=w.min(S(t/i),i)/2.1;return x?t=e+n:k?e=t-n:(e-=n,t+=n),h(e,t,i)}function V(e,t,i,n){return e>=0&&0>i?(n-=i,i=0):0>=t&&n>0&&(i-=n,n=0),[i,n]}function U(e){return e=(+e).toFixed(15).split("."),e.pop().replace(/0+$/,"").length}function g(e,t,i,n){if(x){var a=o(e,4,1);i.e-a.e>6&&(a={c:0,e:i.e}),s(i,a),s(n,a),n.c+=a.c-i.c,i.c=a.c}else if(k){var r=o(t,4);n.e-r.e>6&&(r={c:0,e:n.e}),s(i,r),s(n,r),i.c+=r.c-n.c,n.c=r.c}}function f(e,t,i){var n=i?[i]:_,s=t-e;if(0===s)return t=o(t,3),i=n[0],t.c=W(t.c+i/2),a(t.c-i,t.c,i,t.e);S(t/s)<1e-6&&(t=0),S(e/s)<1e-6&&(e=0);var l,h,d,c=[[5,10],[10,2],[50,10],[100,2]],m=[],p=[],u=o(t-e,3),V=o(e,-1,1),U=o(t,-1);r(V,u,1),r(U,u),s=U.c-V.c,u.c=s;for(var g=n.length;g--;){i=n[g],l=I(s/i),h=l*i-s,d=3*(h+3),d+=2*(i-n[0]+2),i%5===0&&(d-=10);for(var f=c.length;f--;)l%c[f][0]===0&&(d/=c[f][1]);p[g]=[i,l,h,d].join(),m[g]={secs:i,step:l,delta:h,score:d}}return m.sort(function(e,t){return e.score-t.score}),m=m[0],V.c=W(V.c-m.delta/2),U.c=W(U.c+m.delta/2),a(V.c,U.c,m.secs,u.e)}var y,b,_,x,k,v=[10,20,25,50],L=[4,5,6],w=Math,W=w.round,X=w.floor,I=w.ceil,S=w.abs;return n}),define("echarts/util/smartLogSteps",["require","./number"],function(e){function t(e){return i(),U=e||{},n(),a(),[o(),i()][0]}function i(){m=U=f=V=y=b=g=_=p=u=null}function n(){p=U.logLabelBase,null==p?(u="plain",p=10,V=S):(p=+p,1>p&&(p=10),u="exponent",V=v(p)),g=U.splitNumber,null==g&&(g=E);var e=parseFloat(U.dataMin),t=parseFloat(U.dataMax);isFinite(e)||isFinite(t)?isFinite(e)?isFinite(t)?e>t&&(t=[e,e=t][0]):t=e:e=t:e=t=1,m=U.logPositive,null==m&&(m=t>0||0===e),y=m?e:-t,b=m?t:-e,T>y&&(y=T),T>b&&(b=T)}function a(){function e(){g>d&&(g=d);var e=X(l(d/g)),t=W(l(d/e)),i=e*t,n=(i-m)/2,a=X(l(r-n));c(a-r)&&(a-=1),f=-a*V;for(var s=a;o>=s-e;s+=e)_.push(L(p,s))}function t(){for(var e=i(h,0),t=e+2;t>e&&a(e+1)+n(e+1)*C<r;)e++;for(var l=i(s,0),t=l-2;l>t&&a(l-1)+n(l-1)*C>o;)l--;f=-(a(e)*S+n(e)*K);for(var d=e;l>=d;d++){var c=a(d),m=n(d);_.push(L(10,c)*L(2,m))}}function i(e,t){return 3*e+t}function n(e){return e-3*a(e)}function a(e){return X(l(e/3))}_=[];var o=l(v(b)/V),r=l(v(y)/V),s=W(o),h=X(r),d=s-h,m=o-r;"exponent"===u?e():z>=d&&g>z?t():e()}function o(){for(var e=[],t=0,i=_.length;i>t;t++)e[t]=(m?1:-1)*_[t];!m&&e.reverse();var n=s(),a=n.value2Coord,o=a(e[0]),l=a(e[e.length-1]);return o===l&&(o-=1,l+=1),{dataMin:o,dataMax:l,tickList:e,logPositive:m,labelFormatter:r(),dataMappingMethods:n}}function r(){if("exponent"===u){var e=p,t=V;return function(i){if(!isFinite(parseFloat(i)))return"";var n="";return 0>i&&(i=-i,n="-"),n+e+d(v(i)/t)}}return function(e){return isFinite(parseFloat(e))?x.addCommas(h(e)):""}}function s(){var e=m,t=f;return{value2Coord:function(i){return null==i||isNaN(i)||!isFinite(i)?i:(i=parseFloat(i),isFinite(i)?e&&T>i?i=T:!e&&i>-T&&(i=-T):i=T,i=w(i),(e?1:-1)*(v(i)+t))},coord2Value:function(i){return null==i||isNaN(i)||!isFinite(i)?i:(i=parseFloat(i),isFinite(i)||(i=T),e?L(I,i-t):-L(I,-i+t))}}}function l(e){return+Number(+e).toFixed(14)}function h(e){return Number(e).toFixed(15).replace(/\.?0*$/,"")}function d(e){e=h(Math.round(e));for(var t=[],i=0,n=e.length;n>i;i++){var a=e.charAt(i);t.push(A[a]||"")}return t.join("")}function c(e){return e>-T&&T>e}var m,p,u,V,U,g,f,y,b,_,x=e("./number"),k=Math,v=k.log,L=k.pow,w=k.abs,W=k.ceil,X=k.floor,I=k.E,S=k.LN10,K=k.LN2,C=K/S,T=1e-9,E=5,z=2,A={0:"â°",1:"¹",2:"²",3:"³",4:"â´",5:"âµ",6:"â¶",7:"â·",8:"â¸",9:"â¹","-":"â»"};return t});
|
style:{y:c+p,color:this.titleOption.subtextStyle.color,text:a,textFont:h,textBaseline:"bottom"},highlightStyle:{color:l.lift(this.titleOption.subtextStyle.color,1),brushType:"fill"},hoverable:!1};switch(o&&(V.hoverable=!0,V.clickable=!0,V.onclick=function(){r&&"self"==r?window.location=o:window.open(o)}),this.titleOption.x){case"center":u.style.x=V.style.x=d+m/2,u.style.textAlign=V.style.textAlign="center";break;case"left":u.style.x=V.style.x=d,u.style.textAlign=V.style.textAlign="left";break;case"right":u.style.x=V.style.x=d+m,u.style.textAlign=V.style.textAlign="right";break;default:d=this.titleOption.x-0,d=isNaN(d)?0:d,u.style.x=V.style.x=d}this.titleOption.textAlign&&(u.style.textAlign=V.style.textAlign=this.titleOption.textAlign),this.shapeList.push(new n(u)),""!==a&&this.shapeList.push(new n(V))},_buildBackground:function(){var e=this.reformCssArray(this.titleOption.padding);this.shapeList.push(new a({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._itemGroupLocation.x-e[3],y:this._itemGroupLocation.y-e[0],width:this._itemGroupLocation.width+e[3]+e[1],height:this._itemGroupLocation.height+e[0]+e[2],brushType:0===this.titleOption.borderWidth?"fill":"both",color:this.titleOption.backgroundColor,strokeColor:this.titleOption.borderColor,lineWidth:this.titleOption.borderWidth}}))},_getItemGroupLocation:function(){var e,t=this.reformCssArray(this.titleOption.padding),i=this.titleOption.text,n=this.titleOption.subtext,a=this.getFont(this.titleOption.textStyle),o=this.getFont(this.titleOption.subtextStyle),r=Math.max(s.getTextWidth(i,a),s.getTextWidth(n,o)),l=s.getTextHeight(i,a)+(""===n?0:this.titleOption.itemGap+s.getTextHeight(n,o)),h=this.zr.getWidth();switch(this.titleOption.x){case"center":e=Math.floor((h-r)/2);break;case"left":e=t[3]+this.titleOption.borderWidth;break;case"right":e=h-r-t[1]-this.titleOption.borderWidth;break;default:e=this.titleOption.x-0,e=isNaN(e)?0:e}var d,c=this.zr.getHeight();switch(this.titleOption.y){case"top":d=t[0]+this.titleOption.borderWidth;break;case"bottom":d=c-l-t[2]-this.titleOption.borderWidth;break;case"center":d=Math.floor((c-l)/2);break;default:d=this.titleOption.y-0,d=isNaN(d)?0:d}return{x:e,y:d,width:r,height:l}},refresh:function(e){e&&(this.option=e,this.option.title=this.reformOption(this.option.title),this.titleOption=this.option.title,this.titleOption.textStyle=this.getTextStyle(this.titleOption.textStyle),this.titleOption.subtextStyle=this.getTextStyle(this.titleOption.subtextStyle)),this.clear(),this._buildShape()}},r.inherits(t,i),e("../component").define("title",t),t}),define("echarts/component/tooltip",["require","./base","../util/shape/Cross","zrender/shape/Line","zrender/shape/Rectangle","../config","../util/ecData","zrender/config","zrender/tool/event","zrender/tool/area","zrender/tool/color","zrender/tool/util","zrender/shape/Base","../component"],function(e){function t(e,t,o,r,s){i.call(this,e,t,o,r,s),this.dom=s.dom;var l=this;l._onmousemove=function(e){return l.__onmousemove(e)},l._onglobalout=function(e){return l.__onglobalout(e)},this.zr.on(h.EVENT.MOUSEMOVE,l._onmousemove),this.zr.on(h.EVENT.GLOBALOUT,l._onglobalout),l._hide=function(e){return l.__hide(e)},l._tryShow=function(e){return l.__tryShow(e)},l._refixed=function(e){return l.__refixed(e)},l._setContent=function(e,t){return l.__setContent(e,t)},this._tDom=this._tDom||document.createElement("div"),this._tDom.onselectstart=function(){return!1},this._tDom.onmouseover=function(){l._mousein=!0},this._tDom.onmouseout=function(){l._mousein=!1},this._tDom.className="echarts-tooltip",this._tDom.style.position="absolute",this.hasAppend=!1,this._axisLineShape&&this.zr.delShape(this._axisLineShape.id),this._axisLineShape=new a({zlevel:this.getZlevelBase(),z:this.getZBase(),invisible:!0,hoverable:!1}),this.shapeList.push(this._axisLineShape),this.zr.addShape(this._axisLineShape),this._axisShadowShape&&this.zr.delShape(this._axisShadowShape.id),this._axisShadowShape=new a({zlevel:this.getZlevelBase(),z:1,invisible:!0,hoverable:!1}),this.shapeList.push(this._axisShadowShape),this.zr.addShape(this._axisShadowShape),this._axisCrossShape&&this.zr.delShape(this._axisCrossShape.id),this._axisCrossShape=new n({zlevel:this.getZlevelBase(),z:this.getZBase(),invisible:!0,hoverable:!1}),this.shapeList.push(this._axisCrossShape),this.zr.addShape(this._axisCrossShape),this.showing=!1,this.refresh(r)}var i=e("./base"),n=e("../util/shape/Cross"),a=e("zrender/shape/Line"),o=e("zrender/shape/Rectangle"),r=new o({}),s=e("../config");s.tooltip={zlevel:1,z:8,show:!0,showContent:!0,trigger:"item",islandFormatter:"{a} <br/>{b} : {c}",showDelay:20,hideDelay:100,transitionDuration:.4,enterable:!1,backgroundColor:"rgba(0,0,0,0.7)",borderColor:"#333",borderRadius:4,borderWidth:0,padding:5,axisPointer:{type:"line",lineStyle:{color:"#48b",width:2,type:"solid"},crossStyle:{color:"#1e90ff",width:1,type:"dashed"},shadowStyle:{color:"rgba(150,150,150,0.3)",width:"auto",type:"default"}},textStyle:{color:"#fff"}};var l=e("../util/ecData"),h=e("zrender/config"),d=e("zrender/tool/event"),c=e("zrender/tool/area"),m=e("zrender/tool/color"),p=e("zrender/tool/util"),u=e("zrender/shape/Base");return t.prototype={type:s.COMPONENT_TYPE_TOOLTIP,_gCssText:"position:absolute;display:block;border-style:solid;white-space:nowrap;",_style:function(e){if(!e)return"";var t=[];if(e.transitionDuration){var i="left "+e.transitionDuration+"s,top "+e.transitionDuration+"s";t.push("transition:"+i),t.push("-moz-transition:"+i),t.push("-webkit-transition:"+i),t.push("-o-transition:"+i)}e.backgroundColor&&(t.push("background-Color:"+m.toHex(e.backgroundColor)),t.push("filter:alpha(opacity=70)"),t.push("background-Color:"+e.backgroundColor)),null!=e.borderWidth&&t.push("border-width:"+e.borderWidth+"px"),null!=e.borderColor&&t.push("border-color:"+e.borderColor),null!=e.borderRadius&&(t.push("border-radius:"+e.borderRadius+"px"),t.push("-moz-border-radius:"+e.borderRadius+"px"),t.push("-webkit-border-radius:"+e.borderRadius+"px"),t.push("-o-border-radius:"+e.borderRadius+"px"));var n=e.textStyle;n&&(n.color&&t.push("color:"+n.color),n.decoration&&t.push("text-decoration:"+n.decoration),n.align&&t.push("text-align:"+n.align),n.fontFamily&&t.push("font-family:"+n.fontFamily),n.fontSize&&t.push("font-size:"+n.fontSize+"px"),n.fontSize&&t.push("line-height:"+Math.round(3*n.fontSize/2)+"px"),n.fontStyle&&t.push("font-style:"+n.fontStyle),n.fontWeight&&t.push("font-weight:"+n.fontWeight));var a=e.padding;return null!=a&&(a=this.reformCssArray(a),t.push("padding:"+a[0]+"px "+a[1]+"px "+a[2]+"px "+a[3]+"px")),t=t.join(";")+";"},__hide:function(){this._lastDataIndex=-1,this._lastSeriesIndex=-1,this._lastItemTriggerId=-1,this._tDom&&(this._tDom.style.display="none");var e=!1;this._axisLineShape.invisible||(this._axisLineShape.invisible=!0,this.zr.modShape(this._axisLineShape.id),e=!0),this._axisShadowShape.invisible||(this._axisShadowShape.invisible=!0,this.zr.modShape(this._axisShadowShape.id),e=!0),this._axisCrossShape.invisible||(this._axisCrossShape.invisible=!0,this.zr.modShape(this._axisCrossShape.id),e=!0),this._lastTipShape&&this._lastTipShape.tipShape.length>0&&(this.zr.delShape(this._lastTipShape.tipShape),this._lastTipShape=!1,this.shapeList.length=2),e&&this.zr.refreshNextFrame(),this.showing=!1},_show:function(e,t,i,n){var a=this._tDom.offsetHeight,o=this._tDom.offsetWidth;e&&("function"==typeof e&&(e=e([t,i])),e instanceof Array&&(t=e[0],i=e[1])),t+o>this._zrWidth&&(t-=o+40),i+a>this._zrHeight&&(i-=a-20),20>i&&(i=0),this._tDom.style.cssText=this._gCssText+this._defaultCssText+(n?n:"")+"left:"+t+"px;top:"+i+"px;",(10>a||10>o)&&setTimeout(this._refixed,20),this.showing=!0},__refixed:function(){if(this._tDom){var e="",t=this._tDom.offsetHeight,i=this._tDom.offsetWidth;this._tDom.offsetLeft+i>this._zrWidth&&(e+="left:"+(this._zrWidth-i-20)+"px;"),this._tDom.offsetTop+t>this._zrHeight&&(e+="top:"+(this._zrHeight-t-10)+"px;"),""!==e&&(this._tDom.style.cssText+=e)}},__tryShow:function(){var e,t;if(this._curTarget){if("island"===this._curTarget._type&&this.option.tooltip.show)return void this._showItemTrigger();var i=l.get(this._curTarget,"series"),n=l.get(this._curTarget,"data");e=this.deepQuery([n,i,this.option],"tooltip.show"),null!=i&&null!=n&&e?(t=this.deepQuery([n,i,this.option],"tooltip.trigger"),"axis"===t?this._showAxisTrigger(i.xAxisIndex,i.yAxisIndex,l.get(this._curTarget,"dataIndex")):this._showItemTrigger()):(clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),this._hidingTicket=setTimeout(this._hide,this._hideDelay))}else this._findPolarTrigger()||this._findAxisTrigger()},_findAxisTrigger:function(){if(!this.component.xAxis||!this.component.yAxis)return void(this._hidingTicket=setTimeout(this._hide,this._hideDelay));for(var e,t,i=this.option.series,n=0,a=i.length;a>n;n++)if("axis"===this.deepQuery([i[n],this.option],"tooltip.trigger"))return e=i[n].xAxisIndex||0,t=i[n].yAxisIndex||0,this.component.xAxis.getAxis(e)&&this.component.xAxis.getAxis(e).type===s.COMPONENT_TYPE_AXIS_CATEGORY?void this._showAxisTrigger(e,t,this._getNearestDataIndex("x",this.component.xAxis.getAxis(e))):this.component.yAxis.getAxis(t)&&this.component.yAxis.getAxis(t).type===s.COMPONENT_TYPE_AXIS_CATEGORY?void this._showAxisTrigger(e,t,this._getNearestDataIndex("y",this.component.yAxis.getAxis(t))):void this._showAxisTrigger(e,t,-1);"cross"===this.option.tooltip.axisPointer.type&&this._showAxisTrigger(-1,-1,-1)},_findPolarTrigger:function(){if(!this.component.polar)return!1;var e,t=d.getX(this._event),i=d.getY(this._event),n=this.component.polar.getNearestIndex([t,i]);return n?(e=n.valueIndex,n=n.polarIndex):n=-1,-1!=n?this._showPolarTrigger(n,e):!1},_getNearestDataIndex:function(e,t){var i=-1,n=d.getX(this._event),a=d.getY(this._event);if("x"===e){for(var o,r,s=this.component.grid.getXend(),l=t.getCoordByIndex(i);s>l&&(r=l,n>=l);)o=l,l=t.getCoordByIndex(++i);return 0>=i?i=0:r-n>=n-o?i-=1:null==t.getNameByIndex(i)&&(i-=1),i}for(var h,c,m=this.component.grid.getY(),l=t.getCoordByIndex(i);l>m&&(h=l,l>=a);)c=l,l=t.getCoordByIndex(++i);return 0>=i?i=0:a-h>=c-a?i-=1:null==t.getNameByIndex(i)&&(i-=1),i},_showAxisTrigger:function(e,t,i){if(!this._event.connectTrigger&&this.messageCenter.dispatch(s.EVENT.TOOLTIP_IN_GRID,this._event,null,this.myChart),null==this.component.xAxis||null==this.component.yAxis||null==e||null==t)return clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),void(this._hidingTicket=setTimeout(this._hide,this._hideDelay));var n,a,o,r,l=this.option.series,h=[],c=[],m="";if("axis"===this.option.tooltip.trigger){if(!this.option.tooltip.show)return;a=this.option.tooltip.formatter,o=this.option.tooltip.position}var p,u,V=-1!=e&&this.component.xAxis.getAxis(e).type===s.COMPONENT_TYPE_AXIS_CATEGORY?"xAxis":-1!=t&&this.component.yAxis.getAxis(t).type===s.COMPONENT_TYPE_AXIS_CATEGORY?"yAxis":!1;if(V){var U="xAxis"==V?e:t;n=this.component[V].getAxis(U);for(var g=0,f=l.length;f>g;g++)this._isSelected(l[g].name)&&l[g][V+"Index"]===U&&"axis"===this.deepQuery([l[g],this.option],"tooltip.trigger")&&(r=this.query(l[g],"tooltip.showContent")||r,a=this.query(l[g],"tooltip.formatter")||a,o=this.query(l[g],"tooltip.position")||o,m+=this._style(this.query(l[g],"tooltip")),null!=l[g].stack&&"xAxis"==V?(h.unshift(l[g]),c.unshift(g)):(h.push(l[g]),c.push(g)));this.messageCenter.dispatch(s.EVENT.TOOLTIP_HOVER,this._event,{seriesIndex:c,dataIndex:i},this.myChart);var y;"xAxis"==V?(p=this.subPixelOptimize(n.getCoordByIndex(i),this._axisLineWidth),u=d.getY(this._event),y=[p,this.component.grid.getY(),p,this.component.grid.getYend()]):(p=d.getX(this._event),u=this.subPixelOptimize(n.getCoordByIndex(i),this._axisLineWidth),y=[this.component.grid.getX(),u,this.component.grid.getXend(),u]),this._styleAxisPointer(h,y[0],y[1],y[2],y[3],n.getGap(),p,u)}else p=d.getX(this._event),u=d.getY(this._event),this._styleAxisPointer(l,this.component.grid.getX(),u,this.component.grid.getXend(),u,0,p,u),i>=0?this._showItemTrigger(!0):(clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),this._tDom.style.display="none");if(h.length>0){if(this._lastItemTriggerId=-1,this._lastDataIndex!=i||this._lastSeriesIndex!=c[0]){this._lastDataIndex=i,this._lastSeriesIndex=c[0];var b,_;if("function"==typeof a){for(var x=[],g=0,f=h.length;f>g;g++)b=h[g].data[i],_=this.getDataFromOption(b,"-"),x.push({seriesIndex:c[g],seriesName:h[g].name||"",series:h[g],dataIndex:i,data:b,name:n.getNameByIndex(i),value:_,0:h[g].name||"",1:n.getNameByIndex(i),2:_,3:b});this._curTicket="axis:"+i,this._tDom.innerHTML=a.call(this.myChart,x,this._curTicket,this._setContent)}else if("string"==typeof a){this._curTicket=0/0,a=a.replace("{a}","{a0}").replace("{b}","{b0}").replace("{c}","{c0}");for(var g=0,f=h.length;f>g;g++)a=a.replace("{a"+g+"}",this._encodeHTML(h[g].name||"")),a=a.replace("{b"+g+"}",this._encodeHTML(n.getNameByIndex(i))),b=h[g].data[i],b=this.getDataFromOption(b,"-"),a=a.replace("{c"+g+"}",b instanceof Array?b:this.numAddCommas(b));this._tDom.innerHTML=a}else{this._curTicket=0/0,a=this._encodeHTML(n.getNameByIndex(i));for(var g=0,f=h.length;f>g;g++)a+="<br/>"+this._encodeHTML(h[g].name||"")+" : ",b=h[g].data[i],b=this.getDataFromOption(b,"-"),a+=b instanceof Array?b:this.numAddCommas(b);this._tDom.innerHTML=a}}if(r===!1||!this.option.tooltip.showContent)return;this.hasAppend||(this._tDom.style.left=this._zrWidth/2+"px",this._tDom.style.top=this._zrHeight/2+"px",this.dom.firstChild.appendChild(this._tDom),this.hasAppend=!0),this._show(o,p+10,u+10,m)}},_showPolarTrigger:function(e,t){if(null==this.component.polar||null==e||null==t||0>t)return!1;var i,n,a,o=this.option.series,r=[],s=[],l="";if("axis"===this.option.tooltip.trigger){if(!this.option.tooltip.show)return!1;i=this.option.tooltip.formatter,n=this.option.tooltip.position}for(var h=this.option.polar[e].indicator[t].text,c=0,m=o.length;m>c;c++)this._isSelected(o[c].name)&&o[c].polarIndex===e&&"axis"===this.deepQuery([o[c],this.option],"tooltip.trigger")&&(a=this.query(o[c],"tooltip.showContent")||a,i=this.query(o[c],"tooltip.formatter")||i,n=this.query(o[c],"tooltip.position")||n,l+=this._style(this.query(o[c],"tooltip")),r.push(o[c]),s.push(c));if(r.length>0){for(var p,u,V,U=[],c=0,m=r.length;m>c;c++){p=r[c].data;for(var g=0,f=p.length;f>g;g++)u=p[g],this._isSelected(u.name)&&(u=null!=u?u:{name:"",value:{dataIndex:"-"}},V=this.getDataFromOption(u.value[t]),U.push({seriesIndex:s[c],seriesName:r[c].name||"",series:r[c],dataIndex:t,data:u,name:u.name,indicator:h,value:V,0:r[c].name||"",1:u.name,2:V,3:h}))}if(U.length<=0)return;if(this._lastItemTriggerId=-1,this._lastDataIndex!=t||this._lastSeriesIndex!=s[0])if(this._lastDataIndex=t,this._lastSeriesIndex=s[0],"function"==typeof i)this._curTicket="axis:"+t,this._tDom.innerHTML=i.call(this.myChart,U,this._curTicket,this._setContent);else if("string"==typeof i){i=i.replace("{a}","{a0}").replace("{b}","{b0}").replace("{c}","{c0}").replace("{d}","{d0}");for(var c=0,m=U.length;m>c;c++)i=i.replace("{a"+c+"}",this._encodeHTML(U[c].seriesName)),i=i.replace("{b"+c+"}",this._encodeHTML(U[c].name)),i=i.replace("{c"+c+"}",this.numAddCommas(U[c].value)),i=i.replace("{d"+c+"}",this._encodeHTML(U[c].indicator));this._tDom.innerHTML=i}else{i=this._encodeHTML(U[0].name)+"<br/>"+this._encodeHTML(U[0].indicator)+" : "+this.numAddCommas(U[0].value);for(var c=1,m=U.length;m>c;c++)i+="<br/>"+this._encodeHTML(U[c].name)+"<br/>",i+=this._encodeHTML(U[c].indicator)+" : "+this.numAddCommas(U[c].value);this._tDom.innerHTML=i}if(a===!1||!this.option.tooltip.showContent)return;return this.hasAppend||(this._tDom.style.left=this._zrWidth/2+"px",this._tDom.style.top=this._zrHeight/2+"px",this.dom.firstChild.appendChild(this._tDom),this.hasAppend=!0),this._show(n,d.getX(this._event),d.getY(this._event),l),!0}},_showItemTrigger:function(e){if(this._curTarget){var t,i,n,a=l.get(this._curTarget,"series"),o=l.get(this._curTarget,"seriesIndex"),r=l.get(this._curTarget,"data"),h=l.get(this._curTarget,"dataIndex"),c=l.get(this._curTarget,"name"),m=l.get(this._curTarget,"value"),p=l.get(this._curTarget,"special"),u=l.get(this._curTarget,"special2"),V=[r,a,this.option],U="";if("island"!=this._curTarget._type){var g=e?"axis":"item";this.option.tooltip.trigger===g&&(t=this.option.tooltip.formatter,i=this.option.tooltip.position),this.query(a,"tooltip.trigger")===g&&(n=this.query(a,"tooltip.showContent")||n,t=this.query(a,"tooltip.formatter")||t,i=this.query(a,"tooltip.position")||i,U+=this._style(this.query(a,"tooltip"))),n=this.query(r,"tooltip.showContent")||n,t=this.query(r,"tooltip.formatter")||t,i=this.query(r,"tooltip.position")||i,U+=this._style(this.query(r,"tooltip"))}else this._lastItemTriggerId=0/0,n=this.deepQuery(V,"tooltip.showContent"),t=this.deepQuery(V,"tooltip.islandFormatter"),i=this.deepQuery(V,"tooltip.islandPosition");this._lastDataIndex=-1,this._lastSeriesIndex=-1,this._lastItemTriggerId!==this._curTarget.id&&(this._lastItemTriggerId=this._curTarget.id,"function"==typeof t?(this._curTicket=(a.name||"")+":"+h,this._tDom.innerHTML=t.call(this.myChart,{seriesIndex:o,seriesName:a.name||"",series:a,dataIndex:h,data:r,name:c,value:m,percent:p,indicator:p,value2:u,indicator2:u,0:a.name||"",1:c,2:m,3:p,4:u,5:r,6:o,7:h},this._curTicket,this._setContent)):"string"==typeof t?(this._curTicket=0/0,t=t.replace("{a}","{a0}").replace("{b}","{b0}").replace("{c}","{c0}"),t=t.replace("{a0}",this._encodeHTML(a.name||"")).replace("{b0}",this._encodeHTML(c)).replace("{c0}",m instanceof Array?m:this.numAddCommas(m)),t=t.replace("{d}","{d0}").replace("{d0}",p||""),t=t.replace("{e}","{e0}").replace("{e0}",l.get(this._curTarget,"special2")||""),this._tDom.innerHTML=t):(this._curTicket=0/0,this._tDom.innerHTML=a.type===s.CHART_TYPE_RADAR&&p?this._itemFormatter.radar.call(this,a,c,m,p):a.type===s.CHART_TYPE_EVENTRIVER?this._itemFormatter.eventRiver.call(this,a,c,m,r):""+(null!=a.name?this._encodeHTML(a.name)+"<br/>":"")+(""===c?"":this._encodeHTML(c)+" : ")+(m instanceof Array?m:this.numAddCommas(m))));var f=d.getX(this._event),y=d.getY(this._event);this.deepQuery(V,"tooltip.axisPointer.show")&&this.component.grid?this._styleAxisPointer([a],this.component.grid.getX(),y,this.component.grid.getXend(),y,0,f,y):this._hide(),n!==!1&&this.option.tooltip.showContent&&(this.hasAppend||(this._tDom.style.left=this._zrWidth/2+"px",this._tDom.style.top=this._zrHeight/2+"px",this.dom.firstChild.appendChild(this._tDom),this.hasAppend=!0),this._show(i,f+20,y-20,U))}},_itemFormatter:{radar:function(e,t,i,n){var a="";a+=this._encodeHTML(""===t?e.name||"":t),a+=""===a?"":"<br />";for(var o=0;o<n.length;o++)a+=this._encodeHTML(n[o].text)+" : "+this.numAddCommas(i[o])+"<br />";return a},chord:function(e,t,i,n,a){if(null==a)return this._encodeHTML(t)+" ("+this.numAddCommas(i)+")";var o=this._encodeHTML(t),r=this._encodeHTML(n);return""+(null!=e.name?this._encodeHTML(e.name)+"<br/>":"")+o+" -> "+r+" ("+this.numAddCommas(i)+")<br />"+r+" -> "+o+" ("+this.numAddCommas(a)+")"},eventRiver:function(e,t,i,n){var a="";a+=this._encodeHTML(""===e.name?"":e.name+" : "),a+=this._encodeHTML(t),a+=""===a?"":"<br />",n=n.evolution;for(var o=0,r=n.length;r>o;o++)a+='<div style="padding-top:5px;">',n[o].detail&&(n[o].detail.img&&(a+='<img src="'+n[o].detail.img+'" style="float:left;width:40px;height:40px;">'),a+='<div style="margin-left:45px;">'+n[o].time+"<br/>",a+='<a href="'+n[o].detail.link+'" target="_blank">',a+=n[o].detail.text+"</a></div>",a+="</div>");return a}},_styleAxisPointer:function(e,t,i,n,a,o,r,s){if(e.length>0){var l,h,d=this.option.tooltip.axisPointer,c=d.type,m={line:{},cross:{},shadow:{}};for(var p in m)m[p].color=d[p+"Style"].color,m[p].width=d[p+"Style"].width,m[p].type=d[p+"Style"].type;for(var u=0,V=e.length;V>u;u++)l=e[u],h=this.query(l,"tooltip.axisPointer.type"),c=h||c,h&&(m[h].color=this.query(l,"tooltip.axisPointer."+h+"Style.color")||m[h].color,m[h].width=this.query(l,"tooltip.axisPointer."+h+"Style.width")||m[h].width,m[h].type=this.query(l,"tooltip.axisPointer."+h+"Style.type")||m[h].type);if("line"===c){var U=m.line.width,g=t==n;this._axisLineShape.style={xStart:g?this.subPixelOptimize(t,U):t,yStart:g?i:this.subPixelOptimize(i,U),xEnd:g?this.subPixelOptimize(n,U):n,yEnd:g?a:this.subPixelOptimize(a,U),strokeColor:m.line.color,lineWidth:U,lineType:m.line.type},this._axisLineShape.invisible=!1,this.zr.modShape(this._axisLineShape.id)}else if("cross"===c){var f=m.cross.width;this._axisCrossShape.style={brushType:"stroke",rect:this.component.grid.getArea(),x:this.subPixelOptimize(r,f),y:this.subPixelOptimize(s,f),text:("( "+this.component.xAxis.getAxis(0).getValueFromCoord(r)+" , "+this.component.yAxis.getAxis(0).getValueFromCoord(s)+" )").replace(" , "," ").replace(" , "," "),textPosition:"specific",strokeColor:m.cross.color,lineWidth:f,lineType:m.cross.type},this.component.grid.getXend()-r>100?(this._axisCrossShape.style.textAlign="left",this._axisCrossShape.style.textX=r+10):(this._axisCrossShape.style.textAlign="right",this._axisCrossShape.style.textX=r-10),s-this.component.grid.getY()>50?(this._axisCrossShape.style.textBaseline="bottom",this._axisCrossShape.style.textY=s-10):(this._axisCrossShape.style.textBaseline="top",this._axisCrossShape.style.textY=s+10),this._axisCrossShape.invisible=!1,this.zr.modShape(this._axisCrossShape.id)}else"shadow"===c&&((null==m.shadow.width||"auto"===m.shadow.width||isNaN(m.shadow.width))&&(m.shadow.width=o),t===n?Math.abs(this.component.grid.getX()-t)<2?(m.shadow.width/=2,t=n+=m.shadow.width/2):Math.abs(this.component.grid.getXend()-t)<2&&(m.shadow.width/=2,t=n-=m.shadow.width/2):i===a&&(Math.abs(this.component.grid.getY()-i)<2?(m.shadow.width/=2,i=a+=m.shadow.width/2):Math.abs(this.component.grid.getYend()-i)<2&&(m.shadow.width/=2,i=a-=m.shadow.width/2)),this._axisShadowShape.style={xStart:t,yStart:i,xEnd:n,yEnd:a,strokeColor:m.shadow.color,lineWidth:m.shadow.width},this._axisShadowShape.invisible=!1,this.zr.modShape(this._axisShadowShape.id));this.zr.refreshNextFrame()}},__onmousemove:function(e){if(clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),!this._mousein||!this._enterable){var t=e.target,i=d.getX(e.event),n=d.getY(e.event);if(t){this._curTarget=t,this._event=e.event,this._event.zrenderX=i,this._event.zrenderY=n;var a;if(this._needAxisTrigger&&this.component.polar&&-1!=(a=this.component.polar.isInside([i,n])))for(var o=this.option.series,l=0,h=o.length;h>l;l++)if(o[l].polarIndex===a&&"axis"===this.deepQuery([o[l],this.option],"tooltip.trigger")){this._curTarget=null;break}this._showingTicket=setTimeout(this._tryShow,this._showDelay)}else this._curTarget=!1,this._event=e.event,this._event.zrenderX=i,this._event.zrenderY=n,this._needAxisTrigger&&this.component.grid&&c.isInside(r,this.component.grid.getArea(),i,n)?this._showingTicket=setTimeout(this._tryShow,this._showDelay):this._needAxisTrigger&&this.component.polar&&-1!=this.component.polar.isInside([i,n])?this._showingTicket=setTimeout(this._tryShow,this._showDelay):(!this._event.connectTrigger&&this.messageCenter.dispatch(s.EVENT.TOOLTIP_OUT_GRID,this._event,null,this.myChart),this._hidingTicket=setTimeout(this._hide,this._hideDelay))}},__onglobalout:function(){clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),this._hidingTicket=setTimeout(this._hide,this._hideDelay)},__setContent:function(e,t){this._tDom&&(e===this._curTicket&&(this._tDom.innerHTML=t),setTimeout(this._refixed,20))},ontooltipHover:function(e,t){if(!this._lastTipShape||this._lastTipShape&&this._lastTipShape.dataIndex!=e.dataIndex){this._lastTipShape&&this._lastTipShape.tipShape.length>0&&(this.zr.delShape(this._lastTipShape.tipShape),this.shapeList.length=2);for(var i=0,n=t.length;n>i;i++)t[i].zlevel=this.getZlevelBase(),t[i].z=this.getZBase(),t[i].style=u.prototype.getHighlightStyle(t[i].style,t[i].highlightStyle),t[i].draggable=!1,t[i].hoverable=!1,t[i].clickable=!1,t[i].ondragend=null,t[i].ondragover=null,t[i].ondrop=null,this.shapeList.push(t[i]),this.zr.addShape(t[i]);this._lastTipShape={dataIndex:e.dataIndex,tipShape:t}}},ondragend:function(){this._hide()},onlegendSelected:function(e){this._selectedMap=e.selected},_setSelectedMap:function(){this._selectedMap=this.component.legend?p.clone(this.component.legend.getSelectedMap()):{}},_isSelected:function(e){return null!=this._selectedMap[e]?this._selectedMap[e]:!0},showTip:function(e){if(e){var t,i=this.option.series;if(null!=e.seriesIndex)t=e.seriesIndex;else for(var n=e.seriesName,a=0,o=i.length;o>a;a++)if(i[a].name===n){t=a;break}var r=i[t];if(null!=r){var d=this.myChart.chart[r.type],c="axis"===this.deepQuery([r,this.option],"tooltip.trigger");if(d)if(c){var m=e.dataIndex;switch(d.type){case s.CHART_TYPE_LINE:case s.CHART_TYPE_BAR:case s.CHART_TYPE_K:case s.CHART_TYPE_RADAR:if(null==this.component.polar||r.data[0].value.length<=m)return;var p=r.polarIndex||0,u=this.component.polar.getVector(p,m,"max");this._event={zrenderX:u[0],zrenderY:u[1]},this._showPolarTrigger(p,m)}}else{var V,U,g=d.shapeList;switch(d.type){case s.CHART_TYPE_LINE:case s.CHART_TYPE_BAR:case s.CHART_TYPE_K:case s.CHART_TYPE_TREEMAP:case s.CHART_TYPE_SCATTER:for(var m=e.dataIndex,a=0,o=g.length;o>a;a++)if(null==g[a]._mark&&l.get(g[a],"seriesIndex")==t&&l.get(g[a],"dataIndex")==m){this._curTarget=g[a],V=g[a].style.x,U=d.type!=s.CHART_TYPE_K?g[a].style.y:g[a].style.y[0];break}break;case s.CHART_TYPE_RADAR:for(var m=e.dataIndex,a=0,o=g.length;o>a;a++)if("polygon"===g[a].type&&l.get(g[a],"seriesIndex")==t&&l.get(g[a],"dataIndex")==m){this._curTarget=g[a];var u=this.component.polar.getCenter(r.polarIndex||0);V=u[0],U=u[1];break}break;case s.CHART_TYPE_PIE:for(var f=e.name,a=0,o=g.length;o>a;a++)if("sector"===g[a].type&&l.get(g[a],"seriesIndex")==t&&l.get(g[a],"name")==f){this._curTarget=g[a];var y=this._curTarget.style,b=(y.startAngle+y.endAngle)/2*Math.PI/180;V=this._curTarget.style.x+Math.cos(b)*y.r/1.5,U=this._curTarget.style.y-Math.sin(b)*y.r/1.5;break}break;case s.CHART_TYPE_MAP:for(var f=e.name,_=r.mapType,a=0,o=g.length;o>a;a++)if("text"===g[a].type&&g[a]._mapType===_&&g[a].style._name===f){this._curTarget=g[a],V=this._curTarget.style.x+this._curTarget.position[0],U=this._curTarget.style.y+this._curTarget.position[1];break}break;case s.CHART_TYPE_CHORD:for(var f=e.name,a=0,o=g.length;o>a;a++)if("sector"===g[a].type&&l.get(g[a],"name")==f){this._curTarget=g[a];var y=this._curTarget.style,b=(y.startAngle+y.endAngle)/2*Math.PI/180;return V=this._curTarget.style.x+Math.cos(b)*(y.r-2),U=this._curTarget.style.y-Math.sin(b)*(y.r-2),void this.zr.trigger(h.EVENT.MOUSEMOVE,{zrenderX:V,zrenderY:U})}break;case s.CHART_TYPE_FORCE:for(var f=e.name,a=0,o=g.length;o>a;a++)if("circle"===g[a].type&&l.get(g[a],"name")==f){this._curTarget=g[a],V=this._curTarget.position[0],U=this._curTarget.position[1];break}}null!=V&&null!=U&&(this._event={zrenderX:V,zrenderY:U},this.zr.addHoverShape(this._curTarget),this.zr.refreshHover(),this._showItemTrigger())}}}},hideTip:function(){this._hide()},refresh:function(e){if(this._zrHeight=this.zr.getHeight(),this._zrWidth=this.zr.getWidth(),this._lastTipShape&&this._lastTipShape.tipShape.length>0&&this.zr.delShape(this._lastTipShape.tipShape),this._lastTipShape=!1,this.shapeList.length=2,this._lastDataIndex=-1,this._lastSeriesIndex=-1,this._lastItemTriggerId=-1,e){this.option=e,this.option.tooltip=this.reformOption(this.option.tooltip),this.option.tooltip.textStyle=p.merge(this.option.tooltip.textStyle,this.ecTheme.textStyle),this._needAxisTrigger=!1,"axis"===this.option.tooltip.trigger&&(this._needAxisTrigger=!0);for(var t=this.option.series,i=0,n=t.length;n>i;i++)if("axis"===this.query(t[i],"tooltip.trigger")){this._needAxisTrigger=!0;break}this._showDelay=this.option.tooltip.showDelay,this._hideDelay=this.option.tooltip.hideDelay,this._defaultCssText=this._style(this.option.tooltip),this._setSelectedMap(),this._axisLineWidth=this.option.tooltip.axisPointer.lineStyle.width,this._enterable=this.option.tooltip.enterable,!this._enterable&&this._tDom.className.indexOf(h.elementClassName)<0&&(this._tDom.className+=" "+h.elementClassName)}if(this.showing){var a=this;setTimeout(function(){a.zr.trigger(h.EVENT.MOUSEMOVE,a.zr.handler._event)},50)}},onbeforDispose:function(){this._lastTipShape&&this._lastTipShape.tipShape.length>0&&this.zr.delShape(this._lastTipShape.tipShape),clearTimeout(this._hidingTicket),clearTimeout(this._showingTicket),this.zr.un(h.EVENT.MOUSEMOVE,this._onmousemove),this.zr.un(h.EVENT.GLOBALOUT,this._onglobalout),this.hasAppend&&this.dom.firstChild&&this.dom.firstChild.removeChild(this._tDom),this._tDom=null},_encodeHTML:function(e){return String(e).replace(/&/g,"&").replace(/</g,"<").replace(/>/g,">").replace(/"/g,""").replace(/'/g,"'")}},p.inherits(t,i),e("../component").define("tooltip",t),t}),define("echarts/component/legend",["require","./base","zrender/shape/Text","zrender/shape/Rectangle","zrender/shape/Sector","../util/shape/Icon","../util/shape/Candle","../config","zrender/tool/util","zrender/tool/area","../component"],function(e){function t(e,t,n,a,o){if(!this.query(a,"legend.data"))return void console.error("option.legend.data has not been defined.");i.call(this,e,t,n,a,o);var r=this;r._legendSelected=function(e){r.__legendSelected(e)},r._dispatchHoverLink=function(e){return r.__dispatchHoverLink(e)},this._colorIndex=0,this._colorMap={},this._selectedMap={},this._hasDataMap={},this.refresh(a)}var i=e("./base"),n=e("zrender/shape/Text"),a=e("zrender/shape/Rectangle"),o=e("zrender/shape/Sector"),r=e("../util/shape/Icon"),s=e("../util/shape/Candle"),l=e("../config");l.legend={zlevel:0,z:4,show:!0,orient:"horizontal",x:"center",y:"top",backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,padding:5,itemGap:10,itemWidth:20,itemHeight:14,textStyle:{color:"#333"},selectedMode:!0};var h=e("zrender/tool/util"),d=e("zrender/tool/area");t.prototype={type:l.COMPONENT_TYPE_LEGEND,_buildShape:function(){if(this.legendOption.show){this._itemGroupLocation=this._getItemGroupLocation(),this._buildBackground(),this._buildItem();for(var e=0,t=this.shapeList.length;t>e;e++)this.zr.addShape(this.shapeList[e])}},_buildItem:function(){var e,t,i,a,o,s,l,c,m=this.legendOption.data,p=m.length,u=this.legendOption.textStyle,V=this.zr.getWidth(),U=this.zr.getHeight(),g=this._itemGroupLocation.x,f=this._itemGroupLocation.y,y=this.legendOption.itemWidth,b=this.legendOption.itemHeight,_=this.legendOption.itemGap;"vertical"===this.legendOption.orient&&"right"===this.legendOption.x&&(g=this._itemGroupLocation.x+this._itemGroupLocation.width-y);for(var x=0;p>x;x++)o=h.merge(m[x].textStyle||{},u),s=this.getFont(o),e=this._getName(m[x]),l=this._getFormatterName(e),""!==e?(t=m[x].icon||this._getSomethingByName(e).type,c=this.getColor(e),"horizontal"===this.legendOption.orient?200>V-g&&y+5+d.getTextWidth(l,s)+(x===p-1||""===m[x+1]?0:_)>=V-g&&(g=this._itemGroupLocation.x,f+=b+_):200>U-f&&b+(x===p-1||""===m[x+1]?0:_)>=U-f&&("right"===this.legendOption.x?g-=this._itemGroupLocation.maxWidth+_:g+=this._itemGroupLocation.maxWidth+_,f=this._itemGroupLocation.y),i=this._getItemShapeByType(g,f,y,b,this._selectedMap[e]&&this._hasDataMap[e]?c:"#ccc",t,c),i._name=e,i=new r(i),a={zlevel:this.getZlevelBase(),z:this.getZBase(),style:{x:g+y+5,y:f+b/2,color:this._selectedMap[e]?"auto"===o.color?c:o.color:"#ccc",text:l,textFont:s,textBaseline:"middle"},highlightStyle:{color:c,brushType:"fill"},hoverable:!!this.legendOption.selectedMode,clickable:!!this.legendOption.selectedMode},"vertical"===this.legendOption.orient&&"right"===this.legendOption.x&&(a.style.x-=y+10,a.style.textAlign="right"),a._name=e,a=new n(a),this.legendOption.selectedMode&&(i.onclick=a.onclick=this._legendSelected,i.onmouseover=a.onmouseover=this._dispatchHoverLink,i.hoverConnect=a.id,a.hoverConnect=i.id),this.shapeList.push(i),this.shapeList.push(a),"horizontal"===this.legendOption.orient?g+=y+5+d.getTextWidth(l,s)+_:f+=b+_):"horizontal"===this.legendOption.orient?(g=this._itemGroupLocation.x,
f+=b+_):("right"===this.legendOption.x?g-=this._itemGroupLocation.maxWidth+_:g+=this._itemGroupLocation.maxWidth+_,f=this._itemGroupLocation.y);"horizontal"===this.legendOption.orient&&"center"===this.legendOption.x&&f!=this._itemGroupLocation.y&&this._mLineOptimize()},_getName:function(e){return"undefined"!=typeof e.name?e.name:e},_getFormatterName:function(e){var t,i=this.legendOption.formatter;return t="function"==typeof i?i.call(this.myChart,e):"string"==typeof i?i.replace("{name}",e):e},_getFormatterNameFromData:function(e){var t=this._getName(e);return this._getFormatterName(t)},_mLineOptimize:function(){for(var e=[],t=this._itemGroupLocation.x,i=2,n=this.shapeList.length;n>i;i++)this.shapeList[i].style.x===t?e.push((this._itemGroupLocation.width-(this.shapeList[i-1].style.x+d.getTextWidth(this.shapeList[i-1].style.text,this.shapeList[i-1].style.textFont)-t))/2):i===n-1&&e.push((this._itemGroupLocation.width-(this.shapeList[i].style.x+d.getTextWidth(this.shapeList[i].style.text,this.shapeList[i].style.textFont)-t))/2);for(var a=-1,i=1,n=this.shapeList.length;n>i;i++)this.shapeList[i].style.x===t&&a++,0!==e[a]&&(this.shapeList[i].style.x+=e[a])},_buildBackground:function(){var e=this.reformCssArray(this.legendOption.padding);this.shapeList.push(new a({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._itemGroupLocation.x-e[3],y:this._itemGroupLocation.y-e[0],width:this._itemGroupLocation.width+e[3]+e[1],height:this._itemGroupLocation.height+e[0]+e[2],brushType:0===this.legendOption.borderWidth?"fill":"both",color:this.legendOption.backgroundColor,strokeColor:this.legendOption.borderColor,lineWidth:this.legendOption.borderWidth}}))},_getItemGroupLocation:function(){var e=this.legendOption.data,t=e.length,i=this.legendOption.itemGap,n=this.legendOption.itemWidth+5,a=this.legendOption.itemHeight,o=this.legendOption.textStyle,r=this.getFont(o),s=0,l=0,c=this.reformCssArray(this.legendOption.padding),m=this.zr.getWidth()-c[1]-c[3],p=this.zr.getHeight()-c[0]-c[2],u=0,V=0;if("horizontal"===this.legendOption.orient){l=a;for(var U=0;t>U;U++)if(""!==this._getName(e[U])){var g=d.getTextWidth(this._getFormatterNameFromData(e[U]),e[U].textStyle?this.getFont(h.merge(e[U].textStyle||{},o)):r);u+n+g+i>m?(u-=i,s=Math.max(s,u),l+=a+i,u=0):(u+=n+g+i,s=Math.max(s,u-i))}else u-=i,s=Math.max(s,u),l+=a+i,u=0}else{for(var U=0;t>U;U++)V=Math.max(V,d.getTextWidth(this._getFormatterNameFromData(e[U]),e[U].textStyle?this.getFont(h.merge(e[U].textStyle||{},o)):r));V+=n,s=V;for(var U=0;t>U;U++)""!==this._getName(e[U])?u+a+i>p?(s+=V+i,u-=i,l=Math.max(l,u),u=0):(u+=a+i,l=Math.max(l,u-i)):(s+=V+i,u-=i,l=Math.max(l,u),u=0)}m=this.zr.getWidth(),p=this.zr.getHeight();var f;switch(this.legendOption.x){case"center":f=Math.floor((m-s)/2);break;case"left":f=c[3]+this.legendOption.borderWidth;break;case"right":f=m-s-c[1]-c[3]-2*this.legendOption.borderWidth;break;default:f=this.parsePercent(this.legendOption.x,m)}var y;switch(this.legendOption.y){case"top":y=c[0]+this.legendOption.borderWidth;break;case"bottom":y=p-l-c[0]-c[2]-2*this.legendOption.borderWidth;break;case"center":y=Math.floor((p-l)/2);break;default:y=this.parsePercent(this.legendOption.y,p)}return{x:f,y:y,width:s,height:l,maxWidth:V}},_getSomethingByName:function(e){for(var t,i=this.option.series,n=0,a=i.length;a>n;n++){if(i[n].name===e)return{type:i[n].type,series:i[n],seriesIndex:n,data:null,dataIndex:-1};if(i[n].type===l.CHART_TYPE_PIE||i[n].type===l.CHART_TYPE_RADAR||i[n].type===l.CHART_TYPE_CHORD||i[n].type===l.CHART_TYPE_FORCE||i[n].type===l.CHART_TYPE_FUNNEL||i[n].type===l.CHART_TYPE_TREEMAP){t=i[n].categories||i[n].data||i[n].nodes;for(var o=0,r=t.length;r>o;o++)if(t[o].name===e)return{type:i[n].type,series:i[n],seriesIndex:n,data:t[o],dataIndex:o}}}return{type:"bar",series:null,seriesIndex:-1,data:null,dataIndex:-1}},_getItemShapeByType:function(e,t,i,n,a,o,r){var s,h="#ccc"===a?r:a,d={zlevel:this.getZlevelBase(),z:this.getZBase(),style:{iconType:"legendicon"+o,x:e,y:t,width:i,height:n,color:a,strokeColor:a,lineWidth:2},highlightStyle:{color:h,strokeColor:h,lineWidth:1},hoverable:this.legendOption.selectedMode,clickable:this.legendOption.selectedMode};if(o.match("image")){var s=o.replace(new RegExp("^image:\\/\\/"),"");o="image"}switch(o){case"line":d.style.brushType="stroke",d.highlightStyle.lineWidth=3;break;case"radar":case"venn":case"tree":case"treemap":case"scatter":d.highlightStyle.lineWidth=3;break;case"k":d.style.brushType="both",d.highlightStyle.lineWidth=3,d.highlightStyle.color=d.style.color=this.deepQuery([this.ecTheme,l],"k.itemStyle.normal.color")||"#fff",d.style.strokeColor="#ccc"!=a?this.deepQuery([this.ecTheme,l],"k.itemStyle.normal.lineStyle.color")||"#ff3200":a;break;case"image":d.style.iconType="image",d.style.image=s,"#ccc"===a&&(d.style.opacity=.5)}return d},__legendSelected:function(e){var t=e.target._name;if("single"===this.legendOption.selectedMode)for(var i in this._selectedMap)this._selectedMap[i]=!1;this._selectedMap[t]=!this._selectedMap[t],this.messageCenter.dispatch(l.EVENT.LEGEND_SELECTED,e.event,{selected:this._selectedMap,target:t},this.myChart)},__dispatchHoverLink:function(e){this.messageCenter.dispatch(l.EVENT.LEGEND_HOVERLINK,e.event,{target:e.target._name},this.myChart)},refresh:function(e){if(e){this.option=e||this.option,this.option.legend=this.reformOption(this.option.legend),this.legendOption=this.option.legend;var t,i,n,a,o=this.legendOption.data||[];if(this.legendOption.selected)for(var r in this.legendOption.selected)this._selectedMap[r]="undefined"!=typeof this._selectedMap[r]?this._selectedMap[r]:this.legendOption.selected[r];for(var s=0,h=o.length;h>s;s++)t=this._getName(o[s]),""!==t&&(i=this._getSomethingByName(t),i.series?(this._hasDataMap[t]=!0,a=!i.data||i.type!==l.CHART_TYPE_PIE&&i.type!==l.CHART_TYPE_FORCE&&i.type!==l.CHART_TYPE_FUNNEL?[i.series]:[i.data,i.series],n=this.getItemStyleColor(this.deepQuery(a,"itemStyle.normal.color"),i.seriesIndex,i.dataIndex,i.data),n&&i.type!=l.CHART_TYPE_K&&this.setColor(t,n),this._selectedMap[t]=null!=this._selectedMap[t]?this._selectedMap[t]:!0):this._hasDataMap[t]=!1)}this.clear(),this._buildShape()},getRelatedAmount:function(e){for(var t,i=0,n=this.option.series,a=0,o=n.length;o>a;a++)if(n[a].name===e&&i++,n[a].type===l.CHART_TYPE_PIE||n[a].type===l.CHART_TYPE_RADAR||n[a].type===l.CHART_TYPE_CHORD||n[a].type===l.CHART_TYPE_FORCE||n[a].type===l.CHART_TYPE_FUNNEL){t=n[a].type!=l.CHART_TYPE_FORCE?n[a].data:n[a].categories;for(var r=0,s=t.length;s>r;r++)t[r].name===e&&"-"!=t[r].value&&i++}return i},setColor:function(e,t){this._colorMap[e]=t},getColor:function(e){return this._colorMap[e]||(this._colorMap[e]=this.zr.getColor(this._colorIndex++)),this._colorMap[e]},hasColor:function(e){return this._colorMap[e]?this._colorMap[e]:!1},add:function(e,t){for(var i=this.legendOption.data,n=0,a=i.length;a>n;n++)if(this._getName(i[n])===e)return;this.legendOption.data.push(e),this.setColor(e,t),this._selectedMap[e]=!0,this._hasDataMap[e]=!0},del:function(e){for(var t=this.legendOption.data,i=0,n=t.length;n>i;i++)if(this._getName(t[i])===e)return this.legendOption.data.splice(i,1)},getItemShape:function(e){if(null!=e)for(var t,i=0,n=this.shapeList.length;n>i;i++)if(t=this.shapeList[i],t._name===e&&"text"!=t.type)return t},setItemShape:function(e,t){for(var i,n=0,a=this.shapeList.length;a>n;n++)i=this.shapeList[n],i._name===e&&"text"!=i.type&&(this._selectedMap[e]||(t.style.color="#ccc",t.style.strokeColor="#ccc"),this.zr.modShape(i.id,t))},isSelected:function(e){return"undefined"!=typeof this._selectedMap[e]?this._selectedMap[e]:!0},getSelectedMap:function(){return this._selectedMap},setSelected:function(e,t){if("single"===this.legendOption.selectedMode)for(var i in this._selectedMap)this._selectedMap[i]=!1;this._selectedMap[e]=t,this.messageCenter.dispatch(l.EVENT.LEGEND_SELECTED,null,{selected:this._selectedMap,target:e},this.myChart)},onlegendSelected:function(e,t){var i=e.selected;for(var n in i)this._selectedMap[n]!=i[n]&&(t.needRefresh=!0),this._selectedMap[n]=i[n]}};var c={line:function(e,t){var i=t.height/2;e.moveTo(t.x,t.y+i),e.lineTo(t.x+t.width,t.y+i)},pie:function(e,t){var i=t.x,n=t.y,a=t.width,r=t.height;o.prototype.buildPath(e,{x:i+a/2,y:n+r+2,r:r,r0:6,startAngle:45,endAngle:135})},eventRiver:function(e,t){var i=t.x,n=t.y,a=t.width,o=t.height;e.moveTo(i,n+o),e.bezierCurveTo(i+a,n+o,i,n+4,i+a,n+4),e.lineTo(i+a,n),e.bezierCurveTo(i,n,i+a,n+o-4,i,n+o-4),e.lineTo(i,n+o)},k:function(e,t){var i=t.x,n=t.y,a=t.width,o=t.height;s.prototype.buildPath(e,{x:i+a/2,y:[n+1,n+1,n+o-6,n+o],width:a-6})},bar:function(e,t){var i=t.x,n=t.y+1,a=t.width,o=t.height-2,r=3;e.moveTo(i+r,n),e.lineTo(i+a-r,n),e.quadraticCurveTo(i+a,n,i+a,n+r),e.lineTo(i+a,n+o-r),e.quadraticCurveTo(i+a,n+o,i+a-r,n+o),e.lineTo(i+r,n+o),e.quadraticCurveTo(i,n+o,i,n+o-r),e.lineTo(i,n+r),e.quadraticCurveTo(i,n,i+r,n)},force:function(e,t){r.prototype.iconLibrary.circle(e,t)},radar:function(e,t){var i=6,n=t.x+t.width/2,a=t.y+t.height/2,o=t.height/2,r=2*Math.PI/i,s=-Math.PI/2,l=n+o*Math.cos(s),h=a+o*Math.sin(s);e.moveTo(l,h),s+=r;for(var d=0,c=i-1;c>d;d++)e.lineTo(n+o*Math.cos(s),a+o*Math.sin(s)),s+=r;e.lineTo(l,h)}};c.chord=c.pie,c.map=c.bar;for(var m in c)r.prototype.iconLibrary["legendicon"+m]=c[m];return h.inherits(t,i),e("../component").define("legend",t),t}),define("echarts/util/ecData",[],function(){function e(e,t,i,n,a,o,r,s){var l;return"undefined"!=typeof n&&(l=null==n.value?n:n.value),e._echartsData={_series:t,_seriesIndex:i,_data:n,_dataIndex:a,_name:o,_value:l,_special:r,_special2:s},e._echartsData}function t(e,t){var i=e._echartsData;if(!t)return i;switch(t){case"series":case"seriesIndex":case"data":case"dataIndex":case"name":case"value":case"special":case"special2":return i&&i["_"+t]}return null}function i(e,t,i){switch(e._echartsData=e._echartsData||{},t){case"series":case"seriesIndex":case"data":case"dataIndex":case"name":case"value":case"special":case"special2":e._echartsData["_"+t]=i}}function n(e,t){t._echartsData={_series:e._echartsData._series,_seriesIndex:e._echartsData._seriesIndex,_data:e._echartsData._data,_dataIndex:e._echartsData._dataIndex,_name:e._echartsData._name,_value:e._echartsData._value,_special:e._echartsData._special,_special2:e._echartsData._special2}}return{pack:e,set:i,get:t,clone:n}}),define("echarts/chart",[],function(){var e={},t={};return e.define=function(i,n){return t[i]=n,e},e.get=function(e){return t[e]},e}),define("zrender/tool/color",["require","../tool/util"],function(e){function t(e){D=e}function i(){D=N}function n(e,t){return e=0|e,t=t||D,t[e%t.length]}function a(e){B=e}function o(){H=B}function r(){return B}function s(e,t,i,n,a,o,r){O||(O=P.getContext());for(var s=O.createRadialGradient(e,t,i,n,a,o),l=0,h=r.length;h>l;l++)s.addColorStop(r[l][0],r[l][1]);return s.__nonRecursion=!0,s}function l(e,t,i,n,a){O||(O=P.getContext());for(var o=O.createLinearGradient(e,t,i,n),r=0,s=a.length;s>r;r++)o.addColorStop(a[r][0],a[r][1]);return o.__nonRecursion=!0,o}function h(e,t,i){e=u(e),t=u(t),e=S(e),t=S(t);for(var n=[],a=(t[0]-e[0])/i,o=(t[1]-e[1])/i,r=(t[2]-e[2])/i,s=(t[3]-e[3])/i,l=0,h=e[0],d=e[1],m=e[2],p=e[3];i>l;l++)n[l]=c([T(Math.floor(h),[0,255]),T(Math.floor(d),[0,255]),T(Math.floor(m),[0,255]),p.toFixed(4)-0],"rgba"),h+=a,d+=o,m+=r,p+=s;return h=t[0],d=t[1],m=t[2],p=t[3],n[l]=c([h,d,m,p],"rgba"),n}function d(e,t){var i=[],n=e.length;if(void 0===t&&(t=20),1===n)i=h(e[0],e[0],t);else if(n>1)for(var a=0,o=n-1;o>a;a++){var r=h(e[a],e[a+1],t);o-1>a&&r.pop(),i=i.concat(r)}return i}function c(e,t){if(t=t||"rgb",e&&(3===e.length||4===e.length)){if(e=C(e,function(e){return e>1?Math.ceil(e):e}),t.indexOf("hex")>-1)return"#"+((1<<24)+(e[0]<<16)+(e[1]<<8)+ +e[2]).toString(16).slice(1);if(t.indexOf("hs")>-1){var i=C(e.slice(1,3),function(e){return e+"%"});e[1]=i[0],e[2]=i[1]}return t.indexOf("a")>-1?(3===e.length&&e.push(1),e[3]=T(e[3],[0,1]),t+"("+e.slice(0,4).join(",")+")"):t+"("+e.slice(0,3).join(",")+")"}}function m(e){e=v(e),e.indexOf("rgba")<0&&(e=u(e));var t=[],i=0;return e.replace(/[\d.]+/g,function(e){e=3>i?0|e:+e,t[i++]=e}),t}function p(e,t){if(!E(e))return e;var i=S(e),n=i[3];return"undefined"==typeof n&&(n=1),e.indexOf("hsb")>-1?i=z(i):e.indexOf("hsl")>-1&&(i=A(i)),t.indexOf("hsb")>-1||t.indexOf("hsv")>-1?i=F(i):t.indexOf("hsl")>-1&&(i=J(i)),i[3]=n,c(i,t)}function u(e){return p(e,"rgba")}function V(e){return p(e,"rgb")}function U(e){return p(e,"hex")}function g(e){return p(e,"hsva")}function f(e){return p(e,"hsv")}function y(e){return p(e,"hsba")}function b(e){return p(e,"hsb")}function _(e){return p(e,"hsla")}function x(e){return p(e,"hsl")}function k(e){for(var t in G)if(U(G[t])===U(e))return t;return null}function v(e){return String(e).replace(/\s+/g,"")}function L(e){if(G[e]&&(e=G[e]),e=v(e),e=e.replace(/hsv/i,"hsb"),/^#[\da-f]{3}$/i.test(e)){e=parseInt(e.slice(1),16);var t=(3840&e)<<8,i=(240&e)<<4,n=15&e;e="#"+((1<<24)+(t<<4)+t+(i<<4)+i+(n<<4)+n).toString(16).slice(1)}return e}function w(e,t){if(!E(e))return e;var i=t>0?1:-1;"undefined"==typeof t&&(t=0),t=Math.abs(t)>1?1:Math.abs(t),e=V(e);for(var n=S(e),a=0;3>a;a++)n[a]=1===i?n[a]*(1-t)|0:(255-n[a])*t+n[a]|0;return"rgb("+n.join(",")+")"}function W(e){if(!E(e))return e;var t=S(u(e));return t=C(t,function(e){return 255-e}),c(t,"rgb")}function X(e,t,i){if(!E(e)||!E(t))return e;"undefined"==typeof i&&(i=.5),i=1-T(i,[0,1]);for(var n=2*i-1,a=S(u(e)),o=S(u(t)),r=a[3]-o[3],s=((n*r===-1?n:(n+r)/(1+n*r))+1)/2,l=1-s,h=[],d=0;3>d;d++)h[d]=a[d]*s+o[d]*l;var m=a[3]*i+o[3]*(1-i);return m=Math.max(0,Math.min(1,m)),1===a[3]&&1===o[3]?c(h,"rgb"):(h[3]=m,c(h,"rgba"))}function I(){return"#"+(Math.random().toString(16)+"0000").slice(2,8)}function S(e){e=L(e);var t=e.match(R);if(null===t)throw new Error("The color format error");var i,n,a,o=[];if(t[2])i=t[2].replace("#","").split(""),a=[i[0]+i[1],i[2]+i[3],i[4]+i[5]],o=C(a,function(e){return T(parseInt(e,16),[0,255])});else if(t[4]){var r=t[4].split(",");n=r[3],a=r.slice(0,3),o=C(a,function(e){return e=Math.floor(e.indexOf("%")>0?2.55*parseInt(e,0):e),T(e,[0,255])}),"undefined"!=typeof n&&o.push(T(parseFloat(n),[0,1]))}else if(t[5]||t[6]){var s=(t[5]||t[6]).split(","),l=parseInt(s[0],0)/360,h=s[1],d=s[2];n=s[3],o=C([h,d],function(e){return T(parseFloat(e)/100,[0,1])}),o.unshift(l),"undefined"!=typeof n&&o.push(T(parseFloat(n),[0,1]))}return o}function K(e,t){if(!E(e))return e;null===t&&(t=1);var i=S(u(e));return i[3]=T(Number(t).toFixed(4),[0,1]),c(i,"rgba")}function C(e,t){if("function"!=typeof t)throw new TypeError;for(var i=e?e.length:0,n=0;i>n;n++)e[n]=t(e[n]);return e}function T(e,t){return e<=t[0]?e=t[0]:e>=t[1]&&(e=t[1]),e}function E(e){return e instanceof Array||"string"==typeof e}function z(e){var t,i,n,a=e[0],o=e[1],r=e[2];if(0===o)t=255*r,i=255*r,n=255*r;else{var s=6*a;6===s&&(s=0);var l=0|s,h=r*(1-o),d=r*(1-o*(s-l)),c=r*(1-o*(1-(s-l))),m=0,p=0,u=0;0===l?(m=r,p=c,u=h):1===l?(m=d,p=r,u=h):2===l?(m=h,p=r,u=c):3===l?(m=h,p=d,u=r):4===l?(m=c,p=h,u=r):(m=r,p=h,u=d),t=255*m,i=255*p,n=255*u}return[t,i,n]}function A(e){var t,i,n,a=e[0],o=e[1],r=e[2];if(0===o)t=255*r,i=255*r,n=255*r;else{var s;s=.5>r?r*(1+o):r+o-o*r;var l=2*r-s;t=255*M(l,s,a+1/3),i=255*M(l,s,a),n=255*M(l,s,a-1/3)}return[t,i,n]}function M(e,t,i){return 0>i&&(i+=1),i>1&&(i-=1),1>6*i?e+6*(t-e)*i:1>2*i?t:2>3*i?e+(t-e)*(2/3-i)*6:e}function F(e){var t,i,n=e[0]/255,a=e[1]/255,o=e[2]/255,r=Math.min(n,a,o),s=Math.max(n,a,o),l=s-r,h=s;if(0===l)t=0,i=0;else{i=l/s;var d=((s-n)/6+l/2)/l,c=((s-a)/6+l/2)/l,m=((s-o)/6+l/2)/l;n===s?t=m-c:a===s?t=1/3+d-m:o===s&&(t=2/3+c-d),0>t&&(t+=1),t>1&&(t-=1)}return t=360*t,i=100*i,h=100*h,[t,i,h]}function J(e){var t,i,n=e[0]/255,a=e[1]/255,o=e[2]/255,r=Math.min(n,a,o),s=Math.max(n,a,o),l=s-r,h=(s+r)/2;if(0===l)t=0,i=0;else{i=.5>h?l/(s+r):l/(2-s-r);var d=((s-n)/6+l/2)/l,c=((s-a)/6+l/2)/l,m=((s-o)/6+l/2)/l;n===s?t=m-c:a===s?t=1/3+d-m:o===s&&(t=2/3+c-d),0>t&&(t+=1),t>1&&(t-=1)}return t=360*t,i=100*i,h=100*h,[t,i,h]}var O,P=e("../tool/util"),D=["#ff9277"," #dddd00"," #ffc877"," #bbe3ff"," #d5ffbb","#bbbbff"," #ddb000"," #b0dd00"," #e2bbff"," #ffbbe3","#ff7777"," #ff9900"," #83dd00"," #77e3ff"," #778fff","#c877ff"," #ff77ab"," #ff6600"," #aa8800"," #77c7ff","#ad77ff"," #ff77ff"," #dd0083"," #777700"," #00aa00","#0088aa"," #8400dd"," #aa0088"," #dd0000"," #772e00"],N=D,B="rgba(255,255,0,0.5)",H=B,R=/^\s*((#[a-f\d]{6})|(#[a-f\d]{3})|rgba?\(\s*([\d\.]+%?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+%?)?)\s*\)|hsba?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+)?)%?\s*\)|hsla?\(\s*([\d\.]+(?:deg|\xb0|%)?\s*,\s*[\d\.]+%?\s*,\s*[\d\.]+%?(?:\s*,\s*[\d\.]+)?)%?\s*\))\s*$/i,G={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#0ff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000",blanchedalmond:"#ffebcd",blue:"#00f",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#0ff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgrey:"#a9a9a9",darkgreen:"#006400",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#f0f",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",grey:"#808080",green:"#008000",greenyellow:"#adff2f",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgrey:"#d3d3d3",lightgreen:"#90ee90",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#789",lightslategrey:"#789",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#0f0",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#f0f",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370d8",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#d87093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#f00",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#fff",whitesmoke:"#f5f5f5",yellow:"#ff0",yellowgreen:"#9acd32"};return{customPalette:t,resetPalette:i,getColor:n,getHighlightColor:r,customHighlight:a,resetHighlight:o,getRadialGradient:s,getLinearGradient:l,getGradientColors:d,getStepColors:h,reverse:W,mix:X,lift:w,trim:v,random:I,toRGB:V,toRGBA:u,toHex:U,toHSL:x,toHSLA:_,toHSB:b,toHSBA:y,toHSV:f,toHSVA:g,toName:k,toColor:c,toArray:m,alpha:K,getData:S}}),define("echarts/component/timeline",["require","./base","zrender/shape/Rectangle","../util/shape/Icon","../util/shape/Chain","../config","zrender/tool/util","zrender/tool/area","zrender/tool/event","../component"],function(e){function t(e,t,i,a,o){n.call(this,e,t,i,a,o);var r=this;if(r._onclick=function(e){return r.__onclick(e)},r._ondrift=function(e,t){return r.__ondrift(this,e,t)},r._ondragend=function(){return r.__ondragend()},r._setCurrentOption=function(){var e=r.timelineOption;r.currentIndex%=e.data.length;var t=r.options[r.currentIndex]||{};r.myChart._setOption(t,e.notMerge,!0),r.messageCenter.dispatch(s.EVENT.TIMELINE_CHANGED,null,{currentIndex:r.currentIndex,data:null!=e.data[r.currentIndex].name?e.data[r.currentIndex].name:e.data[r.currentIndex]},r.myChart)},r._onFrame=function(){r._setCurrentOption(),r._syncHandleShape(),r.timelineOption.autoPlay&&(r.playTicket=setTimeout(function(){return r.currentIndex+=1,!r.timelineOption.loop&&r.currentIndex>=r.timelineOption.data.length?(r.currentIndex=r.timelineOption.data.length-1,void r.stop()):void r._onFrame()},r.timelineOption.playInterval))},this.setTheme(!1),this.options=this.option.options,this.currentIndex=this.timelineOption.currentIndex%this.timelineOption.data.length,this.timelineOption.notMerge||0===this.currentIndex||(this.options[this.currentIndex]=l.merge(this.options[this.currentIndex],this.options[0])),this.timelineOption.show&&(this._buildShape(),this._syncHandleShape()),this._setCurrentOption(),this.timelineOption.autoPlay){var r=this;this.playTicket=setTimeout(function(){r.play()},null!=this.ecTheme.animationDuration?this.ecTheme.animationDuration:s.animationDuration)}}function i(e,t){var i=2,n=t.x+i,a=t.y+i+2,r=t.width-i,s=t.height-i,l=t.symbol;if("last"===l)e.moveTo(n+r-2,a+s/3),e.lineTo(n+r-2,a),e.lineTo(n+2,a+s/2),e.lineTo(n+r-2,a+s),e.lineTo(n+r-2,a+s/3*2),e.moveTo(n,a),e.lineTo(n,a);else if("next"===l)e.moveTo(n+2,a+s/3),e.lineTo(n+2,a),e.lineTo(n+r-2,a+s/2),e.lineTo(n+2,a+s),e.lineTo(n+2,a+s/3*2),e.moveTo(n,a),e.lineTo(n,a);else if("play"===l)if("stop"===t.status)e.moveTo(n+2,a),e.lineTo(n+r-2,a+s/2),e.lineTo(n+2,a+s),e.lineTo(n+2,a);else{var h="both"===t.brushType?2:3;e.rect(n+2,a,h,s),e.rect(n+r-h-2,a,h,s)}else if(l.match("image")){var d="";d=l.replace(new RegExp("^image:\\/\\/"),""),l=o.prototype.iconLibrary.image,l(e,{x:n,y:a,width:r,height:s,image:d})}}var n=e("./base"),a=e("zrender/shape/Rectangle"),o=e("../util/shape/Icon"),r=e("../util/shape/Chain"),s=e("../config");s.timeline={zlevel:0,z:4,show:!0,type:"time",notMerge:!1,realtime:!0,x:80,x2:80,y2:0,height:50,backgroundColor:"rgba(0,0,0,0)",borderColor:"#ccc",borderWidth:0,padding:5,controlPosition:"left",autoPlay:!1,loop:!0,playInterval:2e3,lineStyle:{width:1,color:"#666",type:"dashed"},label:{show:!0,interval:"auto",rotate:0,textStyle:{color:"#333"}},checkpointStyle:{symbol:"auto",symbolSize:"auto",color:"auto",borderColor:"auto",borderWidth:"auto",label:{show:!1,textStyle:{color:"auto"}}},controlStyle:{itemSize:15,itemGap:5,normal:{color:"#333"},emphasis:{color:"#1e90ff"}},symbol:"emptyDiamond",symbolSize:4,currentIndex:0};var l=e("zrender/tool/util"),h=e("zrender/tool/area"),d=e("zrender/tool/event");return t.prototype={type:s.COMPONENT_TYPE_TIMELINE,_buildShape:function(){if(this._location=this._getLocation(),this._buildBackground(),this._buildControl(),this._chainPoint=this._getChainPoint(),this.timelineOption.label.show)for(var e=this._getInterval(),t=0,i=this._chainPoint.length;i>t;t+=e)this._chainPoint[t].showLabel=!0;this._buildChain(),this._buildHandle();for(var t=0,n=this.shapeList.length;n>t;t++)this.zr.addShape(this.shapeList[t])},_getLocation:function(){var e,t=this.timelineOption,i=this.reformCssArray(this.timelineOption.padding),n=this.zr.getWidth(),a=this.parsePercent(t.x,n),o=this.parsePercent(t.x2,n);null==t.width?(e=n-a-o,o=n-o):(e=this.parsePercent(t.width,n),o=a+e);var r,s,l=this.zr.getHeight(),h=this.parsePercent(t.height,l);return null!=t.y?(r=this.parsePercent(t.y,l),s=r+h):(s=l-this.parsePercent(t.y2,l),r=s-h),{x:a+i[3],y:r+i[0],x2:o-i[1],y2:s-i[2],width:e-i[1]-i[3],height:h-i[0]-i[2]}},_getReformedLabel:function(e){var t=this.timelineOption,i=null!=t.data[e].name?t.data[e].name:t.data[e],n=t.data[e].formatter||t.label.formatter;return n&&("function"==typeof n?i=n.call(this.myChart,i):"string"==typeof n&&(i=n.replace("{value}",i))),i},_getInterval:function(){var e=this._chainPoint,t=this.timelineOption,i=t.label.interval;if("auto"===i){var n=t.label.textStyle.fontSize,a=t.data,o=t.data.length;if(o>3){var r,s,l=!1;for(i=0;!l&&o>i;){i++,l=!0;for(var d=i;o>d;d+=i){if(r=e[d].x-e[d-i].x,0!==t.label.rotate)s=n;else if(a[d].textStyle)s=h.getTextWidth(e[d].name,e[d].textFont);else{var c=e[d].name+"",m=(c.match(/\w/g)||"").length,p=c.length-m;s=m*n*2/3+p*n}if(s>r){l=!1;break}}}}else i=1}else i=i-0+1;return i},_getChainPoint:function(){function e(e){return null!=h[e].name?h[e].name:h[e]+""}var t,i=this.timelineOption,n=i.symbol.toLowerCase(),a=i.symbolSize,o=i.label.rotate,r=i.label.textStyle,s=this.getFont(r),h=i.data,d=this._location.x,c=this._location.y+this._location.height/4*3,m=this._location.x2-this._location.x,p=h.length,u=[];if(p>1){var V=m/p;if(V=V>50?50:20>V?5:V,m-=2*V,"number"===i.type)for(var U=0;p>U;U++)u.push(d+V+m/(p-1)*U);else{u[0]=new Date(e(0).replace(/-/g,"/")),u[p-1]=new Date(e(p-1).replace(/-/g,"/"))-u[0];for(var U=1;p>U;U++)u[U]=d+V+m*(new Date(e(U).replace(/-/g,"/"))-u[0])/u[p-1];u[0]=d+V}}else u.push(d+m/2);for(var g,f,y,b,_,x=[],U=0;p>U;U++)d=u[U],g=h[U].symbol&&h[U].symbol.toLowerCase()||n,g.match("empty")?(g=g.replace("empty",""),y=!0):y=!1,g.match("star")&&(f=g.replace("star","")-0||5,g="star"),t=h[U].textStyle?l.merge(h[U].textStyle||{},r):r,b=t.align||"center",o?(b=o>0?"right":"left",_=[o*Math.PI/180,d,c-5]):_=!1,x.push({x:d,n:f,isEmpty:y,symbol:g,symbolSize:h[U].symbolSize||a,color:h[U].color,borderColor:h[U].borderColor,borderWidth:h[U].borderWidth,name:this._getReformedLabel(U),textColor:t.color,textAlign:b,textBaseline:t.baseline||"middle",textX:d,textY:c-(o?5:0),textFont:h[U].textStyle?this.getFont(t):s,rotation:_,showLabel:!1});return x},_buildBackground:function(){var e=this.timelineOption,t=this.reformCssArray(this.timelineOption.padding),i=this._location.width,n=this._location.height;(0!==e.borderWidth||"rgba(0,0,0,0)"!=e.backgroundColor.replace(/\s/g,""))&&this.shapeList.push(new a({zlevel:this.getZlevelBase(),z:this.getZBase(),hoverable:!1,style:{x:this._location.x-t[3],y:this._location.y-t[0],width:i+t[1]+t[3],height:n+t[0]+t[2],brushType:0===e.borderWidth?"fill":"both",color:e.backgroundColor,strokeColor:e.borderColor,lineWidth:e.borderWidth}}))},_buildControl:function(){var e=this,t=this.timelineOption,i=t.lineStyle,n=t.controlStyle;if("none"!==t.controlPosition){var a,r=n.itemSize,s=n.itemGap;"left"===t.controlPosition?(a=this._location.x,this._location.x+=3*(r+s)):(a=this._location.x2-(3*(r+s)-s),this._location.x2-=3*(r+s));var h=this._location.y,d={zlevel:this.getZlevelBase(),z:this.getZBase()+1,style:{iconType:"timelineControl",symbol:"last",x:a,y:h,width:r,height:r,brushType:"stroke",color:n.normal.color,strokeColor:n.normal.color,lineWidth:i.width},highlightStyle:{color:n.emphasis.color,strokeColor:n.emphasis.color,lineWidth:i.width+1},clickable:!0};this._ctrLastShape=new o(d),this._ctrLastShape.onclick=function(){e.last()},this.shapeList.push(this._ctrLastShape),a+=r+s,this._ctrPlayShape=new o(l.clone(d)),this._ctrPlayShape.style.brushType="fill",this._ctrPlayShape.style.symbol="play",this._ctrPlayShape.style.status=this.timelineOption.autoPlay?"playing":"stop",this._ctrPlayShape.style.x=a,this._ctrPlayShape.onclick=function(){"stop"===e._ctrPlayShape.style.status?e.play():e.stop()},this.shapeList.push(this._ctrPlayShape),a+=r+s,this._ctrNextShape=new o(l.clone(d)),this._ctrNextShape.style.symbol="next",this._ctrNextShape.style.x=a,this._ctrNextShape.onclick=function(){e.next()},this.shapeList.push(this._ctrNextShape)}},_buildChain:function(){var e=this.timelineOption,t=e.lineStyle;this._timelineShae={zlevel:this.getZlevelBase(),z:this.getZBase(),style:{x:this._location.x,y:this.subPixelOptimize(this._location.y,t.width),width:this._location.x2-this._location.x,height:this._location.height,chainPoint:this._chainPoint,brushType:"both",strokeColor:t.color,lineWidth:t.width,lineType:t.type},hoverable:!1,clickable:!0,onclick:this._onclick},this._timelineShae=new r(this._timelineShae),this.shapeList.push(this._timelineShae)},_buildHandle:function(){var e=this._chainPoint[this.currentIndex],t=e.symbolSize+1;t=5>t?5:t,this._handleShape={zlevel:this.getZlevelBase(),z:this.getZBase()+1,hoverable:!1,draggable:!0,style:{iconType:"diamond",n:e.n,x:e.x-t,y:this._location.y+this._location.height/4-t,width:2*t,height:2*t,brushType:"both",textPosition:"specific",textX:e.x,textY:this._location.y-this._location.height/4,textAlign:"center",textBaseline:"middle"},highlightStyle:{},ondrift:this._ondrift,ondragend:this._ondragend},this._handleShape=new o(this._handleShape),this.shapeList.push(this._handleShape)},_syncHandleShape:function(){if(this.timelineOption.show){var e=this.timelineOption,t=e.checkpointStyle,i=this._chainPoint[this.currentIndex];this._handleShape.style.text=t.label.show?i.name:"",this._handleShape.style.textFont=i.textFont,this._handleShape.style.n=i.n,"auto"===t.symbol?this._handleShape.style.iconType="none"!=i.symbol?i.symbol:"diamond":(this._handleShape.style.iconType=t.symbol,t.symbol.match("star")&&(this._handleShape.style.n=t.symbol.replace("star","")-0||5,this._handleShape.style.iconType="star"));var n;"auto"===t.symbolSize?(n=i.symbolSize+2,n=5>n?5:n):n=t.symbolSize-0,this._handleShape.style.color="auto"===t.color?i.color?i.color:e.controlStyle.emphasis.color:t.color,this._handleShape.style.textColor="auto"===t.label.textStyle.color?this._handleShape.style.color:t.label.textStyle.color,this._handleShape.highlightStyle.strokeColor=this._handleShape.style.strokeColor="auto"===t.borderColor?i.borderColor?i.borderColor:"#fff":t.borderColor,this._handleShape.style.lineWidth="auto"===t.borderWidth?i.borderWidth?i.borderWidth:0:t.borderWidth-0,this._handleShape.highlightStyle.lineWidth=this._handleShape.style.lineWidth+1,this.zr.animate(this._handleShape.id,"style").when(500,{x:i.x-n,textX:i.x,y:this._location.y+this._location.height/4-n,width:2*n,height:2*n}).start("ExponentialOut")}},_findChainIndex:function(e){var t=this._chainPoint,i=t.length;if(e<=t[0].x)return 0;if(e>=t[i-1].x)return i-1;for(var n=0;i-1>n;n++)if(e>=t[n].x&&e<=t[n+1].x)return Math.abs(e-t[n].x)<Math.abs(e-t[n+1].x)?n:n+1},__onclick:function(e){var t=d.getX(e.event),i=this._findChainIndex(t);return i===this.currentIndex?!0:(this.currentIndex=i,this.timelineOption.autoPlay&&this.stop(),clearTimeout(this.playTicket),void this._onFrame())},__ondrift:function(e,t){this.timelineOption.autoPlay&&this.stop();var i,n=this._chainPoint,a=n.length;e.style.x+t<=n[0].x-n[0].symbolSize?(e.style.x=n[0].x-n[0].symbolSize,i=0):e.style.x+t>=n[a-1].x-n[a-1].symbolSize?(e.style.x=n[a-1].x-n[a-1].symbolSize,i=a-1):(e.style.x+=t,i=this._findChainIndex(e.style.x));var o=n[i],r=o.symbolSize+2;if(e.style.iconType=o.symbol,e.style.n=o.n,e.style.textX=e.style.x+r/2,e.style.y=this._location.y+this._location.height/4-r,e.style.width=2*r,e.style.height=2*r,e.style.text=o.name,i===this.currentIndex)return!0;if(this.currentIndex=i,this.timelineOption.realtime){clearTimeout(this.playTicket);var s=this;this.playTicket=setTimeout(function(){s._setCurrentOption()},200)}return!0},__ondragend:function(){this.isDragend=!0},ondragend:function(e,t){this.isDragend&&e.target&&(!this.timelineOption.realtime&&this._setCurrentOption(),t.dragOut=!0,t.dragIn=!0,t.needRefresh=!1,this.isDragend=!1,this._syncHandleShape())},last:function(){return this.timelineOption.autoPlay&&this.stop(),this.currentIndex-=1,this.currentIndex<0&&(this.currentIndex=this.timelineOption.data.length-1),this._onFrame(),this.currentIndex},next:function(){return this.timelineOption.autoPlay&&this.stop(),this.currentIndex+=1,this.currentIndex>=this.timelineOption.data.length&&(this.currentIndex=0),
this._onFrame(),this.currentIndex},play:function(e,t){return this._ctrPlayShape&&"playing"!=this._ctrPlayShape.style.status&&(this._ctrPlayShape.style.status="playing",this.zr.modShape(this._ctrPlayShape.id),this.zr.refreshNextFrame()),this.timelineOption.autoPlay=null!=t?t:!0,this.timelineOption.autoPlay||clearTimeout(this.playTicket),this.currentIndex=null!=e?e:this.currentIndex+1,this.currentIndex>=this.timelineOption.data.length&&(this.currentIndex=0),this._onFrame(),this.currentIndex},stop:function(){return this._ctrPlayShape&&"stop"!=this._ctrPlayShape.style.status&&(this._ctrPlayShape.style.status="stop",this.zr.modShape(this._ctrPlayShape.id),this.zr.refreshNextFrame()),this.timelineOption.autoPlay=!1,clearTimeout(this.playTicket),this.currentIndex},resize:function(){this.timelineOption.show&&(this.clear(),this._buildShape(),this._syncHandleShape())},setTheme:function(e){this.timelineOption=this.reformOption(l.clone(this.option.timeline)),this.timelineOption.label.textStyle=this.getTextStyle(this.timelineOption.label.textStyle),this.timelineOption.checkpointStyle.label.textStyle=this.getTextStyle(this.timelineOption.checkpointStyle.label.textStyle),this.myChart.canvasSupported||(this.timelineOption.realtime=!1),this.timelineOption.show&&e&&(this.clear(),this._buildShape(),this._syncHandleShape())},onbeforDispose:function(){clearTimeout(this.playTicket)}},o.prototype.iconLibrary.timelineControl=i,l.inherits(t,n),e("../component").define("timeline",t),t}),define("zrender/shape/Image",["require","./Base","../tool/util"],function(e){var t=e("./Base"),i=function(e){t.call(this,e)};return i.prototype={type:"image",brush:function(e,t,i){var n=this.style||{};t&&(n=this.getHighlightStyle(n,this.highlightStyle||{}));var a=n.image,o=this;if(this._imageCache||(this._imageCache={}),"string"==typeof a){var r=a;this._imageCache[r]?a=this._imageCache[r]:(a=new Image,a.onload=function(){a.onload=null,o.modSelf(),i()},a.src=r,this._imageCache[r]=a)}if(a){if("IMG"==a.nodeName.toUpperCase())if(window.ActiveXObject){if("complete"!=a.readyState)return}else if(!a.complete)return;var s=n.width||a.width,l=n.height||a.height,h=n.x,d=n.y;if(!a.width||!a.height)return;if(e.save(),this.doClip(e),this.setContext(e,n),this.setTransform(e),n.sWidth&&n.sHeight){var c=n.sx||0,m=n.sy||0;e.drawImage(a,c,m,n.sWidth,n.sHeight,h,d,s,l)}else if(n.sx&&n.sy){var c=n.sx,m=n.sy,p=s-c,u=l-m;e.drawImage(a,c,m,p,u,h,d,s,l)}else e.drawImage(a,h,d,s,l);n.width||(n.width=s),n.height||(n.height=l),this.style.width||(this.style.width=s),this.style.height||(this.style.height=l),this.drawText(e,n,this.style),e.restore()}},getRect:function(e){return{x:e.x,y:e.y,width:e.width,height:e.height}},clearCache:function(){this._imageCache={}}},e("../tool/util").inherits(i,t),i}),define("zrender/loadingEffect/Bar",["require","./Base","../tool/util","../tool/color","../shape/Rectangle"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/color"),o=e("../shape/Rectangle");return n.inherits(t,i),t.prototype._start=function(e,t){var i=n.merge(this.options,{textStyle:{color:"#888"},backgroundColor:"rgba(250, 250, 250, 0.8)",effectOption:{x:0,y:this.canvasHeight/2-30,width:this.canvasWidth,height:5,brushType:"fill",timeInterval:100}}),r=this.createTextShape(i.textStyle),s=this.createBackgroundShape(i.backgroundColor),l=i.effectOption,h=new o({highlightStyle:n.clone(l)});return h.highlightStyle.color=l.color||a.getLinearGradient(l.x,l.y,l.x+l.width,l.y+l.height,[[0,"#ff6400"],[.5,"#ffe100"],[1,"#b1ff00"]]),null!=i.progress?(e(s),h.highlightStyle.width=this.adjust(i.progress,[0,1])*i.effectOption.width,e(h),e(r),void t()):(h.highlightStyle.width=0,setInterval(function(){e(s),h.highlightStyle.width<l.width?h.highlightStyle.width+=8:h.highlightStyle.width=0,e(h),e(r),t()},l.timeInterval))},t}),define("zrender/loadingEffect/Bubble",["require","./Base","../tool/util","../tool/color","../shape/Circle"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/color"),o=e("../shape/Circle");return n.inherits(t,i),t.prototype._start=function(e,t){for(var i=n.merge(this.options,{textStyle:{color:"#888"},backgroundColor:"rgba(250, 250, 250, 0.8)",effect:{n:50,lineWidth:2,brushType:"stroke",color:"random",timeInterval:100}}),r=this.createTextShape(i.textStyle),s=this.createBackgroundShape(i.backgroundColor),l=i.effect,h=l.n,d=l.brushType,c=l.lineWidth,m=[],p=this.canvasWidth,u=this.canvasHeight,V=0;h>V;V++){var U="random"==l.color?a.alpha(a.random(),.3):l.color;m[V]=new o({highlightStyle:{x:Math.ceil(Math.random()*p),y:Math.ceil(Math.random()*u),r:Math.ceil(40*Math.random()),brushType:d,color:U,strokeColor:U,lineWidth:c},animationY:Math.ceil(20*Math.random())})}return setInterval(function(){e(s);for(var i=0;h>i;i++){var n=m[i].highlightStyle;n.y-m[i].animationY+n.r<=0&&(m[i].highlightStyle.y=u+n.r,m[i].highlightStyle.x=Math.ceil(Math.random()*p)),m[i].highlightStyle.y-=m[i].animationY,e(m[i])}e(r),t()},l.timeInterval)},t}),define("zrender/loadingEffect/DynamicLine",["require","./Base","../tool/util","../tool/color","../shape/Line"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/color"),o=e("../shape/Line");return n.inherits(t,i),t.prototype._start=function(e,t){for(var i=n.merge(this.options,{textStyle:{color:"#fff"},backgroundColor:"rgba(0, 0, 0, 0.8)",effectOption:{n:30,lineWidth:1,color:"random",timeInterval:100}}),r=this.createTextShape(i.textStyle),s=this.createBackgroundShape(i.backgroundColor),l=i.effectOption,h=l.n,d=l.lineWidth,c=[],m=this.canvasWidth,p=this.canvasHeight,u=0;h>u;u++){var V=-Math.ceil(1e3*Math.random()),U=Math.ceil(400*Math.random()),g=Math.ceil(Math.random()*p),f="random"==l.color?a.random():l.color;c[u]=new o({highlightStyle:{xStart:V,yStart:g,xEnd:V+U,yEnd:g,strokeColor:f,lineWidth:d},animationX:Math.ceil(100*Math.random()),len:U})}return setInterval(function(){e(s);for(var i=0;h>i;i++){var n=c[i].highlightStyle;n.xStart>=m&&(c[i].len=Math.ceil(400*Math.random()),n.xStart=-400,n.xEnd=-400+c[i].len,n.yStart=Math.ceil(Math.random()*p),n.yEnd=n.yStart),n.xStart+=c[i].animationX,n.xEnd+=c[i].animationX,e(c[i])}e(r),t()},l.timeInterval)},t}),define("zrender/loadingEffect/Ring",["require","./Base","../tool/util","../tool/color","../shape/Ring","../shape/Sector"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/color"),o=e("../shape/Ring"),r=e("../shape/Sector");return n.inherits(t,i),t.prototype._start=function(e,t){var i=n.merge(this.options,{textStyle:{color:"#07a"},backgroundColor:"rgba(250, 250, 250, 0.8)",effect:{x:this.canvasWidth/2,y:this.canvasHeight/2,r0:60,r:100,color:"#bbdcff",brushType:"fill",textPosition:"inside",textFont:"normal 30px verdana",textColor:"rgba(30, 144, 255, 0.6)",timeInterval:100}}),s=i.effect,l=i.textStyle;null==l.x&&(l.x=s.x),null==l.y&&(l.y=s.y+(s.r0+s.r)/2-5);for(var h=this.createTextShape(i.textStyle),d=this.createBackgroundShape(i.backgroundColor),c=s.x,m=s.y,p=s.r0+6,u=s.r-6,V=s.color,U=a.lift(V,.1),g=new o({highlightStyle:n.clone(s)}),f=[],y=a.getGradientColors(["#ff6400","#ffe100","#97ff00"],25),b=15,_=240,x=0;16>x;x++)f.push(new r({highlightStyle:{x:c,y:m,r0:p,r:u,startAngle:_-b,endAngle:_,brushType:"fill",color:U},_color:a.getLinearGradient(c+p*Math.cos(_,!0),m-p*Math.sin(_,!0),c+p*Math.cos(_-b,!0),m-p*Math.sin(_-b,!0),[[0,y[2*x]],[1,y[2*x+1]]])})),_-=b;_=360;for(var x=0;4>x;x++)f.push(new r({highlightStyle:{x:c,y:m,r0:p,r:u,startAngle:_-b,endAngle:_,brushType:"fill",color:U},_color:a.getLinearGradient(c+p*Math.cos(_,!0),m-p*Math.sin(_,!0),c+p*Math.cos(_-b,!0),m-p*Math.sin(_-b,!0),[[0,y[2*x+32]],[1,y[2*x+33]]])})),_-=b;var k=0;if(null!=i.progress){e(d),k=100*this.adjust(i.progress,[0,1]).toFixed(2)/5,g.highlightStyle.text=5*k+"%",e(g);for(var x=0;20>x;x++)f[x].highlightStyle.color=k>x?f[x]._color:U,e(f[x]);return e(h),void t()}return setInterval(function(){e(d),k+=k>=20?-20:1,e(g);for(var i=0;20>i;i++)f[i].highlightStyle.color=k>i?f[i]._color:U,e(f[i]);e(h),t()},s.timeInterval)},t}),define("zrender/loadingEffect/Spin",["require","./Base","../tool/util","../tool/color","../tool/area","../shape/Sector"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/color"),o=e("../tool/area"),r=e("../shape/Sector");return n.inherits(t,i),t.prototype._start=function(e,t){var i=n.merge(this.options,{textStyle:{color:"#fff",textAlign:"start"},backgroundColor:"rgba(0, 0, 0, 0.8)"}),s=this.createTextShape(i.textStyle),l=10,h=o.getTextWidth(s.highlightStyle.text,s.highlightStyle.textFont),d=o.getTextHeight(s.highlightStyle.text,s.highlightStyle.textFont),c=n.merge(this.options.effect||{},{r0:9,r:15,n:18,color:"#fff",timeInterval:100}),m=this.getLocation(this.options.textStyle,h+l+2*c.r,Math.max(2*c.r,d));c.x=m.x+c.r,c.y=s.highlightStyle.y=m.y+m.height/2,s.highlightStyle.x=c.x+c.r+l;for(var p=this.createBackgroundShape(i.backgroundColor),u=c.n,V=c.x,U=c.y,g=c.r0,f=c.r,y=c.color,b=[],_=Math.round(180/u),x=0;u>x;x++)b[x]=new r({highlightStyle:{x:V,y:U,r0:g,r:f,startAngle:_*x*2,endAngle:_*x*2+_,color:a.alpha(y,(x+1)/u),brushType:"fill"}});var k=[0,V,U];return setInterval(function(){e(p),k[0]-=.3;for(var i=0;u>i;i++)b[i].rotation=k,e(b[i]);e(s),t()},c.timeInterval)},t}),define("zrender/loadingEffect/Whirling",["require","./Base","../tool/util","../tool/area","../shape/Ring","../shape/Droplet","../shape/Circle"],function(e){function t(e){i.call(this,e)}var i=e("./Base"),n=e("../tool/util"),a=e("../tool/area"),o=e("../shape/Ring"),r=e("../shape/Droplet"),s=e("../shape/Circle");return n.inherits(t,i),t.prototype._start=function(e,t){var i=n.merge(this.options,{textStyle:{color:"#888",textAlign:"start"},backgroundColor:"rgba(250, 250, 250, 0.8)"}),l=this.createTextShape(i.textStyle),h=10,d=a.getTextWidth(l.highlightStyle.text,l.highlightStyle.textFont),c=a.getTextHeight(l.highlightStyle.text,l.highlightStyle.textFont),m=n.merge(this.options.effect||{},{r:18,colorIn:"#fff",colorOut:"#555",colorWhirl:"#6cf",timeInterval:50}),p=this.getLocation(this.options.textStyle,d+h+2*m.r,Math.max(2*m.r,c));m.x=p.x+m.r,m.y=l.highlightStyle.y=p.y+p.height/2,l.highlightStyle.x=m.x+m.r+h;var u=this.createBackgroundShape(i.backgroundColor),V=new r({highlightStyle:{a:Math.round(m.r/2),b:Math.round(m.r-m.r/6),brushType:"fill",color:m.colorWhirl}}),U=new s({highlightStyle:{r:Math.round(m.r/6),brushType:"fill",color:m.colorIn}}),g=new o({highlightStyle:{r0:Math.round(m.r-m.r/3),r:m.r,brushType:"fill",color:m.colorOut}}),f=[0,m.x,m.y];return V.highlightStyle.x=U.highlightStyle.x=g.highlightStyle.x=f[1],V.highlightStyle.y=U.highlightStyle.y=g.highlightStyle.y=f[2],setInterval(function(){e(u),e(g),f[0]-=.3,V.rotation=f,e(V),e(U),e(l),t()},m.timeInterval)},t}),define("echarts/theme/macarons",[],function(){var e={color:["#2ec7c9","#b6a2de","#5ab1ef","#ffb980","#d87a80","#8d98b3","#e5cf0d","#97b552","#95706d","#dc69aa","#07a2a4","#9a7fd1","#588dd5","#f5994e","#c05050","#59678c","#c9ab00","#7eb00a","#6f5553","#c14089"],title:{textStyle:{fontWeight:"normal",color:"#008acd"}},dataRange:{itemWidth:15,color:["#5ab1ef","#e0ffff"]},toolbox:{color:["#1e90ff","#1e90ff","#1e90ff","#1e90ff"],effectiveColor:"#ff4500"},tooltip:{backgroundColor:"rgba(50,50,50,0.5)",axisPointer:{type:"line",lineStyle:{color:"#008acd"},crossStyle:{color:"#008acd"},shadowStyle:{color:"rgba(200,200,200,0.2)"}}},dataZoom:{dataBackgroundColor:"#efefff",fillerColor:"rgba(182,162,222,0.2)",handleColor:"#008acd"},grid:{borderColor:"#eee"},categoryAxis:{axisLine:{lineStyle:{color:"#008acd"}},splitLine:{lineStyle:{color:["#eee"]}}},valueAxis:{axisLine:{lineStyle:{color:"#008acd"}},splitArea:{show:!0,areaStyle:{color:["rgba(250,250,250,0.1)","rgba(200,200,200,0.1)"]}},splitLine:{lineStyle:{color:["#eee"]}}},polar:{axisLine:{lineStyle:{color:"#ddd"}},splitArea:{show:!0,areaStyle:{color:["rgba(250,250,250,0.2)","rgba(200,200,200,0.2)"]}},splitLine:{lineStyle:{color:"#ddd"}}},timeline:{lineStyle:{color:"#008acd"},controlStyle:{normal:{color:"#008acd"},emphasis:{color:"#008acd"}},symbol:"emptyCircle",symbolSize:3},bar:{itemStyle:{normal:{barBorderRadius:5},emphasis:{barBorderRadius:5}}},line:{smooth:!0,symbol:"emptyCircle",symbolSize:3},k:{itemStyle:{normal:{color:"#d87a80",color0:"#2ec7c9",lineStyle:{color:"#d87a80",color0:"#2ec7c9"}}}},scatter:{symbol:"circle",symbolSize:4},radar:{symbol:"emptyCircle",symbolSize:3},map:{itemStyle:{normal:{areaStyle:{color:"#ddd"},label:{textStyle:{color:"#d87a80"}}},emphasis:{areaStyle:{color:"#fe994e"}}}},force:{itemStyle:{normal:{linkStyle:{color:"#1e90ff"}}}},chord:{itemStyle:{normal:{borderWidth:1,borderColor:"rgba(128, 128, 128, 0.5)",chordStyle:{lineStyle:{color:"rgba(128, 128, 128, 0.5)"}}},emphasis:{borderWidth:1,borderColor:"rgba(128, 128, 128, 0.5)",chordStyle:{lineStyle:{color:"rgba(128, 128, 128, 0.5)"}}}}},gauge:{axisLine:{lineStyle:{color:[[.2,"#2ec7c9"],[.8,"#5ab1ef"],[1,"#d87a80"]],width:10}},axisTick:{splitNumber:10,length:15,lineStyle:{color:"auto"}},splitLine:{length:22,lineStyle:{color:"auto"}},pointer:{width:5}},textStyle:{fontFamily:"微软雅黑, Arial, Verdana, sans-serif"}};return e}),define("echarts/theme/infographic",[],function(){var e={color:["#C1232B","#B5C334","#FCCE10","#E87C25","#27727B","#FE8463","#9BCA63","#FAD860","#F3A43B","#60C0DD","#D7504B","#C6E579","#F4E001","#F0805A","#26C0C0"],title:{textStyle:{fontWeight:"normal",color:"#27727B"}},dataRange:{x:"right",y:"center",itemWidth:5,itemHeight:25,color:["#C1232B","#FCCE10"]},toolbox:{color:["#C1232B","#B5C334","#FCCE10","#E87C25","#27727B","#FE8463","#9BCA63","#FAD860","#F3A43B","#60C0DD"],effectiveColor:"#ff4500"},tooltip:{backgroundColor:"rgba(50,50,50,0.5)",axisPointer:{type:"line",lineStyle:{color:"#27727B",type:"dashed"},crossStyle:{color:"#27727B"},shadowStyle:{color:"rgba(200,200,200,0.3)"}}},dataZoom:{dataBackgroundColor:"rgba(181,195,52,0.3)",fillerColor:"rgba(181,195,52,0.2)",handleColor:"#27727B"},grid:{borderWidth:0},categoryAxis:{axisLine:{lineStyle:{color:"#27727B"}},splitLine:{show:!1}},valueAxis:{axisLine:{show:!1},splitArea:{show:!1},splitLine:{lineStyle:{color:["#ccc"],type:"dashed"}}},polar:{axisLine:{lineStyle:{color:"#ddd"}},splitArea:{show:!0,areaStyle:{color:["rgba(250,250,250,0.2)","rgba(200,200,200,0.2)"]}},splitLine:{lineStyle:{color:"#ddd"}}},timeline:{lineStyle:{color:"#27727B"},controlStyle:{normal:{color:"#27727B"},emphasis:{color:"#27727B"}},symbol:"emptyCircle",symbolSize:3},line:{itemStyle:{normal:{borderWidth:2,borderColor:"#fff",lineStyle:{width:3}},emphasis:{borderWidth:0}},symbol:"circle",symbolSize:3.5},k:{itemStyle:{normal:{color:"#C1232B",color0:"#B5C334",lineStyle:{width:1,color:"#C1232B",color0:"#B5C334"}}}},scatter:{itemStyle:{normal:{borderWidth:1,borderColor:"rgba(200,200,200,0.5)"},emphasis:{borderWidth:0}},symbol:"star4",symbolSize:4},radar:{symbol:"emptyCircle",symbolSize:3},map:{itemStyle:{normal:{areaStyle:{color:"#ddd"},label:{textStyle:{color:"#C1232B"}}},emphasis:{areaStyle:{color:"#fe994e"},label:{textStyle:{color:"rgb(100,0,0)"}}}}},force:{itemStyle:{normal:{linkStyle:{color:"#27727B"}}}},chord:{itemStyle:{normal:{borderWidth:1,borderColor:"rgba(128, 128, 128, 0.5)",chordStyle:{lineStyle:{color:"rgba(128, 128, 128, 0.5)"}}},emphasis:{borderWidth:1,borderColor:"rgba(128, 128, 128, 0.5)",chordStyle:{lineStyle:{color:"rgba(128, 128, 128, 0.5)"}}}}},gauge:{center:["50%","80%"],radius:"100%",startAngle:180,endAngle:0,axisLine:{show:!0,lineStyle:{color:[[.2,"#B5C334"],[.8,"#27727B"],[1,"#C1232B"]],width:"40%"}},axisTick:{splitNumber:2,length:5,lineStyle:{color:"#fff"}},axisLabel:{textStyle:{color:"#fff",fontWeight:"bolder"}},splitLine:{length:"5%",lineStyle:{color:"#fff"}},pointer:{width:"40%",length:"80%",color:"#fff"},title:{offsetCenter:[0,-20],textStyle:{color:"auto",fontSize:20}},detail:{offsetCenter:[0,0],textStyle:{color:"auto",fontSize:40}}},textStyle:{fontFamily:"微软雅黑, Arial, Verdana, sans-serif"}};return e}),define("zrender/dep/excanvas",["require"],function(){return document.createElement("canvas").getContext?G_vmlCanvasManager=!1:!function(){function e(){return this.context_||(this.context_=new b(this))}function t(e,t){var i=O.call(arguments,2);return function(){return e.apply(t,i.concat(O.call(arguments)))}}function i(e){return String(e).replace(/&/g,"&").replace(/"/g,""")}function n(e,t,i){e.namespaces[t]||e.namespaces.add(t,i,"#default#VML")}function a(e){if(n(e,"g_vml_","urn:schemas-microsoft-com:vml"),n(e,"g_o_","urn:schemas-microsoft-com:office:office"),!e.styleSheets.ex_canvas_){var t=e.createStyleSheet();t.owningElement.id="ex_canvas_",t.cssText="canvas{display:inline-block;overflow:hidden;text-align:left;width:300px;height:150px}"}}function o(e){var t=e.srcElement;switch(e.propertyName){case"width":t.getContext().clearRect(),t.style.width=t.attributes.width.nodeValue+"px",t.firstChild.style.width=t.clientWidth+"px";break;case"height":t.getContext().clearRect(),t.style.height=t.attributes.height.nodeValue+"px",t.firstChild.style.height=t.clientHeight+"px"}}function r(e){var t=e.srcElement;t.firstChild&&(t.firstChild.style.width=t.clientWidth+"px",t.firstChild.style.height=t.clientHeight+"px")}function s(){return[[1,0,0],[0,1,0],[0,0,1]]}function l(e,t){for(var i=s(),n=0;3>n;n++)for(var a=0;3>a;a++){for(var o=0,r=0;3>r;r++)o+=e[n][r]*t[r][a];i[n][a]=o}return i}function h(e,t){t.fillStyle=e.fillStyle,t.lineCap=e.lineCap,t.lineJoin=e.lineJoin,t.lineWidth=e.lineWidth,t.miterLimit=e.miterLimit,t.shadowBlur=e.shadowBlur,t.shadowColor=e.shadowColor,t.shadowOffsetX=e.shadowOffsetX,t.shadowOffsetY=e.shadowOffsetY,t.strokeStyle=e.strokeStyle,t.globalAlpha=e.globalAlpha,t.font=e.font,t.textAlign=e.textAlign,t.textBaseline=e.textBaseline,t.scaleX_=e.scaleX_,t.scaleY_=e.scaleY_,t.lineScale_=e.lineScale_}function d(e){var t=e.indexOf("(",3),i=e.indexOf(")",t+1),n=e.substring(t+1,i).split(",");return(4!=n.length||"a"!=e.charAt(3))&&(n[3]=1),n}function c(e){return parseFloat(e)/100}function m(e,t,i){return Math.min(i,Math.max(t,e))}function p(e){var t,i,n,a,o,r;if(a=parseFloat(e[0])/360%360,0>a&&a++,o=m(c(e[1]),0,1),r=m(c(e[2]),0,1),0==o)t=i=n=r;else{var s=.5>r?r*(1+o):r+o-r*o,l=2*r-s;t=u(l,s,a+1/3),i=u(l,s,a),n=u(l,s,a-1/3)}return"#"+D[Math.floor(255*t)]+D[Math.floor(255*i)]+D[Math.floor(255*n)]}function u(e,t,i){return 0>i&&i++,i>1&&i--,1>6*i?e+6*(t-e)*i:1>2*i?t:2>3*i?e+(t-e)*(2/3-i)*6:e}function V(e){if(e in R)return R[e];var t,i=1;if(e=String(e),"#"==e.charAt(0))t=e;else if(/^rgb/.test(e)){for(var n,a=d(e),t="#",o=0;3>o;o++)n=-1!=a[o].indexOf("%")?Math.floor(255*c(a[o])):+a[o],t+=D[m(n,0,255)];i=+a[3]}else if(/^hsl/.test(e)){var a=d(e);t=p(a),i=a[3]}else t=H[e]||e;return R[e]={color:t,alpha:i}}function U(e){if(Y[e])return Y[e];var t,i=document.createElement("div"),n=i.style;try{n.font=e,t=n.fontFamily.split(",")[0]}catch(a){}return Y[e]={style:n.fontStyle||G.style,variant:n.fontVariant||G.variant,weight:n.fontWeight||G.weight,size:n.fontSize||G.size,family:t||G.family}}function g(e,t){var i={};for(var n in e)i[n]=e[n];var a=parseFloat(t.currentStyle.fontSize),o=parseFloat(e.size);return i.size="number"==typeof e.size?e.size:-1!=e.size.indexOf("px")?o:-1!=e.size.indexOf("em")?a*o:-1!=e.size.indexOf("%")?a/100*o:-1!=e.size.indexOf("pt")?o/.75:a,i}function f(e){return e.style+" "+e.variant+" "+e.weight+" "+e.size+"px '"+e.family+"'"}function y(e){return Z[e]||"square"}function b(e){this.m_=s(),this.mStack_=[],this.aStack_=[],this.currentPath_=[],this.strokeStyle="#000",this.fillStyle="#000",this.lineWidth=1,this.lineJoin="miter",this.lineCap="butt",this.miterLimit=1*F,this.globalAlpha=1,this.font="12px 微软雅黑",this.textAlign="left",this.textBaseline="alphabetic",this.canvas=e;var t="width:"+e.clientWidth+"px;height:"+e.clientHeight+"px;overflow:hidden;position:absolute",i=e.ownerDocument.createElement("div");i.style.cssText=t,e.appendChild(i);var n=i.cloneNode(!1);n.style.backgroundColor="#fff",n.style.filter="alpha(opacity=0)",e.appendChild(n),this.element_=i,this.scaleX_=1,this.scaleY_=1,this.lineScale_=1}function _(e,t,i,n){e.currentPath_.push({type:"bezierCurveTo",cp1x:t.x,cp1y:t.y,cp2x:i.x,cp2y:i.y,x:n.x,y:n.y}),e.currentX_=n.x,e.currentY_=n.y}function x(e,t){var i=V(e.strokeStyle),n=i.color,a=i.alpha*e.globalAlpha,o=e.lineScale_*e.lineWidth;1>o&&(a*=o),t.push("<g_vml_:stroke",' opacity="',a,'"',' joinstyle="',e.lineJoin,'"',' miterlimit="',e.miterLimit,'"',' endcap="',y(e.lineCap),'"',' weight="',o,'px"',' color="',n,'" />')}function k(e,t,i,n){var a=e.fillStyle,o=e.scaleX_,r=e.scaleY_,s=n.x-i.x,l=n.y-i.y;if(a instanceof W){var h=0,d={x:0,y:0},c=0,m=1;if("gradient"==a.type_){var p=a.x0_/o,u=a.y0_/r,U=a.x1_/o,g=a.y1_/r,f=v(e,p,u),y=v(e,U,g),b=y.x-f.x,_=y.y-f.y;h=180*Math.atan2(b,_)/Math.PI,0>h&&(h+=360),1e-6>h&&(h=0)}else{var f=v(e,a.x0_,a.y0_);d={x:(f.x-i.x)/s,y:(f.y-i.y)/l},s/=o*F,l/=r*F;var x=C.max(s,l);c=2*a.r0_/x,m=2*a.r1_/x-c}var k=a.colors_;k.sort(function(e,t){return e.offset-t.offset});for(var L=k.length,w=k[0].color,I=k[L-1].color,S=k[0].alpha*e.globalAlpha,K=k[L-1].alpha*e.globalAlpha,T=[],E=0;L>E;E++){var z=k[E];T.push(z.offset*m+c+" "+z.color)}t.push('<g_vml_:fill type="',a.type_,'"',' method="none" focus="100%"',' color="',w,'"',' color2="',I,'"',' colors="',T.join(","),'"',' opacity="',K,'"',' g_o_:opacity2="',S,'"',' angle="',h,'"',' focusposition="',d.x,",",d.y,'" />')}else if(a instanceof X){if(s&&l){var A=-i.x,M=-i.y;t.push("<g_vml_:fill",' position="',A/s*o*o,",",M/l*r*r,'"',' type="tile"',' src="',a.src_,'" />')}}else{var J=V(e.fillStyle),O=J.color,P=J.alpha*e.globalAlpha;t.push('<g_vml_:fill color="',O,'" opacity="',P,'" />')}}function v(e,t,i){var n=e.m_;return{x:F*(t*n[0][0]+i*n[1][0]+n[2][0])-J,y:F*(t*n[0][1]+i*n[1][1]+n[2][1])-J}}function L(e){return isFinite(e[0][0])&&isFinite(e[0][1])&&isFinite(e[1][0])&&isFinite(e[1][1])&&isFinite(e[2][0])&&isFinite(e[2][1])}function w(e,t,i){if(L(t)&&(e.m_=t,e.scaleX_=Math.sqrt(t[0][0]*t[0][0]+t[0][1]*t[0][1]),e.scaleY_=Math.sqrt(t[1][0]*t[1][0]+t[1][1]*t[1][1]),i)){var n=t[0][0]*t[1][1]-t[0][1]*t[1][0];e.lineScale_=M(A(n))}}function W(e){this.type_=e,this.x0_=0,this.y0_=0,this.r0_=0,this.x1_=0,this.y1_=0,this.r1_=0,this.colors_=[]}function X(e,t){switch(S(e),t){case"repeat":case null:case"":this.repetition_="repeat";break;case"repeat-x":case"repeat-y":case"no-repeat":this.repetition_=t;break;default:I("SYNTAX_ERR")}this.src_=e.src,this.width_=e.width,this.height_=e.height}function I(e){throw new K(e)}function S(e){e&&1==e.nodeType&&"IMG"==e.tagName||I("TYPE_MISMATCH_ERR"),"complete"!=e.readyState&&I("INVALID_STATE_ERR")}function K(e){this.code=this[e],this.message=e+": DOM Exception "+this.code}var C=Math,T=C.round,E=C.sin,z=C.cos,A=C.abs,M=C.sqrt,F=10,J=F/2,O=(+navigator.userAgent.match(/MSIE ([\d.]+)?/)[1],Array.prototype.slice);a(document);var P={init:function(e){var i=e||document;i.createElement("canvas"),i.attachEvent("onreadystatechange",t(this.init_,this,i))},init_:function(e){for(var t=e.getElementsByTagName("canvas"),i=0;i<t.length;i++)this.initElement(t[i])},initElement:function(t){if(!t.getContext){t.getContext=e,a(t.ownerDocument),t.innerHTML="",t.attachEvent("onpropertychange",o),t.attachEvent("onresize",r);var i=t.attributes;i.width&&i.width.specified?t.style.width=i.width.nodeValue+"px":t.width=t.clientWidth,i.height&&i.height.specified?t.style.height=i.height.nodeValue+"px":t.height=t.clientHeight}return t}};P.init();for(var D=[],N=0;16>N;N++)for(var B=0;16>B;B++)D[16*N+B]=N.toString(16)+B.toString(16);var H={aliceblue:"#F0F8FF",antiquewhite:"#FAEBD7",aquamarine:"#7FFFD4",azure:"#F0FFFF",beige:"#F5F5DC",bisque:"#FFE4C4",black:"#000000",blanchedalmond:"#FFEBCD",blueviolet:"#8A2BE2",brown:"#A52A2A",burlywood:"#DEB887",cadetblue:"#5F9EA0",chartreuse:"#7FFF00",chocolate:"#D2691E",coral:"#FF7F50",cornflowerblue:"#6495ED",cornsilk:"#FFF8DC",crimson:"#DC143C",cyan:"#00FFFF",darkblue:"#00008B",darkcyan:"#008B8B",darkgoldenrod:"#B8860B",darkgray:"#A9A9A9",darkgreen:"#006400",darkgrey:"#A9A9A9",darkkhaki:"#BDB76B",darkmagenta:"#8B008B",darkolivegreen:"#556B2F",darkorange:"#FF8C00",darkorchid:"#9932CC",darkred:"#8B0000",darksalmon:"#E9967A",darkseagreen:"#8FBC8F",darkslateblue:"#483D8B",darkslategray:"#2F4F4F",darkslategrey:"#2F4F4F",darkturquoise:"#00CED1",darkviolet:"#9400D3",deeppink:"#FF1493",deepskyblue:"#00BFFF",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1E90FF",firebrick:"#B22222",floralwhite:"#FFFAF0",forestgreen:"#228B22",gainsboro:"#DCDCDC",ghostwhite:"#F8F8FF",gold:"#FFD700",goldenrod:"#DAA520",grey:"#808080",greenyellow:"#ADFF2F",honeydew:"#F0FFF0",hotpink:"#FF69B4",indianred:"#CD5C5C",indigo:"#4B0082",ivory:"#FFFFF0",khaki:"#F0E68C",lavender:"#E6E6FA",lavenderblush:"#FFF0F5",lawngreen:"#7CFC00",lemonchiffon:"#FFFACD",lightblue:"#ADD8E6",lightcoral:"#F08080",lightcyan:"#E0FFFF",lightgoldenrodyellow:"#FAFAD2",lightgreen:"#90EE90",lightgrey:"#D3D3D3",lightpink:"#FFB6C1",lightsalmon:"#FFA07A",lightseagreen:"#20B2AA",lightskyblue:"#87CEFA",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#B0C4DE",lightyellow:"#FFFFE0",limegreen:"#32CD32",linen:"#FAF0E6",magenta:"#FF00FF",mediumaquamarine:"#66CDAA",mediumblue:"#0000CD",mediumorchid:"#BA55D3",mediumpurple:"#9370DB",mediumseagreen:"#3CB371",mediumslateblue:"#7B68EE",mediumspringgreen:"#00FA9A",mediumturquoise:"#48D1CC",mediumvioletred:"#C71585",midnightblue:"#191970",mintcream:"#F5FFFA",mistyrose:"#FFE4E1",moccasin:"#FFE4B5",navajowhite:"#FFDEAD",oldlace:"#FDF5E6",olivedrab:"#6B8E23",orange:"#FFA500",orangered:"#FF4500",orchid:"#DA70D6",palegoldenrod:"#EEE8AA",palegreen:"#98FB98",paleturquoise:"#AFEEEE",palevioletred:"#DB7093",papayawhip:"#FFEFD5",peachpuff:"#FFDAB9",peru:"#CD853F",pink:"#FFC0CB",plum:"#DDA0DD",powderblue:"#B0E0E6",rosybrown:"#BC8F8F",royalblue:"#4169E1",saddlebrown:"#8B4513",salmon:"#FA8072",sandybrown:"#F4A460",seagreen:"#2E8B57",seashell:"#FFF5EE",sienna:"#A0522D",skyblue:"#87CEEB",slateblue:"#6A5ACD",slategray:"#708090",slategrey:"#708090",snow:"#FFFAFA",springgreen:"#00FF7F",steelblue:"#4682B4",tan:"#D2B48C",thistle:"#D8BFD8",tomato:"#FF6347",turquoise:"#40E0D0",violet:"#EE82EE",wheat:"#F5DEB3",whitesmoke:"#F5F5F5",yellowgreen:"#9ACD32"},R={},G={style:"normal",variant:"normal",weight:"normal",size:12,family:"微软雅黑"},Y={},Z={butt:"flat",round:"round"},Q=b.prototype;Q.clearRect=function(){this.textMeasureEl_&&(this.textMeasureEl_.removeNode(!0),this.textMeasureEl_=null),this.element_.innerHTML=""},Q.beginPath=function(){this.currentPath_=[]},Q.moveTo=function(e,t){var i=v(this,e,t);this.currentPath_.push({type:"moveTo",x:i.x,y:i.y}),this.currentX_=i.x,this.currentY_=i.y},Q.lineTo=function(e,t){var i=v(this,e,t);this.currentPath_.push({type:"lineTo",x:i.x,y:i.y}),this.currentX_=i.x,this.currentY_=i.y},Q.bezierCurveTo=function(e,t,i,n,a,o){var r=v(this,a,o),s=v(this,e,t),l=v(this,i,n);_(this,s,l,r)},Q.quadraticCurveTo=function(e,t,i,n){var a=v(this,e,t),o=v(this,i,n),r={x:this.currentX_+2/3*(a.x-this.currentX_),y:this.currentY_+2/3*(a.y-this.currentY_)},s={x:r.x+(o.x-this.currentX_)/3,y:r.y+(o.y-this.currentY_)/3};_(this,r,s,o)},Q.arc=function(e,t,i,n,a,o){i*=F;var r=o?"at":"wa",s=e+z(n)*i-J,l=t+E(n)*i-J,h=e+z(a)*i-J,d=t+E(a)*i-J;s!=h||o||(s+=.125);var c=v(this,e,t),m=v(this,s,l),p=v(this,h,d);this.currentPath_.push({type:r,x:c.x,y:c.y,radius:i,xStart:m.x,yStart:m.y,xEnd:p.x,yEnd:p.y})},Q.rect=function(e,t,i,n){this.moveTo(e,t),this.lineTo(e+i,t),this.lineTo(e+i,t+n),this.lineTo(e,t+n),this.closePath()},Q.strokeRect=function(e,t,i,n){var a=this.currentPath_;this.beginPath(),this.moveTo(e,t),this.lineTo(e+i,t),this.lineTo(e+i,t+n),this.lineTo(e,t+n),this.closePath(),this.stroke(),this.currentPath_=a},Q.fillRect=function(e,t,i,n){var a=this.currentPath_;this.beginPath(),this.moveTo(e,t),this.lineTo(e+i,t),this.lineTo(e+i,t+n),this.lineTo(e,t+n),this.closePath(),this.fill(),this.currentPath_=a},Q.createLinearGradient=function(e,t,i,n){var a=new W("gradient");return a.x0_=e,a.y0_=t,a.x1_=i,a.y1_=n,a},Q.createRadialGradient=function(e,t,i,n,a,o){var r=new W("gradientradial");return r.x0_=e,r.y0_=t,r.r0_=i,r.x1_=n,r.y1_=a,r.r1_=o,r},Q.drawImage=function(e){var t,i,n,a,o,r,s,l,h=e.runtimeStyle.width,d=e.runtimeStyle.height;e.runtimeStyle.width="auto",e.runtimeStyle.height="auto";var c=e.width,m=e.height;if(e.runtimeStyle.width=h,e.runtimeStyle.height=d,3==arguments.length)t=arguments[1],i=arguments[2],o=r=0,s=n=c,l=a=m;else if(5==arguments.length)t=arguments[1],i=arguments[2],n=arguments[3],a=arguments[4],o=r=0,s=c,l=m;else{if(9!=arguments.length)throw Error("Invalid number of arguments");o=arguments[1],r=arguments[2],s=arguments[3],l=arguments[4],t=arguments[5],i=arguments[6],n=arguments[7],a=arguments[8]}var p=v(this,t,i),u=[],V=10,U=10,g=y=1;if(u.push(" <g_vml_:group",' coordsize="',F*V,",",F*U,'"',' coordorigin="0,0"',' style="width:',V,"px;height:",U,"px;position:absolute;"),1!=this.m_[0][0]||this.m_[0][1]||1!=this.m_[1][1]||this.m_[1][0]){var f=[],g=this.scaleX_,y=this.scaleY_;f.push("M11=",this.m_[0][0]/g,",","M12=",this.m_[1][0]/y,",","M21=",this.m_[0][1]/g,",","M22=",this.m_[1][1]/y,",","Dx=",T(p.x/F),",","Dy=",T(p.y/F),"");var b=p,_=v(this,t+n,i),x=v(this,t,i+a),k=v(this,t+n,i+a);b.x=C.max(b.x,_.x,x.x,k.x),b.y=C.max(b.y,_.y,x.y,k.y),u.push("padding:0 ",T(b.x/F),"px ",T(b.y/F),"px 0;filter:progid:DXImageTransform.Microsoft.Matrix(",f.join(""),", SizingMethod='clip');")}else u.push("top:",T(p.y/F),"px;left:",T(p.x/F),"px;");u.push(' ">'),(o||r)&&u.push('<div style="overflow: hidden; width:',Math.ceil((n+o*n/s)*g),"px;"," height:",Math.ceil((a+r*a/l)*y),"px;"," filter:progid:DxImageTransform.Microsoft.Matrix(Dx=",-o*n/s*g,",Dy=",-r*a/l*y,');">'),u.push('<div style="width:',Math.round(g*c*n/s),"px;"," height:",Math.round(y*m*a/l),"px;"," filter:"),this.globalAlpha<1&&u.push(" progid:DXImageTransform.Microsoft.Alpha(opacity="+100*this.globalAlpha+")"),u.push(" progid:DXImageTransform.Microsoft.AlphaImageLoader(src=",e.src,',sizingMethod=scale)">'),(o||r)&&u.push("</div>"),u.push("</div></div>"),this.element_.insertAdjacentHTML("BeforeEnd",u.join(""))},Q.stroke=function(e){var t=[],i=10,n=10;t.push("<g_vml_:shape",' filled="',!!e,'"',' style="position:absolute;width:',i,"px;height:",n,'px;"',' coordorigin="0,0"',' coordsize="',F*i,",",F*n,'"',' stroked="',!e,'"',' path="');for(var a={x:null,y:null},o={x:null,y:null},r=0;r<this.currentPath_.length;r++){var s,l=this.currentPath_[r];switch(l.type){case"moveTo":s=l,t.push(" m ",T(l.x),",",T(l.y));break;case"lineTo":t.push(" l ",T(l.x),",",T(l.y));break;case"close":t.push(" x "),l=null;break;case"bezierCurveTo":t.push(" c ",T(l.cp1x),",",T(l.cp1y),",",T(l.cp2x),",",T(l.cp2y),",",T(l.x),",",T(l.y));break;case"at":case"wa":t.push(" ",l.type," ",T(l.x-this.scaleX_*l.radius),",",T(l.y-this.scaleY_*l.radius)," ",T(l.x+this.scaleX_*l.radius),",",T(l.y+this.scaleY_*l.radius)," ",T(l.xStart),",",T(l.yStart)," ",T(l.xEnd),",",T(l.yEnd))}l&&((null==a.x||l.x<a.x)&&(a.x=l.x),(null==o.x||l.x>o.x)&&(o.x=l.x),(null==a.y||l.y<a.y)&&(a.y=l.y),(null==o.y||l.y>o.y)&&(o.y=l.y))}t.push(' ">'),e?k(this,t,a,o):x(this,t),t.push("</g_vml_:shape>"),this.element_.insertAdjacentHTML("beforeEnd",t.join(""))},Q.fill=function(){this.stroke(!0)},Q.closePath=function(){this.currentPath_.push({type:"close"})},Q.save=function(){var e={};h(this,e),this.aStack_.push(e),this.mStack_.push(this.m_),this.m_=l(s(),this.m_)},Q.restore=function(){this.aStack_.length&&(h(this.aStack_.pop(),this),this.m_=this.mStack_.pop())},Q.translate=function(e,t){var i=[[1,0,0],[0,1,0],[e,t,1]];w(this,l(i,this.m_),!1)},Q.rotate=function(e){var t=z(e),i=E(e),n=[[t,i,0],[-i,t,0],[0,0,1]];w(this,l(n,this.m_),!1)},Q.scale=function(e,t){var i=[[e,0,0],[0,t,0],[0,0,1]];w(this,l(i,this.m_),!0)},Q.transform=function(e,t,i,n,a,o){var r=[[e,t,0],[i,n,0],[a,o,1]];w(this,l(r,this.m_),!0)},Q.setTransform=function(e,t,i,n,a,o){var r=[[e,t,0],[i,n,0],[a,o,1]];w(this,r,!0)},Q.drawText_=function(e,t,n,a,o){var r=this.m_,s=1e3,l=0,h=s,d={x:0,y:0},c=[],m=g(U(this.font),this.element_),p=f(m),u=this.element_.currentStyle,V=this.textAlign.toLowerCase();
|
evaluate.py
|
import tensorflow as tf
from baselines.ppo2 import ppo2
from baselines.common.models import build_impala_cnn
from baselines.common.mpi_util import setup_mpi_gpus
from procgen import ProcgenEnv
from baselines.common.vec_env import (
VecExtractDictObs,
VecMonitor,
VecFrameStack,
VecNormalize
)
from baselines import logger
from mpi4py import MPI
import argparse
from .alternate_ppo2 import alt_ppo2
import os
from baselines.common import set_global_seeds
from baselines.common.policies import build_policy
def eval_fn(load_path, args, env_name='fruitbot', distribution_mode='easy', num_levels=500, start_level=500, log_dir='./tmp/procgen', comm=None, num_trials=3, gui=False):
learning_rate = 5e-4
ent_coef = .01
gamma = .999
lam = .95
nsteps = 256
nminibatches = 8
ppo_epochs = 3
clip_range = .2
use_vf_clipping = True
vf_coef = 0.5
max_grad_norm = 0.5
mpi_rank_weight = 1
log_interval = 1
seed=None
log_comm = comm.Split(0, 0)
format_strs = ['csv', 'stdout'] if log_comm.Get_rank() == 0 else []
logger.configure(comm=log_comm, dir=log_dir, format_strs=format_strs)
logger.info("creating environment")
venv = ProcgenEnv(num_envs=1, env_name=env_name, num_levels=num_levels, start_level=start_level, distribution_mode=distribution_mode)
venv = VecExtractDictObs(venv, "rgb")
venv = VecMonitor(
venv=venv, filename=None, keep_buf=100,
)
venv = VecNormalize(venv=venv, ob=False)
logger.info("creating tf session")
setup_mpi_gpus()
config = tf.ConfigProto()
config.gpu_options.allow_growth = True #pylint: disable=E1101
sess = tf.Session(config=config)
sess.__enter__()
conv_fn = lambda x: build_impala_cnn(x, depths=[16,32,32], emb_size=256)
logger.info(f"evaluating")
set_global_seeds(seed)
policy = build_policy(venv, conv_fn)
# Get the nb of env
nenvs = venv.num_envs
# Get state_space and action_space
ob_space = venv.observation_space
ac_space = venv.action_space
# Calculate the batch_size
nbatch = nenvs * nsteps
nbatch_train = nbatch // nminibatches
# Instantiate the model object (that creates act_model and train_model)
from .alternate_ppo2.model import Model
model_fn = Model
model = model_fn(policy=policy, ob_space=ob_space, ac_space=ac_space, nbatch_act=nenvs, nbatch_train=nbatch_train,
|
if os.path.isfile(load_path):
alt_ppo2.eval(
network=conv_fn,
nsteps=nsteps,
ent_coef=ent_coef,
vf_coef=vf_coef,
max_grad_norm=max_grad_norm,
gamma=gamma,
lam=lam,
log_interval=log_interval,
nminibatches=nminibatches,
noptepochs=ppo_epochs,
load_path=load_path,
mpi_rank_weight=mpi_rank_weight,
comm=comm,
clip_vf=use_vf_clipping,
lr=learning_rate,
cliprange=clip_range,
policy=policy,
nenvs=nenvs,
ob_space=ob_space,
ac_space=ac_space,
nbatch=nbatch,
nbatch_train=nbatch_train,
model_fn=model_fn,
model=model,
num_trials=num_trials,
num_levels=num_levels,
start_level=start_level,
gui=gui,
args=args
)
elif os.path.isdir(load_path):
for file in os.listdir(load_path):
log_comm = comm.Split(0, 0)
format_strs = ['csv', 'stdout'] if log_comm.Get_rank() == 0 else []
logger.configure(comm=log_comm, dir=log_dir+'/'+file, format_strs=format_strs)
alt_ppo2.eval(
network=conv_fn,
nsteps=nsteps,
ent_coef=ent_coef,
vf_coef=vf_coef,
max_grad_norm=max_grad_norm,
gamma=gamma,
lam=lam,
log_interval=log_interval,
nminibatches=nminibatches,
noptepochs=ppo_epochs,
load_path=load_path+'/'+file,
mpi_rank_weight=mpi_rank_weight,
comm=comm,
clip_vf=use_vf_clipping,
lr=learning_rate,
cliprange=clip_range,
policy=policy,
nenvs=nenvs,
ob_space=ob_space,
ac_space=ac_space,
nbatch=nbatch,
nbatch_train=nbatch_train,
model_fn=model_fn,
model=model,
num_trials=num_trials,
num_levels=num_levels,
start_level=start_level,
gui=gui,
args=args
)
else:
print('Model path does not exist.')
return
def main():
parser = argparse.ArgumentParser(description='Process procgen evaluation arguments.')
parser.add_argument('--load_model', type=str, required=True)
parser.add_argument('--log_dir', type=str, default='./logs/eval')
parser.add_argument('--env_name', type=str, default='fruitbot')
parser.add_argument('--distribution_mode', type=str, default='easy', choices=["easy", "hard", "exploration", "memory", "extreme"])
parser.add_argument('--num_levels', type=int, default=500)
parser.add_argument('--start_level', type=int, default=0)
parser.add_argument('--num_trials', type=int, default=3)
parser.add_argument('--gui', action='store_true')
args = parser.parse_args()
comm = MPI.COMM_WORLD
eval_fn(args.load_model,
log_dir=args.log_dir,
env_name=args.env_name,
distribution_mode=args.distribution_mode,
num_levels=args.num_levels,
start_level=args.start_level,
num_trials=args.num_trials,
comm=comm,
gui=args.gui,
args=args
)
if __name__ == '__main__':
main()
|
nsteps=nsteps, ent_coef=ent_coef, vf_coef=vf_coef,
max_grad_norm=max_grad_norm, comm=comm, mpi_rank_weight=mpi_rank_weight)
|
test_qz.py
|
# Copyright 2019 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import condensa
from condensa import schemes
def test_float16(device):
scheme = schemes.Quantize(condensa.float16)
fc = torch.nn.Linear(100, 10).float().to(device)
scheme.pi(fc)
assert fc.weight.dtype == torch.float16
scheme.delta(fc)
assert fc.weight.dtype == torch.float32
|
if torch.cuda.is_available():
test_float16('cpu')
|
if __name__ == '__main__':
test_float16('cpu')
|
task.go
|
package tasks
import (
"archive/tar"
"crypto/tls"
"fmt"
docker "github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/mount"
"github.com/determined-ai/determined/master/pkg/archive"
"github.com/determined-ai/determined/master/pkg/container"
"github.com/determined-ai/determined/master/pkg/device"
"github.com/determined-ai/determined/master/pkg/model"
"github.com/determined-ai/determined/master/pkg/schemas/expconf"
)
const (
// ContainerWorkDir is the working directory for tasks.
ContainerWorkDir = "/run/determined/workdir"
userPythonBaseDir = "/run/determined/pythonuserbase"
runDir = "/run/determined"
trainDir = "/run/determined/train"
modelCopy = "/run/determined/train/model"
rootDir = "/"
passwdPath = "/run/determined/etc/passwd"
shadowPath = "/run/determined/etc/shadow"
groupPath = "/run/determined/etc/group"
certPath = "/run/determined/etc/ssl/master.crt"
)
const (
// Container runtimes.
runc = "runc"
)
// TaskSpec defines the spec of a task.
type TaskSpec struct {
// Fields that are only for task logics.
Description string
// LoggingFields are fields to include in each record of structured (i.e., Fluent Bit) logging.
LoggingFields map[string]string
// UseFluentLogging is whether to use Fluent Bit logging (as opposed to directly streaming).
UseFluentLogging bool
// Fields that are set on the cluster level.
ClusterID string
HarnessPath string
MasterCert *tls.Certificate
// Fields that are set on the per-request basis.
TaskContainerDefaults model.TaskContainerDefaultsConfig
Environment expconf.EnvironmentConfig
ResourcesConfig expconf.ResourcesConfig
Owner *model.User
AgentUserGroup *model.AgentUserGroup
ExtraArchives []container.RunArchive
ExtraEnvVars map[string]string
Entrypoint []string
Mounts []mount.Mount
// UseHostMode is whether host mode networking would be desirable for this task.
// This is used by Docker only.
UseHostMode bool
ShmSize int64
// The parent task of an allocation.
TaskID string
// Fields that are set on the per-allocation basis.
AllocationID string
AllocationSessionToken string
ContainerID string
Devices []device.Device
}
// Archives returns all the archives.
func (t *TaskSpec) Archives() []container.RunArchive {
res := []container.RunArchive{
workDirArchive(t.AgentUserGroup),
injectUserArchive(t.AgentUserGroup),
harnessArchive(t.HarnessPath, t.AgentUserGroup),
masterCertArchive(t.MasterCert),
}
res = append(res, t.ExtraArchives...)
return res
}
// EnvVars returns all the environment variables.
func (t TaskSpec) EnvVars() map[string]string {
e := map[string]string{
// PYTHONUSERBASE allows us to `pip install --user` into a location guaranteed to be owned by
// the user inside the container.
"PYTHONUSERBASE": userPythonBaseDir,
"DET_TASK_ID": t.TaskID,
"DET_ALLOCATION_ID": t.AllocationID,
"DET_ALLOCATION_SESSION_TOKEN": t.AllocationSessionToken,
}
if t.TaskContainerDefaults.NCCLPortRange != "" {
e["NCCL_PORT_RANGE"] = t.TaskContainerDefaults.NCCLPortRange
}
if t.TaskContainerDefaults.NCCLPortRange != "" {
e["GLOO_PORT_RANGE"] = t.TaskContainerDefaults.NCCLPortRange
}
networkInterface := t.TaskContainerDefaults.DtrainNetworkInterface
if networkInterface == "" {
networkInterface = "DET_AUTO_DETECT_NETWORK_INTERFACE"
}
e["DET_TRIAL_RUNNER_NETWORK_INTERFACE"] = networkInterface
if t.MasterCert != nil {
e["DET_USE_TLS"] = "true"
e["DET_MASTER_CERT_FILE"] = certPath
} else {
e["DET_USE_TLS"] = "false"
}
for k, v := range t.ExtraEnvVars {
e[k] = v
}
return e
}
// ToDockerSpec converts a task spec to a docker container spec.
func (t *TaskSpec) ToDockerSpec() container.Spec {
var envVars []string
for k, v := range t.EnvVars() {
envVars = append(envVars, fmt.Sprintf("%s=%s", k, v))
}
env := t.Environment
deviceType := device.CPU
if len(t.Devices) > 0 {
deviceType = t.Devices[0].Type
}
envVars = append(envVars, env.EnvironmentVariables().For(deviceType)...)
containerRuntime := ""
switch deviceType {
case device.CPU, device.ZeroSlot:
containerRuntime = runc
}
network := t.TaskContainerDefaults.NetworkMode
if t.UseHostMode {
network = hostMode
}
shmSize := t.ShmSize
if shmSize == 0 {
shmSize = t.TaskContainerDefaults.ShmSizeBytes
}
resources := t.ResourcesConfig
var devices []docker.DeviceMapping
for _, device := range resources.Devices() {
devices = append(devices, docker.DeviceMapping{
PathOnHost: device.HostPath(),
PathInContainer: device.ContainerPath(),
CgroupPermissions: device.Mode(),
})
}
spec := container.Spec{
PullSpec: container.PullSpec{
Registry: env.RegistryAuth(),
ForcePull: env.ForcePullImage(),
},
RunSpec: container.RunSpec{
ContainerConfig: docker.Config{
User: getUser(t.AgentUserGroup),
ExposedPorts: toPortSet(env.Ports()),
Env: envVars,
Cmd: t.Entrypoint,
Image: env.Image().For(deviceType),
WorkingDir: ContainerWorkDir,
},
HostConfig: docker.HostConfig{
NetworkMode: network,
Mounts: t.Mounts,
PublishAllPorts: true,
ShmSize: shmSize,
CapAdd: env.AddCapabilities(),
CapDrop: env.DropCapabilities(),
Runtime: containerRuntime,
Resources: docker.Resources{
Devices: devices,
},
},
Archives: t.Archives(),
UseFluentLogging: t.UseFluentLogging,
},
}
return spec
}
// workDirArchive ensures that the workdir is created and owned by the user.
func workDirArchive(aug *model.AgentUserGroup) container.RunArchive {
return wrapArchive(
archive.Archive{
aug.OwnedArchiveItem(runDir, nil, 0700, tar.TypeDir),
aug.OwnedArchiveItem(ContainerWorkDir, nil, 0700, tar.TypeDir),
aug.OwnedArchiveItem(userPythonBaseDir, nil, 0700, tar.TypeDir),
},
rootDir,
)
}
// injectUserArchive creates the user/UID/group/GID for a user by adding passwd/shadow/group files
// to /run/determined/etc, which will be read by libnss_determined inside the container. If
// libnss_determined is not present in the container, these files will be simply ignored and some
// non-root container features will not work properly.
func
|
(aug *model.AgentUserGroup) container.RunArchive {
passwdBytes := []byte(
fmt.Sprintf("%v:x:%v:%v::%v:/bin/bash\n", aug.User, aug.UID, aug.GID, ContainerWorkDir),
)
shadowBytes := []byte(fmt.Sprintf("%v:!!:::::::\n", aug.User))
groupBytes := []byte(fmt.Sprintf("%v:x:%v:\n", aug.Group, aug.GID))
return wrapArchive(
archive.Archive{
archive.RootItem(passwdPath, passwdBytes, 0644, tar.TypeReg),
archive.RootItem(shadowPath, shadowBytes, 0600, tar.TypeReg),
archive.RootItem(groupPath, groupBytes, 0644, tar.TypeReg),
},
rootDir,
)
}
func getUser(agentUserGroup *model.AgentUserGroup) string {
if agentUserGroup == nil {
return ""
}
return fmt.Sprintf("%d:%d", agentUserGroup.UID, agentUserGroup.GID)
}
|
injectUserArchive
|
dart2js_nobuild.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'depot_tools/gclient',
'file',
'depot_tools/gsutil',
'recipe_engine/context',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'test_utils',
'zip',
]
all_runtimes = ['d8', 'jsshell', 'ie9', 'ie10', 'ie11', 'ff',
'safari', 'chrome', 'safarimobilesim', 'drt', 'chromeff',
'ie10chrome', 'ie11ff']
multiple_runtimes = {'chromeff': ['chrome', 'ff'],
'ie10chrome': ['ie10', 'chrome'],
'ie11ff': ['ie11', 'ff']}
all_options = {'hostchecked': '--host-checked',
'minified': '--minified',
'cps': '--cps-ir',
'csp': '--csp'}
build_directories = {'linux': 'out/ReleaseX64',
'win': 'out/ReleaseX64',
'mac': 'xcodebuild/ReleaseX64'}
IsFirstTestStep = True
def RunTests(api, test_args, test_specs, use_xvfb=False):
for test_spec in test_specs:
args = []
args.extend(test_args)
global IsFirstTestStep
if IsFirstTestStep:
IsFirstTestStep = False
else:
args.append('--append_logs')
args.extend(test_spec['tests'])
with api.context(cwd=api.path['checkout']):
if use_xvfb:
xvfb_cmd = ['xvfb-run', '-a', '--server-args=-screen 0 1024x768x24']
xvfb_cmd.extend(['python', '-u', './tools/test.py'])
xvfb_cmd.extend(args)
api.step(test_spec['name'], xvfb_cmd)
else:
api.python(test_spec['name'],
api.path['checkout'].join('tools', 'test.py'),
args=args)
def sdk_url(channel, platform, arch, mode, revision):
platforms = {
'linux': 'linux',
'win': 'windows',
'mac': 'macos',
}
platform = platforms[platform]
# The paths here come from dart-lang/sdk/tools/bots/bot_utils.py
return ('gs://dart-archive/channels/%s/raw/hash/%s/sdk/dartsdk-%s-%s-%s.zip'
% (channel, revision, platform, arch, mode))
def RunSteps(api):
builder_name = str(api.properties.get('buildername')) # Convert from unicode.
builder_fragments = builder_name.split('-')
assert len(builder_fragments) > 3
assert builder_fragments[0] == 'dart2js'
system = builder_fragments[1]
assert system in ['linux', 'mac10.11', 'win7', 'win8', 'win10']
runtime = builder_fragments[2]
assert runtime in all_runtimes
channel = builder_fragments[-1]
assert channel in ['be', 'dev', 'stable', 'integration']
try:
num_shards = int(builder_fragments[-2])
shard = int(builder_fragments[-3])
sharded = True
options_end = - 3
except ValueError:
sharded = False
options_end = - 1
options = builder_fragments[3:options_end]
for option in options:
assert all_options.has_key(option)
api.gclient.set_config('dart')
api.path.c.dynamic_paths['tools'] = None
api.bot_update.ensure_checkout()
api.path['tools'] = api.path['checkout'].join('tools')
revision = api.properties['revision']
api.gclient.runhooks()
with api.step.defer_results():
with api.context(cwd=api.path['checkout']):
api.python('taskkill before building',
api.path['checkout'].join('tools', 'task_kill.py'),
args=['--kill_browsers=True'],
ok_ret='any')
zipfile = api.path.abspath(api.path['checkout'].join('sdk.zip'))
url = sdk_url(channel, api.platform.name, 'x64', 'release', revision)
api.gsutil(['cp', url, zipfile], name='Download sdk')
build_dir = api.path['checkout'].join(build_directories[api.platform.name])
build_dir = api.path.abspath(build_dir)
api.file.makedirs('Create build directory', build_dir)
api.file.rmtree('Clean build directory', build_dir)
api.zip.unzip('Unzip sdk', zipfile, build_dir)
with api.step.defer_results():
runtimes = multiple_runtimes.get(runtime, [runtime])
for runtime in runtimes:
# TODO(whesse): Call a script that prints the runtime version.
test_args = ['--mode=release',
'--arch=x64',
'--use-sdk',
'--compiler=dart2js',
'--dart2js-batch',
'--runtime=%s' % runtime,
'--progress=buildbot',
'-v',
'--reset-browser-configuration',
'--report',
'--time',
'--write-debug-log',
'--write-test-outcome-log']
for option in options:
test_args.append(all_options[option])
if sharded:
test_args.extend(['--shards=%s' % num_shards, '--shard=%s' % shard])
if system in ['win7', 'win8', 'win10']:
test_args.append('--builder-tag=%s' % system)
if runtime in ['ie10', 'ie11']:
test_args.extend(['-j6', '--timeout=120']) # Issue 28955, IE is slow.
test_specs = [{'name': 'dart2js %s tests' % runtime,
'tests': ['html', 'pkg', 'samples']},
{'name': 'dart2js %s co19 tests' % runtime,
'tests': ['co19']}]
else:
test_specs = [
{'name': 'dart2js-%s tests' % runtime,
'tests': ['--exclude-suite=observatory_ui,co19']},
{'name': 'dart2js-%s-package tests' % runtime,
'tests': ['pkg']},
{'name': 'dart2js-%s-observatory_ui tests' % runtime,
'tests': ['observatory_ui']},
{'name': 'dart2js-%s-extra tests' % runtime,
'tests': ['dart2js_extra', 'dart2js_native']},
{'name': 'dart2js-%s-co19 tests' % runtime,
'tests': ['co19']},
]
needs_xvfb = (runtime in ['drt', 'dartium', 'chrome', 'ff'] and
system == 'linux')
RunTests(api, test_args, test_specs, use_xvfb=needs_xvfb)
if runtime == 'd8':
kernel_test_args = test_args + ['--dart2js-with-kernel']
kernel_test_specs = [{
'name': 'dart2js-with-kernel-d8 tests',
'tests': ['language', 'corelib', 'dart2js_extra', 'dart2js_native']
}]
RunTests(api, kernel_test_args, kernel_test_specs, use_xvfb=needs_xvfb)
test_args.append('--fast-startup')
for spec in test_specs:
spec['name'] = spec['name'].replace(' tests', '-fast-startup tests')
RunTests(api, test_args, test_specs, use_xvfb=needs_xvfb)
if runtime in ['d8', 'drt']:
test_args.append('--checked')
for spec in test_specs:
spec['name'] = spec['name'].replace(' tests', '-checked tests')
RunTests(api, test_args, test_specs, use_xvfb=needs_xvfb)
with api.context(cwd=api.path['checkout']):
# TODO(whesse): Add archive coredumps step from dart_factory.py.
api.python('taskkill after testing',
api.path['checkout'].join('tools', 'task_kill.py'),
args=['--kill_browsers=True'],
ok_ret='any')
if api.platform.name == 'win':
api.step('debug log',
['cmd.exe', '/c', 'type', '.debug.log'])
else:
api.step('debug log',
['cat', '.debug.log'])
def GenTests(api):
|
yield (
api.test('dart2js-linux-jsshell-hostchecked-csp-3-5-be') +
api.platform('linux', 64) +
api.properties.generic(
mastername='client.dart',
buildername='dart2js-linux-jsshell-hostchecked-csp-3-5-be',
revision='hash_of_revision'))
yield (
api.test('dart2js-win7-ie10-dev') + api.platform('win', 32) +
api.properties.generic(
mastername='client.dart',
buildername='dart2js-win7-ie10-dev',
revision='hash_of_revision'))
yield (
api.test('dart2js-linux-drt-be') + api.platform('linux', 64) +
api.properties.generic(
mastername='client.dart',
buildername='dart2js-linux-drt-93-105-dev',
revision='hash_of_revision'))
yield (
api.test('dart2js-linux-d8-be') + api.platform('linux', 64) +
api.properties.generic(
mastername='client.dart',
buildername='dart2js-linux-d8-1-4-be',
revision='hash_of_revision'))
yield (
api.test('dart2js-mac10.11-safari-1-3-be') + api.platform('mac', 64) +
api.properties.generic(
mastername='client.dart',
buildername='dart2js-mac10.11-safari-1-3-be',
revision='hash_of_revision'))
|
|
pe8.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
"""
def
|
(fname="../../res/pe8.txt", n=5):
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
>>> pe8()
40824
"""
with open(fname, 'r') as f:
s = f.read()
s = s.replace('\n', '')
ls = len(s)
if ls < n:
raise ValueError
m = 0
for x in range(ls - n + 1):
t = 1
for y in range(n):
t *= int(s[x + y])
if m < t:
m = t
return(m)
if __name__ == "__main__":
import doctest
doctest.testmod()
try:
while True:
s = input('> ')
n = int(s)
print(pe8(n=n))
except (SyntaxError, EOFError, KeyboardInterrupt, NameError):
pass
|
pe8
|
expires.rs
|
use crate::header::{HttpDate, EXPIRES};
header! {
/// `Expires` header, defined in [RFC7234](http://tools.ietf.org/html/rfc7234#section-5.3)
///
/// The `Expires` header field gives the date/time after which the
/// response is considered stale.
///
/// The presence of an Expires field does not imply that the original
/// resource will change or cease to exist at, before, or after that
/// time.
///
/// # ABNF
///
/// ```text
/// Expires = HTTP-date
/// ```
///
/// # Example values
/// * `Thu, 01 Dec 1994 16:00:00 GMT`
///
/// # Example
///
/// ```
/// use std::time::{SystemTime, Duration};
/// use actix_http::Response;
/// use actix_http::http::header::Expires;
///
/// let mut builder = Response::Ok();
/// let expiration = SystemTime::now() + Duration::from_secs(60 * 60 * 24);
/// builder.insert_header(
/// Expires(expiration.into())
/// );
/// ```
(Expires, EXPIRES) => [HttpDate]
test_expires {
// Test case from RFC
|
test_header!(test1, vec![b"Thu, 01 Dec 1994 16:00:00 GMT"]);
}
}
| |
GoogleSheetUpdateOne.js
|
/*
Copyright 2020-2021 Lowdefy, Inc
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import schema from './GoogleSheetUpdateOneSchema.json';
import cleanRows from '../cleanRows';
import getSheet from '../getSheet';
import { transformRead, transformWrite } from '../transformTypes';
import mingoFilter from '../../../utils/mingoFilter';
async function
|
({ request, connection }) {
const { filter, update, options = {} } = request;
const { limit, skip, upsert, raw } = options;
const sheet = await getSheet({ connection });
let rows = await sheet.getRows({ limit, offset: skip });
rows = transformRead({ input: rows, types: connection.columnTypes });
rows = mingoFilter({ input: rows, filter });
const transformedUpdate = transformWrite({ input: update, types: connection.columnTypes });
if (rows.length === 0) {
if (upsert) {
const insertedRow = await sheet.addRow(transformedUpdate, { raw });
return {
modifiedCount: 1,
upserted: true,
row: cleanRows(insertedRow),
};
}
return {
modifiedCount: 0,
upserted: false,
};
}
const row = rows[0];
Object.assign(row, transformedUpdate);
await row.save({ raw });
return {
modifiedCount: 1,
upserted: false,
row: cleanRows(row),
};
}
export default { resolver: googleSheetUpdateOne, schema, checkRead: false, checkWrite: true };
|
googleSheetUpdateOne
|
random-bytes.browser.js
|
import * as utils from "./utils"
var randomSource /* RandomUint8ArrayProvider */
export function
|
(length) {
if (utils.isDefined(randomSource))
return randomSource.generate(length)
// Relies on https://developer.mozilla.org/en-US/docs/Web/API/RandomSource
// https://www.w3.org/TR/WebCryptoAPI/#RandomSource-method-getRandomValues
return new Promise((resolve, reject) => {
let array = new Uint8Array(length)
resolve(window.crypto.getRandomValues(array))
})
}
export function setRandomSource(source /* RandomUint8ArrayProvider */) {
randomSource = source
}
|
randomBytes
|
__init__.py
|
"""
{{ cookiecutter.application_name }} Application
|
default_app_config = '{{ cookiecutter.application_module }}.apps.Config'
|
"""
|
commands_manual.go
|
// Copyright 2013 The lime Authors.
// Use of this source code is governed by a 2-clause
// BSD-style license that can be found in the LICENSE file.
package api
import (
"fmt"
"github.com/jxo/lime"
"github.com/jxo/lime/log"
"github.com/jxo/lime/text"
"github.com/jxo/lime/util"
"github.com/limetext/gopy"
)
var (
_ = lime.View{}
_ = text.Region{}
)
var (
_windowCommandGlueClass = py.Class{
Name: "sublime.WindowCommandGlue",
Pointer: (*WindowCommandGlue)(nil),
}
_textCommandGlueClass = py.Class{
Name: "sublime.TextCommandGlue",
Pointer: (*TextCommandGlue)(nil),
}
_applicationCommandGlueClass = py.Class{
Name: "sublime.ApplicationCommandGlue",
Pointer: (*ApplicationCommandGlue)(nil),
}
)
type (
CommandGlue struct {
py.BaseObject
inner py.Object
args lime.Args
}
WindowCommandGlue struct {
py.BaseObject
CommandGlue
}
TextCommandGlue struct {
py.BaseObject
CommandGlue
}
ApplicationCommandGlue struct {
py.BaseObject
CommandGlue
}
)
func (c *CommandGlue) Init(args lime.Args) error {
c.args = args
return nil
}
func (c *CommandGlue) BypassUndo() bool {
return false
}
func (c *CommandGlue) PyInit(args *py.Tuple, kwds *py.Dict) error {
if args.Size() != 1 {
return fmt.Errorf("Expected only 1 argument not %d", args.Size())
}
if v, err := args.GetItem(0); err != nil {
return pyError(err)
} else {
c.inner = v
}
c.inner.Incref()
return nil
}
func (c *CommandGlue) CreatePyArgs(args lime.Args) (ret *py.Dict, err error) {
if r, err := toPython(args); err != nil {
return nil, err
} else {
return r.(*py.Dict), nil
}
}
func (c *CommandGlue) callBool(name string, args lime.Args) bool {
gs := py.GilState_Ensure()
defer gs.Release()
var (
pyargs, r py.Object
err error
)
if pyargs, err = c.CreatePyArgs(args); err != nil {
log.Error(err)
return false
}
defer pyargs.Decref()
if r, err = c.CallMethodObjArgs(name, pyargs); err != nil {
log.Error(err)
return true
}
defer r.Decref()
if r, ok := r.(*py.Bool); ok {
return r.Bool()
}
return true
}
func (c *CommandGlue) IsEnabled() bool {
return c.callBool("is_enabled", c.args)
}
func (c *CommandGlue) IsVisible() bool {
return c.callBool("is_visible", c.args)
}
func (c *CommandGlue) Description() string {
gs := py.GilState_Ensure()
defer gs.Release()
var (
pyargs, r py.Object
err error
)
if pyargs, err = c.CreatePyArgs(c.args); err != nil {
log.Error(err)
return ""
}
defer pyargs.Decref()
if r, err = c.CallMethodObjArgs("description", pyargs); err != nil {
log.Error(err)
return ""
}
defer r.Decref()
if r, ok := r.(*py.Unicode); ok {
return r.String()
}
return ""
}
func pyError(err error) error
|
func (c *TextCommandGlue) Run(v *lime.View, e *lime.Edit) error {
l := py.NewLock()
defer l.Unlock()
p0 := util.Prof.Enter("tc.run")
defer p0.Exit()
var (
pyv, pye, pyargs, obj py.Object
err error
)
if pyv, err = toPython(v); err != nil {
return pyError(err)
}
defer pyv.Decref()
if pye, err = toPython(e); err != nil {
return pyError(err)
}
defer pye.Decref()
if pyargs, err = c.CreatePyArgs(c.args); err != nil {
return pyError(err)
}
defer pyargs.Decref()
init := util.Prof.Enter("tc.init")
if obj, err = c.inner.Base().CallFunctionObjArgs(pyv); err != nil {
return pyError(err)
}
defer obj.Decref()
init.Exit()
exec := util.Prof.Enter("tc.exec")
defer exec.Exit()
if obj.Base().HasAttrString("run_") {
// The plugin is probably trying to bypass the undostack...
old := v.IsScratch()
v.SetScratch(true)
log.Finest("Discarded: %s", e)
v.EndEdit(e)
v.SetScratch(old)
ret, err := obj.Base().CallMethodObjArgs("run_", pye, pyargs)
if ret != nil {
ret.Decref()
}
if err != nil {
return pyError(err)
}
return nil
}
ret, err := obj.Base().CallMethodObjArgs("run__", pye, pyargs)
if ret != nil {
ret.Decref()
}
if err != nil {
return pyError(err)
}
return nil
}
func (c *WindowCommandGlue) Run(w *lime.Window) error {
l := py.NewLock()
defer l.Unlock()
var (
pyw, pyargs, obj py.Object
err error
)
log.Debug("WindowCommand: %v", c.args)
if pyw, err = toPython(w); err != nil {
return pyError(err)
}
defer pyw.Decref()
if pyargs, err = c.CreatePyArgs(c.args); err != nil {
return pyError(err)
}
defer pyargs.Decref()
if obj, err = c.inner.Base().CallFunctionObjArgs(pyw); err != nil {
return pyError(err)
}
defer obj.Decref()
if ret, err := obj.Base().CallMethodObjArgs("run_", pyargs); err != nil {
return pyError(err)
} else {
ret.Decref()
}
return nil
}
func (c *ApplicationCommandGlue) Run() error {
l := py.NewLock()
defer l.Unlock()
var (
pyargs py.Object
err error
)
if pyargs, err = c.CreatePyArgs(c.args); err != nil {
return pyError(err)
}
defer pyargs.Decref()
obj, err := c.inner.Base().CallFunctionObjArgs()
if err != nil {
return pyError(err)
}
defer obj.Decref()
if ret, err := obj.Base().CallMethodObjArgs("run", pyargs); err != nil {
return pyError(err)
} else {
ret.Decref()
}
return nil
}
func (c *ApplicationCommandGlue) IsChecked(args lime.Args) bool {
return c.callBool("is_checked", args)
}
|
{
return fmt.Errorf("%v", err)
}
|
user.controller.ts
|
import { Body, Controller, Delete, Get, Inject, Patch, Post, Res, UploadedFile, UseGuards } from '@nestjs/common';
import { AuthGuard } from '@nestjs/passport';
import { AuthService } from 'src/auth/auth.service';
import { AuthLoginDto } from 'src/auth/dto/AuthLoginDto.dto';
import { UserObj } from 'src/decorators/userobj.decorator';
import { PrivacyService } from 'src/privacy/privacy.service';
import ChangePrivacyOptionsInterface from './dto/ChangePrivacyOptionsInterface.dto';
import InputUserRole from './dto/InputUserRole.dto';
import UserRegistration from './dto/UserRegistration.dto';
import { UserService } from './user.service';
import { UserData } from './userData.entity';
import { Response } from 'express';
import { UserInformationService } from 'src/user-information/user-information.service';
import IUpdateUserInformation from './dto/IUpdateUserInformation';
import IUsername from './Types/IUsername';
import IPassword from './Types/IPassword';
import IDeactivateData from './Types/IDeactivateData';
import IGetOtherUserInfo from 'src/user-information/Types/IGetOtherUserInfo';
import { FriendListService } from 'src/friend-list/friend-list.service';
import INewFriend from 'src/friend-list/Types/INewFriend';
import { UseInterceptors } from '@nestjs/common';
import { FileInterceptor } from '@nestjs/platform-express';
import { diskStorage } from 'multer'
import { v4 as uuidv4 } from 'uuid';
import * as path from 'path';
import { LocalAuthGuard } from 'src/auth/local-auth.guard';
import { JwtAuthGuard } from 'src/auth/jwt-auth.guard';
export const storage = {
storage: diskStorage({
destination: 'src/public/images',
filename: (req, file, cb) => {
const filename: string = uuidv4();
const extension: string = path.parse(file.originalname).ext;
cb(null, `${filename}${extension}`)
}
})
}
@Controller('user')
export class
|
{
constructor(
@Inject(UserService) private userService: UserService,
@Inject(PrivacyService) private privacyService: PrivacyService,
@Inject(UserInformationService) private userInformationService: UserInformationService,
@Inject(AuthService) private authService: AuthService,
@Inject(FriendListService) private friendListService: FriendListService
) { }
@Post('/register')
RegisterUser(
@Body() newUser: UserRegistration,
) {
return this.userService.createUser(newUser);
}
@Post('/login')
async phoneRegister(
@Body() req: AuthLoginDto,
@Res() res: Response,
): Promise<any> {
return this.authService.login(req, res);
}
@Get('/logout')
@UseGuards(JwtAuthGuard)
async logout(@UserObj() user: UserData, @Res() res: Response) {
return this.authService.logout(user, res);
}
@Get('/self')
@UseGuards(JwtAuthGuard)
GetSmallUserData(
@UserObj() user: UserData,
) {
return this.userService.getSmallUserData(user)
}
@Get('/role')
// @UseGuards(JwtAuthGuard)
// @UseGuards(LocalAuthGuard)
@UseGuards(JwtAuthGuard)
GetUserData(
@UserObj() user: UserData,
@Res() res: Response
) {
return this.userService.getUserData(user);
}
@Post('/invite')
@UseGuards(JwtAuthGuard)
InviteUserToTheFriendList(
@UserObj() user: UserData,
@Body() username: INewFriend
) {
return this.friendListService.inviteUserToTheFriendList(user, username);
}
@Get('/invite-list')
@UseGuards(JwtAuthGuard)
GetUserInviteList(
@UserObj() user: UserData
) {
return this.friendListService.getInviteList(user);
}
@Delete('/invite')
@UseGuards(JwtAuthGuard)
RemoveUserFromInviteList(
@UserObj() user: UserData,
@Body() username: INewFriend
) {
return this.friendListService.deleteUserFromInviteList(user, username);
}
@Get('/friend-list')
// @UseGuards(JwtAuthGuard)
@UseGuards(JwtAuthGuard)
GetUserFriendList(
@UserObj() user: UserData
) {
return this.friendListService.getFriendList(user);
}
@Post('/user-friend-list')
@UseGuards(JwtAuthGuard)
async GetFriendListByUsername(
@UserObj() user: UserData,
@Body() username: INewFriend
) {
const doesHaveAccess = await this.privacyService.checkIfUserHaveAccessToThisData(user, username, 'friendList');
if (doesHaveAccess)
return this.friendListService.getFriendListByUsername(username);
return []
}
@Post('/new-friend')
@UseGuards(JwtAuthGuard)
AddNewFriend(
@UserObj() user: UserData,
@Body() username: INewFriend
) {
return this.friendListService.addNewFriend(user, username);
}
@Delete('/friend')
@UseGuards(JwtAuthGuard)
RemoveUserFromFriendList(
@UserObj() user: UserData,
@Body() username: INewFriend
) {
return this.friendListService.removeUserFromFriendList(user, username);
}
@Get('/privacy')
@UseGuards(JwtAuthGuard)
GetUserPrivacySettings(
@UserObj() user: UserData
) {
return this.privacyService.getUserPrivacySettings(user);
}
@Patch('/privacy')
@UseGuards(JwtAuthGuard)
ChangeUserPrivacySettings(
@UserObj() user: UserData,
@Body() options: ChangePrivacyOptionsInterface
) {
return this.privacyService.changeUserPrivacySettings({ user, options });
}
@Get('/account-info')
@UseGuards(JwtAuthGuard)
GetUserAccountInfo(
@UserObj() user: UserData
) {
return this.userInformationService.getAccountInfo(user);
}
@Post('/profile')
@UseGuards(JwtAuthGuard)
GetOtherUserInfo(
@UserObj() user: UserData,
@Body() username: IGetOtherUserInfo
) {
return this.userInformationService.getOtherUserInfo(username, user);
}
@Get('/information')
@UseGuards(JwtAuthGuard)
GetUserInformation(
@UserObj() user: UserData
) {
return this.userInformationService.getValidatedUserInformation(user);
}
@Patch('/information')
@UseGuards(JwtAuthGuard)
ChangeUserInformation(
@UserObj() user: UserData,
@Body() options: IUpdateUserInformation
) {
return this.userInformationService.changeUserInformation({ user, options });
}
//dodać rolę admina (guard)
@Patch('/user-role')
@UseGuards(JwtAuthGuard)
ChangeUserRole(
@UserObj() user: UserData,
@Body() role: InputUserRole,
) {
return this.userService.changeUserRole(user, role);
}
@Patch('/username')
@UseGuards(JwtAuthGuard)
ChangeUsername(
@UserObj() user: UserData,
@Body() username: IUsername,
) {
return this.userService.changeUsername(user, username);
}
@Patch('/password')
@UseGuards(JwtAuthGuard)
ChangePassword(
@UserObj() user: UserData,
@Body() password: IPassword,
) {
return this.userService.changePassword(user, password);
}
@Post('/deactivate')
@UseGuards(JwtAuthGuard)
DeactivateUser(
@UserObj() user: UserData,
@Body() deactivateData: IDeactivateData,
) {
return this.userService.deactivateUser(user, deactivateData);
}
@Post('/profile-picture')
@UseGuards(JwtAuthGuard)
@UseInterceptors(FileInterceptor('file', storage))
UpdateProfilePicture(
@UploadedFile() file,
@UserObj() user: UserData,
) {
return this.userService.updateProfilePicture(user, file.filename);
}
@Get('/profile-picture')
@UseGuards(JwtAuthGuard)
GetProfilePicture(
@UserObj() user: UserData
) {
return this.userInformationService.getProfilePicture(user)
}
@Delete('/profile-picture')
@UseGuards(JwtAuthGuard)
DeleteProfilePicture(
@UserObj() user: UserData
) {
return this.userInformationService.deleteProfilePicture(user);
}
}
|
UserController
|
index.tsx
|
import React, { Component } from 'react'
import { observer, inject } from 'mobx-react'
import '../../assets/App.css'
import { Link } from '@reach/router'
interface Props {
|
mbx_appstore: {
getAppName: string
setAppName: Function
}
}
@inject('mbx_appstore')
@observer
export default class About extends Component<Props> {
render() {
return (
<div className="App">
<h1>App Name: {this.props.mbx_appstore.getAppName}</h1>
<h1>About Page</h1>
<Link to="..">Back</Link>
</div>
)
}
}
| |
structured.py
|
from .imports import *
from sklearn_pandas import DataFrameMapper
from sklearn.preprocessing import LabelEncoder, Imputer, StandardScaler
from pandas.api.types import is_string_dtype, is_numeric_dtype
from sklearn.ensemble import forest
from sklearn.tree import export_graphviz
def set_plot_sizes(sml, med, big):
plt.rc('font', size=sml) # controls default text sizes
plt.rc('axes', titlesize=sml) # fontsize of the axes title
plt.rc('axes', labelsize=med) # fontsize of the x and y labels
plt.rc('xtick', labelsize=sml) # fontsize of the tick labels
plt.rc('ytick', labelsize=sml) # fontsize of the tick labels
plt.rc('legend', fontsize=sml) # legend fontsize
plt.rc('figure', titlesize=big) # fontsize of the figure title
def parallel_trees(m, fn, n_jobs=8):
return list(ProcessPoolExecutor(n_jobs).map(fn, m.estimators_))
def draw_tree(t, df, size=10, ratio=0.6, precision=0):
""" Draws a representation of a random forest in IPython.
Parameters:
-----------
t: The tree you wish to draw
df: The data used to train the tree. This is used to get the names of the features.
"""
s=export_graphviz(t, out_file=None, feature_names=df.columns, filled=True,
special_characters=True, rotate=True, precision=precision)
IPython.display.display(graphviz.Source(re.sub('Tree {',
f'Tree {{ size={size}; ratio={ratio}', s)))
def combine_date(years, months=1, days=1, weeks=None, hours=None, minutes=None,
seconds=None, milliseconds=None, microseconds=None, nanoseconds=None):
years = np.asarray(years) - 1970
months = np.asarray(months) - 1
days = np.asarray(days) - 1
types = ('<M8[Y]', '<m8[M]', '<m8[D]', '<m8[W]', '<m8[h]',
'<m8[m]', '<m8[s]', '<m8[ms]', '<m8[us]', '<m8[ns]')
vals = (years, months, days, weeks, hours, minutes, seconds,
milliseconds, microseconds, nanoseconds)
return sum(np.asarray(v, dtype=t) for t, v in zip(types, vals)
if v is not None)
def get_sample(df,n):
""" Gets a random sample of n rows from df, without replacement.
Parameters:
-----------
df: A pandas data frame, that you wish to sample from.
n: The number of rows you wish to sample.
Returns:
--------
return value: A random sample of n rows of df.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['a', 'b', 'a']})
|
col1 col2
0 1 a
1 2 b
2 3 a
>>> get_sample(df, 2)
col1 col2
1 2 b
2 3 a
"""
idxs = sorted(np.random.permutation(len(df))[:n])
return df.iloc[idxs].copy()
def add_datepart(df, fldname, drop=True, time=False, errors="raise"):
"""add_datepart converts a column of df from a datetime64 to many columns containing
the information from the date. This applies changes inplace.
Parameters:
-----------
df: A pandas data frame. df gain several new columns.
fldname: A string that is the name of the date column you wish to expand.
If it is not a datetime64 series, it will be converted to one with pd.to_datetime.
drop: If true then the original date column will be removed.
time: If true time features: Hour, Minute, Second will be added.
Examples:
---------
>>> df = pd.DataFrame({ 'A' : pd.to_datetime(['3/11/2000', '3/12/2000', '3/13/2000'], infer_datetime_format=False) })
>>> df
A
0 2000-03-11
1 2000-03-12
2 2000-03-13
>>> add_datepart(df, 'A')
>>> df
AYear AMonth AWeek ADay ADayofweek ADayofyear AIs_month_end AIs_month_start AIs_quarter_end AIs_quarter_start AIs_year_end AIs_year_start AElapsed
0 2000 3 10 11 5 71 False False False False False False 952732800
1 2000 3 10 12 6 72 False False False False False False 952819200
2 2000 3 11 13 0 73 False False False False False False 952905600
"""
fld = df[fldname]
fld_dtype = fld.dtype
if isinstance(fld_dtype, pd.core.dtypes.dtypes.DatetimeTZDtype):
fld_dtype = np.datetime64
if not np.issubdtype(fld_dtype, np.datetime64):
df[fldname] = fld = pd.to_datetime(fld, infer_datetime_format=True, errors=errors)
targ_pre = re.sub('[Dd]ate$', '', fldname)
attr = ['Year', 'Month', 'Week', 'Day', 'Dayofweek', 'Dayofyear',
'Is_month_end', 'Is_month_start', 'Is_quarter_end', 'Is_quarter_start', 'Is_year_end', 'Is_year_start']
if time: attr = attr + ['Hour', 'Minute', 'Second']
for n in attr: df[targ_pre + n] = getattr(fld.dt, n.lower())
df[targ_pre + 'Elapsed'] = fld.astype(np.int64) // 10 ** 9
if drop: df.drop(fldname, axis=1, inplace=True)
def is_date(x): return np.issubdtype(x.dtype, np.datetime64)
def train_cats(df):
"""Change any columns of strings in a panda's dataframe to a column of
categorical values. This applies the changes inplace.
Parameters:
-----------
df: A pandas dataframe. Any columns of strings will be changed to
categorical values.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['a', 'b', 'a']})
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
note the type of col2 is string
>>> train_cats(df)
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
now the type of col2 is category
"""
for n,c in df.items():
if is_string_dtype(c): df[n] = c.astype('category').cat.as_ordered()
def apply_cats(df, trn):
"""Changes any columns of strings in df into categorical variables using trn as
a template for the category codes.
Parameters:
-----------
df: A pandas dataframe. Any columns of strings will be changed to
categorical values. The category codes are determined by trn.
trn: A pandas dataframe. When creating a category for df, it looks up the
what the category's code were in trn and makes those the category codes
for df.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['a', 'b', 'a']})
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
note the type of col2 is string
>>> train_cats(df)
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
now the type of col2 is category {a : 1, b : 2}
>>> df2 = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['b', 'a', 'a']})
>>> apply_cats(df2, df)
col1 col2
0 1 b
1 2 a
2 3 a
now the type of col is category {a : 1, b : 2}
"""
for n,c in df.items():
if (n in trn.columns) and (trn[n].dtype.name=='category'):
df[n] = c.astype('category').cat.as_ordered()
df[n].cat.set_categories(trn[n].cat.categories, ordered=True, inplace=True)
def fix_missing(df, col, name, na_dict):
""" Fill missing data in a column of df with the median, and add a {name}_na column
which specifies if the data was missing.
Parameters:
-----------
df: The data frame that will be changed.
col: The column of data to fix by filling in missing data.
name: The name of the new filled column in df.
na_dict: A dictionary of values to create na's of and the value to insert. If
name is not a key of na_dict the median will fill any missing data. Also
if name is not a key of na_dict and there is no missing data in col, then
no {name}_na column is not created.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, np.NaN, 3], 'col2' : [5, 2, 2]})
>>> df
col1 col2
0 1 5
1 nan 2
2 3 2
>>> fix_missing(df, df['col1'], 'col1', {})
>>> df
col1 col2 col1_na
0 1 5 False
1 2 2 True
2 3 2 False
>>> df = pd.DataFrame({'col1' : [1, np.NaN, 3], 'col2' : [5, 2, 2]})
>>> df
col1 col2
0 1 5
1 nan 2
2 3 2
>>> fix_missing(df, df['col2'], 'col2', {})
>>> df
col1 col2
0 1 5
1 nan 2
2 3 2
>>> df = pd.DataFrame({'col1' : [1, np.NaN, 3], 'col2' : [5, 2, 2]})
>>> df
col1 col2
0 1 5
1 nan 2
2 3 2
>>> fix_missing(df, df['col1'], 'col1', {'col1' : 500})
>>> df
col1 col2 col1_na
0 1 5 False
1 500 2 True
2 3 2 False
"""
if is_numeric_dtype(col):
if pd.isnull(col).sum() or (name in na_dict):
df[name+'_na'] = pd.isnull(col)
filler = na_dict[name] if name in na_dict else col.median()
df[name] = col.fillna(filler)
na_dict[name] = filler
return na_dict
def numericalize(df, col, name, max_n_cat):
""" Changes the column col from a categorical type to it's integer codes.
Parameters:
-----------
df: A pandas dataframe. df[name] will be filled with the integer codes from
col.
col: The column you wish to change into the categories.
name: The column name you wish to insert into df. This column will hold the
integer codes.
max_n_cat: If col has more categories than max_n_cat it will not change the
it to its integer codes. If max_n_cat is None, then col will always be
converted.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['a', 'b', 'a']})
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
note the type of col2 is string
>>> train_cats(df)
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
now the type of col2 is category { a : 1, b : 2}
>>> numericalize(df, df['col2'], 'col3', None)
col1 col2 col3
0 1 a 1
1 2 b 2
2 3 a 1
"""
if not is_numeric_dtype(col) and ( max_n_cat is None or len(col.cat.categories)>max_n_cat):
df[name] = col.cat.codes+1
def scale_vars(df, mapper):
warnings.filterwarnings('ignore', category=sklearn.exceptions.DataConversionWarning)
if mapper is None:
map_f = [([n],StandardScaler()) for n in df.columns if is_numeric_dtype(df[n])]
mapper = DataFrameMapper(map_f).fit(df)
df[mapper.transformed_names_] = mapper.transform(df)
return mapper
def proc_df(df, y_fld=None, skip_flds=None, ignore_flds=None, do_scale=False, na_dict=None,
preproc_fn=None, max_n_cat=None, subset=None, mapper=None):
""" proc_df takes a data frame df and splits off the response variable, and
changes the df into an entirely numeric dataframe. For each column of df
which is not in skip_flds nor in ignore_flds, na values are replaced by the
median value of the column.
Parameters:
-----------
df: The data frame you wish to process.
y_fld: The name of the response variable
skip_flds: A list of fields that dropped from df.
ignore_flds: A list of fields that are ignored during processing.
do_scale: Standardizes each column in df. Takes Boolean Values(True,False)
na_dict: a dictionary of na columns to add. Na columns are also added if there
are any missing values.
preproc_fn: A function that gets applied to df.
max_n_cat: The maximum number of categories to break into dummy values, instead
of integer codes.
subset: Takes a random subset of size subset from df.
mapper: If do_scale is set as True, the mapper variable
calculates the values used for scaling of variables during training time (mean and standard deviation).
Returns:
--------
[x, y, nas, mapper(optional)]:
x: x is the transformed version of df. x will not have the response variable
and is entirely numeric.
y: y is the response variable
nas: returns a dictionary of which nas it created, and the associated median.
mapper: A DataFrameMapper which stores the mean and standard deviation of the corresponding continuous
variables which is then used for scaling of during test-time.
Examples:
---------
>>> df = pd.DataFrame({'col1' : [1, 2, 3], 'col2' : ['a', 'b', 'a']})
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
note the type of col2 is string
>>> train_cats(df)
>>> df
col1 col2
0 1 a
1 2 b
2 3 a
now the type of col2 is category { a : 1, b : 2}
>>> x, y, nas = proc_df(df, 'col1')
>>> x
col2
0 1
1 2
2 1
>>> data = DataFrame(pet=["cat", "dog", "dog", "fish", "cat", "dog", "cat", "fish"],
children=[4., 6, 3, 3, 2, 3, 5, 4],
salary=[90, 24, 44, 27, 32, 59, 36, 27])
>>> mapper = DataFrameMapper([(:pet, LabelBinarizer()),
([:children], StandardScaler())])
>>>round(fit_transform!(mapper, copy(data)), 2)
8x4 Array{Float64,2}:
1.0 0.0 0.0 0.21
0.0 1.0 0.0 1.88
0.0 1.0 0.0 -0.63
0.0 0.0 1.0 -0.63
1.0 0.0 0.0 -1.46
0.0 1.0 0.0 -0.63
1.0 0.0 0.0 1.04
0.0 0.0 1.0 0.21
"""
if not ignore_flds: ignore_flds=[]
if not skip_flds: skip_flds=[]
if subset: df = get_sample(df,subset)
else: df = df.copy()
ignored_flds = df.loc[:, ignore_flds]
df.drop(ignore_flds, axis=1, inplace=True)
if preproc_fn: preproc_fn(df)
if y_fld is None: y = None
else:
if not is_numeric_dtype(df[y_fld]): df[y_fld] = df[y_fld].cat.codes
y = df[y_fld].values
skip_flds += [y_fld]
df.drop(skip_flds, axis=1, inplace=True)
if na_dict is None: na_dict = {}
else: na_dict = na_dict.copy()
na_dict_initial = na_dict.copy()
for n,c in df.items(): na_dict = fix_missing(df, c, n, na_dict)
if len(na_dict_initial.keys()) > 0:
df.drop([a + '_na' for a in list(set(na_dict.keys()) - set(na_dict_initial.keys()))], axis=1, inplace=True)
if do_scale: mapper = scale_vars(df, mapper)
for n,c in df.items(): numericalize(df, c, n, max_n_cat)
df = pd.get_dummies(df, dummy_na=True)
df = pd.concat([ignored_flds, df], axis=1)
res = [df, y, na_dict]
if do_scale: res = res + [mapper]
return res
def rf_feat_importance(m, df):
return pd.DataFrame({'cols':df.columns, 'imp':m.feature_importances_}
).sort_values('imp', ascending=False)
def set_rf_samples(n):
""" Changes Scikit learn's random forests to give each tree a random sample of
n random rows.
"""
forest._generate_sample_indices = (lambda rs, n_samples:
forest.check_random_state(rs).randint(0, n_samples, n))
def reset_rf_samples():
""" Undoes the changes produced by set_rf_samples.
"""
forest._generate_sample_indices = (lambda rs, n_samples:
forest.check_random_state(rs).randint(0, n_samples, n_samples))
def get_nn_mappers(df, cat_vars, contin_vars):
# Replace nulls with 0 for continuous, "" for categorical.
for v in contin_vars: df[v] = df[v].fillna(df[v].max()+100,)
for v in cat_vars: df[v].fillna('#NA#', inplace=True)
# list of tuples, containing variable and instance of a transformer for that variable
# for categoricals, use LabelEncoder to map to integers. For continuous, standardize
cat_maps = [(o, LabelEncoder()) for o in cat_vars]
contin_maps = [([o], StandardScaler()) for o in contin_vars]
return DataFrameMapper(cat_maps).fit(df), DataFrameMapper(contin_maps).fit(df)
|
>>> df
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.