file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
__init__.py
|
# Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
|
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
|
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
|
conditional_block
|
__init__.py
|
# Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
|
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
|
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
|
identifier_body
|
__init__.py
|
# Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
|
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def __init__(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
|
import unittest
|
random_line_split
|
__init__.py
|
# Copyright (c) 2013 - The pycangjie authors
#
# This file is part of pycangjie, the Python bindings to libcangjie.
#
# pycangjie is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pycangjie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with pycangjie. If not, see <http://www.gnu.org/licenses/>.
import itertools
import operator
import string
import subprocess
import unittest
import cangjie
class MetaTest(type):
"""Metaclass for our test cases
The goal is to provide every TestCase class with methods like test_a(),
test_b(), etc..., in other words, one method per potential Cangjie input
code.
Well, not quite, because that would be 12356630 methods (the number of
strings composed of 1 to 5 lowercase ascii letters), and even though my
laptop has 8Go of RAM, the test process gets killed by the OOM killer. :)
So we cheat, and use libcangjie's wildcard support, so that we only
generate 26 + 26^2 = 702 methods.
"""
def
|
(cls, name, bases, dct):
super(MetaTest, cls).__init__(name, bases, dct)
def gen_codes():
"""Generate the 702 possible input codes"""
# First, the 1-character codes
for c in string.ascii_lowercase:
yield c
# Next, the 2-characters-with-wildcard codes
for t in itertools.product(string.ascii_lowercase, repeat=2):
yield '*'.join(t)
def tester(code):
def func(cls):
return cls.run_test(code)
return func
# Generate the test_* methods
for code in gen_codes():
setattr(cls, "test_%s" % code.replace("*", ""), tester(code))
class BaseTestCase(unittest.TestCase):
"""Base test class, grouping the common stuff for all our unit tests"""
def __init__(self, name):
super().__init__(name)
self.cli_cmd = ["/usr/bin/libcangjie_cli"] + self.cli_options
self.language = (cangjie.filters.BIG5 | cangjie.filters.HKSCS |
cangjie.filters.PUNCTUATION |
cangjie.filters.CHINESE |
cangjie.filters.ZHUYIN | cangjie.filters.KANJI |
cangjie.filters.KATAKANA |
cangjie.filters.HIRAGANA |
cangjie.filters.SYMBOLS)
def setUp(self):
self.cj = cangjie.Cangjie(self.version, self.language)
def tearDown(self):
del self.cj
def run_command(self, cmd):
"""Run a command, deal with errors, and return its stdout"""
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = proc.communicate()
try:
cangjie.errors.handle_error_code(proc.returncode,
msg="Unknown error while running"
" libcangjie_cli (%d)"
% proc.returncode)
except cangjie.errors.CangjieNoCharsError:
return ""
try:
return out.decode("utf-8")
except UnicodeDecodeError:
# Python's 'utf-8' codec trips over b"\xed\xa1\x9d\xed\xbc\xb2",
# but according to [1] and [2], it is a valid sequence of 2 chars:
# U+D85D \xed\xa1\x9d
# U+DF32 \xed\xbc\xb2
# [1] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=55389&utf8=string-literal
# [2] http://www.utf8-chartable.de/unicode-utf8-table.pl?start=57138&utf8=string-literal
# TODO: Investigate this further, and eventually open a bug report
out2 = []
for line in out.split("\n".encode("utf-8")):
try:
out2.append(line.decode("utf-8"))
except UnicodeDecodeError:
pass
return "\n".join(out2)
def run_test(self, input_code):
"""Run the actual test
This compares the output of the libcangjie_cli tool with the output
from pycangjie.
The idea is that if pycangjie produces the same results as a C++ tool
compiled against libcangjie, then pycangjie properly wraps libcangjie.
We do not try to verify that pycangjie produces valid results here,
validity is to be checked in libcangjie.
Note that this whole test is based on scraping the output of
libcangjie_cli, which is quite fragile.
"""
# Get a list of CangjieChar from libcangjie_cli as a reference
tmp_expected = self.run_command(self.cli_cmd+[input_code]).split("\n")
tmp_expected = map(lambda x: x.strip(" \n"), tmp_expected)
tmp_expected = filter(lambda x: len(x) > 0, tmp_expected)
expected = []
for item in tmp_expected:
chchar, simpchar, code, frequency = item.split(", ")
chchar = chchar.split(": ")[-1].strip("'")
simpchar = simpchar.split(": ")[-1].strip("'")
code = code.split(": ")[-1].strip("'")
frequency = int(frequency.split(" ")[-1])
expected.append(cangjie._core.CangjieChar(chchar.encode("utf-8"),
simpchar.encode("utf-8"),
code.encode("utf-8"),
frequency))
expected = sorted(expected, key=operator.attrgetter('chchar', 'code'))
try:
# And compare with what pycangjie produces
results = sorted(self.cj.get_characters(input_code),
key=operator.attrgetter('chchar', 'code'))
self.assertEqual(results, expected)
except cangjie.errors.CangjieNoCharsError:
self.assertEqual(len(expected), 0)
|
__init__
|
identifier_name
|
java_protobuf_library.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def
|
(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
__init__
|
identifier_name
|
java_protobuf_library.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
|
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
|
conditional_block
|
java_protobuf_library.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
|
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
random_line_split
|
|
java_protobuf_library.py
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.backend.jvm.targets.import_jars_mixin import ImportJarsMixin
from pants.backend.jvm.targets.jvm_target import JvmTarget
from pants.base.payload import Payload
from pants.base.payload_field import PrimitiveField
logger = logging.getLogger(__name__)
class JavaProtobufLibrary(ImportJarsMixin, JvmTarget):
"""Generates a stub Java library from protobuf IDL files."""
def __init__(self, payload=None, buildflags=None, imports=None, **kwargs):
|
@property
def imported_jar_library_specs(self):
"""List of JarLibrary specs to import.
Required to implement the ImportJarsMixin.
"""
return self.payload.import_specs
|
"""
:param buildflags: Unused, and will be removed in a future release.
:param list imports: List of addresses of `jar_library <#jar_library>`_
targets which contain .proto definitions.
"""
payload = payload or Payload()
# TODO(Eric Ayers): The target needs to incorporate the settings of --gen-protoc-version
# and --gen-protoc-plugins into the fingerprint. Consider adding a custom FingeprintStrategy
# into ProtobufGen to get it.
payload.add_fields({
'import_specs': PrimitiveField(imports or ())
})
super(JavaProtobufLibrary, self).__init__(payload=payload, **kwargs)
if buildflags is not None:
logger.warn(" Target definition at {address} sets attribute 'buildflags' which is "
"ignored and will be removed in a future release"
.format(address=self.address.spec))
self.add_labels('codegen')
|
identifier_body
|
html_fragment.py
|
#!/usr/bin/python
#
# Urwid html fragment output wrapper for "screen shots"
# Copyright (C) 2004-2007 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def
|
(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
"""Call fn."""
return fn()
def draw_screen(self, (cols, rows), r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
aspec = self._palette[a][
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!"
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extra = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]"
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]"
import curses_display
curses_display.Screen = HtmlGenerator
import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
__init__
|
identifier_name
|
html_fragment.py
|
#!/usr/bin/python
#
# Urwid html fragment output wrapper for "screen shots"
# Copyright (C) 2004-2007 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def __init__(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
"""Call fn."""
return fn()
def draw_screen(self, (cols, rows), r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
aspec = self._palette[a][
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!"
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extra = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
|
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]"
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]"
import curses_display
curses_display.Screen = HtmlGenerator
import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
|
identifier_body
|
html_fragment.py
|
#!/usr/bin/python
#
# Urwid html fragment output wrapper for "screen shots"
# Copyright (C) 2004-2007 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def __init__(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
"""Call fn."""
return fn()
def draw_screen(self, (cols, rows), r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
|
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!"
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extra = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]"
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]"
import curses_display
curses_display.Screen = HtmlGenerator
import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
aspec = self._palette[a][
|
random_line_split
|
html_fragment.py
|
#!/usr/bin/python
#
# Urwid html fragment output wrapper for "screen shots"
# Copyright (C) 2004-2007 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
"""
HTML PRE-based UI implementation
"""
from urwid import util
from urwid.main_loop import ExitMainLoop
from urwid.display_common import AttrSpec, BaseScreen
# replace control characters with ?'s
_trans_table = "?" * 32 + "".join([chr(x) for x in range(32, 256)])
_default_foreground = 'black'
_default_background = 'light gray'
class HtmlGeneratorSimulationError(Exception):
pass
class HtmlGenerator(BaseScreen):
# class variables
fragments = []
sizes = []
keys = []
started = True
def __init__(self):
super(HtmlGenerator, self).__init__()
self.colors = 16
self.bright_is_bold = False # ignored
self.has_underline = True # ignored
self.register_palette_entry(None,
_default_foreground, _default_background)
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
if colors is None:
colors = self.colors
if bright_is_bold is None:
bright_is_bold = self.bright_is_bold
if has_underline is None:
has_underline = self.has_underline
self.colors = colors
self.bright_is_bold = bright_is_bold
self.has_underline = has_underline
def set_mouse_tracking(self, enable=True):
"""Not yet implemented"""
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
"""Call fn."""
return fn()
def draw_screen(self, (cols, rows), r ):
"""Create an html fragment from the render object.
Append it to HtmlGenerator.fragments list.
"""
# collect output in l
l = []
assert r.rows() == rows
if r.cursor is not None:
cx, cy = r.cursor
else:
cx = cy = None
y = -1
for row in r.content():
y += 1
col = 0
for a, cs, run in row:
run = run.translate(_trans_table)
if isinstance(a, AttrSpec):
aspec = a
else:
aspec = self._palette[a][
{1: 1, 16: 0, 88:2, 256:3}[self.colors]]
if y == cy and col <= cx:
run_width = util.calc_width(run, 0,
len(run))
if col+run_width > cx:
l.append(html_span(run,
aspec, cx-col))
else:
l.append(html_span(run, aspec))
col += run_width
else:
l.append(html_span(run, aspec))
l.append("\n")
# add the fragment to the list
self.fragments.append( "<pre>%s</pre>" % "".join(l) )
def clear(self):
"""
Force the screen to be completely repainted on the next
call to draw_screen().
(does nothing for html_fragment)
"""
pass
def get_cols_rows(self):
"""Return the next screen size in HtmlGenerator.sizes."""
if not self.sizes:
raise HtmlGeneratorSimulationError, "Ran out of screen sizes to return!"
return self.sizes.pop(0)
def get_input(self, raw_keys=False):
"""Return the next list of keypresses in HtmlGenerator.keys."""
if not self.keys:
raise ExitMainLoop()
if raw_keys:
return (self.keys.pop(0), [])
return self.keys.pop(0)
_default_aspec = AttrSpec(_default_foreground, _default_background)
(_d_fg_r, _d_fg_g, _d_fg_b, _d_bg_r, _d_bg_g, _d_bg_b) = (
_default_aspec.get_rgb_values())
def html_span(s, aspec, cursor = -1):
fg_r, fg_g, fg_b, bg_r, bg_g, bg_b = aspec.get_rgb_values()
# use real colours instead of default fg/bg
if fg_r is None:
fg_r, fg_g, fg_b = _d_fg_r, _d_fg_g, _d_fg_b
if bg_r is None:
|
html_fg = "#%02x%02x%02x" % (fg_r, fg_g, fg_b)
html_bg = "#%02x%02x%02x" % (bg_r, bg_g, bg_b)
if aspec.standout:
html_fg, html_bg = html_bg, html_fg
extra = (";text-decoration:underline" * aspec.underline +
";font-weight:bold" * aspec.bold)
def html_span(fg, bg, s):
if not s: return ""
return ('<span style="color:%s;'
'background:%s%s">%s</span>' %
(fg, bg, extra, html_escape(s)))
if cursor >= 0:
c_off, _ign = util.calc_text_pos(s, 0, len(s), cursor)
c2_off = util.move_next_char(s, c_off, len(s))
return (html_span(html_fg, html_bg, s[:c_off]) +
html_span(html_bg, html_fg, s[c_off:c2_off]) +
html_span(html_fg, html_bg, s[c2_off:]))
else:
return html_span(html_fg, html_bg, s)
def html_escape(text):
"""Escape text so that it will be displayed safely within HTML"""
text = text.replace('&','&')
text = text.replace('<','<')
text = text.replace('>','>')
return text
def screenshot_init( sizes, keys ):
"""
Replace curses_display.Screen and raw_display.Screen class with
HtmlGenerator.
Call this function before executing an application that uses
curses_display.Screen to have that code use HtmlGenerator instead.
sizes -- list of ( columns, rows ) tuples to be returned by each call
to HtmlGenerator.get_cols_rows()
keys -- list of lists of keys to be returned by each call to
HtmlGenerator.get_input()
Lists of keys may include "window resize" to force the application to
call get_cols_rows and read a new screen size.
For example, the following call will prepare an application to:
1. start in 80x25 with its first call to get_cols_rows()
2. take a screenshot when it calls draw_screen(..)
3. simulate 5 "down" keys from get_input()
4. take a screenshot when it calls draw_screen(..)
5. simulate keys "a", "b", "c" and a "window resize"
6. resize to 20x10 on its second call to get_cols_rows()
7. take a screenshot when it calls draw_screen(..)
8. simulate a "Q" keypress to quit the application
screenshot_init( [ (80,25), (20,10) ],
[ ["down"]*5, ["a","b","c","window resize"], ["Q"] ] )
"""
try:
for (row,col) in sizes:
assert type(row) == int
assert row>0 and col>0
except (AssertionError, ValueError):
raise Exception, "sizes must be in the form [ (col1,row1), (col2,row2), ...]"
try:
for l in keys:
assert type(l) == list
for k in l:
assert type(k) == str
except (AssertionError, ValueError):
raise Exception, "keys must be in the form [ [keyA1, keyA2, ..], [keyB1, ..], ...]"
import curses_display
curses_display.Screen = HtmlGenerator
import raw_display
raw_display.Screen = HtmlGenerator
HtmlGenerator.sizes = sizes
HtmlGenerator.keys = keys
def screenshot_collect():
"""Return screenshots as a list of HTML fragments."""
l = HtmlGenerator.fragments
HtmlGenerator.fragments = []
return l
|
bg_r, bg_g, bg_b = _d_bg_r, _d_bg_g, _d_bg_b
|
conditional_block
|
userscripts.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlheadelement::HTMLHeadElement;
use crate::dom::node::document_from_node;
use js::jsval::UndefinedValue;
use servo_config::opts;
use std::fs::{read_dir, File};
use std::io::Read;
use std::path::PathBuf;
pub fn load_script(head: &HTMLHeadElement)
|
{
let path_str = match opts::get().userscripts.clone() {
Some(p) => p,
None => return,
};
let doc = document_from_node(head);
let win = Trusted::new(doc.window());
doc.add_delayed_task(task!(UserScriptExecute: move || {
let win = win.root();
let cx = win.get_cx();
rooted!(in(cx) let mut rval = UndefinedValue());
let path = PathBuf::from(&path_str);
let mut files = read_dir(&path)
.expect("Bad path passed to --userscripts")
.filter_map(|e| e.ok())
.map(|e| e.path())
.collect::<Vec<_>>();
files.sort();
for file in files {
let mut f = File::open(&file).unwrap();
let mut contents = vec![];
f.read_to_end(&mut contents).unwrap();
let script_text = String::from_utf8_lossy(&contents);
win.upcast::<GlobalScope>()
.evaluate_script_on_global_with_result(
&script_text,
&file.to_string_lossy(),
rval.handle_mut(),
1,
);
}
}));
}
|
identifier_body
|
|
userscripts.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlheadelement::HTMLHeadElement;
use crate::dom::node::document_from_node;
use js::jsval::UndefinedValue;
use servo_config::opts;
use std::fs::{read_dir, File};
use std::io::Read;
use std::path::PathBuf;
pub fn load_script(head: &HTMLHeadElement) {
let path_str = match opts::get().userscripts.clone() {
Some(p) => p,
None => return,
};
let doc = document_from_node(head);
let win = Trusted::new(doc.window());
doc.add_delayed_task(task!(UserScriptExecute: move || {
let win = win.root();
let cx = win.get_cx();
rooted!(in(cx) let mut rval = UndefinedValue());
let path = PathBuf::from(&path_str);
let mut files = read_dir(&path)
.expect("Bad path passed to --userscripts")
.filter_map(|e| e.ok())
.map(|e| e.path())
.collect::<Vec<_>>();
files.sort();
for file in files {
let mut f = File::open(&file).unwrap();
let mut contents = vec![];
f.read_to_end(&mut contents).unwrap();
let script_text = String::from_utf8_lossy(&contents);
win.upcast::<GlobalScope>()
|
&file.to_string_lossy(),
rval.handle_mut(),
1,
);
}
}));
}
|
.evaluate_script_on_global_with_result(
&script_text,
|
random_line_split
|
userscripts.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlheadelement::HTMLHeadElement;
use crate::dom::node::document_from_node;
use js::jsval::UndefinedValue;
use servo_config::opts;
use std::fs::{read_dir, File};
use std::io::Read;
use std::path::PathBuf;
pub fn
|
(head: &HTMLHeadElement) {
let path_str = match opts::get().userscripts.clone() {
Some(p) => p,
None => return,
};
let doc = document_from_node(head);
let win = Trusted::new(doc.window());
doc.add_delayed_task(task!(UserScriptExecute: move || {
let win = win.root();
let cx = win.get_cx();
rooted!(in(cx) let mut rval = UndefinedValue());
let path = PathBuf::from(&path_str);
let mut files = read_dir(&path)
.expect("Bad path passed to --userscripts")
.filter_map(|e| e.ok())
.map(|e| e.path())
.collect::<Vec<_>>();
files.sort();
for file in files {
let mut f = File::open(&file).unwrap();
let mut contents = vec![];
f.read_to_end(&mut contents).unwrap();
let script_text = String::from_utf8_lossy(&contents);
win.upcast::<GlobalScope>()
.evaluate_script_on_global_with_result(
&script_text,
&file.to_string_lossy(),
rval.handle_mut(),
1,
);
}
}));
}
|
load_script
|
identifier_name
|
util.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimateTimings, AnimationMetadata, AnimationMetadataType, AnimationOptions, sequence, ɵStyleData} from '@angular/animations';
import {Ast as AnimationAst, AstVisitor as AnimationAstVisitor} from './dsl/animation_ast';
import {AnimationDslVisitor} from './dsl/animation_dsl_visitor';
import {isNode} from './render/shared';
export const ONE_SECOND = 1000;
export const SUBSTITUTION_EXPR_START = '{{';
export const SUBSTITUTION_EXPR_END = '}}';
export const ENTER_CLASSNAME = 'ng-enter';
export const LEAVE_CLASSNAME = 'ng-leave';
export const ENTER_SELECTOR = '.ng-enter';
export const LEAVE_SELECTOR = '.ng-leave';
export const NG_TRIGGER_CLASSNAME = 'ng-trigger';
export const NG_TRIGGER_SELECTOR = '.ng-trigger';
export const NG_ANIMATING_CLASSNAME = 'ng-animating';
export const NG_ANIMATING_SELECTOR = '.ng-animating';
export function resolveTimingValue(value: string|number) {
if (typeof value == 'number') return value;
const matches = value.match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2) return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
function _convertTimeValueToMS(value: number, unit: string): number {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
export function resolveTiming(
timings: string|number|AnimateTimings, errors: any[], allowNegativeValues?: boolean) {
return timings.hasOwnProperty('duration') ?
<AnimateTimings>timings :
parseTimeExpression(<string|number>timings, errors, allowNegativeValues);
}
function parseTimeExpression(
exp: string|number, errors: string[], allowNegativeValues?: boolean): AnimateTimings {
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
let duration: number;
let delay: number = 0;
let easing: string = '';
if (typeof exp === 'string') {
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return {duration: 0, delay: 0, easing: ''};
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
} else {
duration = exp;
}
if (!allowNegativeValues) {
let containsErrors = false;
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return {duration, delay, easing};
}
export function copyObj(
obj: {[key: string]: any}, destination: {[key: string]: any} = {}): {[key: string]: any} {
Object.keys(obj).forEach(prop => {
destination[prop] = obj[prop];
});
return destination;
}
export function normalizeStyles(styles: ɵStyleData|ɵStyleData[]): ɵStyleData {
const normalizedStyles: ɵStyleData = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
} else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
export function copyStyles(
styles: ɵStyleData, readPrototype: boolean, destination: ɵStyleData = {}): ɵStyleData {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
} else {
copyObj(styles, destination);
}
return destination;
}
function getStyleAttributeString(element: any, key: string, value: string) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
} else {
return '';
}
}
function writeStyleAttribute(element: any) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
export function setStyles(element: any, styles: ɵStyleData, formerStyles?: {[key: string]: any}) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function eraseStyles(element: any, styles: ɵStyleData) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function normalizeAnimationEntry(steps: AnimationMetadata|
AnimationMetadata[]): AnimationMetadata {
if (Ar
|
unction validateStyleParams(
value: string|number, options: AnimationOptions, errors: any[]) {
const params = options.params || {};
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(
`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
const PARAM_REGEX =
new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
export function extractStyleParams(value: string|number): string[] {
let params: string[] = [];
if (typeof value === 'string') {
let match: any;
while (match = PARAM_REGEX.exec(value)) {
params.push(match[1] as string);
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
export function interpolateParams(
value: string|number, params: {[name: string]: any}, errors: any[]): string|number {
const original = value.toString();
const str = original.replace(PARAM_REGEX, (_, varName) => {
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
errors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
return localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
export function iteratorToArray(iterator: any): any[] {
const arr: any[] = [];
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
export function mergeAnimationOptions(
source: AnimationOptions, destination: AnimationOptions): AnimationOptions {
if (source.params) {
const p0 = source.params;
if (!destination.params) {
destination.params = {};
}
const p1 = destination.params;
Object.keys(p0).forEach(param => {
if (!p1.hasOwnProperty(param)) {
p1[param] = p0[param];
}
});
}
return destination;
}
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
export function dashCaseToCamelCase(input: string): string {
return input.replace(DASH_CASE_REGEXP, (...m: any[]) => m[1].toUpperCase());
}
function camelCaseToDashCase(input: string): string {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
export function allowPreviousPlayerStylesMerge(duration: number, delay: number) {
return duration === 0 || delay === 0;
}
export function balancePreviousStylesIntoKeyframes(
element: any, keyframes: {[key: string]: any}[], previousStyles: {[key: string]: any}) {
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
let startingKeyframe = keyframes[0];
let missingStyleProps: string[] = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
let kf = keyframes[i];
missingStyleProps.forEach(function(prop) {
kf[prop] = computeStyle(element, prop);
});
}
}
}
return keyframes;
}
export function visitDslNode(
visitor: AnimationDslVisitor, node: AnimationMetadata, context: any): any;
export function visitDslNode(
visitor: AnimationAstVisitor, node: AnimationAst<AnimationMetadataType>, context: any): any;
export function visitDslNode(visitor: any, node: any, context: any): any {
switch (node.type) {
case AnimationMetadataType.Trigger:
return visitor.visitTrigger(node, context);
case AnimationMetadataType.State:
return visitor.visitState(node, context);
case AnimationMetadataType.Transition:
return visitor.visitTransition(node, context);
case AnimationMetadataType.Sequence:
return visitor.visitSequence(node, context);
case AnimationMetadataType.Group:
return visitor.visitGroup(node, context);
case AnimationMetadataType.Animate:
return visitor.visitAnimate(node, context);
case AnimationMetadataType.Keyframes:
return visitor.visitKeyframes(node, context);
case AnimationMetadataType.Style:
return visitor.visitStyle(node, context);
case AnimationMetadataType.Reference:
return visitor.visitReference(node, context);
case AnimationMetadataType.AnimateChild:
return visitor.visitAnimateChild(node, context);
case AnimationMetadataType.AnimateRef:
return visitor.visitAnimateRef(node, context);
case AnimationMetadataType.Query:
return visitor.visitQuery(node, context);
case AnimationMetadataType.Stagger:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
export function computeStyle(element: any, prop: string): string {
return (<any>window.getComputedStyle(element))[prop];
}
|
ray.isArray(steps)) {
if (steps.length == 1) return steps[0];
return sequence(steps);
}
return steps as AnimationMetadata;
}
export f
|
identifier_body
|
util.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimateTimings, AnimationMetadata, AnimationMetadataType, AnimationOptions, sequence, ɵStyleData} from '@angular/animations';
import {Ast as AnimationAst, AstVisitor as AnimationAstVisitor} from './dsl/animation_ast';
import {AnimationDslVisitor} from './dsl/animation_dsl_visitor';
import {isNode} from './render/shared';
export const ONE_SECOND = 1000;
export const SUBSTITUTION_EXPR_START = '{{';
export const SUBSTITUTION_EXPR_END = '}}';
export const ENTER_CLASSNAME = 'ng-enter';
export const LEAVE_CLASSNAME = 'ng-leave';
export const ENTER_SELECTOR = '.ng-enter';
export const LEAVE_SELECTOR = '.ng-leave';
export const NG_TRIGGER_CLASSNAME = 'ng-trigger';
export const NG_TRIGGER_SELECTOR = '.ng-trigger';
export const NG_ANIMATING_CLASSNAME = 'ng-animating';
export const NG_ANIMATING_SELECTOR = '.ng-animating';
export function resolveTimingValue(value: string|number) {
if (typeof value == 'number') return value;
const matches = value.match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2) return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
function _convertTimeValueToMS(value: number, unit: string): number {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
export function resolveTiming(
timings: string|number|AnimateTimings, errors: any[], allowNegativeValues?: boolean) {
return timings.hasOwnProperty('duration') ?
<AnimateTimings>timings :
parseTimeExpression(<string|number>timings, errors, allowNegativeValues);
}
function parseTimeExpression(
exp: string|number, errors: string[], allowNegativeValues?: boolean): AnimateTimings {
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
let duration: number;
let delay: number = 0;
let easing: string = '';
if (typeof exp === 'string') {
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return {duration: 0, delay: 0, easing: ''};
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
} else {
duration = exp;
}
if (!allowNegativeValues) {
let containsErrors = false;
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return {duration, delay, easing};
}
export function copyObj(
obj: {[key: string]: any}, destination: {[key: string]: any} = {}): {[key: string]: any} {
Object.keys(obj).forEach(prop => {
destination[prop] = obj[prop];
});
return destination;
}
export function normalizeStyles(styles: ɵStyleData|ɵStyleData[]): ɵStyleData {
const normalizedStyles: ɵStyleData = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
} else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
export function copyStyles(
styles: ɵStyleData, readPrototype: boolean, destination: ɵStyleData = {}): ɵStyleData {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
} else {
copyObj(styles, destination);
}
return destination;
}
function getStyleAttributeString(element: any, key: string, value: string) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
} else {
return '';
}
}
function writeStyleAttribute(element: any) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
export function setStyles(element: any, styles: ɵStyleData, formerStyles?: {[key: string]: any}) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function eraseStyles(element: any, styles: ɵStyleData) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function normalizeAnimationEntry(steps: AnimationMetadata|
AnimationMetadata[]): AnimationMetadata {
if (Array.isArray(steps)) {
if (steps.length == 1) return steps[0];
return sequence(steps);
}
return steps as AnimationMetadata;
}
export function validateStyleParams(
value: string|number, options: AnimationOptions, errors: any[]) {
const params = options.params || {};
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(
`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
const PARAM_REGEX =
new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
export function extractStyleParams(value: string|number): string[] {
let params: string[] = [];
if (typeof value === 'string') {
let match: any;
while (match = PARAM_REGEX.exec(value)) {
params.push(match[1] as string);
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
export function interpolateParams(
value: string|number, params: {[name: string]: any}, errors: any[]): string|number {
const original = value.toString();
const str = original.replace(PARAM_REGEX, (_, varName) => {
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
er
|
n localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
export function iteratorToArray(iterator: any): any[] {
const arr: any[] = [];
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
export function mergeAnimationOptions(
source: AnimationOptions, destination: AnimationOptions): AnimationOptions {
if (source.params) {
const p0 = source.params;
if (!destination.params) {
destination.params = {};
}
const p1 = destination.params;
Object.keys(p0).forEach(param => {
if (!p1.hasOwnProperty(param)) {
p1[param] = p0[param];
}
});
}
return destination;
}
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
export function dashCaseToCamelCase(input: string): string {
return input.replace(DASH_CASE_REGEXP, (...m: any[]) => m[1].toUpperCase());
}
function camelCaseToDashCase(input: string): string {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
export function allowPreviousPlayerStylesMerge(duration: number, delay: number) {
return duration === 0 || delay === 0;
}
export function balancePreviousStylesIntoKeyframes(
element: any, keyframes: {[key: string]: any}[], previousStyles: {[key: string]: any}) {
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
let startingKeyframe = keyframes[0];
let missingStyleProps: string[] = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
let kf = keyframes[i];
missingStyleProps.forEach(function(prop) {
kf[prop] = computeStyle(element, prop);
});
}
}
}
return keyframes;
}
export function visitDslNode(
visitor: AnimationDslVisitor, node: AnimationMetadata, context: any): any;
export function visitDslNode(
visitor: AnimationAstVisitor, node: AnimationAst<AnimationMetadataType>, context: any): any;
export function visitDslNode(visitor: any, node: any, context: any): any {
switch (node.type) {
case AnimationMetadataType.Trigger:
return visitor.visitTrigger(node, context);
case AnimationMetadataType.State:
return visitor.visitState(node, context);
case AnimationMetadataType.Transition:
return visitor.visitTransition(node, context);
case AnimationMetadataType.Sequence:
return visitor.visitSequence(node, context);
case AnimationMetadataType.Group:
return visitor.visitGroup(node, context);
case AnimationMetadataType.Animate:
return visitor.visitAnimate(node, context);
case AnimationMetadataType.Keyframes:
return visitor.visitKeyframes(node, context);
case AnimationMetadataType.Style:
return visitor.visitStyle(node, context);
case AnimationMetadataType.Reference:
return visitor.visitReference(node, context);
case AnimationMetadataType.AnimateChild:
return visitor.visitAnimateChild(node, context);
case AnimationMetadataType.AnimateRef:
return visitor.visitAnimateRef(node, context);
case AnimationMetadataType.Query:
return visitor.visitQuery(node, context);
case AnimationMetadataType.Stagger:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
export function computeStyle(element: any, prop: string): string {
return (<any>window.getComputedStyle(element))[prop];
}
|
rors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
retur
|
conditional_block
|
util.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimateTimings, AnimationMetadata, AnimationMetadataType, AnimationOptions, sequence, ɵStyleData} from '@angular/animations';
import {Ast as AnimationAst, AstVisitor as AnimationAstVisitor} from './dsl/animation_ast';
import {AnimationDslVisitor} from './dsl/animation_dsl_visitor';
import {isNode} from './render/shared';
export const ONE_SECOND = 1000;
export const SUBSTITUTION_EXPR_START = '{{';
export const SUBSTITUTION_EXPR_END = '}}';
export const ENTER_CLASSNAME = 'ng-enter';
export const LEAVE_CLASSNAME = 'ng-leave';
export const ENTER_SELECTOR = '.ng-enter';
export const LEAVE_SELECTOR = '.ng-leave';
export const NG_TRIGGER_CLASSNAME = 'ng-trigger';
export const NG_TRIGGER_SELECTOR = '.ng-trigger';
export const NG_ANIMATING_CLASSNAME = 'ng-animating';
export const NG_ANIMATING_SELECTOR = '.ng-animating';
export function resolveTimingValue(value: string|number) {
if (typeof value == 'number') return value;
const matches = value.match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2) return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
function _convertTimeValueToMS(value: number, unit: string): number {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
export function resolveTiming(
timings: string|number|AnimateTimings, errors: any[], allowNegativeValues?: boolean) {
return timings.hasOwnProperty('duration') ?
<AnimateTimings>timings :
parseTimeExpression(<string|number>timings, errors, allowNegativeValues);
}
function parseTimeExpression(
exp: string|number, errors: string[], allowNegativeValues?: boolean): AnimateTimings {
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
let duration: number;
let delay: number = 0;
let easing: string = '';
if (typeof exp === 'string') {
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return {duration: 0, delay: 0, easing: ''};
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
} else {
duration = exp;
}
if (!allowNegativeValues) {
let containsErrors = false;
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return {duration, delay, easing};
}
export function copyObj(
obj: {[key: string]: any}, destination: {[key: string]: any} = {}): {[key: string]: any} {
Object.keys(obj).forEach(prop => {
destination[prop] = obj[prop];
});
return destination;
}
export function normalizeStyles(styles: ɵStyleData|ɵStyleData[]): ɵStyleData {
const normalizedStyles: ɵStyleData = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
} else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
export function copyStyles(
styles: ɵStyleData, readPrototype: boolean, destination: ɵStyleData = {}): ɵStyleData {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
} else {
copyObj(styles, destination);
}
return destination;
}
function getStyleAttributeString(element: any, key: string, value: string) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
} else {
return '';
}
}
function writeStyleAttribute(element: any) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
export function setStyles(element: any, styles: ɵStyleData, formerStyles?: {[key: string]: any}) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function eraseStyles(element: any, styles: ɵStyleData) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function normalizeAnimationEntry(steps: AnimationMetadata|
AnimationMetadata[]): AnimationMetadata {
if (Array.isArray(steps)) {
if (steps.length == 1) return steps[0];
return sequence(steps);
}
return steps as AnimationMetadata;
}
export function validateStyleParams(
value: string|number, options: AnimationOptions, errors: any[]) {
const params = options.params || {};
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(
`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
const PARAM_REGEX =
new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
export function extractStyleParams(value: string|number): string[] {
|
while (match = PARAM_REGEX.exec(value)) {
params.push(match[1] as string);
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
export function interpolateParams(
value: string|number, params: {[name: string]: any}, errors: any[]): string|number {
const original = value.toString();
const str = original.replace(PARAM_REGEX, (_, varName) => {
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
errors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
return localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
export function iteratorToArray(iterator: any): any[] {
const arr: any[] = [];
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
export function mergeAnimationOptions(
source: AnimationOptions, destination: AnimationOptions): AnimationOptions {
if (source.params) {
const p0 = source.params;
if (!destination.params) {
destination.params = {};
}
const p1 = destination.params;
Object.keys(p0).forEach(param => {
if (!p1.hasOwnProperty(param)) {
p1[param] = p0[param];
}
});
}
return destination;
}
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
export function dashCaseToCamelCase(input: string): string {
return input.replace(DASH_CASE_REGEXP, (...m: any[]) => m[1].toUpperCase());
}
function camelCaseToDashCase(input: string): string {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
export function allowPreviousPlayerStylesMerge(duration: number, delay: number) {
return duration === 0 || delay === 0;
}
export function balancePreviousStylesIntoKeyframes(
element: any, keyframes: {[key: string]: any}[], previousStyles: {[key: string]: any}) {
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
let startingKeyframe = keyframes[0];
let missingStyleProps: string[] = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
let kf = keyframes[i];
missingStyleProps.forEach(function(prop) {
kf[prop] = computeStyle(element, prop);
});
}
}
}
return keyframes;
}
export function visitDslNode(
visitor: AnimationDslVisitor, node: AnimationMetadata, context: any): any;
export function visitDslNode(
visitor: AnimationAstVisitor, node: AnimationAst<AnimationMetadataType>, context: any): any;
export function visitDslNode(visitor: any, node: any, context: any): any {
switch (node.type) {
case AnimationMetadataType.Trigger:
return visitor.visitTrigger(node, context);
case AnimationMetadataType.State:
return visitor.visitState(node, context);
case AnimationMetadataType.Transition:
return visitor.visitTransition(node, context);
case AnimationMetadataType.Sequence:
return visitor.visitSequence(node, context);
case AnimationMetadataType.Group:
return visitor.visitGroup(node, context);
case AnimationMetadataType.Animate:
return visitor.visitAnimate(node, context);
case AnimationMetadataType.Keyframes:
return visitor.visitKeyframes(node, context);
case AnimationMetadataType.Style:
return visitor.visitStyle(node, context);
case AnimationMetadataType.Reference:
return visitor.visitReference(node, context);
case AnimationMetadataType.AnimateChild:
return visitor.visitAnimateChild(node, context);
case AnimationMetadataType.AnimateRef:
return visitor.visitAnimateRef(node, context);
case AnimationMetadataType.Query:
return visitor.visitQuery(node, context);
case AnimationMetadataType.Stagger:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
export function computeStyle(element: any, prop: string): string {
return (<any>window.getComputedStyle(element))[prop];
}
|
let params: string[] = [];
if (typeof value === 'string') {
let match: any;
|
random_line_split
|
util.ts
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {AnimateTimings, AnimationMetadata, AnimationMetadataType, AnimationOptions, sequence, ɵStyleData} from '@angular/animations';
import {Ast as AnimationAst, AstVisitor as AnimationAstVisitor} from './dsl/animation_ast';
import {AnimationDslVisitor} from './dsl/animation_dsl_visitor';
import {isNode} from './render/shared';
export const ONE_SECOND = 1000;
export const SUBSTITUTION_EXPR_START = '{{';
export const SUBSTITUTION_EXPR_END = '}}';
export const ENTER_CLASSNAME = 'ng-enter';
export const LEAVE_CLASSNAME = 'ng-leave';
export const ENTER_SELECTOR = '.ng-enter';
export const LEAVE_SELECTOR = '.ng-leave';
export const NG_TRIGGER_CLASSNAME = 'ng-trigger';
export const NG_TRIGGER_SELECTOR = '.ng-trigger';
export const NG_ANIMATING_CLASSNAME = 'ng-animating';
export const NG_ANIMATING_SELECTOR = '.ng-animating';
export function resolveTimingValue(value: string|number) {
if (typeof value == 'number') return value;
const matches = value.match(/^(-?[\.\d]+)(m?s)/);
if (!matches || matches.length < 2) return 0;
return _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
}
function _convertTimeValueToMS(value: number, unit: string): number {
switch (unit) {
case 's':
return value * ONE_SECOND;
default: // ms or something else
return value;
}
}
export function resolveTiming(
timings: string|number|AnimateTimings, errors: any[], allowNegativeValues?: boolean) {
return timings.hasOwnProperty('duration') ?
<AnimateTimings>timings :
parseTimeExpression(<string|number>timings, errors, allowNegativeValues);
}
function parseTimeExpression(
exp: string|number, errors: string[], allowNegativeValues?: boolean): AnimateTimings {
const regex = /^(-?[\.\d]+)(m?s)(?:\s+(-?[\.\d]+)(m?s))?(?:\s+([-a-z]+(?:\(.+?\))?))?$/i;
let duration: number;
let delay: number = 0;
let easing: string = '';
if (typeof exp === 'string') {
const matches = exp.match(regex);
if (matches === null) {
errors.push(`The provided timing value "${exp}" is invalid.`);
return {duration: 0, delay: 0, easing: ''};
}
duration = _convertTimeValueToMS(parseFloat(matches[1]), matches[2]);
const delayMatch = matches[3];
if (delayMatch != null) {
delay = _convertTimeValueToMS(parseFloat(delayMatch), matches[4]);
}
const easingVal = matches[5];
if (easingVal) {
easing = easingVal;
}
} else {
duration = exp;
}
if (!allowNegativeValues) {
let containsErrors = false;
let startIndex = errors.length;
if (duration < 0) {
errors.push(`Duration values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (delay < 0) {
errors.push(`Delay values below 0 are not allowed for this animation step.`);
containsErrors = true;
}
if (containsErrors) {
errors.splice(startIndex, 0, `The provided timing value "${exp}" is invalid.`);
}
}
return {duration, delay, easing};
}
export function copyObj(
obj: {[key: string]: any}, destination: {[key: string]: any} = {}): {[key: string]: any} {
Object.keys(obj).forEach(prop => {
destination[prop] = obj[prop];
});
return destination;
}
export function normalizeStyles(styles: ɵStyleData|ɵStyleData[]): ɵStyleData {
const normalizedStyles: ɵStyleData = {};
if (Array.isArray(styles)) {
styles.forEach(data => copyStyles(data, false, normalizedStyles));
} else {
copyStyles(styles, false, normalizedStyles);
}
return normalizedStyles;
}
export function copyStyles(
styles: ɵStyleData, readPrototype: boolean, destination: ɵStyleData = {}): ɵStyleData {
if (readPrototype) {
// we make use of a for-in loop so that the
// prototypically inherited properties are
// revealed from the backFill map
for (let prop in styles) {
destination[prop] = styles[prop];
}
} else {
copyObj(styles, destination);
}
return destination;
}
function getStyleAttributeString(element: any, key: string, value: string) {
// Return the key-value pair string to be added to the style attribute for the
// given CSS style key.
if (value) {
return key + ':' + value + ';';
} else {
return '';
}
}
function writeStyleAttribute(element: any) {
// Read the style property of the element and manually reflect it to the
// style attribute. This is needed because Domino on platform-server doesn't
// understand the full set of allowed CSS properties and doesn't reflect some
// of them automatically.
let styleAttrValue = '';
for (let i = 0; i < element.style.length; i++) {
const key = element.style.item(i);
styleAttrValue += getStyleAttributeString(element, key, element.style.getPropertyValue(key));
}
for (const key in element.style) {
// Skip internal Domino properties that don't need to be reflected.
if (!element.style.hasOwnProperty(key) || key.startsWith('_')) {
continue;
}
const dashKey = camelCaseToDashCase(key);
styleAttrValue += getStyleAttributeString(element, dashKey, element.style[key]);
}
element.setAttribute('style', styleAttrValue);
}
export function setStyles(element: any, styles: ɵStyleData, formerStyles?: {[key: string]: any}) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
if (formerStyles && !formerStyles.hasOwnProperty(prop)) {
formerStyles[prop] = element.style[camelProp];
}
element.style[camelProp] = styles[prop];
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function eraseStyles(element: any, styles: ɵStyleData) {
if (element['style']) {
Object.keys(styles).forEach(prop => {
const camelProp = dashCaseToCamelCase(prop);
element.style[camelProp] = '';
});
// On the server set the 'style' attribute since it's not automatically reflected.
if (isNode()) {
writeStyleAttribute(element);
}
}
}
export function normalizeAnimationEntry(steps: AnimationMetadata|
AnimationMetadata[]): AnimationMetadata {
if (Array.isArray(steps)) {
if (steps.length == 1) return steps[0];
return sequence(steps);
}
return steps as AnimationMetadata;
}
export function validateStyleParams(
value: string|number, options: AnimationOptions, errors: any[]) {
const params = options.params || {};
const matches = extractStyleParams(value);
if (matches.length) {
matches.forEach(varName => {
if (!params.hasOwnProperty(varName)) {
errors.push(
`Unable to resolve the local animation param ${varName} in the given list of values`);
}
});
}
}
const PARAM_REGEX =
new RegExp(`${SUBSTITUTION_EXPR_START}\\s*(.+?)\\s*${SUBSTITUTION_EXPR_END}`, 'g');
export function extractSty
|
ring|number): string[] {
let params: string[] = [];
if (typeof value === 'string') {
let match: any;
while (match = PARAM_REGEX.exec(value)) {
params.push(match[1] as string);
}
PARAM_REGEX.lastIndex = 0;
}
return params;
}
export function interpolateParams(
value: string|number, params: {[name: string]: any}, errors: any[]): string|number {
const original = value.toString();
const str = original.replace(PARAM_REGEX, (_, varName) => {
let localVal = params[varName];
// this means that the value was never overridden by the data passed in by the user
if (!params.hasOwnProperty(varName)) {
errors.push(`Please provide a value for the animation param ${varName}`);
localVal = '';
}
return localVal.toString();
});
// we do this to assert that numeric values stay as they are
return str == original ? value : str;
}
export function iteratorToArray(iterator: any): any[] {
const arr: any[] = [];
let item = iterator.next();
while (!item.done) {
arr.push(item.value);
item = iterator.next();
}
return arr;
}
export function mergeAnimationOptions(
source: AnimationOptions, destination: AnimationOptions): AnimationOptions {
if (source.params) {
const p0 = source.params;
if (!destination.params) {
destination.params = {};
}
const p1 = destination.params;
Object.keys(p0).forEach(param => {
if (!p1.hasOwnProperty(param)) {
p1[param] = p0[param];
}
});
}
return destination;
}
const DASH_CASE_REGEXP = /-+([a-z0-9])/g;
export function dashCaseToCamelCase(input: string): string {
return input.replace(DASH_CASE_REGEXP, (...m: any[]) => m[1].toUpperCase());
}
function camelCaseToDashCase(input: string): string {
return input.replace(/([a-z])([A-Z])/g, '$1-$2').toLowerCase();
}
export function allowPreviousPlayerStylesMerge(duration: number, delay: number) {
return duration === 0 || delay === 0;
}
export function balancePreviousStylesIntoKeyframes(
element: any, keyframes: {[key: string]: any}[], previousStyles: {[key: string]: any}) {
const previousStyleProps = Object.keys(previousStyles);
if (previousStyleProps.length && keyframes.length) {
let startingKeyframe = keyframes[0];
let missingStyleProps: string[] = [];
previousStyleProps.forEach(prop => {
if (!startingKeyframe.hasOwnProperty(prop)) {
missingStyleProps.push(prop);
}
startingKeyframe[prop] = previousStyles[prop];
});
if (missingStyleProps.length) {
// tslint:disable-next-line
for (var i = 1; i < keyframes.length; i++) {
let kf = keyframes[i];
missingStyleProps.forEach(function(prop) {
kf[prop] = computeStyle(element, prop);
});
}
}
}
return keyframes;
}
export function visitDslNode(
visitor: AnimationDslVisitor, node: AnimationMetadata, context: any): any;
export function visitDslNode(
visitor: AnimationAstVisitor, node: AnimationAst<AnimationMetadataType>, context: any): any;
export function visitDslNode(visitor: any, node: any, context: any): any {
switch (node.type) {
case AnimationMetadataType.Trigger:
return visitor.visitTrigger(node, context);
case AnimationMetadataType.State:
return visitor.visitState(node, context);
case AnimationMetadataType.Transition:
return visitor.visitTransition(node, context);
case AnimationMetadataType.Sequence:
return visitor.visitSequence(node, context);
case AnimationMetadataType.Group:
return visitor.visitGroup(node, context);
case AnimationMetadataType.Animate:
return visitor.visitAnimate(node, context);
case AnimationMetadataType.Keyframes:
return visitor.visitKeyframes(node, context);
case AnimationMetadataType.Style:
return visitor.visitStyle(node, context);
case AnimationMetadataType.Reference:
return visitor.visitReference(node, context);
case AnimationMetadataType.AnimateChild:
return visitor.visitAnimateChild(node, context);
case AnimationMetadataType.AnimateRef:
return visitor.visitAnimateRef(node, context);
case AnimationMetadataType.Query:
return visitor.visitQuery(node, context);
case AnimationMetadataType.Stagger:
return visitor.visitStagger(node, context);
default:
throw new Error(`Unable to resolve animation metadata node #${node.type}`);
}
}
export function computeStyle(element: any, prop: string): string {
return (<any>window.getComputedStyle(element))[prop];
}
|
leParams(value: st
|
identifier_name
|
walker.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::{self, DirEntry, Metadata};
use std::io;
use std::path::{Path, PathBuf};
use anyhow::Result;
use thiserror::Error;
use pathmatcher::{DirectoryMatch, Matcher};
use types::path::ParseError;
use types::{RepoPath, RepoPathBuf};
#[derive(Error, Debug)]
pub enum WalkError {
#[error("invalid file name encoding '{0}'")]
FsUtf8Error(String),
#[error("IO error at '{0}': {1}")]
IOError(RepoPathBuf, #[source] io::Error),
#[error("path error at '{0}': {1}")]
RepoPathError(String, #[source] ParseError),
#[error("invalid file type at '{0}'")]
InvalidFileType(RepoPathBuf),
#[error("invalid mtime for '{0}': {1}")]
InvalidMTime(RepoPathBuf, #[source] anyhow::Error),
}
impl WalkError {
pub fn filename(&self) -> String {
match self {
WalkError::FsUtf8Error(path) => path.to_string(),
WalkError::IOError(path, _) => path.to_string(),
WalkError::RepoPathError(path, _) => path.to_string(),
WalkError::InvalidFileType(path) => path.to_string(),
WalkError::InvalidMTime(path, _) => path.to_string(),
}
}
pub fn message(&self) -> String {
match self {
WalkError::FsUtf8Error(_) => "invalid file name encoding".to_string(),
WalkError::IOError(_, error) => error.to_string(),
WalkError::RepoPathError(_, error) => error.to_string(),
WalkError::InvalidFileType(_) => "invalid file type".to_string(),
WalkError::InvalidMTime(_, error) => format!("invalid mtime - {}", error.to_string()),
}
}
}
pub enum WalkEntry {
File(RepoPathBuf, Metadata),
Directory(RepoPathBuf),
}
impl AsRef<RepoPath> for WalkEntry {
fn as_ref(&self) -> &RepoPath {
match self {
WalkEntry::File(f, _) => f,
WalkEntry::Directory(d) => d,
}
}
}
/// Walker traverses the working copy, starting at the root of the repo,
|
dir_matches: Vec<RepoPathBuf>,
results: Vec<Result<WalkEntry>>,
matcher: M,
include_directories: bool,
}
impl<M> Walker<M>
where
M: Matcher,
{
pub fn new(root: PathBuf, matcher: M, include_directories: bool) -> Result<Self> {
let mut dir_matches = vec![];
if matcher.matches_directory(&RepoPathBuf::new())? != DirectoryMatch::Nothing {
dir_matches.push(RepoPathBuf::new());
}
let walker = Walker {
root,
dir_matches,
results: Vec::new(),
matcher,
include_directories,
};
Ok(walker)
}
fn match_entry(&mut self, next_dir: &RepoPathBuf, entry: DirEntry) -> Result<()> {
// It'd be nice to move all this conversion noise to a function, but having it here saves
// us from allocating filename repeatedly.
let filename = entry.file_name();
let filename = filename.to_str().ok_or(WalkError::FsUtf8Error(
filename.to_string_lossy().into_owned(),
))?;
let filename = RepoPath::from_str(filename)
.map_err(|e| WalkError::RepoPathError(filename.to_owned(), e))?;
let filetype = entry
.file_type()
.map_err(|e| WalkError::IOError(filename.to_owned(), e))?;
let mut candidate_path = next_dir.clone();
candidate_path.push(filename);
if filetype.is_file() || filetype.is_symlink() {
if self.matcher.matches_file(candidate_path.as_repo_path())? {
self.results
.push(Ok(WalkEntry::File(candidate_path, entry.metadata()?)));
}
} else if filetype.is_dir() {
if filename.as_str() != ".hg"
&& self
.matcher
.matches_directory(candidate_path.as_repo_path())?
!= DirectoryMatch::Nothing
{
self.dir_matches.push(candidate_path);
}
} else if self.matcher.matches_file(candidate_path.as_repo_path())? {
return Err(WalkError::InvalidFileType(filename.to_owned()).into());
}
Ok(())
}
/// Lazy traversal to find matching files
fn walk(&mut self) -> Result<()> {
while self.results.is_empty() && !self.dir_matches.is_empty() {
let next_dir = self.dir_matches.pop().unwrap();
if self.include_directories {
self.results
.push(Ok(WalkEntry::Directory(next_dir.clone())));
}
let abs_next_dir = self.root.join(next_dir.as_str());
// Don't process the directory if it contains a .hg directory, unless it's the root.
if next_dir.is_empty() || !Path::exists(&abs_next_dir.join(".hg")) {
for entry in fs::read_dir(abs_next_dir)
.map_err(|e| WalkError::IOError(next_dir.clone(), e))?
{
let entry = entry.map_err(|e| WalkError::IOError(next_dir.clone(), e))?;
if let Err(e) = self.match_entry(&next_dir, entry) {
self.results.push(Err(e));
}
}
}
}
Ok(())
}
}
impl<M> Iterator for Walker<M>
where
M: Matcher,
{
type Item = Result<WalkEntry>;
fn next(&mut self) -> Option<Self::Item> {
match self.walk() {
Err(e) => Some(Err(e)),
Ok(()) => self.results.pop(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::{create_dir_all, OpenOptions};
use std::path::PathBuf;
use tempfile::tempdir;
use pathmatcher::{AlwaysMatcher, NeverMatcher};
fn create_directory(
directories: &std::vec::Vec<&str>,
files: &std::vec::Vec<&str>,
) -> Result<tempfile::TempDir> {
let root = tempdir()?;
for dir in directories {
create_dir_all(root.path().join(dir))?;
}
for file in files {
let path = root.path().join(file);
OpenOptions::new()
.create(true)
.write(true)
.open(path.as_path())?;
}
Ok(root)
}
#[test]
fn test_walker() -> Result<()> {
let directories = vec!["dirA", "dirB/dirC/dirD"];
let files = vec!["dirA/a.txt", "dirA/b.txt", "dirB/dirC/dirD/c.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, AlwaysMatcher::new(), false)?;
let walked_files: Result<Vec<_>> = walker.collect();
let walked_files = walked_files?;
assert_eq!(walked_files.len(), 3);
for file in walked_files {
assert!(files.contains(&file.as_ref().to_string().as_str()));
}
Ok(())
}
#[test]
fn test_match_nothing() -> Result<()> {
let directories = vec!["dirA"];
let files = vec!["dirA/a.txt", "b.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, NeverMatcher::new(), false)?;
let walked_files: Vec<_> = walker.collect();
assert!(walked_files.is_empty());
Ok(())
}
}
|
/// finding files matched by matcher
pub struct Walker<M> {
root: PathBuf,
|
random_line_split
|
walker.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::{self, DirEntry, Metadata};
use std::io;
use std::path::{Path, PathBuf};
use anyhow::Result;
use thiserror::Error;
use pathmatcher::{DirectoryMatch, Matcher};
use types::path::ParseError;
use types::{RepoPath, RepoPathBuf};
#[derive(Error, Debug)]
pub enum WalkError {
#[error("invalid file name encoding '{0}'")]
FsUtf8Error(String),
#[error("IO error at '{0}': {1}")]
IOError(RepoPathBuf, #[source] io::Error),
#[error("path error at '{0}': {1}")]
RepoPathError(String, #[source] ParseError),
#[error("invalid file type at '{0}'")]
InvalidFileType(RepoPathBuf),
#[error("invalid mtime for '{0}': {1}")]
InvalidMTime(RepoPathBuf, #[source] anyhow::Error),
}
impl WalkError {
pub fn filename(&self) -> String {
match self {
WalkError::FsUtf8Error(path) => path.to_string(),
WalkError::IOError(path, _) => path.to_string(),
WalkError::RepoPathError(path, _) => path.to_string(),
WalkError::InvalidFileType(path) => path.to_string(),
WalkError::InvalidMTime(path, _) => path.to_string(),
}
}
pub fn message(&self) -> String {
match self {
WalkError::FsUtf8Error(_) => "invalid file name encoding".to_string(),
WalkError::IOError(_, error) => error.to_string(),
WalkError::RepoPathError(_, error) => error.to_string(),
WalkError::InvalidFileType(_) => "invalid file type".to_string(),
WalkError::InvalidMTime(_, error) => format!("invalid mtime - {}", error.to_string()),
}
}
}
pub enum WalkEntry {
File(RepoPathBuf, Metadata),
Directory(RepoPathBuf),
}
impl AsRef<RepoPath> for WalkEntry {
fn as_ref(&self) -> &RepoPath {
match self {
WalkEntry::File(f, _) => f,
WalkEntry::Directory(d) => d,
}
}
}
/// Walker traverses the working copy, starting at the root of the repo,
/// finding files matched by matcher
pub struct Walker<M> {
root: PathBuf,
dir_matches: Vec<RepoPathBuf>,
results: Vec<Result<WalkEntry>>,
matcher: M,
include_directories: bool,
}
impl<M> Walker<M>
where
M: Matcher,
{
pub fn new(root: PathBuf, matcher: M, include_directories: bool) -> Result<Self> {
let mut dir_matches = vec![];
if matcher.matches_directory(&RepoPathBuf::new())? != DirectoryMatch::Nothing {
dir_matches.push(RepoPathBuf::new());
}
let walker = Walker {
root,
dir_matches,
results: Vec::new(),
matcher,
include_directories,
};
Ok(walker)
}
fn match_entry(&mut self, next_dir: &RepoPathBuf, entry: DirEntry) -> Result<()> {
// It'd be nice to move all this conversion noise to a function, but having it here saves
// us from allocating filename repeatedly.
let filename = entry.file_name();
let filename = filename.to_str().ok_or(WalkError::FsUtf8Error(
filename.to_string_lossy().into_owned(),
))?;
let filename = RepoPath::from_str(filename)
.map_err(|e| WalkError::RepoPathError(filename.to_owned(), e))?;
let filetype = entry
.file_type()
.map_err(|e| WalkError::IOError(filename.to_owned(), e))?;
let mut candidate_path = next_dir.clone();
candidate_path.push(filename);
if filetype.is_file() || filetype.is_symlink() {
if self.matcher.matches_file(candidate_path.as_repo_path())? {
self.results
.push(Ok(WalkEntry::File(candidate_path, entry.metadata()?)));
}
} else if filetype.is_dir() {
if filename.as_str() != ".hg"
&& self
.matcher
.matches_directory(candidate_path.as_repo_path())?
!= DirectoryMatch::Nothing
{
self.dir_matches.push(candidate_path);
}
} else if self.matcher.matches_file(candidate_path.as_repo_path())? {
return Err(WalkError::InvalidFileType(filename.to_owned()).into());
}
Ok(())
}
/// Lazy traversal to find matching files
fn walk(&mut self) -> Result<()> {
while self.results.is_empty() && !self.dir_matches.is_empty() {
let next_dir = self.dir_matches.pop().unwrap();
if self.include_directories {
self.results
.push(Ok(WalkEntry::Directory(next_dir.clone())));
}
let abs_next_dir = self.root.join(next_dir.as_str());
// Don't process the directory if it contains a .hg directory, unless it's the root.
if next_dir.is_empty() || !Path::exists(&abs_next_dir.join(".hg")) {
for entry in fs::read_dir(abs_next_dir)
.map_err(|e| WalkError::IOError(next_dir.clone(), e))?
{
let entry = entry.map_err(|e| WalkError::IOError(next_dir.clone(), e))?;
if let Err(e) = self.match_entry(&next_dir, entry) {
self.results.push(Err(e));
}
}
}
}
Ok(())
}
}
impl<M> Iterator for Walker<M>
where
M: Matcher,
{
type Item = Result<WalkEntry>;
fn
|
(&mut self) -> Option<Self::Item> {
match self.walk() {
Err(e) => Some(Err(e)),
Ok(()) => self.results.pop(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::{create_dir_all, OpenOptions};
use std::path::PathBuf;
use tempfile::tempdir;
use pathmatcher::{AlwaysMatcher, NeverMatcher};
fn create_directory(
directories: &std::vec::Vec<&str>,
files: &std::vec::Vec<&str>,
) -> Result<tempfile::TempDir> {
let root = tempdir()?;
for dir in directories {
create_dir_all(root.path().join(dir))?;
}
for file in files {
let path = root.path().join(file);
OpenOptions::new()
.create(true)
.write(true)
.open(path.as_path())?;
}
Ok(root)
}
#[test]
fn test_walker() -> Result<()> {
let directories = vec!["dirA", "dirB/dirC/dirD"];
let files = vec!["dirA/a.txt", "dirA/b.txt", "dirB/dirC/dirD/c.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, AlwaysMatcher::new(), false)?;
let walked_files: Result<Vec<_>> = walker.collect();
let walked_files = walked_files?;
assert_eq!(walked_files.len(), 3);
for file in walked_files {
assert!(files.contains(&file.as_ref().to_string().as_str()));
}
Ok(())
}
#[test]
fn test_match_nothing() -> Result<()> {
let directories = vec!["dirA"];
let files = vec!["dirA/a.txt", "b.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, NeverMatcher::new(), false)?;
let walked_files: Vec<_> = walker.collect();
assert!(walked_files.is_empty());
Ok(())
}
}
|
next
|
identifier_name
|
walker.rs
|
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::fs::{self, DirEntry, Metadata};
use std::io;
use std::path::{Path, PathBuf};
use anyhow::Result;
use thiserror::Error;
use pathmatcher::{DirectoryMatch, Matcher};
use types::path::ParseError;
use types::{RepoPath, RepoPathBuf};
#[derive(Error, Debug)]
pub enum WalkError {
#[error("invalid file name encoding '{0}'")]
FsUtf8Error(String),
#[error("IO error at '{0}': {1}")]
IOError(RepoPathBuf, #[source] io::Error),
#[error("path error at '{0}': {1}")]
RepoPathError(String, #[source] ParseError),
#[error("invalid file type at '{0}'")]
InvalidFileType(RepoPathBuf),
#[error("invalid mtime for '{0}': {1}")]
InvalidMTime(RepoPathBuf, #[source] anyhow::Error),
}
impl WalkError {
pub fn filename(&self) -> String {
match self {
WalkError::FsUtf8Error(path) => path.to_string(),
WalkError::IOError(path, _) => path.to_string(),
WalkError::RepoPathError(path, _) => path.to_string(),
WalkError::InvalidFileType(path) => path.to_string(),
WalkError::InvalidMTime(path, _) => path.to_string(),
}
}
pub fn message(&self) -> String {
match self {
WalkError::FsUtf8Error(_) => "invalid file name encoding".to_string(),
WalkError::IOError(_, error) => error.to_string(),
WalkError::RepoPathError(_, error) => error.to_string(),
WalkError::InvalidFileType(_) => "invalid file type".to_string(),
WalkError::InvalidMTime(_, error) => format!("invalid mtime - {}", error.to_string()),
}
}
}
pub enum WalkEntry {
File(RepoPathBuf, Metadata),
Directory(RepoPathBuf),
}
impl AsRef<RepoPath> for WalkEntry {
fn as_ref(&self) -> &RepoPath {
match self {
WalkEntry::File(f, _) => f,
WalkEntry::Directory(d) => d,
}
}
}
/// Walker traverses the working copy, starting at the root of the repo,
/// finding files matched by matcher
pub struct Walker<M> {
root: PathBuf,
dir_matches: Vec<RepoPathBuf>,
results: Vec<Result<WalkEntry>>,
matcher: M,
include_directories: bool,
}
impl<M> Walker<M>
where
M: Matcher,
{
pub fn new(root: PathBuf, matcher: M, include_directories: bool) -> Result<Self>
|
fn match_entry(&mut self, next_dir: &RepoPathBuf, entry: DirEntry) -> Result<()> {
// It'd be nice to move all this conversion noise to a function, but having it here saves
// us from allocating filename repeatedly.
let filename = entry.file_name();
let filename = filename.to_str().ok_or(WalkError::FsUtf8Error(
filename.to_string_lossy().into_owned(),
))?;
let filename = RepoPath::from_str(filename)
.map_err(|e| WalkError::RepoPathError(filename.to_owned(), e))?;
let filetype = entry
.file_type()
.map_err(|e| WalkError::IOError(filename.to_owned(), e))?;
let mut candidate_path = next_dir.clone();
candidate_path.push(filename);
if filetype.is_file() || filetype.is_symlink() {
if self.matcher.matches_file(candidate_path.as_repo_path())? {
self.results
.push(Ok(WalkEntry::File(candidate_path, entry.metadata()?)));
}
} else if filetype.is_dir() {
if filename.as_str() != ".hg"
&& self
.matcher
.matches_directory(candidate_path.as_repo_path())?
!= DirectoryMatch::Nothing
{
self.dir_matches.push(candidate_path);
}
} else if self.matcher.matches_file(candidate_path.as_repo_path())? {
return Err(WalkError::InvalidFileType(filename.to_owned()).into());
}
Ok(())
}
/// Lazy traversal to find matching files
fn walk(&mut self) -> Result<()> {
while self.results.is_empty() && !self.dir_matches.is_empty() {
let next_dir = self.dir_matches.pop().unwrap();
if self.include_directories {
self.results
.push(Ok(WalkEntry::Directory(next_dir.clone())));
}
let abs_next_dir = self.root.join(next_dir.as_str());
// Don't process the directory if it contains a .hg directory, unless it's the root.
if next_dir.is_empty() || !Path::exists(&abs_next_dir.join(".hg")) {
for entry in fs::read_dir(abs_next_dir)
.map_err(|e| WalkError::IOError(next_dir.clone(), e))?
{
let entry = entry.map_err(|e| WalkError::IOError(next_dir.clone(), e))?;
if let Err(e) = self.match_entry(&next_dir, entry) {
self.results.push(Err(e));
}
}
}
}
Ok(())
}
}
impl<M> Iterator for Walker<M>
where
M: Matcher,
{
type Item = Result<WalkEntry>;
fn next(&mut self) -> Option<Self::Item> {
match self.walk() {
Err(e) => Some(Err(e)),
Ok(()) => self.results.pop(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::fs::{create_dir_all, OpenOptions};
use std::path::PathBuf;
use tempfile::tempdir;
use pathmatcher::{AlwaysMatcher, NeverMatcher};
fn create_directory(
directories: &std::vec::Vec<&str>,
files: &std::vec::Vec<&str>,
) -> Result<tempfile::TempDir> {
let root = tempdir()?;
for dir in directories {
create_dir_all(root.path().join(dir))?;
}
for file in files {
let path = root.path().join(file);
OpenOptions::new()
.create(true)
.write(true)
.open(path.as_path())?;
}
Ok(root)
}
#[test]
fn test_walker() -> Result<()> {
let directories = vec!["dirA", "dirB/dirC/dirD"];
let files = vec!["dirA/a.txt", "dirA/b.txt", "dirB/dirC/dirD/c.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, AlwaysMatcher::new(), false)?;
let walked_files: Result<Vec<_>> = walker.collect();
let walked_files = walked_files?;
assert_eq!(walked_files.len(), 3);
for file in walked_files {
assert!(files.contains(&file.as_ref().to_string().as_str()));
}
Ok(())
}
#[test]
fn test_match_nothing() -> Result<()> {
let directories = vec!["dirA"];
let files = vec!["dirA/a.txt", "b.txt"];
let root_dir = create_directory(&directories, &files)?;
let root_path = PathBuf::from(root_dir.path());
let walker = Walker::new(root_path, NeverMatcher::new(), false)?;
let walked_files: Vec<_> = walker.collect();
assert!(walked_files.is_empty());
Ok(())
}
}
|
{
let mut dir_matches = vec![];
if matcher.matches_directory(&RepoPathBuf::new())? != DirectoryMatch::Nothing {
dir_matches.push(RepoPathBuf::new());
}
let walker = Walker {
root,
dir_matches,
results: Vec::new(),
matcher,
include_directories,
};
Ok(walker)
}
|
identifier_body
|
ctdav_n_auv.py
|
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/ctdav_n_auv.py
@author Jeff Roy
@brief Parser and particle Classes and tools for the ctdav_n_auv data
Release notes:
initial release
"""
__author__ = 'Jeff Roy'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger
log = get_logger()
from mi.dataset.parser.auv_common import \
AuvCommonParticle, \
AuvCommonParser, \
compute_timestamp
# The structure below is a list of tuples
# Each tuple consists of
# parameter name, index into raw data parts list, encoding function
CTDAV_N_AUV_PARAM_MAP = [
# message ID is typically index 0
('mission_epoch', 1, int),
('auv_latitude', 2, float),
('auv_longitude', 3, float),
('mission_time', 4, int),
('m_depth', 5, float),
('ctdav_n_auv_conductivity', 6, float),
('temperature', 7, float),
('salinity', 8, float),
('speed_of_sound', 9, float),
('dissolved_oxygen', 10, float),
('powered_on', 11, int)
]
class CtdavNAuvInstrumentParticle(AuvCommonParticle):
_auv_param_map = CTDAV_N_AUV_PARAM_MAP
# must provide a parameter map for _build_parsed_values
class CtdavNAuvTelemeteredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument"
class
|
(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument_recovered"
CTDAV_N_AUV_ID = '1181' # message ID of ctdav_n records
CTDAV_N_AUV_FIELD_COUNT = 12 # number of expected fields in an ctdav_n record
CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvTelemeteredParticle)]
CTDAV_N_AUV_RECOVERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvRecoveredParticle)]
class CtdavNAuvParser(AuvCommonParser):
def __init__(self,
stream_handle,
exception_callback,
is_telemetered):
if is_telemetered:
message_map = CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP
else:
message_map = CTDAV_N_AUV_RECOVERED_MESSAGE_MAP
# provide message ID and # of fields to parent class
super(CtdavNAuvParser, self).__init__(stream_handle,
exception_callback,
message_map)
|
CtdavNAuvRecoveredParticle
|
identifier_name
|
ctdav_n_auv.py
|
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/ctdav_n_auv.py
@author Jeff Roy
@brief Parser and particle Classes and tools for the ctdav_n_auv data
Release notes:
initial release
"""
__author__ = 'Jeff Roy'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger
log = get_logger()
from mi.dataset.parser.auv_common import \
AuvCommonParticle, \
AuvCommonParser, \
compute_timestamp
# The structure below is a list of tuples
# Each tuple consists of
# parameter name, index into raw data parts list, encoding function
CTDAV_N_AUV_PARAM_MAP = [
# message ID is typically index 0
('mission_epoch', 1, int),
('auv_latitude', 2, float),
('auv_longitude', 3, float),
('mission_time', 4, int),
('m_depth', 5, float),
('ctdav_n_auv_conductivity', 6, float),
('temperature', 7, float),
('salinity', 8, float),
('speed_of_sound', 9, float),
('dissolved_oxygen', 10, float),
('powered_on', 11, int)
]
class CtdavNAuvInstrumentParticle(AuvCommonParticle):
_auv_param_map = CTDAV_N_AUV_PARAM_MAP
# must provide a parameter map for _build_parsed_values
class CtdavNAuvTelemeteredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument"
class CtdavNAuvRecoveredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument_recovered"
CTDAV_N_AUV_ID = '1181' # message ID of ctdav_n records
CTDAV_N_AUV_FIELD_COUNT = 12 # number of expected fields in an ctdav_n record
CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvTelemeteredParticle)]
CTDAV_N_AUV_RECOVERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvRecoveredParticle)]
class CtdavNAuvParser(AuvCommonParser):
def __init__(self,
stream_handle,
exception_callback,
is_telemetered):
if is_telemetered:
message_map = CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP
else:
|
# provide message ID and # of fields to parent class
super(CtdavNAuvParser, self).__init__(stream_handle,
exception_callback,
message_map)
|
message_map = CTDAV_N_AUV_RECOVERED_MESSAGE_MAP
|
conditional_block
|
ctdav_n_auv.py
|
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/ctdav_n_auv.py
@author Jeff Roy
@brief Parser and particle Classes and tools for the ctdav_n_auv data
Release notes:
initial release
"""
__author__ = 'Jeff Roy'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger
|
compute_timestamp
# The structure below is a list of tuples
# Each tuple consists of
# parameter name, index into raw data parts list, encoding function
CTDAV_N_AUV_PARAM_MAP = [
# message ID is typically index 0
('mission_epoch', 1, int),
('auv_latitude', 2, float),
('auv_longitude', 3, float),
('mission_time', 4, int),
('m_depth', 5, float),
('ctdav_n_auv_conductivity', 6, float),
('temperature', 7, float),
('salinity', 8, float),
('speed_of_sound', 9, float),
('dissolved_oxygen', 10, float),
('powered_on', 11, int)
]
class CtdavNAuvInstrumentParticle(AuvCommonParticle):
_auv_param_map = CTDAV_N_AUV_PARAM_MAP
# must provide a parameter map for _build_parsed_values
class CtdavNAuvTelemeteredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument"
class CtdavNAuvRecoveredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument_recovered"
CTDAV_N_AUV_ID = '1181' # message ID of ctdav_n records
CTDAV_N_AUV_FIELD_COUNT = 12 # number of expected fields in an ctdav_n record
CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvTelemeteredParticle)]
CTDAV_N_AUV_RECOVERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvRecoveredParticle)]
class CtdavNAuvParser(AuvCommonParser):
def __init__(self,
stream_handle,
exception_callback,
is_telemetered):
if is_telemetered:
message_map = CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP
else:
message_map = CTDAV_N_AUV_RECOVERED_MESSAGE_MAP
# provide message ID and # of fields to parent class
super(CtdavNAuvParser, self).__init__(stream_handle,
exception_callback,
message_map)
|
log = get_logger()
from mi.dataset.parser.auv_common import \
AuvCommonParticle, \
AuvCommonParser, \
|
random_line_split
|
ctdav_n_auv.py
|
"""
@package mi.dataset.parser
@file marine-integrations/mi/dataset/parser/ctdav_n_auv.py
@author Jeff Roy
@brief Parser and particle Classes and tools for the ctdav_n_auv data
Release notes:
initial release
"""
__author__ = 'Jeff Roy'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger
log = get_logger()
from mi.dataset.parser.auv_common import \
AuvCommonParticle, \
AuvCommonParser, \
compute_timestamp
# The structure below is a list of tuples
# Each tuple consists of
# parameter name, index into raw data parts list, encoding function
CTDAV_N_AUV_PARAM_MAP = [
# message ID is typically index 0
('mission_epoch', 1, int),
('auv_latitude', 2, float),
('auv_longitude', 3, float),
('mission_time', 4, int),
('m_depth', 5, float),
('ctdav_n_auv_conductivity', 6, float),
('temperature', 7, float),
('salinity', 8, float),
('speed_of_sound', 9, float),
('dissolved_oxygen', 10, float),
('powered_on', 11, int)
]
class CtdavNAuvInstrumentParticle(AuvCommonParticle):
|
class CtdavNAuvTelemeteredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument"
class CtdavNAuvRecoveredParticle(CtdavNAuvInstrumentParticle):
# set the data_particle_type for the DataParticle class
_data_particle_type = "ctdav_n_auv_instrument_recovered"
CTDAV_N_AUV_ID = '1181' # message ID of ctdav_n records
CTDAV_N_AUV_FIELD_COUNT = 12 # number of expected fields in an ctdav_n record
CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvTelemeteredParticle)]
CTDAV_N_AUV_RECOVERED_MESSAGE_MAP = [(CTDAV_N_AUV_ID,
CTDAV_N_AUV_FIELD_COUNT,
compute_timestamp,
CtdavNAuvRecoveredParticle)]
class CtdavNAuvParser(AuvCommonParser):
def __init__(self,
stream_handle,
exception_callback,
is_telemetered):
if is_telemetered:
message_map = CTDAV_N_AUV_TELEMETERED_MESSAGE_MAP
else:
message_map = CTDAV_N_AUV_RECOVERED_MESSAGE_MAP
# provide message ID and # of fields to parent class
super(CtdavNAuvParser, self).__init__(stream_handle,
exception_callback,
message_map)
|
_auv_param_map = CTDAV_N_AUV_PARAM_MAP
# must provide a parameter map for _build_parsed_values
|
identifier_body
|
create_table_test.py
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
|
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())
|
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
|
random_line_split
|
create_table_test.py
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
def setUp(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
|
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())
|
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
|
identifier_body
|
create_table_test.py
|
# -*- coding: utf-8; -*-
#
# Licensed to CRATE Technology GmbH ("Crate") under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from crate.client.sqlalchemy.types import Object, ObjectArray
from crate.client.cursor import Cursor
from unittest import TestCase
from unittest.mock import patch, MagicMock
fake_cursor = MagicMock(name='fake_cursor')
FakeCursor = MagicMock(name='FakeCursor', spec=Cursor)
FakeCursor.return_value = fake_cursor
@patch('crate.client.connection.Cursor', FakeCursor)
class CreateTableTest(TestCase):
def
|
(self):
self.engine = sa.create_engine('crate://')
self.Base = declarative_base(bind=self.engine)
def test_create_table_with_basic_types(self):
class User(self.Base):
__tablename__ = 'users'
string_col = sa.Column(sa.String, primary_key=True)
unicode_col = sa.Column(sa.Unicode)
text_col = sa.Column(sa.Text)
int_col = sa.Column(sa.Integer)
long_col1 = sa.Column(sa.BigInteger)
long_col2 = sa.Column(sa.NUMERIC)
bool_col = sa.Column(sa.Boolean)
short_col = sa.Column(sa.SmallInteger)
datetime_col = sa.Column(sa.DateTime)
date_col = sa.Column(sa.Date)
float_col = sa.Column(sa.Float)
double_col = sa.Column(sa.DECIMAL)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE users (\n\tstring_col STRING, '
'\n\tunicode_col STRING, \n\ttext_col STRING, \n\tint_col INT, '
'\n\tlong_col1 LONG, \n\tlong_col2 LONG, '
'\n\tbool_col BOOLEAN, '
'\n\tshort_col SHORT, '
'\n\tdatetime_col TIMESTAMP, \n\tdate_col TIMESTAMP, '
'\n\tfloat_col FLOAT, \n\tdouble_col DOUBLE, '
'\n\tPRIMARY KEY (string_col)\n)\n\n'),
())
def test_with_obj_column(self):
class DummyTable(self.Base):
__tablename__ = 'dummy'
pk = sa.Column(sa.String, primary_key=True)
obj_col = sa.Column(Object)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE dummy (\n\tpk STRING, \n\tobj_col OBJECT, '
'\n\tPRIMARY KEY (pk)\n)\n\n'),
())
def test_with_clustered_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p'
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED BY (p)\n\n'),
())
def test_with_partitioned_by(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_partitioned_by': 'p',
'invalid_option': 1
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk)\n'
') PARTITIONED BY (p)\n\n'),
())
def test_with_number_of_shards_and_replicas(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_number_of_replicas': '2',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'PRIMARY KEY (pk)\n'
') CLUSTERED INTO 3 SHARDS WITH (NUMBER_OF_REPLICAS = 2)\n\n'),
())
def test_with_clustered_by_and_number_of_shards(self):
class DummyTable(self.Base):
__tablename__ = 't'
__table_args__ = {
'crate_clustered_by': 'p',
'crate_number_of_shards': 3
}
pk = sa.Column(sa.String, primary_key=True)
p = sa.Column(sa.String, primary_key=True)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'p STRING, \n\t'
'PRIMARY KEY (pk, p)\n'
') CLUSTERED BY (p) INTO 3 SHARDS\n\n'),
())
def test_table_with_object_array(self):
class DummyTable(self.Base):
__tablename__ = 't'
pk = sa.Column(sa.String, primary_key=True)
tags = sa.Column(ObjectArray)
self.Base.metadata.create_all()
fake_cursor.execute.assert_called_with(
('\nCREATE TABLE t (\n\t'
'pk STRING, \n\t'
'tags ARRAY(OBJECT), \n\t'
'PRIMARY KEY (pk)\n)\n\n'), ())
|
setUp
|
identifier_name
|
ast_spec_utils.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as html from '../../src/ml_parser/ast';
import {ParseTreeResult} from '../../src/ml_parser/html_parser';
import {ParseLocation} from '../../src/parse_util';
export function humanizeDom(parseResult: ParseTreeResult, addSourceSpan: boolean = false): any[] {
if (parseResult.errors.length > 0)
|
return humanizeNodes(parseResult.rootNodes, addSourceSpan);
}
export function humanizeDomSourceSpans(parseResult: ParseTreeResult): any[] {
return humanizeDom(parseResult, true);
}
export function humanizeNodes(nodes: html.Node[], addSourceSpan: boolean = false): any[] {
const humanizer = new _Humanizer(addSourceSpan);
html.visitAll(humanizer, nodes);
return humanizer.result;
}
export function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`;
}
class _Humanizer implements html.Visitor {
result: any[] = [];
elDepth: number = 0;
constructor(private includeSourceSpan: boolean) {}
visitElement(element: html.Element, context: any): any {
const res = this._appendContext(element, [html.Element, element.name, this.elDepth++]);
if (this.includeSourceSpan) {
res.push(element.startSourceSpan?.toString() ?? null);
res.push(element.endSourceSpan?.toString() ?? null);
}
this.result.push(res);
html.visitAll(this, element.attrs);
html.visitAll(this, element.children);
this.elDepth--;
}
visitAttribute(attribute: html.Attribute, context: any): any {
const res = this._appendContext(attribute, [html.Attribute, attribute.name, attribute.value]);
this.result.push(res);
}
visitText(text: html.Text, context: any): any {
const res = this._appendContext(text, [html.Text, text.value, this.elDepth]);
this.result.push(res);
}
visitComment(comment: html.Comment, context: any): any {
const res = this._appendContext(comment, [html.Comment, comment.value, this.elDepth]);
this.result.push(res);
}
visitExpansion(expansion: html.Expansion, context: any): any {
const res = this._appendContext(
expansion, [html.Expansion, expansion.switchValue, expansion.type, this.elDepth++]);
this.result.push(res);
html.visitAll(this, expansion.cases);
this.elDepth--;
}
visitExpansionCase(expansionCase: html.ExpansionCase, context: any): any {
const res =
this._appendContext(expansionCase, [html.ExpansionCase, expansionCase.value, this.elDepth]);
this.result.push(res);
}
private _appendContext(ast: html.Node, input: any[]): any[] {
if (!this.includeSourceSpan) return input;
input.push(ast.sourceSpan!.toString());
return input;
}
}
|
{
const errorString = parseResult.errors.join('\n');
throw new Error(`Unexpected parse errors:\n${errorString}`);
}
|
conditional_block
|
ast_spec_utils.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as html from '../../src/ml_parser/ast';
import {ParseTreeResult} from '../../src/ml_parser/html_parser';
import {ParseLocation} from '../../src/parse_util';
export function humanizeDom(parseResult: ParseTreeResult, addSourceSpan: boolean = false): any[] {
if (parseResult.errors.length > 0) {
const errorString = parseResult.errors.join('\n');
throw new Error(`Unexpected parse errors:\n${errorString}`);
}
return humanizeNodes(parseResult.rootNodes, addSourceSpan);
}
export function humanizeDomSourceSpans(parseResult: ParseTreeResult): any[] {
return humanizeDom(parseResult, true);
}
export function humanizeNodes(nodes: html.Node[], addSourceSpan: boolean = false): any[] {
const humanizer = new _Humanizer(addSourceSpan);
html.visitAll(humanizer, nodes);
return humanizer.result;
}
export function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`;
}
class _Humanizer implements html.Visitor {
result: any[] = [];
elDepth: number = 0;
constructor(private includeSourceSpan: boolean) {}
visitElement(element: html.Element, context: any): any {
const res = this._appendContext(element, [html.Element, element.name, this.elDepth++]);
if (this.includeSourceSpan) {
res.push(element.startSourceSpan?.toString() ?? null);
res.push(element.endSourceSpan?.toString() ?? null);
}
this.result.push(res);
|
html.visitAll(this, element.children);
this.elDepth--;
}
visitAttribute(attribute: html.Attribute, context: any): any {
const res = this._appendContext(attribute, [html.Attribute, attribute.name, attribute.value]);
this.result.push(res);
}
visitText(text: html.Text, context: any): any {
const res = this._appendContext(text, [html.Text, text.value, this.elDepth]);
this.result.push(res);
}
visitComment(comment: html.Comment, context: any): any {
const res = this._appendContext(comment, [html.Comment, comment.value, this.elDepth]);
this.result.push(res);
}
visitExpansion(expansion: html.Expansion, context: any): any {
const res = this._appendContext(
expansion, [html.Expansion, expansion.switchValue, expansion.type, this.elDepth++]);
this.result.push(res);
html.visitAll(this, expansion.cases);
this.elDepth--;
}
visitExpansionCase(expansionCase: html.ExpansionCase, context: any): any {
const res =
this._appendContext(expansionCase, [html.ExpansionCase, expansionCase.value, this.elDepth]);
this.result.push(res);
}
private _appendContext(ast: html.Node, input: any[]): any[] {
if (!this.includeSourceSpan) return input;
input.push(ast.sourceSpan!.toString());
return input;
}
}
|
html.visitAll(this, element.attrs);
|
random_line_split
|
ast_spec_utils.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as html from '../../src/ml_parser/ast';
import {ParseTreeResult} from '../../src/ml_parser/html_parser';
import {ParseLocation} from '../../src/parse_util';
export function humanizeDom(parseResult: ParseTreeResult, addSourceSpan: boolean = false): any[] {
if (parseResult.errors.length > 0) {
const errorString = parseResult.errors.join('\n');
throw new Error(`Unexpected parse errors:\n${errorString}`);
}
return humanizeNodes(parseResult.rootNodes, addSourceSpan);
}
export function humanizeDomSourceSpans(parseResult: ParseTreeResult): any[] {
return humanizeDom(parseResult, true);
}
export function humanizeNodes(nodes: html.Node[], addSourceSpan: boolean = false): any[] {
const humanizer = new _Humanizer(addSourceSpan);
html.visitAll(humanizer, nodes);
return humanizer.result;
}
export function humanizeLineColumn(location: ParseLocation): string {
return `${location.line}:${location.col}`;
}
class _Humanizer implements html.Visitor {
result: any[] = [];
elDepth: number = 0;
constructor(private includeSourceSpan: boolean) {}
visitElement(element: html.Element, context: any): any {
const res = this._appendContext(element, [html.Element, element.name, this.elDepth++]);
if (this.includeSourceSpan) {
res.push(element.startSourceSpan?.toString() ?? null);
res.push(element.endSourceSpan?.toString() ?? null);
}
this.result.push(res);
html.visitAll(this, element.attrs);
html.visitAll(this, element.children);
this.elDepth--;
}
visitAttribute(attribute: html.Attribute, context: any): any {
const res = this._appendContext(attribute, [html.Attribute, attribute.name, attribute.value]);
this.result.push(res);
}
visitText(text: html.Text, context: any): any {
const res = this._appendContext(text, [html.Text, text.value, this.elDepth]);
this.result.push(res);
}
visitComment(comment: html.Comment, context: any): any {
const res = this._appendContext(comment, [html.Comment, comment.value, this.elDepth]);
this.result.push(res);
}
|
(expansion: html.Expansion, context: any): any {
const res = this._appendContext(
expansion, [html.Expansion, expansion.switchValue, expansion.type, this.elDepth++]);
this.result.push(res);
html.visitAll(this, expansion.cases);
this.elDepth--;
}
visitExpansionCase(expansionCase: html.ExpansionCase, context: any): any {
const res =
this._appendContext(expansionCase, [html.ExpansionCase, expansionCase.value, this.elDepth]);
this.result.push(res);
}
private _appendContext(ast: html.Node, input: any[]): any[] {
if (!this.includeSourceSpan) return input;
input.push(ast.sourceSpan!.toString());
return input;
}
}
|
visitExpansion
|
identifier_name
|
config.ts
|
/* tslint:disable:no-console */
import Chalk from 'chalk'
import * as fs from 'fs-extra'
import * as _ from 'lodash'
import * as meow from 'meow'
import * as path from 'path'
export type ILogLevel = 'error'|'warning'|'log'
export type IEnv = 'development'|'testing'|'production'
interface IFlags {
help: boolean
version: boolean
port: number
key: string
secret: string
callback: string
logLevel: string
dev: boolean
config: string
}
export interface IConfig {
port: number
key: string
secret: string
callback: string
logLevel: ILogLevel
env: IEnv
isDev(): boolean
}
const DEFAULT_PORT = 3000
const DEFAULT_KEY = '5cdc0f5ec9c28202f1098f615edba5cd'
const DEFAULT_SECRET = 'e3b842e3b923b0fb'
const DEFAULT_CALLBACK = 'http://localhost:8000/#/login'
const DEFAULT_LOG_LEVEL: ILogLevel = 'error'
const DEFAULT_ENV: IEnv = 'production'
const AVAILABLE_LOG_LEVEL: ILogLevel[] = ['error', 'warning', 'log']
const AVAILABLE_ENV: IEnv[] = ['production', 'testing', 'development']
const { gray, green, yellow } = Chalk
const cli = meow<IFlags>(
`
Options [${gray('default value')}]
-h, --help Show this help message then exit
-v, --version Show version number
-p, --port Port of this server [${yellow('3000')}]
-k, --key Flickr API consumer key [${green('"a test key"')}]
-s, --secret Flickr API consumer key secret [${green('"a test secret"')}]
The test key will redirect to
http://localhost:3000
-b, --callback The URL Flickr login page will redirect to [${green('"a test URL"')}]
-l, --logLevel The lowest level to log [${green('"error"')}]
Can be: "error", "warning", "log"
-d, --dev Set environment to "development" [${yellow('false')}]
-c, --config Specify the location of config file [${green('"config.json"')}]
If the file isn't existing,
it will create a template
Environment variables
NODE_ENV The running environment [${green('"production"')}]
will override "-d" option
Can be: "development", "production"
DEBUG Print debug info [${green('""')}]
Set to "*" to show all
https://github.com/visionmedia/debug
Priority
EnvVars > Options > ConfigFile > Defaults
`,
{
flags: {
help : { alias: 'h', type: 'boolean' },
version : { alias: 'v', type: 'boolean' },
port : { alias: 'p' },
key : { alias: 'k', type: 'string' },
secret : { alias: 's', type: 'string' },
callback: { alias: 'b', type: 'string' },
logLevel: { alias: 'l', type: 'string' },
dev : { alias: 'd', default: false, type: 'boolean' },
config : { alias: 'c', default: 'config.json', type: 'string' },
},
},
)
const configFileLocation = path.resolve(cli.flags.config)
let configFromFile
try {
console.log(`Reading config file from "${configFileLocation}".`)
configFromFile = fs.readJsonSync(configFileLocation)
} catch (e) {
if (e.code === 'ENOENT') {
console.log(`Config file not found. Generating example config file at "${configFileLocation}".`)
try {
fs.copySync(path.join(__dirname, '../../config.default.json'), configFileLocation)
console.log('Done generating. Please modify the config file then run again. Exiting...')
process.exit(1)
} catch (e2) {
console.error(`Cannot generate example config file at "${configFileLocation}". Exiting...`)
process.exit(-1)
}
}
}
const config: IConfig = {
port : _.isFinite(cli.flags.port) ? cli.flags.port : (configFromFile.port || DEFAULT_PORT),
key : cli.flags.key || configFromFile.key || DEFAULT_KEY,
secret : cli.flags.secret || configFromFile.secret || DEFAULT_SECRET,
callback: cli.flags.callback || configFromFile.callback || DEFAULT_CALLBACK,
logLevel: null,
env : null,
isDev : () => config.env === 'development',
}
if (_.includes(AVAILABLE_LOG_LEVEL, cli.flags.logLevel)) {
config.logLevel = cli.flags.logLevel
} else if (_.includes(AVAILABLE_LOG_LEVEL, configFromFile.logLevel)) {
config.logLevel = configFromFile.logLevel
} else {
config.logLevel = DEFAULT_LOG_LEVEL
}
if (_.includes(AVAILABLE_ENV, process.env.NODE_ENV)) {
config.env = process.env.NODE_ENV as IEnv
} else if (cli.flags.dev) {
config.env = 'development'
} else if (_.includes(AVAILABLE_ENV, configFromFile.env)) {
config.env = configFromFile.env
} else
|
process.env.NODE_ENV = config.env
export default config
|
{
config.env = DEFAULT_ENV
}
|
conditional_block
|
config.ts
|
/* tslint:disable:no-console */
import Chalk from 'chalk'
import * as fs from 'fs-extra'
import * as _ from 'lodash'
import * as meow from 'meow'
import * as path from 'path'
export type ILogLevel = 'error'|'warning'|'log'
export type IEnv = 'development'|'testing'|'production'
interface IFlags {
help: boolean
version: boolean
port: number
key: string
secret: string
callback: string
logLevel: string
dev: boolean
config: string
}
export interface IConfig {
port: number
key: string
secret: string
|
isDev(): boolean
}
const DEFAULT_PORT = 3000
const DEFAULT_KEY = '5cdc0f5ec9c28202f1098f615edba5cd'
const DEFAULT_SECRET = 'e3b842e3b923b0fb'
const DEFAULT_CALLBACK = 'http://localhost:8000/#/login'
const DEFAULT_LOG_LEVEL: ILogLevel = 'error'
const DEFAULT_ENV: IEnv = 'production'
const AVAILABLE_LOG_LEVEL: ILogLevel[] = ['error', 'warning', 'log']
const AVAILABLE_ENV: IEnv[] = ['production', 'testing', 'development']
const { gray, green, yellow } = Chalk
const cli = meow<IFlags>(
`
Options [${gray('default value')}]
-h, --help Show this help message then exit
-v, --version Show version number
-p, --port Port of this server [${yellow('3000')}]
-k, --key Flickr API consumer key [${green('"a test key"')}]
-s, --secret Flickr API consumer key secret [${green('"a test secret"')}]
The test key will redirect to
http://localhost:3000
-b, --callback The URL Flickr login page will redirect to [${green('"a test URL"')}]
-l, --logLevel The lowest level to log [${green('"error"')}]
Can be: "error", "warning", "log"
-d, --dev Set environment to "development" [${yellow('false')}]
-c, --config Specify the location of config file [${green('"config.json"')}]
If the file isn't existing,
it will create a template
Environment variables
NODE_ENV The running environment [${green('"production"')}]
will override "-d" option
Can be: "development", "production"
DEBUG Print debug info [${green('""')}]
Set to "*" to show all
https://github.com/visionmedia/debug
Priority
EnvVars > Options > ConfigFile > Defaults
`,
{
flags: {
help : { alias: 'h', type: 'boolean' },
version : { alias: 'v', type: 'boolean' },
port : { alias: 'p' },
key : { alias: 'k', type: 'string' },
secret : { alias: 's', type: 'string' },
callback: { alias: 'b', type: 'string' },
logLevel: { alias: 'l', type: 'string' },
dev : { alias: 'd', default: false, type: 'boolean' },
config : { alias: 'c', default: 'config.json', type: 'string' },
},
},
)
const configFileLocation = path.resolve(cli.flags.config)
let configFromFile
try {
console.log(`Reading config file from "${configFileLocation}".`)
configFromFile = fs.readJsonSync(configFileLocation)
} catch (e) {
if (e.code === 'ENOENT') {
console.log(`Config file not found. Generating example config file at "${configFileLocation}".`)
try {
fs.copySync(path.join(__dirname, '../../config.default.json'), configFileLocation)
console.log('Done generating. Please modify the config file then run again. Exiting...')
process.exit(1)
} catch (e2) {
console.error(`Cannot generate example config file at "${configFileLocation}". Exiting...`)
process.exit(-1)
}
}
}
const config: IConfig = {
port : _.isFinite(cli.flags.port) ? cli.flags.port : (configFromFile.port || DEFAULT_PORT),
key : cli.flags.key || configFromFile.key || DEFAULT_KEY,
secret : cli.flags.secret || configFromFile.secret || DEFAULT_SECRET,
callback: cli.flags.callback || configFromFile.callback || DEFAULT_CALLBACK,
logLevel: null,
env : null,
isDev : () => config.env === 'development',
}
if (_.includes(AVAILABLE_LOG_LEVEL, cli.flags.logLevel)) {
config.logLevel = cli.flags.logLevel
} else if (_.includes(AVAILABLE_LOG_LEVEL, configFromFile.logLevel)) {
config.logLevel = configFromFile.logLevel
} else {
config.logLevel = DEFAULT_LOG_LEVEL
}
if (_.includes(AVAILABLE_ENV, process.env.NODE_ENV)) {
config.env = process.env.NODE_ENV as IEnv
} else if (cli.flags.dev) {
config.env = 'development'
} else if (_.includes(AVAILABLE_ENV, configFromFile.env)) {
config.env = configFromFile.env
} else {
config.env = DEFAULT_ENV
}
process.env.NODE_ENV = config.env
export default config
|
callback: string
logLevel: ILogLevel
env: IEnv
|
random_line_split
|
NullPoseHandler.py
|
#!/usr/bin/env python
"""
==========================================================
NullPose.py - Pose Handler for single region without Vicon
==========================================================
"""
import sys, time
from numpy import *
from lib.regions import *
import lib.handlers.handlerTemplates as handlerTemplates
class NullPoseHandler(handlerTemplates.PoseHandler):
def __init__(self, executor, shared_data, initial_region):
"""
Null pose handler - used for single region operation without Vicon
initial_region (region): Starting position for robot
"""
r = executor.proj.rfiold.indexOfRegionWithName(initial_region)
center = executor.proj.rfiold.regions[r].getCenter()
self.x = center[0]
self.y = center[1]
self.theta = 0
def getPose(self, cached=False):
x=self.x
y=self.y
o=self.theta
|
return array([x, y, o])
def setPose(self, x, y, theta):
self.x=x
self.y=y
self.theta=theta
|
random_line_split
|
|
NullPoseHandler.py
|
#!/usr/bin/env python
"""
==========================================================
NullPose.py - Pose Handler for single region without Vicon
==========================================================
"""
import sys, time
from numpy import *
from lib.regions import *
import lib.handlers.handlerTemplates as handlerTemplates
class NullPoseHandler(handlerTemplates.PoseHandler):
def
|
(self, executor, shared_data, initial_region):
"""
Null pose handler - used for single region operation without Vicon
initial_region (region): Starting position for robot
"""
r = executor.proj.rfiold.indexOfRegionWithName(initial_region)
center = executor.proj.rfiold.regions[r].getCenter()
self.x = center[0]
self.y = center[1]
self.theta = 0
def getPose(self, cached=False):
x=self.x
y=self.y
o=self.theta
return array([x, y, o])
def setPose(self, x, y, theta):
self.x=x
self.y=y
self.theta=theta
|
__init__
|
identifier_name
|
NullPoseHandler.py
|
#!/usr/bin/env python
"""
==========================================================
NullPose.py - Pose Handler for single region without Vicon
==========================================================
"""
import sys, time
from numpy import *
from lib.regions import *
import lib.handlers.handlerTemplates as handlerTemplates
class NullPoseHandler(handlerTemplates.PoseHandler):
|
def __init__(self, executor, shared_data, initial_region):
"""
Null pose handler - used for single region operation without Vicon
initial_region (region): Starting position for robot
"""
r = executor.proj.rfiold.indexOfRegionWithName(initial_region)
center = executor.proj.rfiold.regions[r].getCenter()
self.x = center[0]
self.y = center[1]
self.theta = 0
def getPose(self, cached=False):
x=self.x
y=self.y
o=self.theta
return array([x, y, o])
def setPose(self, x, y, theta):
self.x=x
self.y=y
self.theta=theta
|
identifier_body
|
|
behaviors.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import re
from utils import CDNEngine
from utils import request
if sys.version_info >= (3, 0):
import subprocess as commands
import urllib.parse as urlparse
else:
import commands
import urlparse
def detect(hostname):
|
"""
Performs CDN detection thanks to information disclosure from server error.
Parameters
----------
hostname : str
Hostname to assess
"""
print('[+] Error server detection\n')
hostname = urlparse.urlparse(hostname).netloc
regexp = re.compile('\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b')
out = commands.getoutput("host " + hostname)
addresses = regexp.finditer(out)
for addr in addresses:
res = request.do('http://' + addr.group())
if res is not None and res.status_code == 500:
CDNEngine.find(res.text.lower())
|
identifier_body
|
|
behaviors.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import re
from utils import CDNEngine
from utils import request
if sys.version_info >= (3, 0):
import subprocess as commands
import urllib.parse as urlparse
else:
import commands
import urlparse
def detect(hostname):
"""
Performs CDN detection thanks to information disclosure from server error.
|
Parameters
----------
hostname : str
Hostname to assess
"""
print('[+] Error server detection\n')
hostname = urlparse.urlparse(hostname).netloc
regexp = re.compile('\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b')
out = commands.getoutput("host " + hostname)
addresses = regexp.finditer(out)
for addr in addresses:
res = request.do('http://' + addr.group())
if res is not None and res.status_code == 500:
CDNEngine.find(res.text.lower())
|
random_line_split
|
|
behaviors.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import re
from utils import CDNEngine
from utils import request
if sys.version_info >= (3, 0):
import subprocess as commands
import urllib.parse as urlparse
else:
import commands
import urlparse
def detect(hostname):
"""
Performs CDN detection thanks to information disclosure from server error.
Parameters
----------
hostname : str
Hostname to assess
"""
print('[+] Error server detection\n')
hostname = urlparse.urlparse(hostname).netloc
regexp = re.compile('\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b')
out = commands.getoutput("host " + hostname)
addresses = regexp.finditer(out)
for addr in addresses:
|
res = request.do('http://' + addr.group())
if res is not None and res.status_code == 500:
CDNEngine.find(res.text.lower())
|
conditional_block
|
|
behaviors.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import re
from utils import CDNEngine
from utils import request
if sys.version_info >= (3, 0):
import subprocess as commands
import urllib.parse as urlparse
else:
import commands
import urlparse
def
|
(hostname):
"""
Performs CDN detection thanks to information disclosure from server error.
Parameters
----------
hostname : str
Hostname to assess
"""
print('[+] Error server detection\n')
hostname = urlparse.urlparse(hostname).netloc
regexp = re.compile('\\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\\b')
out = commands.getoutput("host " + hostname)
addresses = regexp.finditer(out)
for addr in addresses:
res = request.do('http://' + addr.group())
if res is not None and res.status_code == 500:
CDNEngine.find(res.text.lower())
|
detect
|
identifier_name
|
webpack.config.ts
|
import { isAbsolute, join, resolve } from 'path';
import * as dotenv from 'dotenv';
import MiniCssExtractPlugin from 'mini-css-extract-plugin';
import webpack, { Configuration, RuleSetRule } from 'webpack';
import FriendlyErrorsPlugin, { Options as FriendlyErrorsOptions } from 'friendly-errors-webpack-plugin';
import BarPlugin, { Options as BarOptions } from 'webpackbar';
import CleanWebpackPlugin from 'clean-webpack-plugin';
import OptimizeCssAssetsPlugin from 'optimize-css-assets-webpack-plugin';
import { BundleAnalyzerPlugin } from 'webpack-bundle-analyzer';
import CopyPlugin from 'copy-webpack-plugin';
import HtmlPlugin from 'html-webpack-plugin';
import { BabelLoaderOptions, Chain } from './build/chain';
import AntdScssThemePlugin from './build/antd-scss-theme-plugin';
import { Options as TypescriptLoaderOptions } from 'ts-loader';
import tsImport from 'ts-import-plugin';
import { colorPaletteFunction, colorPaletteFunctionSignature } from './build/antdScssColorPalette';
import WebappPlugin from 'webapp-webpack-plugin';
import EntrypointPathPlugin from './build/EntrypointPathPlugin';
import TerserPlugin from 'terser-webpack-plugin';
import { IgnoreNotFoundExportPlugin, IgnoreNotFoundExportPluginOptions } from './build/out/plugins/ts-webpack-ignore-not-found-export';
import ForkTsCheckerPlugin from 'fork-ts-checker-webpack-plugin';
import { NormalizedMessage } from 'fork-ts-checker-webpack-plugin/lib/NormalizedMessage';
import Dashboard from 'webpack-dashboard/plugin';
import { DuplicatesPlugin } from 'inspectpack/plugin';
const chain = new Chain({
mode : process.env.NODE_ENV as any,
sourceDir: resolve(__dirname, 'src'),
outputDir: resolve(__dirname, process.env.NODE_ENV === 'development' ? 'dev' : 'dist'),
});
const { isDev, isProd } = chain;
const cache = true;
const assetsByEntry = isProd;
const _assetPath = assetsByEntry ? 'vendor/codex_[entrypoint]' : 'vendor';
const minimize = isProd;
const assetPath = (...parts: string[]) => join(_assetPath, ...parts);
const rootPath = (...parts: string[]) => resolve(__dirname, '..', ...parts);
const tsconfig = resolve(__dirname, 'tsconfig.webpack.json_');
const tschecker = false;
const dashboardPort = 23345;
const backendURL = process.env.BACKEND_URL || 'http://codex.local';
// defineVars are added to DefinePlugin and HtmlPlugin
const defineVars = {
DEV : isDev,
PROD : isProd,
TEST : chain.get('mode') === 'testing',
ENV : dotenv.load({ path: resolve('.env') }).parsed,
BACKEND_URL: backendURL,
};
//region: Helper Functions
const babelImportPlugins = [
[ 'import', { libraryName: 'antd', style: true }, 'import-antd' ],
[ 'import', { libraryName: 'lodash', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash' ],
[ 'import', { libraryName: 'lodash-es', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash-es' ],
[ 'import', { libraryName: 'jquery', libraryDirectory: 'src' }, 'jquery' ],
];
export function addBabelToRule(chain: Chain, ruleName: string, options: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
rule.use('babel-loader')
.loader('babel-loader')
.options(<BabelLoaderOptions>{
babelrc : false,
configFile : false,
presets : [
[ 'react-app' ],
],
plugins : [
'jsx-control-statements',
[ 'react-css-modules', {
'context' : chain.srcPath(),
'filetypes' : {
'.mscss': {
'syntax' : 'postcss-scss',
'plugins': [
'postcss-nested',
],
},
},
'handleMissingStyleName': 'warn',
'generateScopedName' : '[name]__[local]',
} ],
...babelImportPlugins,
].filter(Boolean),
cacheDirectory: cache,
compact : minimize,
...options,
} as any);
}
export function addTsToRule(chain: Chain, ruleName: string, options: Partial<TypescriptLoaderOptions> = {}, babelOptions: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
if ( !rule.has('babel-loader') ) {
addBabelToRule(chain, ruleName, babelOptions);
}
rule
.use('ts-loader')
.loader('ts-loader')
.options(<Partial<TypescriptLoaderOptions>>{
transpileOnly : true,
configFile : tsconfig,
// happyPackMode : true,
getCustomTransformers: () => ({
before: [
tsImport([
{ libraryName: 'antd', style: true },
{ libraryName: 'semantic-ui-react', libraryDirectory: (importName) => Object.keys(require('./build/semantic-data').nameLocations).includes(importName) ? join('dist/es', require('./build/semantic-data').nameLocations[ importName ]) : 'dist/es' },
{ libraryName: 'neo-async', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash-es', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'jquery', libraryDirectory: 'src', camel2DashComponentName: false },
]) as any,
],
}),
...options,
} as any);
}
let assetLoader = isDev ? 'file-loader' : 'file-loader';
export function addAssetsLoaderForEntry(chain: Chain, name: string, path: string) {
let assetPath = _assetPath.replace('[entrypoint]', name);
chain.module.rule('fonts-' + name)
.test(/\.*\.(woff2?|woff|eot|ttf|otf)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/fonts/',
outputPath: assetPath + '/fonts/',
});
chain.module.rule('images-' + name)
.test(/\.*\.(png|jpe?g|gif|svg)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/img/',
outputPath: assetPath + '/img/',
});
}
export function addPluginEntry(chain: Chain, name: string, dirPath: string, entryFile: string = 'index.ts') {
let umdName = `@codex/${name}`;
chain.entry(name)
.add(chain.srcPath(`pre-path.js?entryName=${name}`))
.add(isAbsolute(entryFile) ? entryFile : join(dirPath, entryFile));
chain.externals({
...chain.get('externals') || {},
[ umdName ]: [ 'codex', name ],
});
// chain.resolve.alias.set(umdName, dirPath);
addAssetsLoaderForEntry(chain, name, dirPath);
chain.module.rule('ts').include.add(dirPath);
chain.module.rule('js').include.add(dirPath);
}
export function addHMR(chain: Chain, reactHotLoader: boolean = true) {
chain.plugin('hmr').use(webpack.HotModuleReplacementPlugin, [ {} ]);
chain.resolve.alias.set('react-dom', '@hot-loader/react-dom');
return;
const modifyOptions = (o: BabelLoaderOptions) => {
if ( reactHotLoader ) {
o.plugins.push('react-hot-loader/babel');
}
let reactCssModulesIndex = o.plugins.findIndex(plugin => Array.isArray(plugin) && plugin[ 0 ] === 'react-css-modules');
if ( reactCssModulesIndex !== - 1 ) {
o.plugins[ reactCssModulesIndex ][ 1 ].webpackHotModuleReloading = true;
}
return o;
};
chain.module.rule('js').use('babel-loader').tap(modifyOptions);
chain.module.rule('ts').use('babel-loader').tap(modifyOptions);
}
export function addAnalyzerPlugins(chain: Chain, when: boolean = true) {
chain.when(when, chain => chain.plugin('bundle-analyzer').use(BundleAnalyzerPlugin, [ <BundleAnalyzerPlugin.Options>{
analyzerMode : 'static',
openAnalyzer : false,
reportFilename: 'bundle-analyzer.html',
} ]));
}
export function addPackage(chain: Chain, name: string, umdName?: string) {
umdName = umdName || `@codex/${name}`;
chain.when(isDev, chain => {
let path = rootPath('packages', name, 'src');
chain.resolve.alias.set(umdName, path);
chain.module.rule('ts').include.add(path);
}, chain => {
chain.resolve.alias.set(umdName, rootPath('packages', name, 'es'));
});
}
export function addDashboardPlugin(chain: Chain, port: number = dashboardPort)
|
export function addDuplicatesPlugin(chain: Chain) {
chain.plugin('duplicates').use(DuplicatesPlugin, [ {
verbose : true,
emitErrors: false,
} ]);
}
//endregion
//region: Plugins
chain.plugin('clean').use(CleanWebpackPlugin, [
[ 'js/', 'css/', '*.hot-update.*', 'assets/', 'vendor/' ],
<CleanWebpackPlugin.Options>{ root: chain.outPath(), verbose: false },
]);
chain.plugin('define').use(webpack.DefinePlugin, [ {
'process.env': {
NODE_ENV: `"${chain.get('mode')}"`,
},
...defineVars
} ]);
chain.plugin('bar').use(BarPlugin, [ <BarOptions>{
profile : true,
compiledIn: true,
minimal : false,
} ]);
chain.plugin('loader-options').use(webpack.LoaderOptionsPlugin, [ { options: {} } ]);
chain.plugin('friendly-errors').use(FriendlyErrorsPlugin, [ <FriendlyErrorsOptions>{
compilationSuccessInfo: { messages: [ 'Build success' ], notes: [] },
onErrors : function (severity, errors) { console.error(severity, errors); },
clearConsole : false,
logLevel : true,
additionalFormatters : [],
additionalTransformers: [],
} ]);
chain.plugin('copy').use(CopyPlugin, [ [
assetsByEntry ? { from: chain.srcPath('core/assets'), to: chain.outPath('vendor/codex_core') } : { from: chain.srcPath('core/assets'), to: chain.outPath('vendor') },
].filter(Boolean) ]);
chain.plugin('html').use(HtmlPlugin, [ <HtmlPlugin.Options>{
filename : 'index.html',
template : resolve(__dirname, 'index.html'),
inject : 'head',
chunksSortMode : isDev ? 'dependency' : 'auto',
templateParameters: {
assetPath: {
core : _assetPath.replace('[entrypoint]', 'core'),
phpdoc: _assetPath.replace('[entrypoint]', 'phpdoc'),
},
...defineVars
},
} ]);
chain.plugin('favicon').use(WebappPlugin, [ {
logo : rootPath('node_modules/@fortawesome/fontawesome-free/svgs/solid/book.svg'),
cache,
prefix: assetsByEntry ? 'vendor/codex_core/img' : 'vendor/img',
inject: true,
} ]).after('html');
chain.plugin('ignore-not-found-export').use(IgnoreNotFoundExportPlugin, [ <IgnoreNotFoundExportPluginOptions>{
exportsToIgnore: [
// 'Toolbar.*Props',
// 'Layout.*Props',
'.*Props',
],
} ]);
chain.when(tschecker, chain => {
chain.plugin('ts-checker-core').use(ForkTsCheckerPlugin, [ <ForkTsCheckerPluginOptions>{
tsconfig : chain.srcPath('core/tsconfig.json'),
ignoreDiagnostics: [
// ERROR in /home/radic/theme/node_modules/mobx/lib/types/observableset.d.ts(21,22):
//TS2420: Class 'ObservableSet<T>' incorrectly implements interface 'Set<T>'.
2420,
],
} ]);
});
chain.when(isProd, chain => {
// chain.plugin('write-file').use(require('write-file-webpack-plugin'), [ { useHashIndex: false } ]);
chain.plugin('css-extract').use(MiniCssExtractPlugin, [ {
filename : assetPath('css/[name].css?[hash]'),
chunkFilename: assetPath('css/[name].chunk.css?[chunkhash]'),
} ]);
chain.plugin('css-optimize').use(OptimizeCssAssetsPlugin, [ <OptimizeCssAssetsPlugin.Options>{
assetNameRegExp : /\.css$/g,
cssProcessor : require('cssnano'),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint : true,
} ]);
});
chain.when(assetsByEntry, chain => chain.plugin('path').use(EntrypointPathPlugin));
//endregion
//region: Style Loaders
export function addStyleLoaders(config: Configuration) {
AntdScssThemePlugin.SCSS_THEME_PATH = chain.srcPath('core/styling/antd/theme.scss');
let antdScssLoader = AntdScssThemePlugin.themify({
loader : 'sass-loader',
options: {
scssThemePath: AntdScssThemePlugin.SCSS_THEME_PATH,
functions : { [ colorPaletteFunctionSignature ]: colorPaletteFunction },
},
});
let scssLoader = { loader: 'sass-loader', options: {} };
let antdLessLoader = AntdScssThemePlugin.themify('less-loader');
let postCssLoader = { loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('postcss-clean'), require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } };
config.module.rules.push(...[ {
test: /\.module.css$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev, modules: true, localIdentName: '[name]__[local]' } },
{ loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } },
].filter(Boolean),
}, {
test : /\.css$/,
exclude: [ /\.module.css$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev } },
isProd && postCssLoader,
].filter(Boolean),
}, {
oneOf: [
{
test: /\.(module\.scss|mscss)$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: false, modules: true, localIdentName: '[name]__[local]' } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
}, {
test : /\.scss$/,
exclude: [ /\.module\.scss$/, /\.mscss$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: true } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
} ],
}, {
test: /\.less$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev } },
isProd && postCssLoader,
{ loader: antdLessLoader.loader, options: { ...antdLessLoader.options, ...{ javascriptEnabled: true, sourceMap: isDev } } },
].filter(Boolean),
} ] as RuleSetRule[]);
config.plugins.push(new AntdScssThemePlugin(AntdScssThemePlugin.SCSS_THEME_PATH));
return config;
}
chain.onToConfig(config => addStyleLoaders(config));
//endregion
//region: Optimization
chain.optimization
.namedChunks(true)
.namedModules(true)
.splitChunks(<webpack.Options.SplitChunksOptions>{
maxInitialRequests: Infinity,
maxAsyncRequests : Infinity,
// maxSize : Infinity,
// name : true,
})
.minimize(minimize)
;
chain.optimization.minimizer('terser').use(TerserPlugin, [ <TerserPlugin.TerserPluginOptions>{
terserOptions: {
parse : { ecma: 8 },
mangle : { safari10: true },
compress: {
ecma : 5,
warnings : false,
comparisons: false,
inline : 2,
},
output : {
ecma : 5,
comments : false,
ascii_only: true,
},
},
parallel : true,
cache : true,
sourceMap : false,
} ]);
chain.when(isDev, chain => {}, chain => {});
//endregion
//region: Init
chain
.target('web')
.cache(cache)
.devtool(isDev ? 'cheap-module-source-map' : false as any)
;
chain.output
.path(chain.outPath())
.pathinfo(isDev)
.publicPath('/')
.library([ 'codex', '[name]' ] as any)
.libraryTarget('window')
.filename(assetPath('js/[name].js'))
.chunkFilename(assetPath('js/chunk.[name].js'));
chain.output.when(isDev, chain => chain,
// .sourceMapFilename('[file].map')
// .devtoolModuleFilenameTemplate((info: DevtoolModuleFilenameTemplateInfo) => {
// return 'file://' + resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// return resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// }),
);
chain.resolve
.symlinks(true)
.extensions.merge([ '.js', '.vue', '.json', '.web.ts', '.ts', '.web.tsx', '.tsx', '.styl', '.less', '.scss', '.stylus', '.css', '.mjs', '.web.js', '.json', '.web.jsx', '.jsx' ]).end()
.mainFields.merge([ 'module', 'browser', 'main' ]).end() // 'jsnext:main',
.mainFiles.merge([ 'index', 'index.ts', 'index.tsx' ]).end()
.modules.merge([ 'node_modules' ]).end()
.alias.merge({
'mobx$' : chain.srcPath('mobx.js'),
'lodash-es$' : 'lodash',
'async$' : 'neo-async',
// 'react-dom' : '@hot-loader/react-dom',
'@ant-design/icons': 'purched-antd-icons', /** @see https://github.com/ant-design/ant-design/issues/12011 */
}).end();
chain.resolveLoader
.modules.merge([ 'node_modules' ]).end()
.extensions.merge([ '.js', '.json', '.ts' ]).end();
chain.externals({});
chain.stats({
warningsFilter: /export .* was not found in/,
});
chain.node.merge({
dgram : 'empty',
fs : 'empty',
net : 'empty',
tls : 'empty',
child_process: 'empty',
// module : 'empty',
// dns : 'mock',
});
chain.performance
.hints(false)
.maxEntrypointSize(999999999)
.maxAssetSize(999999999)
.assetFilter(as => false);
chain.module.set('strictExportPresence', true);
chain.module.rule('ts').test(/\.(ts|tsx)$/);
chain.module.rule('js').test(/\.(js|mjs|jsx)$/);
chain.module.rule('vendor-js').test(/\.(js|mjs)$/).exclude.add(/@babel(?:\/|\\{1,2})runtime/);
addTsToRule(chain, 'ts', {});
addBabelToRule(chain, 'js', {
customize: require.resolve('babel-preset-react-app/webpack-overrides'),
});
addBabelToRule(chain, 'vendor-js', {
presets : [ [ require.resolve('babel-preset-react-app/dependencies'), { helpers: true } ] ],
plugins : [
...babelImportPlugins,
],
sourceMaps: false,
});
addPackage(chain, 'api', '@codex/api');
// addPluginEntry(chain, 'router', chain.srcPath('router'), 'index.tsx')
// addPluginEntry(chain, 'core', chain.srcPath('core'), '_small.tsx');
addPluginEntry(chain, 'core', chain.srcPath('core'), 'index.tsx');
addPluginEntry(chain, 'phpdoc', chain.srcPath('phpdoc'), 'index.tsx');
addPluginEntry(chain, 'comments', chain.srcPath('comments'), 'index.tsx');
chain.resolve.modules.merge([ chain.srcPath('core') ]).end();
chain.resolve.alias.merge({
'heading' : chain.srcPath('core/styling/heading.less'),
'../../theme.config$': chain.srcPath('core/styling/theme.config'),
'./core/index.less$' : chain.srcPath('core/styling/antd/core.less'),
});
//endregion
const config = chain.toConfig();
export default config;
export { chain, config };
//region: interfaces & types
declare type ForkTsCheckerPluginFormatter = (message: NormalizedMessage, useColors: boolean) => string;
interface ForkTsCheckerPluginLogger {
error(message?: any): void;
warn(message?: any): void;
info(message?: any): void;
}
interface ForkTsCheckerPluginOptions {
typescript: string;
tsconfig: string;
compilerOptions: object;
tslint: string | true;
tslintAutoFix: boolean;
watch: string | string[];
async: boolean;
ignoreDiagnostics: number[];
ignoreLints: string[];
ignoreLintWarnings: boolean;
reportFiles: string[];
colors: boolean;
logger: ForkTsCheckerPluginLogger;
formatter: 'default' | 'codeframe' | ForkTsCheckerPluginFormatter;
formatterOptions: any;
silent: boolean;
checkSyntacticErrors: boolean;
memoryLimit: number;
workers: number;
vue: boolean;
useTypescriptIncrementalApi: boolean;
measureCompilationTime: boolean;
}
//endregion
|
{
chain.plugin('dashboard').use(Dashboard, [ {
port: dashboardPort,
} ]);
}
|
identifier_body
|
webpack.config.ts
|
import { isAbsolute, join, resolve } from 'path';
import * as dotenv from 'dotenv';
import MiniCssExtractPlugin from 'mini-css-extract-plugin';
import webpack, { Configuration, RuleSetRule } from 'webpack';
import FriendlyErrorsPlugin, { Options as FriendlyErrorsOptions } from 'friendly-errors-webpack-plugin';
import BarPlugin, { Options as BarOptions } from 'webpackbar';
import CleanWebpackPlugin from 'clean-webpack-plugin';
import OptimizeCssAssetsPlugin from 'optimize-css-assets-webpack-plugin';
import { BundleAnalyzerPlugin } from 'webpack-bundle-analyzer';
import CopyPlugin from 'copy-webpack-plugin';
import HtmlPlugin from 'html-webpack-plugin';
import { BabelLoaderOptions, Chain } from './build/chain';
import AntdScssThemePlugin from './build/antd-scss-theme-plugin';
import { Options as TypescriptLoaderOptions } from 'ts-loader';
import tsImport from 'ts-import-plugin';
import { colorPaletteFunction, colorPaletteFunctionSignature } from './build/antdScssColorPalette';
import WebappPlugin from 'webapp-webpack-plugin';
import EntrypointPathPlugin from './build/EntrypointPathPlugin';
import TerserPlugin from 'terser-webpack-plugin';
import { IgnoreNotFoundExportPlugin, IgnoreNotFoundExportPluginOptions } from './build/out/plugins/ts-webpack-ignore-not-found-export';
import ForkTsCheckerPlugin from 'fork-ts-checker-webpack-plugin';
import { NormalizedMessage } from 'fork-ts-checker-webpack-plugin/lib/NormalizedMessage';
import Dashboard from 'webpack-dashboard/plugin';
import { DuplicatesPlugin } from 'inspectpack/plugin';
const chain = new Chain({
mode : process.env.NODE_ENV as any,
sourceDir: resolve(__dirname, 'src'),
outputDir: resolve(__dirname, process.env.NODE_ENV === 'development' ? 'dev' : 'dist'),
});
const { isDev, isProd } = chain;
const cache = true;
const assetsByEntry = isProd;
const _assetPath = assetsByEntry ? 'vendor/codex_[entrypoint]' : 'vendor';
const minimize = isProd;
const assetPath = (...parts: string[]) => join(_assetPath, ...parts);
const rootPath = (...parts: string[]) => resolve(__dirname, '..', ...parts);
const tsconfig = resolve(__dirname, 'tsconfig.webpack.json_');
const tschecker = false;
const dashboardPort = 23345;
const backendURL = process.env.BACKEND_URL || 'http://codex.local';
// defineVars are added to DefinePlugin and HtmlPlugin
const defineVars = {
DEV : isDev,
PROD : isProd,
TEST : chain.get('mode') === 'testing',
ENV : dotenv.load({ path: resolve('.env') }).parsed,
BACKEND_URL: backendURL,
};
//region: Helper Functions
const babelImportPlugins = [
[ 'import', { libraryName: 'antd', style: true }, 'import-antd' ],
[ 'import', { libraryName: 'lodash', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash' ],
[ 'import', { libraryName: 'lodash-es', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash-es' ],
[ 'import', { libraryName: 'jquery', libraryDirectory: 'src' }, 'jquery' ],
];
export function addBabelToRule(chain: Chain, ruleName: string, options: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
rule.use('babel-loader')
.loader('babel-loader')
.options(<BabelLoaderOptions>{
babelrc : false,
configFile : false,
presets : [
[ 'react-app' ],
],
plugins : [
'jsx-control-statements',
[ 'react-css-modules', {
'context' : chain.srcPath(),
'filetypes' : {
'.mscss': {
'syntax' : 'postcss-scss',
'plugins': [
'postcss-nested',
],
},
},
'handleMissingStyleName': 'warn',
'generateScopedName' : '[name]__[local]',
} ],
...babelImportPlugins,
].filter(Boolean),
cacheDirectory: cache,
compact : minimize,
...options,
} as any);
}
export function addTsToRule(chain: Chain, ruleName: string, options: Partial<TypescriptLoaderOptions> = {}, babelOptions: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
if ( !rule.has('babel-loader') ) {
addBabelToRule(chain, ruleName, babelOptions);
}
rule
.use('ts-loader')
.loader('ts-loader')
.options(<Partial<TypescriptLoaderOptions>>{
transpileOnly : true,
configFile : tsconfig,
// happyPackMode : true,
getCustomTransformers: () => ({
before: [
tsImport([
{ libraryName: 'antd', style: true },
{ libraryName: 'semantic-ui-react', libraryDirectory: (importName) => Object.keys(require('./build/semantic-data').nameLocations).includes(importName) ? join('dist/es', require('./build/semantic-data').nameLocations[ importName ]) : 'dist/es' },
{ libraryName: 'neo-async', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash-es', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'jquery', libraryDirectory: 'src', camel2DashComponentName: false },
]) as any,
],
}),
...options,
} as any);
}
let assetLoader = isDev ? 'file-loader' : 'file-loader';
export function addAssetsLoaderForEntry(chain: Chain, name: string, path: string) {
let assetPath = _assetPath.replace('[entrypoint]', name);
chain.module.rule('fonts-' + name)
.test(/\.*\.(woff2?|woff|eot|ttf|otf)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/fonts/',
outputPath: assetPath + '/fonts/',
});
chain.module.rule('images-' + name)
.test(/\.*\.(png|jpe?g|gif|svg)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/img/',
outputPath: assetPath + '/img/',
});
}
export function addPluginEntry(chain: Chain, name: string, dirPath: string, entryFile: string = 'index.ts') {
let umdName = `@codex/${name}`;
chain.entry(name)
.add(chain.srcPath(`pre-path.js?entryName=${name}`))
.add(isAbsolute(entryFile) ? entryFile : join(dirPath, entryFile));
chain.externals({
...chain.get('externals') || {},
|
// chain.resolve.alias.set(umdName, dirPath);
addAssetsLoaderForEntry(chain, name, dirPath);
chain.module.rule('ts').include.add(dirPath);
chain.module.rule('js').include.add(dirPath);
}
export function addHMR(chain: Chain, reactHotLoader: boolean = true) {
chain.plugin('hmr').use(webpack.HotModuleReplacementPlugin, [ {} ]);
chain.resolve.alias.set('react-dom', '@hot-loader/react-dom');
return;
const modifyOptions = (o: BabelLoaderOptions) => {
if ( reactHotLoader ) {
o.plugins.push('react-hot-loader/babel');
}
let reactCssModulesIndex = o.plugins.findIndex(plugin => Array.isArray(plugin) && plugin[ 0 ] === 'react-css-modules');
if ( reactCssModulesIndex !== - 1 ) {
o.plugins[ reactCssModulesIndex ][ 1 ].webpackHotModuleReloading = true;
}
return o;
};
chain.module.rule('js').use('babel-loader').tap(modifyOptions);
chain.module.rule('ts').use('babel-loader').tap(modifyOptions);
}
export function addAnalyzerPlugins(chain: Chain, when: boolean = true) {
chain.when(when, chain => chain.plugin('bundle-analyzer').use(BundleAnalyzerPlugin, [ <BundleAnalyzerPlugin.Options>{
analyzerMode : 'static',
openAnalyzer : false,
reportFilename: 'bundle-analyzer.html',
} ]));
}
export function addPackage(chain: Chain, name: string, umdName?: string) {
umdName = umdName || `@codex/${name}`;
chain.when(isDev, chain => {
let path = rootPath('packages', name, 'src');
chain.resolve.alias.set(umdName, path);
chain.module.rule('ts').include.add(path);
}, chain => {
chain.resolve.alias.set(umdName, rootPath('packages', name, 'es'));
});
}
export function addDashboardPlugin(chain: Chain, port: number = dashboardPort) {
chain.plugin('dashboard').use(Dashboard, [ {
port: dashboardPort,
} ]);
}
export function addDuplicatesPlugin(chain: Chain) {
chain.plugin('duplicates').use(DuplicatesPlugin, [ {
verbose : true,
emitErrors: false,
} ]);
}
//endregion
//region: Plugins
chain.plugin('clean').use(CleanWebpackPlugin, [
[ 'js/', 'css/', '*.hot-update.*', 'assets/', 'vendor/' ],
<CleanWebpackPlugin.Options>{ root: chain.outPath(), verbose: false },
]);
chain.plugin('define').use(webpack.DefinePlugin, [ {
'process.env': {
NODE_ENV: `"${chain.get('mode')}"`,
},
...defineVars
} ]);
chain.plugin('bar').use(BarPlugin, [ <BarOptions>{
profile : true,
compiledIn: true,
minimal : false,
} ]);
chain.plugin('loader-options').use(webpack.LoaderOptionsPlugin, [ { options: {} } ]);
chain.plugin('friendly-errors').use(FriendlyErrorsPlugin, [ <FriendlyErrorsOptions>{
compilationSuccessInfo: { messages: [ 'Build success' ], notes: [] },
onErrors : function (severity, errors) { console.error(severity, errors); },
clearConsole : false,
logLevel : true,
additionalFormatters : [],
additionalTransformers: [],
} ]);
chain.plugin('copy').use(CopyPlugin, [ [
assetsByEntry ? { from: chain.srcPath('core/assets'), to: chain.outPath('vendor/codex_core') } : { from: chain.srcPath('core/assets'), to: chain.outPath('vendor') },
].filter(Boolean) ]);
chain.plugin('html').use(HtmlPlugin, [ <HtmlPlugin.Options>{
filename : 'index.html',
template : resolve(__dirname, 'index.html'),
inject : 'head',
chunksSortMode : isDev ? 'dependency' : 'auto',
templateParameters: {
assetPath: {
core : _assetPath.replace('[entrypoint]', 'core'),
phpdoc: _assetPath.replace('[entrypoint]', 'phpdoc'),
},
...defineVars
},
} ]);
chain.plugin('favicon').use(WebappPlugin, [ {
logo : rootPath('node_modules/@fortawesome/fontawesome-free/svgs/solid/book.svg'),
cache,
prefix: assetsByEntry ? 'vendor/codex_core/img' : 'vendor/img',
inject: true,
} ]).after('html');
chain.plugin('ignore-not-found-export').use(IgnoreNotFoundExportPlugin, [ <IgnoreNotFoundExportPluginOptions>{
exportsToIgnore: [
// 'Toolbar.*Props',
// 'Layout.*Props',
'.*Props',
],
} ]);
chain.when(tschecker, chain => {
chain.plugin('ts-checker-core').use(ForkTsCheckerPlugin, [ <ForkTsCheckerPluginOptions>{
tsconfig : chain.srcPath('core/tsconfig.json'),
ignoreDiagnostics: [
// ERROR in /home/radic/theme/node_modules/mobx/lib/types/observableset.d.ts(21,22):
//TS2420: Class 'ObservableSet<T>' incorrectly implements interface 'Set<T>'.
2420,
],
} ]);
});
chain.when(isProd, chain => {
// chain.plugin('write-file').use(require('write-file-webpack-plugin'), [ { useHashIndex: false } ]);
chain.plugin('css-extract').use(MiniCssExtractPlugin, [ {
filename : assetPath('css/[name].css?[hash]'),
chunkFilename: assetPath('css/[name].chunk.css?[chunkhash]'),
} ]);
chain.plugin('css-optimize').use(OptimizeCssAssetsPlugin, [ <OptimizeCssAssetsPlugin.Options>{
assetNameRegExp : /\.css$/g,
cssProcessor : require('cssnano'),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint : true,
} ]);
});
chain.when(assetsByEntry, chain => chain.plugin('path').use(EntrypointPathPlugin));
//endregion
//region: Style Loaders
export function addStyleLoaders(config: Configuration) {
AntdScssThemePlugin.SCSS_THEME_PATH = chain.srcPath('core/styling/antd/theme.scss');
let antdScssLoader = AntdScssThemePlugin.themify({
loader : 'sass-loader',
options: {
scssThemePath: AntdScssThemePlugin.SCSS_THEME_PATH,
functions : { [ colorPaletteFunctionSignature ]: colorPaletteFunction },
},
});
let scssLoader = { loader: 'sass-loader', options: {} };
let antdLessLoader = AntdScssThemePlugin.themify('less-loader');
let postCssLoader = { loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('postcss-clean'), require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } };
config.module.rules.push(...[ {
test: /\.module.css$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev, modules: true, localIdentName: '[name]__[local]' } },
{ loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } },
].filter(Boolean),
}, {
test : /\.css$/,
exclude: [ /\.module.css$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev } },
isProd && postCssLoader,
].filter(Boolean),
}, {
oneOf: [
{
test: /\.(module\.scss|mscss)$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: false, modules: true, localIdentName: '[name]__[local]' } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
}, {
test : /\.scss$/,
exclude: [ /\.module\.scss$/, /\.mscss$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: true } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
} ],
}, {
test: /\.less$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev } },
isProd && postCssLoader,
{ loader: antdLessLoader.loader, options: { ...antdLessLoader.options, ...{ javascriptEnabled: true, sourceMap: isDev } } },
].filter(Boolean),
} ] as RuleSetRule[]);
config.plugins.push(new AntdScssThemePlugin(AntdScssThemePlugin.SCSS_THEME_PATH));
return config;
}
chain.onToConfig(config => addStyleLoaders(config));
//endregion
//region: Optimization
chain.optimization
.namedChunks(true)
.namedModules(true)
.splitChunks(<webpack.Options.SplitChunksOptions>{
maxInitialRequests: Infinity,
maxAsyncRequests : Infinity,
// maxSize : Infinity,
// name : true,
})
.minimize(minimize)
;
chain.optimization.minimizer('terser').use(TerserPlugin, [ <TerserPlugin.TerserPluginOptions>{
terserOptions: {
parse : { ecma: 8 },
mangle : { safari10: true },
compress: {
ecma : 5,
warnings : false,
comparisons: false,
inline : 2,
},
output : {
ecma : 5,
comments : false,
ascii_only: true,
},
},
parallel : true,
cache : true,
sourceMap : false,
} ]);
chain.when(isDev, chain => {}, chain => {});
//endregion
//region: Init
chain
.target('web')
.cache(cache)
.devtool(isDev ? 'cheap-module-source-map' : false as any)
;
chain.output
.path(chain.outPath())
.pathinfo(isDev)
.publicPath('/')
.library([ 'codex', '[name]' ] as any)
.libraryTarget('window')
.filename(assetPath('js/[name].js'))
.chunkFilename(assetPath('js/chunk.[name].js'));
chain.output.when(isDev, chain => chain,
// .sourceMapFilename('[file].map')
// .devtoolModuleFilenameTemplate((info: DevtoolModuleFilenameTemplateInfo) => {
// return 'file://' + resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// return resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// }),
);
chain.resolve
.symlinks(true)
.extensions.merge([ '.js', '.vue', '.json', '.web.ts', '.ts', '.web.tsx', '.tsx', '.styl', '.less', '.scss', '.stylus', '.css', '.mjs', '.web.js', '.json', '.web.jsx', '.jsx' ]).end()
.mainFields.merge([ 'module', 'browser', 'main' ]).end() // 'jsnext:main',
.mainFiles.merge([ 'index', 'index.ts', 'index.tsx' ]).end()
.modules.merge([ 'node_modules' ]).end()
.alias.merge({
'mobx$' : chain.srcPath('mobx.js'),
'lodash-es$' : 'lodash',
'async$' : 'neo-async',
// 'react-dom' : '@hot-loader/react-dom',
'@ant-design/icons': 'purched-antd-icons', /** @see https://github.com/ant-design/ant-design/issues/12011 */
}).end();
chain.resolveLoader
.modules.merge([ 'node_modules' ]).end()
.extensions.merge([ '.js', '.json', '.ts' ]).end();
chain.externals({});
chain.stats({
warningsFilter: /export .* was not found in/,
});
chain.node.merge({
dgram : 'empty',
fs : 'empty',
net : 'empty',
tls : 'empty',
child_process: 'empty',
// module : 'empty',
// dns : 'mock',
});
chain.performance
.hints(false)
.maxEntrypointSize(999999999)
.maxAssetSize(999999999)
.assetFilter(as => false);
chain.module.set('strictExportPresence', true);
chain.module.rule('ts').test(/\.(ts|tsx)$/);
chain.module.rule('js').test(/\.(js|mjs|jsx)$/);
chain.module.rule('vendor-js').test(/\.(js|mjs)$/).exclude.add(/@babel(?:\/|\\{1,2})runtime/);
addTsToRule(chain, 'ts', {});
addBabelToRule(chain, 'js', {
customize: require.resolve('babel-preset-react-app/webpack-overrides'),
});
addBabelToRule(chain, 'vendor-js', {
presets : [ [ require.resolve('babel-preset-react-app/dependencies'), { helpers: true } ] ],
plugins : [
...babelImportPlugins,
],
sourceMaps: false,
});
addPackage(chain, 'api', '@codex/api');
// addPluginEntry(chain, 'router', chain.srcPath('router'), 'index.tsx')
// addPluginEntry(chain, 'core', chain.srcPath('core'), '_small.tsx');
addPluginEntry(chain, 'core', chain.srcPath('core'), 'index.tsx');
addPluginEntry(chain, 'phpdoc', chain.srcPath('phpdoc'), 'index.tsx');
addPluginEntry(chain, 'comments', chain.srcPath('comments'), 'index.tsx');
chain.resolve.modules.merge([ chain.srcPath('core') ]).end();
chain.resolve.alias.merge({
'heading' : chain.srcPath('core/styling/heading.less'),
'../../theme.config$': chain.srcPath('core/styling/theme.config'),
'./core/index.less$' : chain.srcPath('core/styling/antd/core.less'),
});
//endregion
const config = chain.toConfig();
export default config;
export { chain, config };
//region: interfaces & types
declare type ForkTsCheckerPluginFormatter = (message: NormalizedMessage, useColors: boolean) => string;
interface ForkTsCheckerPluginLogger {
error(message?: any): void;
warn(message?: any): void;
info(message?: any): void;
}
interface ForkTsCheckerPluginOptions {
typescript: string;
tsconfig: string;
compilerOptions: object;
tslint: string | true;
tslintAutoFix: boolean;
watch: string | string[];
async: boolean;
ignoreDiagnostics: number[];
ignoreLints: string[];
ignoreLintWarnings: boolean;
reportFiles: string[];
colors: boolean;
logger: ForkTsCheckerPluginLogger;
formatter: 'default' | 'codeframe' | ForkTsCheckerPluginFormatter;
formatterOptions: any;
silent: boolean;
checkSyntacticErrors: boolean;
memoryLimit: number;
workers: number;
vue: boolean;
useTypescriptIncrementalApi: boolean;
measureCompilationTime: boolean;
}
//endregion
|
[ umdName ]: [ 'codex', name ],
});
|
random_line_split
|
webpack.config.ts
|
import { isAbsolute, join, resolve } from 'path';
import * as dotenv from 'dotenv';
import MiniCssExtractPlugin from 'mini-css-extract-plugin';
import webpack, { Configuration, RuleSetRule } from 'webpack';
import FriendlyErrorsPlugin, { Options as FriendlyErrorsOptions } from 'friendly-errors-webpack-plugin';
import BarPlugin, { Options as BarOptions } from 'webpackbar';
import CleanWebpackPlugin from 'clean-webpack-plugin';
import OptimizeCssAssetsPlugin from 'optimize-css-assets-webpack-plugin';
import { BundleAnalyzerPlugin } from 'webpack-bundle-analyzer';
import CopyPlugin from 'copy-webpack-plugin';
import HtmlPlugin from 'html-webpack-plugin';
import { BabelLoaderOptions, Chain } from './build/chain';
import AntdScssThemePlugin from './build/antd-scss-theme-plugin';
import { Options as TypescriptLoaderOptions } from 'ts-loader';
import tsImport from 'ts-import-plugin';
import { colorPaletteFunction, colorPaletteFunctionSignature } from './build/antdScssColorPalette';
import WebappPlugin from 'webapp-webpack-plugin';
import EntrypointPathPlugin from './build/EntrypointPathPlugin';
import TerserPlugin from 'terser-webpack-plugin';
import { IgnoreNotFoundExportPlugin, IgnoreNotFoundExportPluginOptions } from './build/out/plugins/ts-webpack-ignore-not-found-export';
import ForkTsCheckerPlugin from 'fork-ts-checker-webpack-plugin';
import { NormalizedMessage } from 'fork-ts-checker-webpack-plugin/lib/NormalizedMessage';
import Dashboard from 'webpack-dashboard/plugin';
import { DuplicatesPlugin } from 'inspectpack/plugin';
const chain = new Chain({
mode : process.env.NODE_ENV as any,
sourceDir: resolve(__dirname, 'src'),
outputDir: resolve(__dirname, process.env.NODE_ENV === 'development' ? 'dev' : 'dist'),
});
const { isDev, isProd } = chain;
const cache = true;
const assetsByEntry = isProd;
const _assetPath = assetsByEntry ? 'vendor/codex_[entrypoint]' : 'vendor';
const minimize = isProd;
const assetPath = (...parts: string[]) => join(_assetPath, ...parts);
const rootPath = (...parts: string[]) => resolve(__dirname, '..', ...parts);
const tsconfig = resolve(__dirname, 'tsconfig.webpack.json_');
const tschecker = false;
const dashboardPort = 23345;
const backendURL = process.env.BACKEND_URL || 'http://codex.local';
// defineVars are added to DefinePlugin and HtmlPlugin
const defineVars = {
DEV : isDev,
PROD : isProd,
TEST : chain.get('mode') === 'testing',
ENV : dotenv.load({ path: resolve('.env') }).parsed,
BACKEND_URL: backendURL,
};
//region: Helper Functions
const babelImportPlugins = [
[ 'import', { libraryName: 'antd', style: true }, 'import-antd' ],
[ 'import', { libraryName: 'lodash', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash' ],
[ 'import', { libraryName: 'lodash-es', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash-es' ],
[ 'import', { libraryName: 'jquery', libraryDirectory: 'src' }, 'jquery' ],
];
export function addBabelToRule(chain: Chain, ruleName: string, options: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
rule.use('babel-loader')
.loader('babel-loader')
.options(<BabelLoaderOptions>{
babelrc : false,
configFile : false,
presets : [
[ 'react-app' ],
],
plugins : [
'jsx-control-statements',
[ 'react-css-modules', {
'context' : chain.srcPath(),
'filetypes' : {
'.mscss': {
'syntax' : 'postcss-scss',
'plugins': [
'postcss-nested',
],
},
},
'handleMissingStyleName': 'warn',
'generateScopedName' : '[name]__[local]',
} ],
...babelImportPlugins,
].filter(Boolean),
cacheDirectory: cache,
compact : minimize,
...options,
} as any);
}
export function addTsToRule(chain: Chain, ruleName: string, options: Partial<TypescriptLoaderOptions> = {}, babelOptions: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
if ( !rule.has('babel-loader') )
|
rule
.use('ts-loader')
.loader('ts-loader')
.options(<Partial<TypescriptLoaderOptions>>{
transpileOnly : true,
configFile : tsconfig,
// happyPackMode : true,
getCustomTransformers: () => ({
before: [
tsImport([
{ libraryName: 'antd', style: true },
{ libraryName: 'semantic-ui-react', libraryDirectory: (importName) => Object.keys(require('./build/semantic-data').nameLocations).includes(importName) ? join('dist/es', require('./build/semantic-data').nameLocations[ importName ]) : 'dist/es' },
{ libraryName: 'neo-async', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash-es', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'jquery', libraryDirectory: 'src', camel2DashComponentName: false },
]) as any,
],
}),
...options,
} as any);
}
let assetLoader = isDev ? 'file-loader' : 'file-loader';
export function addAssetsLoaderForEntry(chain: Chain, name: string, path: string) {
let assetPath = _assetPath.replace('[entrypoint]', name);
chain.module.rule('fonts-' + name)
.test(/\.*\.(woff2?|woff|eot|ttf|otf)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/fonts/',
outputPath: assetPath + '/fonts/',
});
chain.module.rule('images-' + name)
.test(/\.*\.(png|jpe?g|gif|svg)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/img/',
outputPath: assetPath + '/img/',
});
}
export function addPluginEntry(chain: Chain, name: string, dirPath: string, entryFile: string = 'index.ts') {
let umdName = `@codex/${name}`;
chain.entry(name)
.add(chain.srcPath(`pre-path.js?entryName=${name}`))
.add(isAbsolute(entryFile) ? entryFile : join(dirPath, entryFile));
chain.externals({
...chain.get('externals') || {},
[ umdName ]: [ 'codex', name ],
});
// chain.resolve.alias.set(umdName, dirPath);
addAssetsLoaderForEntry(chain, name, dirPath);
chain.module.rule('ts').include.add(dirPath);
chain.module.rule('js').include.add(dirPath);
}
export function addHMR(chain: Chain, reactHotLoader: boolean = true) {
chain.plugin('hmr').use(webpack.HotModuleReplacementPlugin, [ {} ]);
chain.resolve.alias.set('react-dom', '@hot-loader/react-dom');
return;
const modifyOptions = (o: BabelLoaderOptions) => {
if ( reactHotLoader ) {
o.plugins.push('react-hot-loader/babel');
}
let reactCssModulesIndex = o.plugins.findIndex(plugin => Array.isArray(plugin) && plugin[ 0 ] === 'react-css-modules');
if ( reactCssModulesIndex !== - 1 ) {
o.plugins[ reactCssModulesIndex ][ 1 ].webpackHotModuleReloading = true;
}
return o;
};
chain.module.rule('js').use('babel-loader').tap(modifyOptions);
chain.module.rule('ts').use('babel-loader').tap(modifyOptions);
}
export function addAnalyzerPlugins(chain: Chain, when: boolean = true) {
chain.when(when, chain => chain.plugin('bundle-analyzer').use(BundleAnalyzerPlugin, [ <BundleAnalyzerPlugin.Options>{
analyzerMode : 'static',
openAnalyzer : false,
reportFilename: 'bundle-analyzer.html',
} ]));
}
export function addPackage(chain: Chain, name: string, umdName?: string) {
umdName = umdName || `@codex/${name}`;
chain.when(isDev, chain => {
let path = rootPath('packages', name, 'src');
chain.resolve.alias.set(umdName, path);
chain.module.rule('ts').include.add(path);
}, chain => {
chain.resolve.alias.set(umdName, rootPath('packages', name, 'es'));
});
}
export function addDashboardPlugin(chain: Chain, port: number = dashboardPort) {
chain.plugin('dashboard').use(Dashboard, [ {
port: dashboardPort,
} ]);
}
export function addDuplicatesPlugin(chain: Chain) {
chain.plugin('duplicates').use(DuplicatesPlugin, [ {
verbose : true,
emitErrors: false,
} ]);
}
//endregion
//region: Plugins
chain.plugin('clean').use(CleanWebpackPlugin, [
[ 'js/', 'css/', '*.hot-update.*', 'assets/', 'vendor/' ],
<CleanWebpackPlugin.Options>{ root: chain.outPath(), verbose: false },
]);
chain.plugin('define').use(webpack.DefinePlugin, [ {
'process.env': {
NODE_ENV: `"${chain.get('mode')}"`,
},
...defineVars
} ]);
chain.plugin('bar').use(BarPlugin, [ <BarOptions>{
profile : true,
compiledIn: true,
minimal : false,
} ]);
chain.plugin('loader-options').use(webpack.LoaderOptionsPlugin, [ { options: {} } ]);
chain.plugin('friendly-errors').use(FriendlyErrorsPlugin, [ <FriendlyErrorsOptions>{
compilationSuccessInfo: { messages: [ 'Build success' ], notes: [] },
onErrors : function (severity, errors) { console.error(severity, errors); },
clearConsole : false,
logLevel : true,
additionalFormatters : [],
additionalTransformers: [],
} ]);
chain.plugin('copy').use(CopyPlugin, [ [
assetsByEntry ? { from: chain.srcPath('core/assets'), to: chain.outPath('vendor/codex_core') } : { from: chain.srcPath('core/assets'), to: chain.outPath('vendor') },
].filter(Boolean) ]);
chain.plugin('html').use(HtmlPlugin, [ <HtmlPlugin.Options>{
filename : 'index.html',
template : resolve(__dirname, 'index.html'),
inject : 'head',
chunksSortMode : isDev ? 'dependency' : 'auto',
templateParameters: {
assetPath: {
core : _assetPath.replace('[entrypoint]', 'core'),
phpdoc: _assetPath.replace('[entrypoint]', 'phpdoc'),
},
...defineVars
},
} ]);
chain.plugin('favicon').use(WebappPlugin, [ {
logo : rootPath('node_modules/@fortawesome/fontawesome-free/svgs/solid/book.svg'),
cache,
prefix: assetsByEntry ? 'vendor/codex_core/img' : 'vendor/img',
inject: true,
} ]).after('html');
chain.plugin('ignore-not-found-export').use(IgnoreNotFoundExportPlugin, [ <IgnoreNotFoundExportPluginOptions>{
exportsToIgnore: [
// 'Toolbar.*Props',
// 'Layout.*Props',
'.*Props',
],
} ]);
chain.when(tschecker, chain => {
chain.plugin('ts-checker-core').use(ForkTsCheckerPlugin, [ <ForkTsCheckerPluginOptions>{
tsconfig : chain.srcPath('core/tsconfig.json'),
ignoreDiagnostics: [
// ERROR in /home/radic/theme/node_modules/mobx/lib/types/observableset.d.ts(21,22):
//TS2420: Class 'ObservableSet<T>' incorrectly implements interface 'Set<T>'.
2420,
],
} ]);
});
chain.when(isProd, chain => {
// chain.plugin('write-file').use(require('write-file-webpack-plugin'), [ { useHashIndex: false } ]);
chain.plugin('css-extract').use(MiniCssExtractPlugin, [ {
filename : assetPath('css/[name].css?[hash]'),
chunkFilename: assetPath('css/[name].chunk.css?[chunkhash]'),
} ]);
chain.plugin('css-optimize').use(OptimizeCssAssetsPlugin, [ <OptimizeCssAssetsPlugin.Options>{
assetNameRegExp : /\.css$/g,
cssProcessor : require('cssnano'),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint : true,
} ]);
});
chain.when(assetsByEntry, chain => chain.plugin('path').use(EntrypointPathPlugin));
//endregion
//region: Style Loaders
export function addStyleLoaders(config: Configuration) {
AntdScssThemePlugin.SCSS_THEME_PATH = chain.srcPath('core/styling/antd/theme.scss');
let antdScssLoader = AntdScssThemePlugin.themify({
loader : 'sass-loader',
options: {
scssThemePath: AntdScssThemePlugin.SCSS_THEME_PATH,
functions : { [ colorPaletteFunctionSignature ]: colorPaletteFunction },
},
});
let scssLoader = { loader: 'sass-loader', options: {} };
let antdLessLoader = AntdScssThemePlugin.themify('less-loader');
let postCssLoader = { loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('postcss-clean'), require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } };
config.module.rules.push(...[ {
test: /\.module.css$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev, modules: true, localIdentName: '[name]__[local]' } },
{ loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } },
].filter(Boolean),
}, {
test : /\.css$/,
exclude: [ /\.module.css$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev } },
isProd && postCssLoader,
].filter(Boolean),
}, {
oneOf: [
{
test: /\.(module\.scss|mscss)$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: false, modules: true, localIdentName: '[name]__[local]' } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
}, {
test : /\.scss$/,
exclude: [ /\.module\.scss$/, /\.mscss$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: true } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
} ],
}, {
test: /\.less$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev } },
isProd && postCssLoader,
{ loader: antdLessLoader.loader, options: { ...antdLessLoader.options, ...{ javascriptEnabled: true, sourceMap: isDev } } },
].filter(Boolean),
} ] as RuleSetRule[]);
config.plugins.push(new AntdScssThemePlugin(AntdScssThemePlugin.SCSS_THEME_PATH));
return config;
}
chain.onToConfig(config => addStyleLoaders(config));
//endregion
//region: Optimization
chain.optimization
.namedChunks(true)
.namedModules(true)
.splitChunks(<webpack.Options.SplitChunksOptions>{
maxInitialRequests: Infinity,
maxAsyncRequests : Infinity,
// maxSize : Infinity,
// name : true,
})
.minimize(minimize)
;
chain.optimization.minimizer('terser').use(TerserPlugin, [ <TerserPlugin.TerserPluginOptions>{
terserOptions: {
parse : { ecma: 8 },
mangle : { safari10: true },
compress: {
ecma : 5,
warnings : false,
comparisons: false,
inline : 2,
},
output : {
ecma : 5,
comments : false,
ascii_only: true,
},
},
parallel : true,
cache : true,
sourceMap : false,
} ]);
chain.when(isDev, chain => {}, chain => {});
//endregion
//region: Init
chain
.target('web')
.cache(cache)
.devtool(isDev ? 'cheap-module-source-map' : false as any)
;
chain.output
.path(chain.outPath())
.pathinfo(isDev)
.publicPath('/')
.library([ 'codex', '[name]' ] as any)
.libraryTarget('window')
.filename(assetPath('js/[name].js'))
.chunkFilename(assetPath('js/chunk.[name].js'));
chain.output.when(isDev, chain => chain,
// .sourceMapFilename('[file].map')
// .devtoolModuleFilenameTemplate((info: DevtoolModuleFilenameTemplateInfo) => {
// return 'file://' + resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// return resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// }),
);
chain.resolve
.symlinks(true)
.extensions.merge([ '.js', '.vue', '.json', '.web.ts', '.ts', '.web.tsx', '.tsx', '.styl', '.less', '.scss', '.stylus', '.css', '.mjs', '.web.js', '.json', '.web.jsx', '.jsx' ]).end()
.mainFields.merge([ 'module', 'browser', 'main' ]).end() // 'jsnext:main',
.mainFiles.merge([ 'index', 'index.ts', 'index.tsx' ]).end()
.modules.merge([ 'node_modules' ]).end()
.alias.merge({
'mobx$' : chain.srcPath('mobx.js'),
'lodash-es$' : 'lodash',
'async$' : 'neo-async',
// 'react-dom' : '@hot-loader/react-dom',
'@ant-design/icons': 'purched-antd-icons', /** @see https://github.com/ant-design/ant-design/issues/12011 */
}).end();
chain.resolveLoader
.modules.merge([ 'node_modules' ]).end()
.extensions.merge([ '.js', '.json', '.ts' ]).end();
chain.externals({});
chain.stats({
warningsFilter: /export .* was not found in/,
});
chain.node.merge({
dgram : 'empty',
fs : 'empty',
net : 'empty',
tls : 'empty',
child_process: 'empty',
// module : 'empty',
// dns : 'mock',
});
chain.performance
.hints(false)
.maxEntrypointSize(999999999)
.maxAssetSize(999999999)
.assetFilter(as => false);
chain.module.set('strictExportPresence', true);
chain.module.rule('ts').test(/\.(ts|tsx)$/);
chain.module.rule('js').test(/\.(js|mjs|jsx)$/);
chain.module.rule('vendor-js').test(/\.(js|mjs)$/).exclude.add(/@babel(?:\/|\\{1,2})runtime/);
addTsToRule(chain, 'ts', {});
addBabelToRule(chain, 'js', {
customize: require.resolve('babel-preset-react-app/webpack-overrides'),
});
addBabelToRule(chain, 'vendor-js', {
presets : [ [ require.resolve('babel-preset-react-app/dependencies'), { helpers: true } ] ],
plugins : [
...babelImportPlugins,
],
sourceMaps: false,
});
addPackage(chain, 'api', '@codex/api');
// addPluginEntry(chain, 'router', chain.srcPath('router'), 'index.tsx')
// addPluginEntry(chain, 'core', chain.srcPath('core'), '_small.tsx');
addPluginEntry(chain, 'core', chain.srcPath('core'), 'index.tsx');
addPluginEntry(chain, 'phpdoc', chain.srcPath('phpdoc'), 'index.tsx');
addPluginEntry(chain, 'comments', chain.srcPath('comments'), 'index.tsx');
chain.resolve.modules.merge([ chain.srcPath('core') ]).end();
chain.resolve.alias.merge({
'heading' : chain.srcPath('core/styling/heading.less'),
'../../theme.config$': chain.srcPath('core/styling/theme.config'),
'./core/index.less$' : chain.srcPath('core/styling/antd/core.less'),
});
//endregion
const config = chain.toConfig();
export default config;
export { chain, config };
//region: interfaces & types
declare type ForkTsCheckerPluginFormatter = (message: NormalizedMessage, useColors: boolean) => string;
interface ForkTsCheckerPluginLogger {
error(message?: any): void;
warn(message?: any): void;
info(message?: any): void;
}
interface ForkTsCheckerPluginOptions {
typescript: string;
tsconfig: string;
compilerOptions: object;
tslint: string | true;
tslintAutoFix: boolean;
watch: string | string[];
async: boolean;
ignoreDiagnostics: number[];
ignoreLints: string[];
ignoreLintWarnings: boolean;
reportFiles: string[];
colors: boolean;
logger: ForkTsCheckerPluginLogger;
formatter: 'default' | 'codeframe' | ForkTsCheckerPluginFormatter;
formatterOptions: any;
silent: boolean;
checkSyntacticErrors: boolean;
memoryLimit: number;
workers: number;
vue: boolean;
useTypescriptIncrementalApi: boolean;
measureCompilationTime: boolean;
}
//endregion
|
{
addBabelToRule(chain, ruleName, babelOptions);
}
|
conditional_block
|
webpack.config.ts
|
import { isAbsolute, join, resolve } from 'path';
import * as dotenv from 'dotenv';
import MiniCssExtractPlugin from 'mini-css-extract-plugin';
import webpack, { Configuration, RuleSetRule } from 'webpack';
import FriendlyErrorsPlugin, { Options as FriendlyErrorsOptions } from 'friendly-errors-webpack-plugin';
import BarPlugin, { Options as BarOptions } from 'webpackbar';
import CleanWebpackPlugin from 'clean-webpack-plugin';
import OptimizeCssAssetsPlugin from 'optimize-css-assets-webpack-plugin';
import { BundleAnalyzerPlugin } from 'webpack-bundle-analyzer';
import CopyPlugin from 'copy-webpack-plugin';
import HtmlPlugin from 'html-webpack-plugin';
import { BabelLoaderOptions, Chain } from './build/chain';
import AntdScssThemePlugin from './build/antd-scss-theme-plugin';
import { Options as TypescriptLoaderOptions } from 'ts-loader';
import tsImport from 'ts-import-plugin';
import { colorPaletteFunction, colorPaletteFunctionSignature } from './build/antdScssColorPalette';
import WebappPlugin from 'webapp-webpack-plugin';
import EntrypointPathPlugin from './build/EntrypointPathPlugin';
import TerserPlugin from 'terser-webpack-plugin';
import { IgnoreNotFoundExportPlugin, IgnoreNotFoundExportPluginOptions } from './build/out/plugins/ts-webpack-ignore-not-found-export';
import ForkTsCheckerPlugin from 'fork-ts-checker-webpack-plugin';
import { NormalizedMessage } from 'fork-ts-checker-webpack-plugin/lib/NormalizedMessage';
import Dashboard from 'webpack-dashboard/plugin';
import { DuplicatesPlugin } from 'inspectpack/plugin';
const chain = new Chain({
mode : process.env.NODE_ENV as any,
sourceDir: resolve(__dirname, 'src'),
outputDir: resolve(__dirname, process.env.NODE_ENV === 'development' ? 'dev' : 'dist'),
});
const { isDev, isProd } = chain;
const cache = true;
const assetsByEntry = isProd;
const _assetPath = assetsByEntry ? 'vendor/codex_[entrypoint]' : 'vendor';
const minimize = isProd;
const assetPath = (...parts: string[]) => join(_assetPath, ...parts);
const rootPath = (...parts: string[]) => resolve(__dirname, '..', ...parts);
const tsconfig = resolve(__dirname, 'tsconfig.webpack.json_');
const tschecker = false;
const dashboardPort = 23345;
const backendURL = process.env.BACKEND_URL || 'http://codex.local';
// defineVars are added to DefinePlugin and HtmlPlugin
const defineVars = {
DEV : isDev,
PROD : isProd,
TEST : chain.get('mode') === 'testing',
ENV : dotenv.load({ path: resolve('.env') }).parsed,
BACKEND_URL: backendURL,
};
//region: Helper Functions
const babelImportPlugins = [
[ 'import', { libraryName: 'antd', style: true }, 'import-antd' ],
[ 'import', { libraryName: 'lodash', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash' ],
[ 'import', { libraryName: 'lodash-es', libraryDirectory: '', camel2DashComponentName: false }, 'import-lodash-es' ],
[ 'import', { libraryName: 'jquery', libraryDirectory: 'src' }, 'jquery' ],
];
export function
|
(chain: Chain, ruleName: string, options: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
rule.use('babel-loader')
.loader('babel-loader')
.options(<BabelLoaderOptions>{
babelrc : false,
configFile : false,
presets : [
[ 'react-app' ],
],
plugins : [
'jsx-control-statements',
[ 'react-css-modules', {
'context' : chain.srcPath(),
'filetypes' : {
'.mscss': {
'syntax' : 'postcss-scss',
'plugins': [
'postcss-nested',
],
},
},
'handleMissingStyleName': 'warn',
'generateScopedName' : '[name]__[local]',
} ],
...babelImportPlugins,
].filter(Boolean),
cacheDirectory: cache,
compact : minimize,
...options,
} as any);
}
export function addTsToRule(chain: Chain, ruleName: string, options: Partial<TypescriptLoaderOptions> = {}, babelOptions: BabelLoaderOptions = {}) {
let rule = chain.module.rule(ruleName);
if ( !rule.has('babel-loader') ) {
addBabelToRule(chain, ruleName, babelOptions);
}
rule
.use('ts-loader')
.loader('ts-loader')
.options(<Partial<TypescriptLoaderOptions>>{
transpileOnly : true,
configFile : tsconfig,
// happyPackMode : true,
getCustomTransformers: () => ({
before: [
tsImport([
{ libraryName: 'antd', style: true },
{ libraryName: 'semantic-ui-react', libraryDirectory: (importName) => Object.keys(require('./build/semantic-data').nameLocations).includes(importName) ? join('dist/es', require('./build/semantic-data').nameLocations[ importName ]) : 'dist/es' },
{ libraryName: 'neo-async', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'lodash-es', libraryDirectory: null, camel2DashComponentName: false },
{ libraryName: 'jquery', libraryDirectory: 'src', camel2DashComponentName: false },
]) as any,
],
}),
...options,
} as any);
}
let assetLoader = isDev ? 'file-loader' : 'file-loader';
export function addAssetsLoaderForEntry(chain: Chain, name: string, path: string) {
let assetPath = _assetPath.replace('[entrypoint]', name);
chain.module.rule('fonts-' + name)
.test(/\.*\.(woff2?|woff|eot|ttf|otf)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/fonts/',
outputPath: assetPath + '/fonts/',
});
chain.module.rule('images-' + name)
.test(/\.*\.(png|jpe?g|gif|svg)(\?.*)?$/)
.include.add(path).end()
.use(assetLoader)
.loader(assetLoader)
.options({
name : '[name].[ext]',
// publicPath: '/' + assetPath + '/img/',
outputPath: assetPath + '/img/',
});
}
export function addPluginEntry(chain: Chain, name: string, dirPath: string, entryFile: string = 'index.ts') {
let umdName = `@codex/${name}`;
chain.entry(name)
.add(chain.srcPath(`pre-path.js?entryName=${name}`))
.add(isAbsolute(entryFile) ? entryFile : join(dirPath, entryFile));
chain.externals({
...chain.get('externals') || {},
[ umdName ]: [ 'codex', name ],
});
// chain.resolve.alias.set(umdName, dirPath);
addAssetsLoaderForEntry(chain, name, dirPath);
chain.module.rule('ts').include.add(dirPath);
chain.module.rule('js').include.add(dirPath);
}
export function addHMR(chain: Chain, reactHotLoader: boolean = true) {
chain.plugin('hmr').use(webpack.HotModuleReplacementPlugin, [ {} ]);
chain.resolve.alias.set('react-dom', '@hot-loader/react-dom');
return;
const modifyOptions = (o: BabelLoaderOptions) => {
if ( reactHotLoader ) {
o.plugins.push('react-hot-loader/babel');
}
let reactCssModulesIndex = o.plugins.findIndex(plugin => Array.isArray(plugin) && plugin[ 0 ] === 'react-css-modules');
if ( reactCssModulesIndex !== - 1 ) {
o.plugins[ reactCssModulesIndex ][ 1 ].webpackHotModuleReloading = true;
}
return o;
};
chain.module.rule('js').use('babel-loader').tap(modifyOptions);
chain.module.rule('ts').use('babel-loader').tap(modifyOptions);
}
export function addAnalyzerPlugins(chain: Chain, when: boolean = true) {
chain.when(when, chain => chain.plugin('bundle-analyzer').use(BundleAnalyzerPlugin, [ <BundleAnalyzerPlugin.Options>{
analyzerMode : 'static',
openAnalyzer : false,
reportFilename: 'bundle-analyzer.html',
} ]));
}
export function addPackage(chain: Chain, name: string, umdName?: string) {
umdName = umdName || `@codex/${name}`;
chain.when(isDev, chain => {
let path = rootPath('packages', name, 'src');
chain.resolve.alias.set(umdName, path);
chain.module.rule('ts').include.add(path);
}, chain => {
chain.resolve.alias.set(umdName, rootPath('packages', name, 'es'));
});
}
export function addDashboardPlugin(chain: Chain, port: number = dashboardPort) {
chain.plugin('dashboard').use(Dashboard, [ {
port: dashboardPort,
} ]);
}
export function addDuplicatesPlugin(chain: Chain) {
chain.plugin('duplicates').use(DuplicatesPlugin, [ {
verbose : true,
emitErrors: false,
} ]);
}
//endregion
//region: Plugins
chain.plugin('clean').use(CleanWebpackPlugin, [
[ 'js/', 'css/', '*.hot-update.*', 'assets/', 'vendor/' ],
<CleanWebpackPlugin.Options>{ root: chain.outPath(), verbose: false },
]);
chain.plugin('define').use(webpack.DefinePlugin, [ {
'process.env': {
NODE_ENV: `"${chain.get('mode')}"`,
},
...defineVars
} ]);
chain.plugin('bar').use(BarPlugin, [ <BarOptions>{
profile : true,
compiledIn: true,
minimal : false,
} ]);
chain.plugin('loader-options').use(webpack.LoaderOptionsPlugin, [ { options: {} } ]);
chain.plugin('friendly-errors').use(FriendlyErrorsPlugin, [ <FriendlyErrorsOptions>{
compilationSuccessInfo: { messages: [ 'Build success' ], notes: [] },
onErrors : function (severity, errors) { console.error(severity, errors); },
clearConsole : false,
logLevel : true,
additionalFormatters : [],
additionalTransformers: [],
} ]);
chain.plugin('copy').use(CopyPlugin, [ [
assetsByEntry ? { from: chain.srcPath('core/assets'), to: chain.outPath('vendor/codex_core') } : { from: chain.srcPath('core/assets'), to: chain.outPath('vendor') },
].filter(Boolean) ]);
chain.plugin('html').use(HtmlPlugin, [ <HtmlPlugin.Options>{
filename : 'index.html',
template : resolve(__dirname, 'index.html'),
inject : 'head',
chunksSortMode : isDev ? 'dependency' : 'auto',
templateParameters: {
assetPath: {
core : _assetPath.replace('[entrypoint]', 'core'),
phpdoc: _assetPath.replace('[entrypoint]', 'phpdoc'),
},
...defineVars
},
} ]);
chain.plugin('favicon').use(WebappPlugin, [ {
logo : rootPath('node_modules/@fortawesome/fontawesome-free/svgs/solid/book.svg'),
cache,
prefix: assetsByEntry ? 'vendor/codex_core/img' : 'vendor/img',
inject: true,
} ]).after('html');
chain.plugin('ignore-not-found-export').use(IgnoreNotFoundExportPlugin, [ <IgnoreNotFoundExportPluginOptions>{
exportsToIgnore: [
// 'Toolbar.*Props',
// 'Layout.*Props',
'.*Props',
],
} ]);
chain.when(tschecker, chain => {
chain.plugin('ts-checker-core').use(ForkTsCheckerPlugin, [ <ForkTsCheckerPluginOptions>{
tsconfig : chain.srcPath('core/tsconfig.json'),
ignoreDiagnostics: [
// ERROR in /home/radic/theme/node_modules/mobx/lib/types/observableset.d.ts(21,22):
//TS2420: Class 'ObservableSet<T>' incorrectly implements interface 'Set<T>'.
2420,
],
} ]);
});
chain.when(isProd, chain => {
// chain.plugin('write-file').use(require('write-file-webpack-plugin'), [ { useHashIndex: false } ]);
chain.plugin('css-extract').use(MiniCssExtractPlugin, [ {
filename : assetPath('css/[name].css?[hash]'),
chunkFilename: assetPath('css/[name].chunk.css?[chunkhash]'),
} ]);
chain.plugin('css-optimize').use(OptimizeCssAssetsPlugin, [ <OptimizeCssAssetsPlugin.Options>{
assetNameRegExp : /\.css$/g,
cssProcessor : require('cssnano'),
cssProcessorOptions: { discardComments: { removeAll: true } },
canPrint : true,
} ]);
});
chain.when(assetsByEntry, chain => chain.plugin('path').use(EntrypointPathPlugin));
//endregion
//region: Style Loaders
export function addStyleLoaders(config: Configuration) {
AntdScssThemePlugin.SCSS_THEME_PATH = chain.srcPath('core/styling/antd/theme.scss');
let antdScssLoader = AntdScssThemePlugin.themify({
loader : 'sass-loader',
options: {
scssThemePath: AntdScssThemePlugin.SCSS_THEME_PATH,
functions : { [ colorPaletteFunctionSignature ]: colorPaletteFunction },
},
});
let scssLoader = { loader: 'sass-loader', options: {} };
let antdLessLoader = AntdScssThemePlugin.themify('less-loader');
let postCssLoader = { loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('postcss-clean'), require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } };
config.module.rules.push(...[ {
test: /\.module.css$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev, modules: true, localIdentName: '[name]__[local]' } },
{ loader: 'postcss-loader', options: { sourceMap: isDev, plugins: [ require('autoprefixer'), require('cssnext'), require('postcss-nested') ] } },
].filter(Boolean),
}, {
test : /\.css$/,
exclude: [ /\.module.css$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 1, sourceMap: isDev } },
isProd && postCssLoader,
].filter(Boolean),
}, {
oneOf: [
{
test: /\.(module\.scss|mscss)$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: false, modules: true, localIdentName: '[name]__[local]' } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
}, {
test : /\.scss$/,
exclude: [ /\.module\.scss$/, /\.mscss$/ ],
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev, camelCase: true } },
isProd && postCssLoader,
antdScssLoader,
].filter(Boolean),
} ],
}, {
test: /\.less$/,
use : [
isDev ? { loader: 'style-loader', options: { sourceMap: true } } : MiniCssExtractPlugin.loader,
{ loader: 'css-loader', options: { importLoaders: 2, sourceMap: isDev } },
isProd && postCssLoader,
{ loader: antdLessLoader.loader, options: { ...antdLessLoader.options, ...{ javascriptEnabled: true, sourceMap: isDev } } },
].filter(Boolean),
} ] as RuleSetRule[]);
config.plugins.push(new AntdScssThemePlugin(AntdScssThemePlugin.SCSS_THEME_PATH));
return config;
}
chain.onToConfig(config => addStyleLoaders(config));
//endregion
//region: Optimization
chain.optimization
.namedChunks(true)
.namedModules(true)
.splitChunks(<webpack.Options.SplitChunksOptions>{
maxInitialRequests: Infinity,
maxAsyncRequests : Infinity,
// maxSize : Infinity,
// name : true,
})
.minimize(minimize)
;
chain.optimization.minimizer('terser').use(TerserPlugin, [ <TerserPlugin.TerserPluginOptions>{
terserOptions: {
parse : { ecma: 8 },
mangle : { safari10: true },
compress: {
ecma : 5,
warnings : false,
comparisons: false,
inline : 2,
},
output : {
ecma : 5,
comments : false,
ascii_only: true,
},
},
parallel : true,
cache : true,
sourceMap : false,
} ]);
chain.when(isDev, chain => {}, chain => {});
//endregion
//region: Init
chain
.target('web')
.cache(cache)
.devtool(isDev ? 'cheap-module-source-map' : false as any)
;
chain.output
.path(chain.outPath())
.pathinfo(isDev)
.publicPath('/')
.library([ 'codex', '[name]' ] as any)
.libraryTarget('window')
.filename(assetPath('js/[name].js'))
.chunkFilename(assetPath('js/chunk.[name].js'));
chain.output.when(isDev, chain => chain,
// .sourceMapFilename('[file].map')
// .devtoolModuleFilenameTemplate((info: DevtoolModuleFilenameTemplateInfo) => {
// return 'file://' + resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// return resolve(info.absoluteResourcePath.replace(/\\/g, '/'));
// }),
);
chain.resolve
.symlinks(true)
.extensions.merge([ '.js', '.vue', '.json', '.web.ts', '.ts', '.web.tsx', '.tsx', '.styl', '.less', '.scss', '.stylus', '.css', '.mjs', '.web.js', '.json', '.web.jsx', '.jsx' ]).end()
.mainFields.merge([ 'module', 'browser', 'main' ]).end() // 'jsnext:main',
.mainFiles.merge([ 'index', 'index.ts', 'index.tsx' ]).end()
.modules.merge([ 'node_modules' ]).end()
.alias.merge({
'mobx$' : chain.srcPath('mobx.js'),
'lodash-es$' : 'lodash',
'async$' : 'neo-async',
// 'react-dom' : '@hot-loader/react-dom',
'@ant-design/icons': 'purched-antd-icons', /** @see https://github.com/ant-design/ant-design/issues/12011 */
}).end();
chain.resolveLoader
.modules.merge([ 'node_modules' ]).end()
.extensions.merge([ '.js', '.json', '.ts' ]).end();
chain.externals({});
chain.stats({
warningsFilter: /export .* was not found in/,
});
chain.node.merge({
dgram : 'empty',
fs : 'empty',
net : 'empty',
tls : 'empty',
child_process: 'empty',
// module : 'empty',
// dns : 'mock',
});
chain.performance
.hints(false)
.maxEntrypointSize(999999999)
.maxAssetSize(999999999)
.assetFilter(as => false);
chain.module.set('strictExportPresence', true);
chain.module.rule('ts').test(/\.(ts|tsx)$/);
chain.module.rule('js').test(/\.(js|mjs|jsx)$/);
chain.module.rule('vendor-js').test(/\.(js|mjs)$/).exclude.add(/@babel(?:\/|\\{1,2})runtime/);
addTsToRule(chain, 'ts', {});
addBabelToRule(chain, 'js', {
customize: require.resolve('babel-preset-react-app/webpack-overrides'),
});
addBabelToRule(chain, 'vendor-js', {
presets : [ [ require.resolve('babel-preset-react-app/dependencies'), { helpers: true } ] ],
plugins : [
...babelImportPlugins,
],
sourceMaps: false,
});
addPackage(chain, 'api', '@codex/api');
// addPluginEntry(chain, 'router', chain.srcPath('router'), 'index.tsx')
// addPluginEntry(chain, 'core', chain.srcPath('core'), '_small.tsx');
addPluginEntry(chain, 'core', chain.srcPath('core'), 'index.tsx');
addPluginEntry(chain, 'phpdoc', chain.srcPath('phpdoc'), 'index.tsx');
addPluginEntry(chain, 'comments', chain.srcPath('comments'), 'index.tsx');
chain.resolve.modules.merge([ chain.srcPath('core') ]).end();
chain.resolve.alias.merge({
'heading' : chain.srcPath('core/styling/heading.less'),
'../../theme.config$': chain.srcPath('core/styling/theme.config'),
'./core/index.less$' : chain.srcPath('core/styling/antd/core.less'),
});
//endregion
const config = chain.toConfig();
export default config;
export { chain, config };
//region: interfaces & types
declare type ForkTsCheckerPluginFormatter = (message: NormalizedMessage, useColors: boolean) => string;
interface ForkTsCheckerPluginLogger {
error(message?: any): void;
warn(message?: any): void;
info(message?: any): void;
}
interface ForkTsCheckerPluginOptions {
typescript: string;
tsconfig: string;
compilerOptions: object;
tslint: string | true;
tslintAutoFix: boolean;
watch: string | string[];
async: boolean;
ignoreDiagnostics: number[];
ignoreLints: string[];
ignoreLintWarnings: boolean;
reportFiles: string[];
colors: boolean;
logger: ForkTsCheckerPluginLogger;
formatter: 'default' | 'codeframe' | ForkTsCheckerPluginFormatter;
formatterOptions: any;
silent: boolean;
checkSyntacticErrors: boolean;
memoryLimit: number;
workers: number;
vue: boolean;
useTypescriptIncrementalApi: boolean;
measureCompilationTime: boolean;
}
//endregion
|
addBabelToRule
|
identifier_name
|
gaes_stub.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
|
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Stub API calls used in gaes.py for testing.
Instead of doing real API calls, we return test JSON data.
"""
import json
from gcpdiag.queries import apis_stub
#pylint: disable=unused-argument
#pylint: disable=invalid-name
class AppEngineStandardApiStub(apis_stub.ApiStub):
"""Mock object to simulate App Engine Standard api calls."""
def __init__(self, mock_state='init'):
self.mock_state = mock_state
def apps(self):
return self
def services(self):
return AppEngineStandardApiStub('services')
def versions(self):
return AppEngineStandardApiStub('versions')
def list(self, appsId='appsId', servicesId='servicesId'):
self.json_dir = apis_stub.get_json_dir(appsId)
return self
def execute(self, num_retries=0):
if self.mock_state == 'services':
with open(self.json_dir / 'appengine_services.json',
encoding='utf-8') as json_file:
return json.load(json_file)
else:
with open(self.json_dir / 'versions.json', encoding='utf-8') as json_file:
return json.load(json_file)
|
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
random_line_split
|
gaes_stub.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Stub API calls used in gaes.py for testing.
Instead of doing real API calls, we return test JSON data.
"""
import json
from gcpdiag.queries import apis_stub
#pylint: disable=unused-argument
#pylint: disable=invalid-name
class AppEngineStandardApiStub(apis_stub.ApiStub):
"""Mock object to simulate App Engine Standard api calls."""
def __init__(self, mock_state='init'):
self.mock_state = mock_state
def apps(self):
return self
def services(self):
return AppEngineStandardApiStub('services')
def versions(self):
return AppEngineStandardApiStub('versions')
def list(self, appsId='appsId', servicesId='servicesId'):
self.json_dir = apis_stub.get_json_dir(appsId)
return self
def execute(self, num_retries=0):
if self.mock_state == 'services':
|
else:
with open(self.json_dir / 'versions.json', encoding='utf-8') as json_file:
return json.load(json_file)
|
with open(self.json_dir / 'appengine_services.json',
encoding='utf-8') as json_file:
return json.load(json_file)
|
conditional_block
|
gaes_stub.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Stub API calls used in gaes.py for testing.
Instead of doing real API calls, we return test JSON data.
"""
import json
from gcpdiag.queries import apis_stub
#pylint: disable=unused-argument
#pylint: disable=invalid-name
class AppEngineStandardApiStub(apis_stub.ApiStub):
"""Mock object to simulate App Engine Standard api calls."""
def __init__(self, mock_state='init'):
self.mock_state = mock_state
def apps(self):
return self
def services(self):
return AppEngineStandardApiStub('services')
def
|
(self):
return AppEngineStandardApiStub('versions')
def list(self, appsId='appsId', servicesId='servicesId'):
self.json_dir = apis_stub.get_json_dir(appsId)
return self
def execute(self, num_retries=0):
if self.mock_state == 'services':
with open(self.json_dir / 'appengine_services.json',
encoding='utf-8') as json_file:
return json.load(json_file)
else:
with open(self.json_dir / 'versions.json', encoding='utf-8') as json_file:
return json.load(json_file)
|
versions
|
identifier_name
|
gaes_stub.py
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Stub API calls used in gaes.py for testing.
Instead of doing real API calls, we return test JSON data.
"""
import json
from gcpdiag.queries import apis_stub
#pylint: disable=unused-argument
#pylint: disable=invalid-name
class AppEngineStandardApiStub(apis_stub.ApiStub):
"""Mock object to simulate App Engine Standard api calls."""
def __init__(self, mock_state='init'):
self.mock_state = mock_state
def apps(self):
return self
def services(self):
return AppEngineStandardApiStub('services')
def versions(self):
return AppEngineStandardApiStub('versions')
def list(self, appsId='appsId', servicesId='servicesId'):
|
def execute(self, num_retries=0):
if self.mock_state == 'services':
with open(self.json_dir / 'appengine_services.json',
encoding='utf-8') as json_file:
return json.load(json_file)
else:
with open(self.json_dir / 'versions.json', encoding='utf-8') as json_file:
return json.load(json_file)
|
self.json_dir = apis_stub.get_json_dir(appsId)
return self
|
identifier_body
|
fuel-savings-calculator.ts
|
import {milesDrivenTimeframes} from '../reducers/fuel-savings';
import { roundNumber } from './math-helper';
import NumberFormatter from './number-formatter';
export interface ISavings {
annual: string|number;
monthly: string|number;
threeYear: string|number;
}
export interface ISettings { // TODO: Better interface name
// TODO: Get the right types here.
milesDriven?: number;
milesDrivenTimeframe?: string;
tradePpg?: number;
tradeMpg?: number;
newPpg?: number;
newMpg?: number;
}
function calculateMonthlyCost(milesDrivenPerMonth: number, ppg: number, mpg: number): number {
const gallonsUsedPerMonth = milesDrivenPerMonth / mpg;
return gallonsUsedPerMonth * ppg;
};
function calculateMilesDrivenPerMonth(milesDriven: number, milesDrivenTimeframe: milesDrivenTimeframes): number
|
function calculateSavings(settings): ISavings {
const monthlySavings = this.calculateSavingsPerMonth(settings);
return {
annual: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12),
monthly: NumberFormatter.getCurrencyFormattedNumber(monthlySavings),
threeYear: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12 * 3)
};
}
function calculateSavingsPerMonth(settings: ISettings): number {
if (!settings.milesDriven) {
return 0;
}
const milesDrivenPerMonth = this.calculateMilesDrivenPerMonth(settings.milesDriven, settings.milesDrivenTimeframe);
const tradeFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.tradePpg, settings.tradeMpg);
const newFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.newPpg, settings.newMpg);
const savingsPerMonth = tradeFuelCostPerMonth - newFuelCostPerMonth;
return roundNumber(savingsPerMonth, 2);
}
function necessaryDataIsProvidedToCalculateSavings(settings: ISettings): boolean {
return settings.newMpg > 0
&& settings.tradeMpg > 0
&& settings.newPpg > 0
&& settings.tradePpg > 0
&& settings.milesDriven > 0;
}
const fuelSavingsCalculator = {
calculateMilesDrivenPerMonth,
calculateSavings,
calculateSavingsPerMonth,
necessaryDataIsProvidedToCalculateSavings
};
export default fuelSavingsCalculator;
|
{
const monthsPerYear = 12;
const weeksPerYear = 52;
switch (milesDrivenTimeframe) {
case 'week':
return (milesDriven * weeksPerYear) / monthsPerYear;
case 'month':
return milesDriven;
case 'year':
return milesDriven / monthsPerYear;
default:
throw 'Unknown milesDrivenTimeframe passed: ' + milesDrivenTimeframe;
}
}
|
identifier_body
|
fuel-savings-calculator.ts
|
import {milesDrivenTimeframes} from '../reducers/fuel-savings';
import { roundNumber } from './math-helper';
import NumberFormatter from './number-formatter';
export interface ISavings {
annual: string|number;
monthly: string|number;
threeYear: string|number;
}
export interface ISettings { // TODO: Better interface name
// TODO: Get the right types here.
milesDriven?: number;
milesDrivenTimeframe?: string;
tradePpg?: number;
tradeMpg?: number;
newPpg?: number;
newMpg?: number;
}
function calculateMonthlyCost(milesDrivenPerMonth: number, ppg: number, mpg: number): number {
const gallonsUsedPerMonth = milesDrivenPerMonth / mpg;
return gallonsUsedPerMonth * ppg;
};
function calculateMilesDrivenPerMonth(milesDriven: number, milesDrivenTimeframe: milesDrivenTimeframes): number {
const monthsPerYear = 12;
const weeksPerYear = 52;
switch (milesDrivenTimeframe) {
case 'week':
return (milesDriven * weeksPerYear) / monthsPerYear;
case 'month':
return milesDriven;
case 'year':
return milesDriven / monthsPerYear;
default:
throw 'Unknown milesDrivenTimeframe passed: ' + milesDrivenTimeframe;
}
}
function calculateSavings(settings): ISavings {
const monthlySavings = this.calculateSavingsPerMonth(settings);
return {
annual: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12),
monthly: NumberFormatter.getCurrencyFormattedNumber(monthlySavings),
threeYear: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12 * 3)
};
}
function calculateSavingsPerMonth(settings: ISettings): number {
if (!settings.milesDriven) {
return 0;
}
const milesDrivenPerMonth = this.calculateMilesDrivenPerMonth(settings.milesDriven, settings.milesDrivenTimeframe);
const tradeFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.tradePpg, settings.tradeMpg);
const newFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.newPpg, settings.newMpg);
const savingsPerMonth = tradeFuelCostPerMonth - newFuelCostPerMonth;
return roundNumber(savingsPerMonth, 2);
}
function necessaryDataIsProvidedToCalculateSavings(settings: ISettings): boolean {
return settings.newMpg > 0
&& settings.tradeMpg > 0
&& settings.newPpg > 0
&& settings.tradePpg > 0
&& settings.milesDriven > 0;
}
const fuelSavingsCalculator = {
calculateMilesDrivenPerMonth,
calculateSavings,
|
necessaryDataIsProvidedToCalculateSavings
};
export default fuelSavingsCalculator;
|
calculateSavingsPerMonth,
|
random_line_split
|
fuel-savings-calculator.ts
|
import {milesDrivenTimeframes} from '../reducers/fuel-savings';
import { roundNumber } from './math-helper';
import NumberFormatter from './number-formatter';
export interface ISavings {
annual: string|number;
monthly: string|number;
threeYear: string|number;
}
export interface ISettings { // TODO: Better interface name
// TODO: Get the right types here.
milesDriven?: number;
milesDrivenTimeframe?: string;
tradePpg?: number;
tradeMpg?: number;
newPpg?: number;
newMpg?: number;
}
function calculateMonthlyCost(milesDrivenPerMonth: number, ppg: number, mpg: number): number {
const gallonsUsedPerMonth = milesDrivenPerMonth / mpg;
return gallonsUsedPerMonth * ppg;
};
function
|
(milesDriven: number, milesDrivenTimeframe: milesDrivenTimeframes): number {
const monthsPerYear = 12;
const weeksPerYear = 52;
switch (milesDrivenTimeframe) {
case 'week':
return (milesDriven * weeksPerYear) / monthsPerYear;
case 'month':
return milesDriven;
case 'year':
return milesDriven / monthsPerYear;
default:
throw 'Unknown milesDrivenTimeframe passed: ' + milesDrivenTimeframe;
}
}
function calculateSavings(settings): ISavings {
const monthlySavings = this.calculateSavingsPerMonth(settings);
return {
annual: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12),
monthly: NumberFormatter.getCurrencyFormattedNumber(monthlySavings),
threeYear: NumberFormatter.getCurrencyFormattedNumber(monthlySavings * 12 * 3)
};
}
function calculateSavingsPerMonth(settings: ISettings): number {
if (!settings.milesDriven) {
return 0;
}
const milesDrivenPerMonth = this.calculateMilesDrivenPerMonth(settings.milesDriven, settings.milesDrivenTimeframe);
const tradeFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.tradePpg, settings.tradeMpg);
const newFuelCostPerMonth = calculateMonthlyCost(milesDrivenPerMonth, settings.newPpg, settings.newMpg);
const savingsPerMonth = tradeFuelCostPerMonth - newFuelCostPerMonth;
return roundNumber(savingsPerMonth, 2);
}
function necessaryDataIsProvidedToCalculateSavings(settings: ISettings): boolean {
return settings.newMpg > 0
&& settings.tradeMpg > 0
&& settings.newPpg > 0
&& settings.tradePpg > 0
&& settings.milesDriven > 0;
}
const fuelSavingsCalculator = {
calculateMilesDrivenPerMonth,
calculateSavings,
calculateSavingsPerMonth,
necessaryDataIsProvidedToCalculateSavings
};
export default fuelSavingsCalculator;
|
calculateMilesDrivenPerMonth
|
identifier_name
|
templates.js
|
angular.module('templateStore.templates',['ngRoute'])
.config(['$routeProvider', function($routeProvider){
$routeProvider.
when('/templates', {
templateUrl: 'templates/templates.html',
controller: 'TemplatesCtrl'
}).
when('/templates/:templateId', {
templateUrl: 'templates/template-details.html',
controller: 'TemplateDetailsCtrl'
})
}])
.controller('TemplatesCtrl', ['$scope', '$http', function($scope, $http){
$http.get('json/templates.json').success(function(data){
$scope.templates = data;
});
}])
.controller('TemplateDetailsCtrl', ['$scope', '$routeParams', '$http', '$filter', function($scope, $routeParams, $http, $filter){
var templateId = $routeParams.templateId;
$http.get('json/templates.json').success(function(data){
$scope.template = $filter('filter')(data, function(d){
return d.id == templateId;
})[0];
$scope.mainImage = $scope.template.images[0].name;
|
}]);
|
});
$scope.setImage = function(image){
$scope.mainImage = image.name;
}
|
random_line_split
|
showMeMore.js
|
jQuery.fn.showMeMore = function (options) {
var options = $.extend({
current: 4, // number to be displayed at start
count: 4, // how many show in one click
fadeSpeed: 300, // animation speed
showButton: '', // show button (false / string)
hideButton: '', // hide button
showButtonText: 'showButton', //text at the showButton
hideButtonText: 'hideButton', //text at the showButton
enableHide: false, // allow to hide (true / false)
generateBtn: true,// auto generate buttons if they not added by default
list: 'li' //tile elements
}, options);
var make = function () {
var showButton = $(options.showButton),
hideButton = $(options.hideButton),
enableHide = options.enableHide,
count = options.count,
current = options.current,
fadeSpeed = options.fadeSpeed,
list = $(this).find(options.list),//find all 'list' elements
quantity = list.length;//list elements count
//add SHOW button if it is not installed by the user
if (options.generateBtn && options.showButton == '') {
$(this).append('<button class="showButton">' + options.showButtonText + '</button>');
showButton = $(this).find('.showButton');
}
//add HIDE button if it is not installed by the user and if enableHide is true
if (options.generateBtn && enableHide && options.showButton == '') {
$(this).append('<button class="hideButton">' + options.hideButtonText + '</button>');
hideButton = $(this).find('.hideButton');
}
list.hide();//hide all elements
hideButton.hide()//hide "hideButton"
if (quantity <= current) {
showButton.hide();
}
showItem(0);//show first elements
function switchButtons() {
if (enableHide == false) {
showButton.hide();
} else {
showButton.hide();
hideButton.show();
}
}
//this function show next elements
function showItem(time) {
for (var i = 0; i < current; i++) {
if ($(list[i]).is(':hidden')) {
$(list[i]).fadeIn(time);
}
}
}
//this function hide all elements
function hideAll(time) {
for (var i = current; i < quantity; i++) {
|
}
}
showButton.click(function (event) {
event.preventDefault();
current += count;
showItem(fadeSpeed);
if (current >= quantity) {
switchButtons();
}
});
hideButton.click(function (event) {
event.preventDefault();
current = options.current;
hideAll(fadeSpeed);
hideButton.hide();
showButton.show();
});
};
return this.each(make);
};
|
$(list[i]).fadeOut(time);
|
random_line_split
|
showMeMore.js
|
jQuery.fn.showMeMore = function (options) {
var options = $.extend({
current: 4, // number to be displayed at start
count: 4, // how many show in one click
fadeSpeed: 300, // animation speed
showButton: '', // show button (false / string)
hideButton: '', // hide button
showButtonText: 'showButton', //text at the showButton
hideButtonText: 'hideButton', //text at the showButton
enableHide: false, // allow to hide (true / false)
generateBtn: true,// auto generate buttons if they not added by default
list: 'li' //tile elements
}, options);
var make = function () {
var showButton = $(options.showButton),
hideButton = $(options.hideButton),
enableHide = options.enableHide,
count = options.count,
current = options.current,
fadeSpeed = options.fadeSpeed,
list = $(this).find(options.list),//find all 'list' elements
quantity = list.length;//list elements count
//add SHOW button if it is not installed by the user
if (options.generateBtn && options.showButton == '') {
$(this).append('<button class="showButton">' + options.showButtonText + '</button>');
showButton = $(this).find('.showButton');
}
//add HIDE button if it is not installed by the user and if enableHide is true
if (options.generateBtn && enableHide && options.showButton == '')
|
list.hide();//hide all elements
hideButton.hide()//hide "hideButton"
if (quantity <= current) {
showButton.hide();
}
showItem(0);//show first elements
function switchButtons() {
if (enableHide == false) {
showButton.hide();
} else {
showButton.hide();
hideButton.show();
}
}
//this function show next elements
function showItem(time) {
for (var i = 0; i < current; i++) {
if ($(list[i]).is(':hidden')) {
$(list[i]).fadeIn(time);
}
}
}
//this function hide all elements
function hideAll(time) {
for (var i = current; i < quantity; i++) {
$(list[i]).fadeOut(time);
}
}
showButton.click(function (event) {
event.preventDefault();
current += count;
showItem(fadeSpeed);
if (current >= quantity) {
switchButtons();
}
});
hideButton.click(function (event) {
event.preventDefault();
current = options.current;
hideAll(fadeSpeed);
hideButton.hide();
showButton.show();
});
};
return this.each(make);
};
|
{
$(this).append('<button class="hideButton">' + options.hideButtonText + '</button>');
hideButton = $(this).find('.hideButton');
}
|
conditional_block
|
showMeMore.js
|
jQuery.fn.showMeMore = function (options) {
var options = $.extend({
current: 4, // number to be displayed at start
count: 4, // how many show in one click
fadeSpeed: 300, // animation speed
showButton: '', // show button (false / string)
hideButton: '', // hide button
showButtonText: 'showButton', //text at the showButton
hideButtonText: 'hideButton', //text at the showButton
enableHide: false, // allow to hide (true / false)
generateBtn: true,// auto generate buttons if they not added by default
list: 'li' //tile elements
}, options);
var make = function () {
var showButton = $(options.showButton),
hideButton = $(options.hideButton),
enableHide = options.enableHide,
count = options.count,
current = options.current,
fadeSpeed = options.fadeSpeed,
list = $(this).find(options.list),//find all 'list' elements
quantity = list.length;//list elements count
//add SHOW button if it is not installed by the user
if (options.generateBtn && options.showButton == '') {
$(this).append('<button class="showButton">' + options.showButtonText + '</button>');
showButton = $(this).find('.showButton');
}
//add HIDE button if it is not installed by the user and if enableHide is true
if (options.generateBtn && enableHide && options.showButton == '') {
$(this).append('<button class="hideButton">' + options.hideButtonText + '</button>');
hideButton = $(this).find('.hideButton');
}
list.hide();//hide all elements
hideButton.hide()//hide "hideButton"
if (quantity <= current) {
showButton.hide();
}
showItem(0);//show first elements
function switchButtons() {
if (enableHide == false) {
showButton.hide();
} else {
showButton.hide();
hideButton.show();
}
}
//this function show next elements
function showItem(time)
|
//this function hide all elements
function hideAll(time) {
for (var i = current; i < quantity; i++) {
$(list[i]).fadeOut(time);
}
}
showButton.click(function (event) {
event.preventDefault();
current += count;
showItem(fadeSpeed);
if (current >= quantity) {
switchButtons();
}
});
hideButton.click(function (event) {
event.preventDefault();
current = options.current;
hideAll(fadeSpeed);
hideButton.hide();
showButton.show();
});
};
return this.each(make);
};
|
{
for (var i = 0; i < current; i++) {
if ($(list[i]).is(':hidden')) {
$(list[i]).fadeIn(time);
}
}
}
|
identifier_body
|
showMeMore.js
|
jQuery.fn.showMeMore = function (options) {
var options = $.extend({
current: 4, // number to be displayed at start
count: 4, // how many show in one click
fadeSpeed: 300, // animation speed
showButton: '', // show button (false / string)
hideButton: '', // hide button
showButtonText: 'showButton', //text at the showButton
hideButtonText: 'hideButton', //text at the showButton
enableHide: false, // allow to hide (true / false)
generateBtn: true,// auto generate buttons if they not added by default
list: 'li' //tile elements
}, options);
var make = function () {
var showButton = $(options.showButton),
hideButton = $(options.hideButton),
enableHide = options.enableHide,
count = options.count,
current = options.current,
fadeSpeed = options.fadeSpeed,
list = $(this).find(options.list),//find all 'list' elements
quantity = list.length;//list elements count
//add SHOW button if it is not installed by the user
if (options.generateBtn && options.showButton == '') {
$(this).append('<button class="showButton">' + options.showButtonText + '</button>');
showButton = $(this).find('.showButton');
}
//add HIDE button if it is not installed by the user and if enableHide is true
if (options.generateBtn && enableHide && options.showButton == '') {
$(this).append('<button class="hideButton">' + options.hideButtonText + '</button>');
hideButton = $(this).find('.hideButton');
}
list.hide();//hide all elements
hideButton.hide()//hide "hideButton"
if (quantity <= current) {
showButton.hide();
}
showItem(0);//show first elements
function switchButtons() {
if (enableHide == false) {
showButton.hide();
} else {
showButton.hide();
hideButton.show();
}
}
//this function show next elements
function showItem(time) {
for (var i = 0; i < current; i++) {
if ($(list[i]).is(':hidden')) {
$(list[i]).fadeIn(time);
}
}
}
//this function hide all elements
function
|
(time) {
for (var i = current; i < quantity; i++) {
$(list[i]).fadeOut(time);
}
}
showButton.click(function (event) {
event.preventDefault();
current += count;
showItem(fadeSpeed);
if (current >= quantity) {
switchButtons();
}
});
hideButton.click(function (event) {
event.preventDefault();
current = options.current;
hideAll(fadeSpeed);
hideButton.hide();
showButton.show();
});
};
return this.each(make);
};
|
hideAll
|
identifier_name
|
issue-8498.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main()
|
{
match &[(box 5,box 7)] {
ps => {
let (ref y, _) = ps[0];
assert!(**y == 5);
}
}
match Some(&[(box 5,)]) {
Some(ps) => {
let (ref y,) = ps[0];
assert!(**y == 5);
}
None => ()
}
match Some(&[(box 5,box 7)]) {
Some(ps) => {
let (ref y, ref z) = ps[0];
assert!(**y == 5);
assert!(**z == 7);
}
None => ()
}
}
|
identifier_body
|
|
issue-8498.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
match &[(box 5,box 7)] {
ps => {
let (ref y, _) = ps[0];
assert!(**y == 5);
}
}
match Some(&[(box 5,)]) {
Some(ps) => {
let (ref y,) = ps[0];
assert!(**y == 5);
}
None => ()
}
match Some(&[(box 5,box 7)]) {
Some(ps) => {
let (ref y, ref z) = ps[0];
assert!(**y == 5);
|
}
|
assert!(**z == 7);
}
None => ()
}
|
random_line_split
|
issue-8498.rs
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn
|
() {
match &[(box 5,box 7)] {
ps => {
let (ref y, _) = ps[0];
assert!(**y == 5);
}
}
match Some(&[(box 5,)]) {
Some(ps) => {
let (ref y,) = ps[0];
assert!(**y == 5);
}
None => ()
}
match Some(&[(box 5,box 7)]) {
Some(ps) => {
let (ref y, ref z) = ps[0];
assert!(**y == 5);
assert!(**z == 7);
}
None => ()
}
}
|
main
|
identifier_name
|
json.rs
|
#![cfg(feature = "alloc")]
#[macro_use]
extern crate nom;
extern crate jemallocator;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
use nom::{Err, IResult, Offset, error::{VerboseError, VerboseErrorKind}};
use nom::{
character::complete::alphanumeric1 as alphanumeric,
bytes::complete::{take_while, tag},
multi::separated_listc,
branch::alt,
sequence::{preceded, terminated}, error::context
};
use nom::character::complete::char;
use nom::number::complete::recognize_float;
use nom::error::{ErrorKind,ParseError};
use std::str;
use std::iter::repeat;
use std::collections::HashMap;
#[derive(Debug, PartialEq)]
pub enum JsonValue {
Str(String),
Boolean(bool),
Num(f64),
Array(Vec<JsonValue>),
Object(HashMap<String, JsonValue>),
}
fn sp<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
let chars = " \t\r\n";
take_while(move |c| chars.contains(c))(i)
}
fn float<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, f64, E> {
flat_map!(i, recognize_float, parse_to!(f64))
}
fn parse_str<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
escaped!(i, call!(alphanumeric), '\\', one_of!("\"n\\"))
}
fn string<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
//delimitedc(i, char('\"'), parse_str, char('\"'))
let (i, _) = char('\"')(i)?;
//context("string", |i| terminatedc(i, parse_str, char('\"')))(i)
context("string", terminated(parse_str, char('\"')))(i)
}
|
|i| tag("false")(i).map(|(i,_)| (i, false)),
|i| tag("true")(i).map(|(i,_)| (i, true))
))(input)
/*
match tag::<&'static str, &'a str, E>("false")(i) {
Ok((i, _)) => Ok((i, false)),
Err(_) => tag("true")(i).map(|(i,_)| (i, true))
}
*/
}
fn array<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, Vec<JsonValue>, E> {
let (i, _) = char('[')(i)?;
/*context(
"array",
|i| terminatedc(i,
|i| separated_listc(i, |i| precededc(i, sp, char(',')), value),
|i| precededc(i, sp, char(']')))
)(i)*/
context(
"array",
terminated(
|i| separated_listc(i, preceded(sp, char(',')), value),
preceded(sp, char(']')))
)(i)
}
fn key_value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, (&'a str, JsonValue), E> {
separated_pair!(i, preceded!(sp, string), preceded!(sp, char!(':')), value)
}
fn hash<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, HashMap<String, JsonValue>, E> {
let (i, _) = char('{')(i)?;
context(
"map",
terminated(
|i| map!(i,
separated_list!(preceded!(sp, char!(',')), key_value),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
),
preceded(sp, char('}')))
)(i)
/*
map!(i,
delimited!(
char!('{'),
separated_list!(preceded!(sp, char!(',')), key_value),
preceded!(sp, char!('}'))
),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
)
*/
}
fn value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
preceded!(i,
sp,
alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) } |
string => { |s| JsonValue::Str(String::from(s)) } |
float => { |f| JsonValue::Num(f) } |
boolean => { |b| JsonValue::Boolean(b) }
))
}
fn root<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
delimited!(i,
sp,
alt( (
|input| hash(input).map(|(i,h)| (i, JsonValue::Object(h))),
|input| array(input).map(|(i,v)| (i, JsonValue::Array(v)))
) ),
/*alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) }
),*/
not!(complete!(sp)))
}
fn convert_error(input: &str, e: VerboseError<&str>) -> String {
let lines: Vec<_> = input.lines().map(String::from).collect();
//println!("lines: {:#?}", lines);
let mut result = String::new();
for (i, (substring, kind)) in e.errors.iter().enumerate() {
let mut offset = input.offset(substring);
let mut line = 0;
let mut column = 0;
for (j,l) in lines.iter().enumerate() {
if offset <= l.len() {
line = j;
column = offset;
break;
} else {
offset = offset - l.len();
}
}
match kind {
VerboseErrorKind::Char(c) => {
result += &format!("{}: at line {}:\n", i, line);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column-1).collect::<String>();
}
result += "^\n";
result += &format!("expected '{}', found {}\n\n", c, substring.chars().next().unwrap());
},
VerboseErrorKind::Context(s) => {
result += &format!("{}: at line {}, in {}:\n", i, line, s);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column -1).collect::<String>();
}
result += "^\n\n";
}
_ => {}
}
}
result
}
fn main() {
let data = " { \"a\"\t: 42,
\"b\": [ \"x\", \"y\", 12 ] ,
\"c\": { 1\"hello\" : \"world\"
}
} ";
println!("will try to parse:\n\n**********\n{}\n**********\n", data);
println!("basic errors - `root::<(&str, ErrorKind)>(data)`:\n{:#?}\n", root::<(&str, ErrorKind)>(data));
println!("parsed verbose: {:#?}", root::<VerboseError<&str>>(data));
match root::<VerboseError<&str>>(data) {
Err(Err::Error(e)) | Err(Err::Failure(e)) => {
println!("verbose errors - `root::<VerboseError>(data)`:\n{}", convert_error(data, e));
},
_ => panic!(),
}
}
|
fn boolean<'a, E: ParseError<&'a str>>(input: &'a str) ->IResult<&'a str, bool, E> {
alt( (
|
random_line_split
|
json.rs
|
#![cfg(feature = "alloc")]
#[macro_use]
extern crate nom;
extern crate jemallocator;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
use nom::{Err, IResult, Offset, error::{VerboseError, VerboseErrorKind}};
use nom::{
character::complete::alphanumeric1 as alphanumeric,
bytes::complete::{take_while, tag},
multi::separated_listc,
branch::alt,
sequence::{preceded, terminated}, error::context
};
use nom::character::complete::char;
use nom::number::complete::recognize_float;
use nom::error::{ErrorKind,ParseError};
use std::str;
use std::iter::repeat;
use std::collections::HashMap;
#[derive(Debug, PartialEq)]
pub enum JsonValue {
Str(String),
Boolean(bool),
Num(f64),
Array(Vec<JsonValue>),
Object(HashMap<String, JsonValue>),
}
fn sp<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
let chars = " \t\r\n";
take_while(move |c| chars.contains(c))(i)
}
fn float<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, f64, E> {
flat_map!(i, recognize_float, parse_to!(f64))
}
fn parse_str<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E>
|
fn string<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
//delimitedc(i, char('\"'), parse_str, char('\"'))
let (i, _) = char('\"')(i)?;
//context("string", |i| terminatedc(i, parse_str, char('\"')))(i)
context("string", terminated(parse_str, char('\"')))(i)
}
fn boolean<'a, E: ParseError<&'a str>>(input: &'a str) ->IResult<&'a str, bool, E> {
alt( (
|i| tag("false")(i).map(|(i,_)| (i, false)),
|i| tag("true")(i).map(|(i,_)| (i, true))
))(input)
/*
match tag::<&'static str, &'a str, E>("false")(i) {
Ok((i, _)) => Ok((i, false)),
Err(_) => tag("true")(i).map(|(i,_)| (i, true))
}
*/
}
fn array<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, Vec<JsonValue>, E> {
let (i, _) = char('[')(i)?;
/*context(
"array",
|i| terminatedc(i,
|i| separated_listc(i, |i| precededc(i, sp, char(',')), value),
|i| precededc(i, sp, char(']')))
)(i)*/
context(
"array",
terminated(
|i| separated_listc(i, preceded(sp, char(',')), value),
preceded(sp, char(']')))
)(i)
}
fn key_value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, (&'a str, JsonValue), E> {
separated_pair!(i, preceded!(sp, string), preceded!(sp, char!(':')), value)
}
fn hash<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, HashMap<String, JsonValue>, E> {
let (i, _) = char('{')(i)?;
context(
"map",
terminated(
|i| map!(i,
separated_list!(preceded!(sp, char!(',')), key_value),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
),
preceded(sp, char('}')))
)(i)
/*
map!(i,
delimited!(
char!('{'),
separated_list!(preceded!(sp, char!(',')), key_value),
preceded!(sp, char!('}'))
),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
)
*/
}
fn value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
preceded!(i,
sp,
alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) } |
string => { |s| JsonValue::Str(String::from(s)) } |
float => { |f| JsonValue::Num(f) } |
boolean => { |b| JsonValue::Boolean(b) }
))
}
fn root<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
delimited!(i,
sp,
alt( (
|input| hash(input).map(|(i,h)| (i, JsonValue::Object(h))),
|input| array(input).map(|(i,v)| (i, JsonValue::Array(v)))
) ),
/*alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) }
),*/
not!(complete!(sp)))
}
fn convert_error(input: &str, e: VerboseError<&str>) -> String {
let lines: Vec<_> = input.lines().map(String::from).collect();
//println!("lines: {:#?}", lines);
let mut result = String::new();
for (i, (substring, kind)) in e.errors.iter().enumerate() {
let mut offset = input.offset(substring);
let mut line = 0;
let mut column = 0;
for (j,l) in lines.iter().enumerate() {
if offset <= l.len() {
line = j;
column = offset;
break;
} else {
offset = offset - l.len();
}
}
match kind {
VerboseErrorKind::Char(c) => {
result += &format!("{}: at line {}:\n", i, line);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column-1).collect::<String>();
}
result += "^\n";
result += &format!("expected '{}', found {}\n\n", c, substring.chars().next().unwrap());
},
VerboseErrorKind::Context(s) => {
result += &format!("{}: at line {}, in {}:\n", i, line, s);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column -1).collect::<String>();
}
result += "^\n\n";
}
_ => {}
}
}
result
}
fn main() {
let data = " { \"a\"\t: 42,
\"b\": [ \"x\", \"y\", 12 ] ,
\"c\": { 1\"hello\" : \"world\"
}
} ";
println!("will try to parse:\n\n**********\n{}\n**********\n", data);
println!("basic errors - `root::<(&str, ErrorKind)>(data)`:\n{:#?}\n", root::<(&str, ErrorKind)>(data));
println!("parsed verbose: {:#?}", root::<VerboseError<&str>>(data));
match root::<VerboseError<&str>>(data) {
Err(Err::Error(e)) | Err(Err::Failure(e)) => {
println!("verbose errors - `root::<VerboseError>(data)`:\n{}", convert_error(data, e));
},
_ => panic!(),
}
}
|
{
escaped!(i, call!(alphanumeric), '\\', one_of!("\"n\\"))
}
|
identifier_body
|
json.rs
|
#![cfg(feature = "alloc")]
#[macro_use]
extern crate nom;
extern crate jemallocator;
#[global_allocator]
static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
use nom::{Err, IResult, Offset, error::{VerboseError, VerboseErrorKind}};
use nom::{
character::complete::alphanumeric1 as alphanumeric,
bytes::complete::{take_while, tag},
multi::separated_listc,
branch::alt,
sequence::{preceded, terminated}, error::context
};
use nom::character::complete::char;
use nom::number::complete::recognize_float;
use nom::error::{ErrorKind,ParseError};
use std::str;
use std::iter::repeat;
use std::collections::HashMap;
#[derive(Debug, PartialEq)]
pub enum JsonValue {
Str(String),
Boolean(bool),
Num(f64),
Array(Vec<JsonValue>),
Object(HashMap<String, JsonValue>),
}
fn sp<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
let chars = " \t\r\n";
take_while(move |c| chars.contains(c))(i)
}
fn
|
<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, f64, E> {
flat_map!(i, recognize_float, parse_to!(f64))
}
fn parse_str<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
escaped!(i, call!(alphanumeric), '\\', one_of!("\"n\\"))
}
fn string<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, &'a str, E> {
//delimitedc(i, char('\"'), parse_str, char('\"'))
let (i, _) = char('\"')(i)?;
//context("string", |i| terminatedc(i, parse_str, char('\"')))(i)
context("string", terminated(parse_str, char('\"')))(i)
}
fn boolean<'a, E: ParseError<&'a str>>(input: &'a str) ->IResult<&'a str, bool, E> {
alt( (
|i| tag("false")(i).map(|(i,_)| (i, false)),
|i| tag("true")(i).map(|(i,_)| (i, true))
))(input)
/*
match tag::<&'static str, &'a str, E>("false")(i) {
Ok((i, _)) => Ok((i, false)),
Err(_) => tag("true")(i).map(|(i,_)| (i, true))
}
*/
}
fn array<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, Vec<JsonValue>, E> {
let (i, _) = char('[')(i)?;
/*context(
"array",
|i| terminatedc(i,
|i| separated_listc(i, |i| precededc(i, sp, char(',')), value),
|i| precededc(i, sp, char(']')))
)(i)*/
context(
"array",
terminated(
|i| separated_listc(i, preceded(sp, char(',')), value),
preceded(sp, char(']')))
)(i)
}
fn key_value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, (&'a str, JsonValue), E> {
separated_pair!(i, preceded!(sp, string), preceded!(sp, char!(':')), value)
}
fn hash<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, HashMap<String, JsonValue>, E> {
let (i, _) = char('{')(i)?;
context(
"map",
terminated(
|i| map!(i,
separated_list!(preceded!(sp, char!(',')), key_value),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
),
preceded(sp, char('}')))
)(i)
/*
map!(i,
delimited!(
char!('{'),
separated_list!(preceded!(sp, char!(',')), key_value),
preceded!(sp, char!('}'))
),
|tuple_vec| tuple_vec
.into_iter()
.map(|(k, v)| (String::from(k), v))
.collect()
)
*/
}
fn value<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
preceded!(i,
sp,
alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) } |
string => { |s| JsonValue::Str(String::from(s)) } |
float => { |f| JsonValue::Num(f) } |
boolean => { |b| JsonValue::Boolean(b) }
))
}
fn root<'a, E: ParseError<&'a str>>(i: &'a str) ->IResult<&'a str, JsonValue, E> {
delimited!(i,
sp,
alt( (
|input| hash(input).map(|(i,h)| (i, JsonValue::Object(h))),
|input| array(input).map(|(i,v)| (i, JsonValue::Array(v)))
) ),
/*alt!(
hash => { |h| JsonValue::Object(h) } |
array => { |v| JsonValue::Array(v) }
),*/
not!(complete!(sp)))
}
fn convert_error(input: &str, e: VerboseError<&str>) -> String {
let lines: Vec<_> = input.lines().map(String::from).collect();
//println!("lines: {:#?}", lines);
let mut result = String::new();
for (i, (substring, kind)) in e.errors.iter().enumerate() {
let mut offset = input.offset(substring);
let mut line = 0;
let mut column = 0;
for (j,l) in lines.iter().enumerate() {
if offset <= l.len() {
line = j;
column = offset;
break;
} else {
offset = offset - l.len();
}
}
match kind {
VerboseErrorKind::Char(c) => {
result += &format!("{}: at line {}:\n", i, line);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column-1).collect::<String>();
}
result += "^\n";
result += &format!("expected '{}', found {}\n\n", c, substring.chars().next().unwrap());
},
VerboseErrorKind::Context(s) => {
result += &format!("{}: at line {}, in {}:\n", i, line, s);
result += &lines[line];
result += "\n";
if column > 0 {
result += &repeat(' ').take(column -1).collect::<String>();
}
result += "^\n\n";
}
_ => {}
}
}
result
}
fn main() {
let data = " { \"a\"\t: 42,
\"b\": [ \"x\", \"y\", 12 ] ,
\"c\": { 1\"hello\" : \"world\"
}
} ";
println!("will try to parse:\n\n**********\n{}\n**********\n", data);
println!("basic errors - `root::<(&str, ErrorKind)>(data)`:\n{:#?}\n", root::<(&str, ErrorKind)>(data));
println!("parsed verbose: {:#?}", root::<VerboseError<&str>>(data));
match root::<VerboseError<&str>>(data) {
Err(Err::Error(e)) | Err(Err::Failure(e)) => {
println!("verbose errors - `root::<VerboseError>(data)`:\n{}", convert_error(data, e));
},
_ => panic!(),
}
}
|
float
|
identifier_name
|
handledisconnected.ts
|
module cola {
var packingOptions = {
PADDING: 10,
GOLDEN_SECTION: (1 + Math.sqrt(5)) / 2,
FLOAT_EPSILON: 0.0001,
MAX_INERATIONS: 100
};
// assign x, y to nodes while using box packing algorithm for disconnected graphs
export function applyPacking(graphs:Array<any>, w, h, node_size, desired_ratio = 1) {
var init_x = 0,
init_y = 0,
svg_width = w,
svg_height = h,
desired_ratio = typeof desired_ratio !== 'undefined' ? desired_ratio : 1,
node_size = typeof node_size !== 'undefined' ? node_size : 0,
real_width = 0,
real_height = 0,
min_width = 0,
global_bottom = 0,
line = [];
if (graphs.length == 0)
return;
/// that would take care of single nodes problem
// graphs.forEach(function (g) {
// if (g.array.length == 1) {
// g.array[0].x = 0;
// g.array[0].y = 0;
// }
// });
calculate_bb(graphs);
apply(graphs, desired_ratio);
put_nodes_to_right_positions(graphs);
// get bounding boxes for all separate graphs
function calculate_bb(graphs) {
graphs.forEach(function (g) {
calculate_single_bb(g)
});
function calculate_single_bb(graph) {
var min_x = Number.MAX_VALUE, min_y = Number.MAX_VALUE,
max_x = 0, max_y = 0;
graph.array.forEach(function (v) {
var w = typeof v.width !== 'undefined' ? v.width : node_size;
var h = typeof v.height !== 'undefined' ? v.height : node_size;
w /= 2;
h /= 2;
max_x = Math.max(v.x + w, max_x);
min_x = Math.min(v.x - w, min_x);
max_y = Math.max(v.y + h, max_y);
min_y = Math.min(v.y - h, min_y);
});
graph.width = max_x - min_x;
graph.height = max_y - min_y;
}
}
//function plot(data, left, right, opt_x, opt_y) {
// // plot the cost function
// var plot_svg = d3.select("body").append("svg")
// .attr("width", function () { return 2 * (right - left); })
// .attr("height", 200);
// var x = d3.time.scale().range([0, 2 * (right - left)]);
// var xAxis = d3.svg.axis().scale(x).orient("bottom");
// plot_svg.append("g").attr("class", "x axis")
// .attr("transform", "translate(0, 199)")
// .call(xAxis);
// var lastX = 0;
// var lastY = 0;
// var value = 0;
// for (var r = left; r < right; r += 1) {
// value = step(data, r);
// // value = 1;
// plot_svg.append("line").attr("x1", 2 * (lastX - left))
// .attr("y1", 200 - 30 * lastY)
// .attr("x2", 2 * r - 2 * left)
// .attr("y2", 200 - 30 * value)
// .style("stroke", "rgb(6,120,155)");
// lastX = r;
// lastY = value;
// }
// plot_svg.append("circle").attr("cx", 2 * opt_x - 2 * left).attr("cy", 200 - 30 * opt_y)
// .attr("r", 5).style('fill', "rgba(0,0,0,0.5)");
//}
// actual assigning of position to nodes
function put_nodes_to_right_positions(graphs) {
graphs.forEach(function (g) {
// calculate current graph center:
var center = { x: 0, y: 0 };
g.array.forEach(function (node) {
center.x += node.x;
center.y += node.y;
});
center.x /= g.array.length;
center.y /= g.array.length;
// calculate current top left corner:
var corner = { x: center.x - g.width / 2, y: center.y - g.height / 2 };
var offset = { x: g.x - corner.x + svg_width / 2 - real_width / 2, y: g.y - corner.y + svg_height / 2 - real_height / 2};
// put nodes:
g.array.forEach(function (node) {
node.x += offset.x;
node.y += offset.y;
});
});
}
// starts box packing algorithm
// desired ratio is 1 by default
function apply(data, desired_ratio) {
var curr_best_f = Number.POSITIVE_INFINITY;
var curr_best = 0;
data.sort(function (a, b) { return b.height - a.height; });
min_width = data.reduce(function (a, b) {
return a.width < b.width ? a.width : b.width;
});
var left = x1 = min_width;
var right = x2 = get_entire_width(data);
var iterationCounter = 0;
var f_x1 = Number.MAX_VALUE;
var f_x2 = Number.MAX_VALUE;
var flag = -1; // determines which among f_x1 and f_x2 to recompute
var dx = Number.MAX_VALUE;
var df = Number.MAX_VALUE;
while ((dx > min_width) || df > packingOptions.FLOAT_EPSILON) {
if (flag != 1) {
var x1 = right - (right - left) / packingOptions.GOLDEN_SECTION;
var f_x1 = step(data, x1);
}
if (flag != 0) {
var x2 = left + (right - left) / packingOptions.GOLDEN_SECTION;
var f_x2 = step(data, x2);
}
dx = Math.abs(x1 - x2);
df = Math.abs(f_x1 - f_x2);
if (f_x1 < curr_best_f) {
curr_best_f = f_x1;
curr_best = x1;
}
if (f_x2 < curr_best_f) {
curr_best_f = f_x2;
curr_best = x2;
}
if (f_x1 > f_x2) {
left = x1;
x1 = x2;
f_x1 = f_x2;
flag = 1;
} else {
right = x2;
x2 = x1;
f_x2 = f_x1;
flag = 0;
}
if (iterationCounter++ > 100) {
break;
}
}
// plot(data, min_width, get_entire_width(data), curr_best, curr_best_f);
step(data, curr_best);
}
// one iteration of the optimization method
// (gives a proper, but not necessarily optimal packing)
function step(data, max_width) {
line = [];
real_width = 0;
real_height = 0;
global_bottom = init_y;
for (var i = 0; i < data.length; i++) {
var o = data[i];
put_rect(o, max_width);
}
return Math.abs(get_real_ratio() - desired_ratio);
}
// looking for a position to one box
function put_rect(rect, max_width) {
var parent = undefined;
for (var i = 0; i < line.length; i++) {
if ((line[i].space_left >= rect.height) && (line[i].x + line[i].width + rect.width + packingOptions.PADDING - max_width) <= packingOptions.FLOAT_EPSILON) {
parent = line[i];
break;
}
}
line.push(rect);
if (parent !== undefined) {
rect.x = parent.x + parent.width + packingOptions.PADDING;
rect.y = parent.bottom;
rect.space_left = rect.height;
rect.bottom = rect.y;
parent.space_left -= rect.height + packingOptions.PADDING;
parent.bottom += rect.height + packingOptions.PADDING;
} else {
rect.y = global_bottom;
global_bottom += rect.height + packingOptions.PADDING;
rect.x = init_x;
rect.bottom = rect.y;
rect.space_left = rect.height;
}
if (rect.y + rect.height - real_height > -packingOptions.FLOAT_EPSILON) real_height = rect.y + rect.height - init_y;
if (rect.x + rect.width - real_width > -packingOptions.FLOAT_EPSILON) real_width = rect.x + rect.width - init_x;
};
function get_entire_width(data) {
var width = 0;
data.forEach(function (d) { return width += d.width + packingOptions.PADDING; });
return width;
}
function get_real_ratio() {
|
}
/**
* connected components of graph
* returns an array of {}
*/
export function separateGraphs(nodes, links) {
var marks = {};
var ways = {};
var graphs = [];
var clusters = 0;
for (var i = 0; i < links.length; i++) {
var link = links[i];
var n1 = link.source;
var n2 = link.target;
if (ways[n1.index])
ways[n1.index].push(n2);
else
ways[n1.index] = [n2];
if (ways[n2.index])
ways[n2.index].push(n1);
else
ways[n2.index] = [n1];
}
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
if (marks[node.index]) continue;
explore_node(node, true);
}
function explore_node(n, is_new) {
if (marks[n.index] !== undefined) return;
if (is_new) {
clusters++;
graphs.push({ array: [] });
}
marks[n.index] = clusters;
graphs[clusters - 1].array.push(n);
var adjacent = ways[n.index];
if (!adjacent) return;
for (var j = 0; j < adjacent.length; j++) {
explore_node(adjacent[j], false);
}
}
return graphs;
}
}
|
return (real_width / real_height);
}
|
identifier_body
|
handledisconnected.ts
|
module cola {
var packingOptions = {
PADDING: 10,
GOLDEN_SECTION: (1 + Math.sqrt(5)) / 2,
FLOAT_EPSILON: 0.0001,
MAX_INERATIONS: 100
};
// assign x, y to nodes while using box packing algorithm for disconnected graphs
export function applyPacking(graphs:Array<any>, w, h, node_size, desired_ratio = 1) {
var init_x = 0,
init_y = 0,
svg_width = w,
svg_height = h,
desired_ratio = typeof desired_ratio !== 'undefined' ? desired_ratio : 1,
node_size = typeof node_size !== 'undefined' ? node_size : 0,
real_width = 0,
real_height = 0,
min_width = 0,
global_bottom = 0,
line = [];
if (graphs.length == 0)
return;
/// that would take care of single nodes problem
// graphs.forEach(function (g) {
// if (g.array.length == 1) {
// g.array[0].x = 0;
// g.array[0].y = 0;
// }
// });
calculate_bb(graphs);
apply(graphs, desired_ratio);
put_nodes_to_right_positions(graphs);
// get bounding boxes for all separate graphs
function ca
|
raphs) {
graphs.forEach(function (g) {
calculate_single_bb(g)
});
function calculate_single_bb(graph) {
var min_x = Number.MAX_VALUE, min_y = Number.MAX_VALUE,
max_x = 0, max_y = 0;
graph.array.forEach(function (v) {
var w = typeof v.width !== 'undefined' ? v.width : node_size;
var h = typeof v.height !== 'undefined' ? v.height : node_size;
w /= 2;
h /= 2;
max_x = Math.max(v.x + w, max_x);
min_x = Math.min(v.x - w, min_x);
max_y = Math.max(v.y + h, max_y);
min_y = Math.min(v.y - h, min_y);
});
graph.width = max_x - min_x;
graph.height = max_y - min_y;
}
}
//function plot(data, left, right, opt_x, opt_y) {
// // plot the cost function
// var plot_svg = d3.select("body").append("svg")
// .attr("width", function () { return 2 * (right - left); })
// .attr("height", 200);
// var x = d3.time.scale().range([0, 2 * (right - left)]);
// var xAxis = d3.svg.axis().scale(x).orient("bottom");
// plot_svg.append("g").attr("class", "x axis")
// .attr("transform", "translate(0, 199)")
// .call(xAxis);
// var lastX = 0;
// var lastY = 0;
// var value = 0;
// for (var r = left; r < right; r += 1) {
// value = step(data, r);
// // value = 1;
// plot_svg.append("line").attr("x1", 2 * (lastX - left))
// .attr("y1", 200 - 30 * lastY)
// .attr("x2", 2 * r - 2 * left)
// .attr("y2", 200 - 30 * value)
// .style("stroke", "rgb(6,120,155)");
// lastX = r;
// lastY = value;
// }
// plot_svg.append("circle").attr("cx", 2 * opt_x - 2 * left).attr("cy", 200 - 30 * opt_y)
// .attr("r", 5).style('fill', "rgba(0,0,0,0.5)");
//}
// actual assigning of position to nodes
function put_nodes_to_right_positions(graphs) {
graphs.forEach(function (g) {
// calculate current graph center:
var center = { x: 0, y: 0 };
g.array.forEach(function (node) {
center.x += node.x;
center.y += node.y;
});
center.x /= g.array.length;
center.y /= g.array.length;
// calculate current top left corner:
var corner = { x: center.x - g.width / 2, y: center.y - g.height / 2 };
var offset = { x: g.x - corner.x + svg_width / 2 - real_width / 2, y: g.y - corner.y + svg_height / 2 - real_height / 2};
// put nodes:
g.array.forEach(function (node) {
node.x += offset.x;
node.y += offset.y;
});
});
}
// starts box packing algorithm
// desired ratio is 1 by default
function apply(data, desired_ratio) {
var curr_best_f = Number.POSITIVE_INFINITY;
var curr_best = 0;
data.sort(function (a, b) { return b.height - a.height; });
min_width = data.reduce(function (a, b) {
return a.width < b.width ? a.width : b.width;
});
var left = x1 = min_width;
var right = x2 = get_entire_width(data);
var iterationCounter = 0;
var f_x1 = Number.MAX_VALUE;
var f_x2 = Number.MAX_VALUE;
var flag = -1; // determines which among f_x1 and f_x2 to recompute
var dx = Number.MAX_VALUE;
var df = Number.MAX_VALUE;
while ((dx > min_width) || df > packingOptions.FLOAT_EPSILON) {
if (flag != 1) {
var x1 = right - (right - left) / packingOptions.GOLDEN_SECTION;
var f_x1 = step(data, x1);
}
if (flag != 0) {
var x2 = left + (right - left) / packingOptions.GOLDEN_SECTION;
var f_x2 = step(data, x2);
}
dx = Math.abs(x1 - x2);
df = Math.abs(f_x1 - f_x2);
if (f_x1 < curr_best_f) {
curr_best_f = f_x1;
curr_best = x1;
}
if (f_x2 < curr_best_f) {
curr_best_f = f_x2;
curr_best = x2;
}
if (f_x1 > f_x2) {
left = x1;
x1 = x2;
f_x1 = f_x2;
flag = 1;
} else {
right = x2;
x2 = x1;
f_x2 = f_x1;
flag = 0;
}
if (iterationCounter++ > 100) {
break;
}
}
// plot(data, min_width, get_entire_width(data), curr_best, curr_best_f);
step(data, curr_best);
}
// one iteration of the optimization method
// (gives a proper, but not necessarily optimal packing)
function step(data, max_width) {
line = [];
real_width = 0;
real_height = 0;
global_bottom = init_y;
for (var i = 0; i < data.length; i++) {
var o = data[i];
put_rect(o, max_width);
}
return Math.abs(get_real_ratio() - desired_ratio);
}
// looking for a position to one box
function put_rect(rect, max_width) {
var parent = undefined;
for (var i = 0; i < line.length; i++) {
if ((line[i].space_left >= rect.height) && (line[i].x + line[i].width + rect.width + packingOptions.PADDING - max_width) <= packingOptions.FLOAT_EPSILON) {
parent = line[i];
break;
}
}
line.push(rect);
if (parent !== undefined) {
rect.x = parent.x + parent.width + packingOptions.PADDING;
rect.y = parent.bottom;
rect.space_left = rect.height;
rect.bottom = rect.y;
parent.space_left -= rect.height + packingOptions.PADDING;
parent.bottom += rect.height + packingOptions.PADDING;
} else {
rect.y = global_bottom;
global_bottom += rect.height + packingOptions.PADDING;
rect.x = init_x;
rect.bottom = rect.y;
rect.space_left = rect.height;
}
if (rect.y + rect.height - real_height > -packingOptions.FLOAT_EPSILON) real_height = rect.y + rect.height - init_y;
if (rect.x + rect.width - real_width > -packingOptions.FLOAT_EPSILON) real_width = rect.x + rect.width - init_x;
};
function get_entire_width(data) {
var width = 0;
data.forEach(function (d) { return width += d.width + packingOptions.PADDING; });
return width;
}
function get_real_ratio() {
return (real_width / real_height);
}
}
/**
* connected components of graph
* returns an array of {}
*/
export function separateGraphs(nodes, links) {
var marks = {};
var ways = {};
var graphs = [];
var clusters = 0;
for (var i = 0; i < links.length; i++) {
var link = links[i];
var n1 = link.source;
var n2 = link.target;
if (ways[n1.index])
ways[n1.index].push(n2);
else
ways[n1.index] = [n2];
if (ways[n2.index])
ways[n2.index].push(n1);
else
ways[n2.index] = [n1];
}
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
if (marks[node.index]) continue;
explore_node(node, true);
}
function explore_node(n, is_new) {
if (marks[n.index] !== undefined) return;
if (is_new) {
clusters++;
graphs.push({ array: [] });
}
marks[n.index] = clusters;
graphs[clusters - 1].array.push(n);
var adjacent = ways[n.index];
if (!adjacent) return;
for (var j = 0; j < adjacent.length; j++) {
explore_node(adjacent[j], false);
}
}
return graphs;
}
}
|
lculate_bb(g
|
identifier_name
|
handledisconnected.ts
|
module cola {
var packingOptions = {
PADDING: 10,
GOLDEN_SECTION: (1 + Math.sqrt(5)) / 2,
FLOAT_EPSILON: 0.0001,
MAX_INERATIONS: 100
};
// assign x, y to nodes while using box packing algorithm for disconnected graphs
export function applyPacking(graphs:Array<any>, w, h, node_size, desired_ratio = 1) {
var init_x = 0,
init_y = 0,
svg_width = w,
svg_height = h,
desired_ratio = typeof desired_ratio !== 'undefined' ? desired_ratio : 1,
node_size = typeof node_size !== 'undefined' ? node_size : 0,
real_width = 0,
real_height = 0,
min_width = 0,
global_bottom = 0,
line = [];
if (graphs.length == 0)
return;
/// that would take care of single nodes problem
// graphs.forEach(function (g) {
// if (g.array.length == 1) {
// g.array[0].x = 0;
// g.array[0].y = 0;
// }
// });
calculate_bb(graphs);
apply(graphs, desired_ratio);
put_nodes_to_right_positions(graphs);
// get bounding boxes for all separate graphs
function calculate_bb(graphs) {
graphs.forEach(function (g) {
calculate_single_bb(g)
});
function calculate_single_bb(graph) {
var min_x = Number.MAX_VALUE, min_y = Number.MAX_VALUE,
max_x = 0, max_y = 0;
graph.array.forEach(function (v) {
var w = typeof v.width !== 'undefined' ? v.width : node_size;
var h = typeof v.height !== 'undefined' ? v.height : node_size;
w /= 2;
h /= 2;
max_x = Math.max(v.x + w, max_x);
min_x = Math.min(v.x - w, min_x);
max_y = Math.max(v.y + h, max_y);
min_y = Math.min(v.y - h, min_y);
});
graph.width = max_x - min_x;
graph.height = max_y - min_y;
}
}
//function plot(data, left, right, opt_x, opt_y) {
// // plot the cost function
// var plot_svg = d3.select("body").append("svg")
// .attr("width", function () { return 2 * (right - left); })
// .attr("height", 200);
// var x = d3.time.scale().range([0, 2 * (right - left)]);
// var xAxis = d3.svg.axis().scale(x).orient("bottom");
// plot_svg.append("g").attr("class", "x axis")
// .attr("transform", "translate(0, 199)")
// .call(xAxis);
// var lastX = 0;
// var lastY = 0;
// var value = 0;
// for (var r = left; r < right; r += 1) {
// value = step(data, r);
// // value = 1;
// plot_svg.append("line").attr("x1", 2 * (lastX - left))
// .attr("y1", 200 - 30 * lastY)
// .attr("x2", 2 * r - 2 * left)
// .attr("y2", 200 - 30 * value)
// .style("stroke", "rgb(6,120,155)");
// lastX = r;
// lastY = value;
// }
// plot_svg.append("circle").attr("cx", 2 * opt_x - 2 * left).attr("cy", 200 - 30 * opt_y)
// .attr("r", 5).style('fill', "rgba(0,0,0,0.5)");
//}
// actual assigning of position to nodes
function put_nodes_to_right_positions(graphs) {
graphs.forEach(function (g) {
// calculate current graph center:
var center = { x: 0, y: 0 };
g.array.forEach(function (node) {
center.x += node.x;
center.y += node.y;
});
center.x /= g.array.length;
center.y /= g.array.length;
// calculate current top left corner:
var corner = { x: center.x - g.width / 2, y: center.y - g.height / 2 };
var offset = { x: g.x - corner.x + svg_width / 2 - real_width / 2, y: g.y - corner.y + svg_height / 2 - real_height / 2};
// put nodes:
g.array.forEach(function (node) {
node.x += offset.x;
node.y += offset.y;
});
});
}
// starts box packing algorithm
// desired ratio is 1 by default
function apply(data, desired_ratio) {
var curr_best_f = Number.POSITIVE_INFINITY;
var curr_best = 0;
data.sort(function (a, b) { return b.height - a.height; });
min_width = data.reduce(function (a, b) {
return a.width < b.width ? a.width : b.width;
});
var left = x1 = min_width;
var right = x2 = get_entire_width(data);
var iterationCounter = 0;
var f_x1 = Number.MAX_VALUE;
var f_x2 = Number.MAX_VALUE;
var flag = -1; // determines which among f_x1 and f_x2 to recompute
var dx = Number.MAX_VALUE;
var df = Number.MAX_VALUE;
while ((dx > min_width) || df > packingOptions.FLOAT_EPSILON) {
if (flag != 1) {
var x1 = right - (right - left) / packingOptions.GOLDEN_SECTION;
var f_x1 = step(data, x1);
}
if (flag != 0) {
var x2 = left + (right - left) / packingOptions.GOLDEN_SECTION;
var f_x2 = step(data, x2);
}
dx = Math.abs(x1 - x2);
df = Math.abs(f_x1 - f_x2);
if (f_x1 < curr_best_f) {
curr_best_f = f_x1;
curr_best = x1;
}
if (f_x2 < curr_best_f) {
curr_best_f = f_x2;
curr_best = x2;
}
if (f_x1 > f_x2) {
left = x1;
x1 = x2;
f_x1 = f_x2;
flag = 1;
} else {
right = x2;
x2 = x1;
f_x2 = f_x1;
flag = 0;
}
if (iterationCounter++ > 100) {
break;
}
}
// plot(data, min_width, get_entire_width(data), curr_best, curr_best_f);
step(data, curr_best);
}
// one iteration of the optimization method
// (gives a proper, but not necessarily optimal packing)
function step(data, max_width) {
line = [];
real_width = 0;
real_height = 0;
global_bottom = init_y;
for (var i = 0; i < data.length; i++) {
var o = data[i];
put_rect(o, max_width);
}
return Math.abs(get_real_ratio() - desired_ratio);
}
// looking for a position to one box
function put_rect(rect, max_width) {
var parent = undefined;
for (var i = 0; i < line.length; i++) {
if ((line[i].space_left >= rect.height) && (line[i].x + line[i].width + rect.width + packingOptions.PADDING - max_width) <= packingOptions.FLOAT_EPSILON) {
parent = line[i];
break;
}
}
line.push(rect);
if (parent !== undefined) {
rect.x = parent.x + parent.width + packingOptions.PADDING;
rect.y = parent.bottom;
rect.space_left = rect.height;
rect.bottom = rect.y;
parent.space_left -= rect.height + packingOptions.PADDING;
parent.bottom += rect.height + packingOptions.PADDING;
} else {
rect.y = global_bottom;
global_bottom += rect.height + packingOptions.PADDING;
rect.x = init_x;
rect.bottom = rect.y;
rect.space_left = rect.height;
}
if (rect.y + rect.height - real_height > -packingOptions.FLOAT_EPSILON) real_height = rect.y + rect.height - init_y;
if (rect.x + rect.width - real_width > -packingOptions.FLOAT_EPSILON) real_width = rect.x + rect.width - init_x;
};
function get_entire_width(data) {
var width = 0;
data.forEach(function (d) { return width += d.width + packingOptions.PADDING; });
return width;
}
function get_real_ratio() {
return (real_width / real_height);
}
}
/**
* connected components of graph
* returns an array of {}
*/
export function separateGraphs(nodes, links) {
var marks = {};
var ways = {};
var graphs = [];
var clusters = 0;
for (var i = 0; i < links.length; i++) {
var link = links[i];
var n1 = link.source;
var n2 = link.target;
if (ways[n1.index])
ways[n1.index].push(n2);
else
ways[n1.index] = [n2];
if (ways[n2.index])
ways[n2.index].push(n1);
else
ways[n2.index] = [n1];
}
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
if (marks[node.index]) continue;
explore_node(node, true);
}
function explore_node(n, is_new) {
if (marks[n.index] !== undefined) return;
if (is_new) {
clusters++;
graphs.push({ array: [] });
}
marks[n.index] = clusters;
graphs[clusters - 1].array.push(n);
var adjacent = ways[n.index];
|
}
return graphs;
}
}
|
if (!adjacent) return;
for (var j = 0; j < adjacent.length; j++) {
explore_node(adjacent[j], false);
}
|
random_line_split
|
Toolbar.js
|
(function (enyo, scope) {
/**
* {@link onyx.Toolbar} is a horizontal bar containing controls used to perform
* common UI actions.
*
* A toolbar customizes the styling of the controls it hosts, including buttons,
* icons, and inputs.
*
* ```
* {kind: 'onyx.Toolbar', components: [
* {kind: 'onyx.Button', content: 'Favorites'},
* {kind: 'onyx.InputDecorator', components: [
* {kind: 'onyx.Input', placeholder: 'Enter a search term...'}
* ]},
* {kind: 'onyx.IconButton', src: 'go.png'}
* ]}
* ```
*
* @ui
|
* @public
*/
enyo.kind(
/** @lends onyx.Toolbar.prototype */ {
/**
* @private
*/
name: 'onyx.Toolbar',
/**
* @private
*/
classes: 'onyx onyx-toolbar onyx-toolbar-inline',
/**
* @private
*/
create: function (){
this.inherited(arguments);
//workaround for android 4.0.3 rendering glitch (ENYO-674)
if (this.hasClass('onyx-menu-toolbar') && (enyo.platform.android >= 4)){
this.applyStyle('position', 'static');
}
}
});
})(enyo, this);
|
* @class onyx.Toolbar
* @extends enyo.Control
|
random_line_split
|
Toolbar.js
|
(function (enyo, scope) {
/**
* {@link onyx.Toolbar} is a horizontal bar containing controls used to perform
* common UI actions.
*
* A toolbar customizes the styling of the controls it hosts, including buttons,
* icons, and inputs.
*
* ```
* {kind: 'onyx.Toolbar', components: [
* {kind: 'onyx.Button', content: 'Favorites'},
* {kind: 'onyx.InputDecorator', components: [
* {kind: 'onyx.Input', placeholder: 'Enter a search term...'}
* ]},
* {kind: 'onyx.IconButton', src: 'go.png'}
* ]}
* ```
*
* @ui
* @class onyx.Toolbar
* @extends enyo.Control
* @public
*/
enyo.kind(
/** @lends onyx.Toolbar.prototype */ {
/**
* @private
*/
name: 'onyx.Toolbar',
/**
* @private
*/
classes: 'onyx onyx-toolbar onyx-toolbar-inline',
/**
* @private
*/
create: function (){
this.inherited(arguments);
//workaround for android 4.0.3 rendering glitch (ENYO-674)
if (this.hasClass('onyx-menu-toolbar') && (enyo.platform.android >= 4))
|
}
});
})(enyo, this);
|
{
this.applyStyle('position', 'static');
}
|
conditional_block
|
main.js
|
/*global require*/
'use strict';
require.config({
shim: {
underscore: {
exports: '_'
},
backbone: {
deps: [
'underscore',
'jquery'
],
exports: 'Backbone'
},
foundation: {
deps: ['jquery'],
exports: 'foundation'
},
downloadify: {
deps: ['jquery'],
exports: 'downloadify'
},
swfobject: {
deps: [],
exports: 'swfobject'
}
},
paths: {
jquery: '../vendor/jquery/jquery.min',
backbone: '../vendor/backbone/backbone-min',
underscore: '../vendor/underscore/underscore-min',
foundation: '../vendor/foundation/foundation.min',
text: '../vendor/requirejs-text/text',
d3: '../vendor/d3/d3.v3.min',
sylvester: '../vendor/sylvester/sylvester',
downloadify: '../vendor/downloadify/js/downloadify.min',
swfobject: '../vendor/downloadify/js/swfobject',
detect: '../vendor/detect/detect.min'
}
});
require([
'jquery',
'backbone',
'underscore',
'routes/app'
|
Backbone.history.start();
});
function flashInitialized() {
window.mentalmodeler.appModel.start();
}
|
], function ( $, Backbone, _, App ) {
window.mentalmodeler = new App();
|
random_line_split
|
main.js
|
/*global require*/
'use strict';
require.config({
shim: {
underscore: {
exports: '_'
},
backbone: {
deps: [
'underscore',
'jquery'
],
exports: 'Backbone'
},
foundation: {
deps: ['jquery'],
exports: 'foundation'
},
downloadify: {
deps: ['jquery'],
exports: 'downloadify'
},
swfobject: {
deps: [],
exports: 'swfobject'
}
},
paths: {
jquery: '../vendor/jquery/jquery.min',
backbone: '../vendor/backbone/backbone-min',
underscore: '../vendor/underscore/underscore-min',
foundation: '../vendor/foundation/foundation.min',
text: '../vendor/requirejs-text/text',
d3: '../vendor/d3/d3.v3.min',
sylvester: '../vendor/sylvester/sylvester',
downloadify: '../vendor/downloadify/js/downloadify.min',
swfobject: '../vendor/downloadify/js/swfobject',
detect: '../vendor/detect/detect.min'
}
});
require([
'jquery',
'backbone',
'underscore',
'routes/app'
], function ( $, Backbone, _, App ) {
window.mentalmodeler = new App();
Backbone.history.start();
});
function flashInitialized()
|
{
window.mentalmodeler.appModel.start();
}
|
identifier_body
|
|
main.js
|
/*global require*/
'use strict';
require.config({
shim: {
underscore: {
exports: '_'
},
backbone: {
deps: [
'underscore',
'jquery'
],
exports: 'Backbone'
},
foundation: {
deps: ['jquery'],
exports: 'foundation'
},
downloadify: {
deps: ['jquery'],
exports: 'downloadify'
},
swfobject: {
deps: [],
exports: 'swfobject'
}
},
paths: {
jquery: '../vendor/jquery/jquery.min',
backbone: '../vendor/backbone/backbone-min',
underscore: '../vendor/underscore/underscore-min',
foundation: '../vendor/foundation/foundation.min',
text: '../vendor/requirejs-text/text',
d3: '../vendor/d3/d3.v3.min',
sylvester: '../vendor/sylvester/sylvester',
downloadify: '../vendor/downloadify/js/downloadify.min',
swfobject: '../vendor/downloadify/js/swfobject',
detect: '../vendor/detect/detect.min'
}
});
require([
'jquery',
'backbone',
'underscore',
'routes/app'
], function ( $, Backbone, _, App ) {
window.mentalmodeler = new App();
Backbone.history.start();
});
function
|
() {
window.mentalmodeler.appModel.start();
}
|
flashInitialized
|
identifier_name
|
cluster_handler.py
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
|
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Calin Pavel <[email protected]>
"""
import os
import logging
from logging.handlers import MemoryHandler
from tvb.basic.profile import TvbProfile
from tvb.basic.logger.simple_handler import SimpleTimedRotatingFileHandler
class ClusterTimedRotatingFileHandler(MemoryHandler):
"""
This is a custom rotating file handler which computes the name of the file depending on the
execution environment (web node or cluster node)
"""
# Name of the log file where code from Web application will be stored
WEB_LOG_FILE = "web_application.log"
# Name of the file where to write logs from the code executed on cluster nodes
CLUSTER_NODES_LOG_FILE = "operations_executions.log"
# Size of the buffer which store log entries in memory
# in number of lines
BUFFER_CAPACITY = 20
def __init__(self, when='h', interval=1, backupCount=0):
"""
Constructor for logging formatter.
"""
# Formatting string
format_str = '%(asctime)s - %(levelname)s'
if TvbProfile.current.cluster.IN_OPERATION_EXECUTION_PROCESS:
log_file = self.CLUSTER_NODES_LOG_FILE
if TvbProfile.current.cluster.IS_RUNNING_ON_CLUSTER_NODE:
node_name = TvbProfile.current.cluster.CLUSTER_NODE_NAME
if node_name is not None:
format_str += ' [node:' + str(node_name) + '] '
else:
format_str += ' [proc:' + str(os.getpid()) + '] '
else:
log_file = self.WEB_LOG_FILE
format_str += ' - %(name)s - %(message)s'
rotating_file_handler = SimpleTimedRotatingFileHandler(log_file, when, interval, backupCount)
rotating_file_handler.setFormatter(logging.Formatter(format_str))
MemoryHandler.__init__(self, capacity=self.BUFFER_CAPACITY, target=rotating_file_handler)
|
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
|
random_line_split
|
cluster_handler.py
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Calin Pavel <[email protected]>
"""
import os
import logging
from logging.handlers import MemoryHandler
from tvb.basic.profile import TvbProfile
from tvb.basic.logger.simple_handler import SimpleTimedRotatingFileHandler
class ClusterTimedRotatingFileHandler(MemoryHandler):
"""
This is a custom rotating file handler which computes the name of the file depending on the
execution environment (web node or cluster node)
"""
# Name of the log file where code from Web application will be stored
WEB_LOG_FILE = "web_application.log"
# Name of the file where to write logs from the code executed on cluster nodes
CLUSTER_NODES_LOG_FILE = "operations_executions.log"
# Size of the buffer which store log entries in memory
# in number of lines
BUFFER_CAPACITY = 20
def __init__(self, when='h', interval=1, backupCount=0):
"""
Constructor for logging formatter.
"""
# Formatting string
format_str = '%(asctime)s - %(levelname)s'
if TvbProfile.current.cluster.IN_OPERATION_EXECUTION_PROCESS:
log_file = self.CLUSTER_NODES_LOG_FILE
if TvbProfile.current.cluster.IS_RUNNING_ON_CLUSTER_NODE:
node_name = TvbProfile.current.cluster.CLUSTER_NODE_NAME
if node_name is not None:
format_str += ' [node:' + str(node_name) + '] '
else:
|
else:
log_file = self.WEB_LOG_FILE
format_str += ' - %(name)s - %(message)s'
rotating_file_handler = SimpleTimedRotatingFileHandler(log_file, when, interval, backupCount)
rotating_file_handler.setFormatter(logging.Formatter(format_str))
MemoryHandler.__init__(self, capacity=self.BUFFER_CAPACITY, target=rotating_file_handler)
|
format_str += ' [proc:' + str(os.getpid()) + '] '
|
conditional_block
|
cluster_handler.py
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Calin Pavel <[email protected]>
"""
import os
import logging
from logging.handlers import MemoryHandler
from tvb.basic.profile import TvbProfile
from tvb.basic.logger.simple_handler import SimpleTimedRotatingFileHandler
class ClusterTimedRotatingFileHandler(MemoryHandler):
"""
This is a custom rotating file handler which computes the name of the file depending on the
execution environment (web node or cluster node)
"""
# Name of the log file where code from Web application will be stored
WEB_LOG_FILE = "web_application.log"
# Name of the file where to write logs from the code executed on cluster nodes
CLUSTER_NODES_LOG_FILE = "operations_executions.log"
# Size of the buffer which store log entries in memory
# in number of lines
BUFFER_CAPACITY = 20
def __init__(self, when='h', interval=1, backupCount=0):
|
"""
Constructor for logging formatter.
"""
# Formatting string
format_str = '%(asctime)s - %(levelname)s'
if TvbProfile.current.cluster.IN_OPERATION_EXECUTION_PROCESS:
log_file = self.CLUSTER_NODES_LOG_FILE
if TvbProfile.current.cluster.IS_RUNNING_ON_CLUSTER_NODE:
node_name = TvbProfile.current.cluster.CLUSTER_NODE_NAME
if node_name is not None:
format_str += ' [node:' + str(node_name) + '] '
else:
format_str += ' [proc:' + str(os.getpid()) + '] '
else:
log_file = self.WEB_LOG_FILE
format_str += ' - %(name)s - %(message)s'
rotating_file_handler = SimpleTimedRotatingFileHandler(log_file, when, interval, backupCount)
rotating_file_handler.setFormatter(logging.Formatter(format_str))
MemoryHandler.__init__(self, capacity=self.BUFFER_CAPACITY, target=rotating_file_handler)
|
identifier_body
|
|
cluster_handler.py
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Calin Pavel <[email protected]>
"""
import os
import logging
from logging.handlers import MemoryHandler
from tvb.basic.profile import TvbProfile
from tvb.basic.logger.simple_handler import SimpleTimedRotatingFileHandler
class
|
(MemoryHandler):
"""
This is a custom rotating file handler which computes the name of the file depending on the
execution environment (web node or cluster node)
"""
# Name of the log file where code from Web application will be stored
WEB_LOG_FILE = "web_application.log"
# Name of the file where to write logs from the code executed on cluster nodes
CLUSTER_NODES_LOG_FILE = "operations_executions.log"
# Size of the buffer which store log entries in memory
# in number of lines
BUFFER_CAPACITY = 20
def __init__(self, when='h', interval=1, backupCount=0):
"""
Constructor for logging formatter.
"""
# Formatting string
format_str = '%(asctime)s - %(levelname)s'
if TvbProfile.current.cluster.IN_OPERATION_EXECUTION_PROCESS:
log_file = self.CLUSTER_NODES_LOG_FILE
if TvbProfile.current.cluster.IS_RUNNING_ON_CLUSTER_NODE:
node_name = TvbProfile.current.cluster.CLUSTER_NODE_NAME
if node_name is not None:
format_str += ' [node:' + str(node_name) + '] '
else:
format_str += ' [proc:' + str(os.getpid()) + '] '
else:
log_file = self.WEB_LOG_FILE
format_str += ' - %(name)s - %(message)s'
rotating_file_handler = SimpleTimedRotatingFileHandler(log_file, when, interval, backupCount)
rotating_file_handler.setFormatter(logging.Formatter(format_str))
MemoryHandler.__init__(self, capacity=self.BUFFER_CAPACITY, target=rotating_file_handler)
|
ClusterTimedRotatingFileHandler
|
identifier_name
|
arrays.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
/* eslint-disable */
export namespace vscMockArrays {
/**
* Returns the last element of an array.
* @param array The array.
* @param n Which element from the end (default is zero).
*/
export function tail<T>(array: T[], n: number = 0): T {
return array[array.length - (1 + n)];
}
export function equals<T>(one: T[], other: T[], itemEquals: (a: T, b: T) => boolean = (a, b) => a === b): boolean {
if (one.length !== other.length) {
return false;
}
for (let i = 0, len = one.length; i < len; i++) {
if (!itemEquals(one[i], other[i])) {
return false;
}
}
return true;
}
export function binarySearch<T>(array: T[], key: T, comparator: (op1: T, op2: T) => number): number {
let low = 0,
high = array.length - 1;
while (low <= high) {
let mid = ((low + high) / 2) | 0;
let comp = comparator(array[mid], key);
if (comp < 0) {
low = mid + 1;
} else if (comp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -(low + 1);
}
/**
* Takes a sorted array and a function p. The array is sorted in such a way that all elements where p(x) is false
* are located before all elements where p(x) is true.
* @returns the least x for which p(x) is true or array.length if no element fullfills the given function.
*/
export function findFirst<T>(array: T[], p: (x: T) => boolean): number {
let low = 0,
high = array.length;
if (high === 0) {
return 0; // no children
}
while (low < high) {
let mid = Math.floor((low + high) / 2);
if (p(array[mid])) {
high = mid;
} else {
low = mid + 1;
}
}
return low;
}
/**
* Like `Array#sort` but always stable. Usually runs a little slower `than Array#sort`
* so only use this when actually needing stable sort.
*/
export function mergeSort<T>(data: T[], compare: (a: T, b: T) => number): T[] {
_divideAndMerge(data, compare);
return data;
}
function _divideAndMerge<T>(data: T[], compare: (a: T, b: T) => number): void {
if (data.length <= 1) {
// sorted
return;
}
const p = (data.length / 2) | 0;
const left = data.slice(0, p);
const right = data.slice(p);
_divideAndMerge(left, compare);
_divideAndMerge(right, compare);
let leftIdx = 0;
let rightIdx = 0;
let i = 0;
while (leftIdx < left.length && rightIdx < right.length) {
let ret = compare(left[leftIdx], right[rightIdx]);
if (ret <= 0) {
// smaller_equal -> take left to preserve order
data[i++] = left[leftIdx++];
} else {
// greater -> take right
data[i++] = right[rightIdx++];
}
}
while (leftIdx < left.length) {
data[i++] = left[leftIdx++];
}
while (rightIdx < right.length) {
data[i++] = right[rightIdx++];
}
}
export function groupBy<T>(data: T[], compare: (a: T, b: T) => number): T[][] {
const result: T[][] = [];
let currentGroup: T[];
for (const element of mergeSort(data.slice(0), compare)) {
// @ts-ignore
if (!currentGroup || compare(currentGroup[0], element) !== 0) {
currentGroup = [element];
result.push(currentGroup);
} else {
currentGroup.push(element);
}
}
return result;
}
type IMutableSplice<T> = Array<T> &
any & {
deleteCount: number;
};
type ISplice<T> = Array<T> & any;
/**
* Diffs two *sorted* arrays and computes the splices which apply the diff.
*/
export function sortedDiff<T>(before: T[], after: T[], compare: (a: T, b: T) => number): ISplice<T>[] {
const result: IMutableSplice<T>[] = [];
function pushSplice(start: number, deleteCount: number, toInsert: T[]): void {
if (deleteCount === 0 && toInsert.length === 0) {
return;
}
const latest = result[result.length - 1];
if (latest && latest.start + latest.deleteCount === start) {
latest.deleteCount += deleteCount;
latest.toInsert.push(...toInsert);
} else {
result.push({ start, deleteCount, toInsert });
}
}
let beforeIdx = 0;
let afterIdx = 0;
while (true) {
if (beforeIdx === before.length) {
pushSplice(beforeIdx, 0, after.slice(afterIdx));
break;
}
if (afterIdx === after.length) {
pushSplice(beforeIdx, before.length - beforeIdx, []);
break;
}
const beforeElement = before[beforeIdx];
const afterElement = after[afterIdx];
const n = compare(beforeElement, afterElement);
if (n === 0) {
// equal
beforeIdx += 1;
afterIdx += 1;
} else if (n < 0) {
// beforeElement is smaller -> before element removed
pushSplice(beforeIdx, 1, []);
beforeIdx += 1;
} else if (n > 0) {
// beforeElement is greater -> after element added
pushSplice(beforeIdx, 0, [afterElement]);
afterIdx += 1;
}
}
return result;
}
/**
* Takes two *sorted* arrays and computes their delta (removed, added elements).
* Finishes in `Math.min(before.length, after.length)` steps.
* @param before
* @param after
* @param compare
*/
export function delta<T>(before: T[], after: T[], compare: (a: T, b: T) => number): { removed: T[]; added: T[] } {
const splices = sortedDiff(before, after, compare);
const removed: T[] = [];
const added: T[] = [];
for (const splice of splices) {
removed.push(...before.slice(splice.start, splice.start + splice.deleteCount));
added.push(...splice.toInsert);
}
return { removed, added };
}
/**
* Returns the top N elements from the array.
*
* Faster than sorting the entire array when the array is a lot larger than N.
*
* @param array The unsorted array.
* @param compare A sort function for the elements.
* @param n The number of elements to return.
* @return The first n elemnts from array when sorted with compare.
*/
export function top<T>(array: T[], compare: (a: T, b: T) => number, n: number): T[] {
if (n === 0) {
return [];
}
const result = array.slice(0, n).sort(compare);
topStep(array, compare, result, n, array.length);
return result;
}
function topStep<T>(array: T[], compare: (a: T, b: T) => number, result: T[], i: number, m: number): void {
for (const n = result.length; i < m; i++) {
const element = array[i];
if (compare(element, result[n - 1]) < 0) {
result.pop();
const j = findFirst(result, e => compare(element, e) < 0);
|
}
}
/**
* @returns a new array with all undefined or null values removed. The original array is not modified at all.
*/
export function coalesce<T>(array: T[]): T[] {
if (!array) {
return array;
}
return array.filter(e => !!e);
}
/**
* Moves the element in the array for the provided positions.
*/
export function move(array: any[], from: number, to: number): void {
array.splice(to, 0, array.splice(from, 1)[0]);
}
/**
* @returns {{false}} if the provided object is an array
* and not empty.
*/
export function isFalsyOrEmpty(obj: any): boolean {
return !Array.isArray(obj) || (<Array<any>>obj).length === 0;
}
/**
* Removes duplicates from the given array. The optional keyFn allows to specify
* how elements are checked for equalness by returning a unique string for each.
*/
export function distinct<T>(array: T[], keyFn?: (t: T) => string): T[] {
if (!keyFn) {
return array.filter((element, position) => {
return array.indexOf(element) === position;
});
}
const seen: Record<string, boolean> = Object.create(null);
return array.filter(elem => {
const key = keyFn(elem);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
});
}
export function uniqueFilter<T>(keyFn: (t: T) => string): (t: T) => boolean {
const seen: Record<string, boolean> = Object.create(null);
return element => {
const key = keyFn(element);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
};
}
export function firstIndex<T>(array: T[], fn: (item: T) => boolean): number {
for (let i = 0; i < array.length; i++) {
const element = array[i];
if (fn(element)) {
return i;
}
}
return -1;
}
// @ts-ignore
export function first<T>(array: T[], fn: (item: T) => boolean, notFoundValue: T = null): T {
const index = firstIndex(array, fn);
return index < 0 ? notFoundValue : array[index];
}
export function commonPrefixLength<T>(
one: T[],
other: T[],
equals: (a: T, b: T) => boolean = (a, b) => a === b
): number {
let result = 0;
for (let i = 0, len = Math.min(one.length, other.length); i < len && equals(one[i], other[i]); i++) {
result++;
}
return result;
}
export function flatten<T>(arr: T[][]): T[] {
// @ts-ignore
return [].concat(...arr);
}
export function range(to: number): number[];
export function range(from: number, to: number): number[];
export function range(arg: number, to?: number): number[] {
let from = typeof to === 'number' ? arg : 0;
if (typeof to === 'number') {
from = arg;
} else {
from = 0;
to = arg;
}
const result: number[] = [];
if (from <= to) {
for (let i = from; i < to; i++) {
result.push(i);
}
} else {
for (let i = from; i > to; i--) {
result.push(i);
}
}
return result;
}
export function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
}
return arr;
}
export function index<T>(array: T[], indexer: (t: T) => string): Record<string, T>;
export function index<T, R>(array: T[], indexer: (t: T) => string, merger?: (t: T, r: R) => R): Record<string, R>;
export function index<T, R>(
array: T[],
indexer: (t: T) => string,
merger: (t: T, r: R) => R = t => t as any
): Record<string, R> {
return array.reduce((r, t) => {
const key = indexer(t);
r[key] = merger(t, r[key]);
return r;
}, Object.create(null));
}
/**
* Inserts an element into an array. Returns a function which, when
* called, will remove that element from the array.
*/
export function insert<T>(array: T[], element: T): () => void {
array.push(element);
return () => {
const index = array.indexOf(element);
if (index > -1) {
array.splice(index, 1);
}
};
}
/**
* Insert `insertArr` inside `target` at `insertIndex`.
* Please don't touch unless you understand https://jsperf.com/inserting-an-array-within-an-array
*/
export function arrayInsert<T>(target: T[], insertIndex: number, insertArr: T[]): T[] {
const before = target.slice(0, insertIndex);
const after = target.slice(insertIndex);
return before.concat(insertArr, after);
}
}
|
result.splice(j, 0, element);
}
|
random_line_split
|
arrays.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
/* eslint-disable */
export namespace vscMockArrays {
/**
* Returns the last element of an array.
* @param array The array.
* @param n Which element from the end (default is zero).
*/
export function tail<T>(array: T[], n: number = 0): T {
return array[array.length - (1 + n)];
}
export function equals<T>(one: T[], other: T[], itemEquals: (a: T, b: T) => boolean = (a, b) => a === b): boolean {
if (one.length !== other.length) {
return false;
}
for (let i = 0, len = one.length; i < len; i++) {
if (!itemEquals(one[i], other[i])) {
return false;
}
}
return true;
}
export function binarySearch<T>(array: T[], key: T, comparator: (op1: T, op2: T) => number): number {
let low = 0,
high = array.length - 1;
while (low <= high) {
let mid = ((low + high) / 2) | 0;
let comp = comparator(array[mid], key);
if (comp < 0) {
low = mid + 1;
} else if (comp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -(low + 1);
}
/**
* Takes a sorted array and a function p. The array is sorted in such a way that all elements where p(x) is false
* are located before all elements where p(x) is true.
* @returns the least x for which p(x) is true or array.length if no element fullfills the given function.
*/
export function findFirst<T>(array: T[], p: (x: T) => boolean): number {
let low = 0,
high = array.length;
if (high === 0) {
return 0; // no children
}
while (low < high) {
let mid = Math.floor((low + high) / 2);
if (p(array[mid])) {
high = mid;
} else {
low = mid + 1;
}
}
return low;
}
/**
* Like `Array#sort` but always stable. Usually runs a little slower `than Array#sort`
* so only use this when actually needing stable sort.
*/
export function mergeSort<T>(data: T[], compare: (a: T, b: T) => number): T[] {
_divideAndMerge(data, compare);
return data;
}
function _divideAndMerge<T>(data: T[], compare: (a: T, b: T) => number): void {
if (data.length <= 1) {
// sorted
return;
}
const p = (data.length / 2) | 0;
const left = data.slice(0, p);
const right = data.slice(p);
_divideAndMerge(left, compare);
_divideAndMerge(right, compare);
let leftIdx = 0;
let rightIdx = 0;
let i = 0;
while (leftIdx < left.length && rightIdx < right.length) {
let ret = compare(left[leftIdx], right[rightIdx]);
if (ret <= 0) {
// smaller_equal -> take left to preserve order
data[i++] = left[leftIdx++];
} else {
// greater -> take right
data[i++] = right[rightIdx++];
}
}
while (leftIdx < left.length) {
data[i++] = left[leftIdx++];
}
while (rightIdx < right.length) {
data[i++] = right[rightIdx++];
}
}
export function groupBy<T>(data: T[], compare: (a: T, b: T) => number): T[][] {
const result: T[][] = [];
let currentGroup: T[];
for (const element of mergeSort(data.slice(0), compare)) {
// @ts-ignore
if (!currentGroup || compare(currentGroup[0], element) !== 0) {
currentGroup = [element];
result.push(currentGroup);
} else {
currentGroup.push(element);
}
}
return result;
}
type IMutableSplice<T> = Array<T> &
any & {
deleteCount: number;
};
type ISplice<T> = Array<T> & any;
/**
* Diffs two *sorted* arrays and computes the splices which apply the diff.
*/
export function sortedDiff<T>(before: T[], after: T[], compare: (a: T, b: T) => number): ISplice<T>[] {
const result: IMutableSplice<T>[] = [];
function pushSplice(start: number, deleteCount: number, toInsert: T[]): void {
if (deleteCount === 0 && toInsert.length === 0) {
return;
}
const latest = result[result.length - 1];
if (latest && latest.start + latest.deleteCount === start) {
latest.deleteCount += deleteCount;
latest.toInsert.push(...toInsert);
} else {
result.push({ start, deleteCount, toInsert });
}
}
let beforeIdx = 0;
let afterIdx = 0;
while (true) {
if (beforeIdx === before.length) {
pushSplice(beforeIdx, 0, after.slice(afterIdx));
break;
}
if (afterIdx === after.length) {
pushSplice(beforeIdx, before.length - beforeIdx, []);
break;
}
const beforeElement = before[beforeIdx];
const afterElement = after[afterIdx];
const n = compare(beforeElement, afterElement);
if (n === 0) {
// equal
beforeIdx += 1;
afterIdx += 1;
} else if (n < 0) {
// beforeElement is smaller -> before element removed
pushSplice(beforeIdx, 1, []);
beforeIdx += 1;
} else if (n > 0) {
// beforeElement is greater -> after element added
pushSplice(beforeIdx, 0, [afterElement]);
afterIdx += 1;
}
}
return result;
}
/**
* Takes two *sorted* arrays and computes their delta (removed, added elements).
* Finishes in `Math.min(before.length, after.length)` steps.
* @param before
* @param after
* @param compare
*/
export function delta<T>(before: T[], after: T[], compare: (a: T, b: T) => number): { removed: T[]; added: T[] } {
const splices = sortedDiff(before, after, compare);
const removed: T[] = [];
const added: T[] = [];
for (const splice of splices) {
removed.push(...before.slice(splice.start, splice.start + splice.deleteCount));
added.push(...splice.toInsert);
}
return { removed, added };
}
/**
* Returns the top N elements from the array.
*
* Faster than sorting the entire array when the array is a lot larger than N.
*
* @param array The unsorted array.
* @param compare A sort function for the elements.
* @param n The number of elements to return.
* @return The first n elemnts from array when sorted with compare.
*/
export function top<T>(array: T[], compare: (a: T, b: T) => number, n: number): T[] {
if (n === 0) {
return [];
}
const result = array.slice(0, n).sort(compare);
topStep(array, compare, result, n, array.length);
return result;
}
function topStep<T>(array: T[], compare: (a: T, b: T) => number, result: T[], i: number, m: number): void {
for (const n = result.length; i < m; i++) {
const element = array[i];
if (compare(element, result[n - 1]) < 0) {
result.pop();
const j = findFirst(result, e => compare(element, e) < 0);
result.splice(j, 0, element);
}
}
}
/**
* @returns a new array with all undefined or null values removed. The original array is not modified at all.
*/
export function coalesce<T>(array: T[]): T[]
|
/**
* Moves the element in the array for the provided positions.
*/
export function move(array: any[], from: number, to: number): void {
array.splice(to, 0, array.splice(from, 1)[0]);
}
/**
* @returns {{false}} if the provided object is an array
* and not empty.
*/
export function isFalsyOrEmpty(obj: any): boolean {
return !Array.isArray(obj) || (<Array<any>>obj).length === 0;
}
/**
* Removes duplicates from the given array. The optional keyFn allows to specify
* how elements are checked for equalness by returning a unique string for each.
*/
export function distinct<T>(array: T[], keyFn?: (t: T) => string): T[] {
if (!keyFn) {
return array.filter((element, position) => {
return array.indexOf(element) === position;
});
}
const seen: Record<string, boolean> = Object.create(null);
return array.filter(elem => {
const key = keyFn(elem);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
});
}
export function uniqueFilter<T>(keyFn: (t: T) => string): (t: T) => boolean {
const seen: Record<string, boolean> = Object.create(null);
return element => {
const key = keyFn(element);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
};
}
export function firstIndex<T>(array: T[], fn: (item: T) => boolean): number {
for (let i = 0; i < array.length; i++) {
const element = array[i];
if (fn(element)) {
return i;
}
}
return -1;
}
// @ts-ignore
export function first<T>(array: T[], fn: (item: T) => boolean, notFoundValue: T = null): T {
const index = firstIndex(array, fn);
return index < 0 ? notFoundValue : array[index];
}
export function commonPrefixLength<T>(
one: T[],
other: T[],
equals: (a: T, b: T) => boolean = (a, b) => a === b
): number {
let result = 0;
for (let i = 0, len = Math.min(one.length, other.length); i < len && equals(one[i], other[i]); i++) {
result++;
}
return result;
}
export function flatten<T>(arr: T[][]): T[] {
// @ts-ignore
return [].concat(...arr);
}
export function range(to: number): number[];
export function range(from: number, to: number): number[];
export function range(arg: number, to?: number): number[] {
let from = typeof to === 'number' ? arg : 0;
if (typeof to === 'number') {
from = arg;
} else {
from = 0;
to = arg;
}
const result: number[] = [];
if (from <= to) {
for (let i = from; i < to; i++) {
result.push(i);
}
} else {
for (let i = from; i > to; i--) {
result.push(i);
}
}
return result;
}
export function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
}
return arr;
}
export function index<T>(array: T[], indexer: (t: T) => string): Record<string, T>;
export function index<T, R>(array: T[], indexer: (t: T) => string, merger?: (t: T, r: R) => R): Record<string, R>;
export function index<T, R>(
array: T[],
indexer: (t: T) => string,
merger: (t: T, r: R) => R = t => t as any
): Record<string, R> {
return array.reduce((r, t) => {
const key = indexer(t);
r[key] = merger(t, r[key]);
return r;
}, Object.create(null));
}
/**
* Inserts an element into an array. Returns a function which, when
* called, will remove that element from the array.
*/
export function insert<T>(array: T[], element: T): () => void {
array.push(element);
return () => {
const index = array.indexOf(element);
if (index > -1) {
array.splice(index, 1);
}
};
}
/**
* Insert `insertArr` inside `target` at `insertIndex`.
* Please don't touch unless you understand https://jsperf.com/inserting-an-array-within-an-array
*/
export function arrayInsert<T>(target: T[], insertIndex: number, insertArr: T[]): T[] {
const before = target.slice(0, insertIndex);
const after = target.slice(insertIndex);
return before.concat(insertArr, after);
}
}
|
{
if (!array) {
return array;
}
return array.filter(e => !!e);
}
|
identifier_body
|
arrays.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
/* eslint-disable */
export namespace vscMockArrays {
/**
* Returns the last element of an array.
* @param array The array.
* @param n Which element from the end (default is zero).
*/
export function tail<T>(array: T[], n: number = 0): T {
return array[array.length - (1 + n)];
}
export function equals<T>(one: T[], other: T[], itemEquals: (a: T, b: T) => boolean = (a, b) => a === b): boolean {
if (one.length !== other.length) {
return false;
}
for (let i = 0, len = one.length; i < len; i++) {
if (!itemEquals(one[i], other[i])) {
return false;
}
}
return true;
}
export function binarySearch<T>(array: T[], key: T, comparator: (op1: T, op2: T) => number): number {
let low = 0,
high = array.length - 1;
while (low <= high) {
let mid = ((low + high) / 2) | 0;
let comp = comparator(array[mid], key);
if (comp < 0) {
low = mid + 1;
} else if (comp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -(low + 1);
}
/**
* Takes a sorted array and a function p. The array is sorted in such a way that all elements where p(x) is false
* are located before all elements where p(x) is true.
* @returns the least x for which p(x) is true or array.length if no element fullfills the given function.
*/
export function findFirst<T>(array: T[], p: (x: T) => boolean): number {
let low = 0,
high = array.length;
if (high === 0) {
return 0; // no children
}
while (low < high) {
let mid = Math.floor((low + high) / 2);
if (p(array[mid])) {
high = mid;
} else {
low = mid + 1;
}
}
return low;
}
/**
* Like `Array#sort` but always stable. Usually runs a little slower `than Array#sort`
* so only use this when actually needing stable sort.
*/
export function mergeSort<T>(data: T[], compare: (a: T, b: T) => number): T[] {
_divideAndMerge(data, compare);
return data;
}
function _divideAndMerge<T>(data: T[], compare: (a: T, b: T) => number): void {
if (data.length <= 1) {
// sorted
return;
}
const p = (data.length / 2) | 0;
const left = data.slice(0, p);
const right = data.slice(p);
_divideAndMerge(left, compare);
_divideAndMerge(right, compare);
let leftIdx = 0;
let rightIdx = 0;
let i = 0;
while (leftIdx < left.length && rightIdx < right.length) {
let ret = compare(left[leftIdx], right[rightIdx]);
if (ret <= 0) {
// smaller_equal -> take left to preserve order
data[i++] = left[leftIdx++];
} else {
// greater -> take right
data[i++] = right[rightIdx++];
}
}
while (leftIdx < left.length) {
data[i++] = left[leftIdx++];
}
while (rightIdx < right.length) {
data[i++] = right[rightIdx++];
}
}
export function groupBy<T>(data: T[], compare: (a: T, b: T) => number): T[][] {
const result: T[][] = [];
let currentGroup: T[];
for (const element of mergeSort(data.slice(0), compare)) {
// @ts-ignore
if (!currentGroup || compare(currentGroup[0], element) !== 0) {
currentGroup = [element];
result.push(currentGroup);
} else {
currentGroup.push(element);
}
}
return result;
}
type IMutableSplice<T> = Array<T> &
any & {
deleteCount: number;
};
type ISplice<T> = Array<T> & any;
/**
* Diffs two *sorted* arrays and computes the splices which apply the diff.
*/
export function sortedDiff<T>(before: T[], after: T[], compare: (a: T, b: T) => number): ISplice<T>[] {
const result: IMutableSplice<T>[] = [];
function pushSplice(start: number, deleteCount: number, toInsert: T[]): void {
if (deleteCount === 0 && toInsert.length === 0) {
return;
}
const latest = result[result.length - 1];
if (latest && latest.start + latest.deleteCount === start) {
latest.deleteCount += deleteCount;
latest.toInsert.push(...toInsert);
} else {
result.push({ start, deleteCount, toInsert });
}
}
let beforeIdx = 0;
let afterIdx = 0;
while (true) {
if (beforeIdx === before.length) {
pushSplice(beforeIdx, 0, after.slice(afterIdx));
break;
}
if (afterIdx === after.length) {
pushSplice(beforeIdx, before.length - beforeIdx, []);
break;
}
const beforeElement = before[beforeIdx];
const afterElement = after[afterIdx];
const n = compare(beforeElement, afterElement);
if (n === 0) {
// equal
beforeIdx += 1;
afterIdx += 1;
} else if (n < 0) {
// beforeElement is smaller -> before element removed
pushSplice(beforeIdx, 1, []);
beforeIdx += 1;
} else if (n > 0) {
// beforeElement is greater -> after element added
pushSplice(beforeIdx, 0, [afterElement]);
afterIdx += 1;
}
}
return result;
}
/**
* Takes two *sorted* arrays and computes their delta (removed, added elements).
* Finishes in `Math.min(before.length, after.length)` steps.
* @param before
* @param after
* @param compare
*/
export function delta<T>(before: T[], after: T[], compare: (a: T, b: T) => number): { removed: T[]; added: T[] } {
const splices = sortedDiff(before, after, compare);
const removed: T[] = [];
const added: T[] = [];
for (const splice of splices) {
removed.push(...before.slice(splice.start, splice.start + splice.deleteCount));
added.push(...splice.toInsert);
}
return { removed, added };
}
/**
* Returns the top N elements from the array.
*
* Faster than sorting the entire array when the array is a lot larger than N.
*
* @param array The unsorted array.
* @param compare A sort function for the elements.
* @param n The number of elements to return.
* @return The first n elemnts from array when sorted with compare.
*/
export function top<T>(array: T[], compare: (a: T, b: T) => number, n: number): T[] {
if (n === 0) {
return [];
}
const result = array.slice(0, n).sort(compare);
topStep(array, compare, result, n, array.length);
return result;
}
function topStep<T>(array: T[], compare: (a: T, b: T) => number, result: T[], i: number, m: number): void {
for (const n = result.length; i < m; i++) {
const element = array[i];
if (compare(element, result[n - 1]) < 0) {
result.pop();
const j = findFirst(result, e => compare(element, e) < 0);
result.splice(j, 0, element);
}
}
}
/**
* @returns a new array with all undefined or null values removed. The original array is not modified at all.
*/
export function coalesce<T>(array: T[]): T[] {
if (!array) {
return array;
}
return array.filter(e => !!e);
}
/**
* Moves the element in the array for the provided positions.
*/
export function move(array: any[], from: number, to: number): void {
array.splice(to, 0, array.splice(from, 1)[0]);
}
/**
* @returns {{false}} if the provided object is an array
* and not empty.
*/
export function isFalsyOrEmpty(obj: any): boolean {
return !Array.isArray(obj) || (<Array<any>>obj).length === 0;
}
/**
* Removes duplicates from the given array. The optional keyFn allows to specify
* how elements are checked for equalness by returning a unique string for each.
*/
export function distinct<T>(array: T[], keyFn?: (t: T) => string): T[] {
if (!keyFn) {
return array.filter((element, position) => {
return array.indexOf(element) === position;
});
}
const seen: Record<string, boolean> = Object.create(null);
return array.filter(elem => {
const key = keyFn(elem);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
});
}
export function uniqueFilter<T>(keyFn: (t: T) => string): (t: T) => boolean {
const seen: Record<string, boolean> = Object.create(null);
return element => {
const key = keyFn(element);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
};
}
export function firstIndex<T>(array: T[], fn: (item: T) => boolean): number {
for (let i = 0; i < array.length; i++) {
const element = array[i];
if (fn(element)) {
return i;
}
}
return -1;
}
// @ts-ignore
export function first<T>(array: T[], fn: (item: T) => boolean, notFoundValue: T = null): T {
const index = firstIndex(array, fn);
return index < 0 ? notFoundValue : array[index];
}
export function commonPrefixLength<T>(
one: T[],
other: T[],
equals: (a: T, b: T) => boolean = (a, b) => a === b
): number {
let result = 0;
for (let i = 0, len = Math.min(one.length, other.length); i < len && equals(one[i], other[i]); i++) {
result++;
}
return result;
}
export function flatten<T>(arr: T[][]): T[] {
// @ts-ignore
return [].concat(...arr);
}
export function range(to: number): number[];
export function range(from: number, to: number): number[];
export function range(arg: number, to?: number): number[] {
let from = typeof to === 'number' ? arg : 0;
if (typeof to === 'number') {
from = arg;
} else {
from = 0;
to = arg;
}
const result: number[] = [];
if (from <= to) {
for (let i = from; i < to; i++) {
result.push(i);
}
} else {
for (let i = from; i > to; i--) {
result.push(i);
}
}
return result;
}
export function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
}
return arr;
}
export function index<T>(array: T[], indexer: (t: T) => string): Record<string, T>;
export function index<T, R>(array: T[], indexer: (t: T) => string, merger?: (t: T, r: R) => R): Record<string, R>;
export function index<T, R>(
array: T[],
indexer: (t: T) => string,
merger: (t: T, r: R) => R = t => t as any
): Record<string, R> {
return array.reduce((r, t) => {
const key = indexer(t);
r[key] = merger(t, r[key]);
return r;
}, Object.create(null));
}
/**
* Inserts an element into an array. Returns a function which, when
* called, will remove that element from the array.
*/
export function insert<T>(array: T[], element: T): () => void {
array.push(element);
return () => {
const index = array.indexOf(element);
if (index > -1)
|
};
}
/**
* Insert `insertArr` inside `target` at `insertIndex`.
* Please don't touch unless you understand https://jsperf.com/inserting-an-array-within-an-array
*/
export function arrayInsert<T>(target: T[], insertIndex: number, insertArr: T[]): T[] {
const before = target.slice(0, insertIndex);
const after = target.slice(insertIndex);
return before.concat(insertArr, after);
}
}
|
{
array.splice(index, 1);
}
|
conditional_block
|
arrays.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
/* eslint-disable */
export namespace vscMockArrays {
/**
* Returns the last element of an array.
* @param array The array.
* @param n Which element from the end (default is zero).
*/
export function tail<T>(array: T[], n: number = 0): T {
return array[array.length - (1 + n)];
}
export function equals<T>(one: T[], other: T[], itemEquals: (a: T, b: T) => boolean = (a, b) => a === b): boolean {
if (one.length !== other.length) {
return false;
}
for (let i = 0, len = one.length; i < len; i++) {
if (!itemEquals(one[i], other[i])) {
return false;
}
}
return true;
}
export function binarySearch<T>(array: T[], key: T, comparator: (op1: T, op2: T) => number): number {
let low = 0,
high = array.length - 1;
while (low <= high) {
let mid = ((low + high) / 2) | 0;
let comp = comparator(array[mid], key);
if (comp < 0) {
low = mid + 1;
} else if (comp > 0) {
high = mid - 1;
} else {
return mid;
}
}
return -(low + 1);
}
/**
* Takes a sorted array and a function p. The array is sorted in such a way that all elements where p(x) is false
* are located before all elements where p(x) is true.
* @returns the least x for which p(x) is true or array.length if no element fullfills the given function.
*/
export function findFirst<T>(array: T[], p: (x: T) => boolean): number {
let low = 0,
high = array.length;
if (high === 0) {
return 0; // no children
}
while (low < high) {
let mid = Math.floor((low + high) / 2);
if (p(array[mid])) {
high = mid;
} else {
low = mid + 1;
}
}
return low;
}
/**
* Like `Array#sort` but always stable. Usually runs a little slower `than Array#sort`
* so only use this when actually needing stable sort.
*/
export function mergeSort<T>(data: T[], compare: (a: T, b: T) => number): T[] {
_divideAndMerge(data, compare);
return data;
}
function _divideAndMerge<T>(data: T[], compare: (a: T, b: T) => number): void {
if (data.length <= 1) {
// sorted
return;
}
const p = (data.length / 2) | 0;
const left = data.slice(0, p);
const right = data.slice(p);
_divideAndMerge(left, compare);
_divideAndMerge(right, compare);
let leftIdx = 0;
let rightIdx = 0;
let i = 0;
while (leftIdx < left.length && rightIdx < right.length) {
let ret = compare(left[leftIdx], right[rightIdx]);
if (ret <= 0) {
// smaller_equal -> take left to preserve order
data[i++] = left[leftIdx++];
} else {
// greater -> take right
data[i++] = right[rightIdx++];
}
}
while (leftIdx < left.length) {
data[i++] = left[leftIdx++];
}
while (rightIdx < right.length) {
data[i++] = right[rightIdx++];
}
}
export function groupBy<T>(data: T[], compare: (a: T, b: T) => number): T[][] {
const result: T[][] = [];
let currentGroup: T[];
for (const element of mergeSort(data.slice(0), compare)) {
// @ts-ignore
if (!currentGroup || compare(currentGroup[0], element) !== 0) {
currentGroup = [element];
result.push(currentGroup);
} else {
currentGroup.push(element);
}
}
return result;
}
type IMutableSplice<T> = Array<T> &
any & {
deleteCount: number;
};
type ISplice<T> = Array<T> & any;
/**
* Diffs two *sorted* arrays and computes the splices which apply the diff.
*/
export function sortedDiff<T>(before: T[], after: T[], compare: (a: T, b: T) => number): ISplice<T>[] {
const result: IMutableSplice<T>[] = [];
function
|
(start: number, deleteCount: number, toInsert: T[]): void {
if (deleteCount === 0 && toInsert.length === 0) {
return;
}
const latest = result[result.length - 1];
if (latest && latest.start + latest.deleteCount === start) {
latest.deleteCount += deleteCount;
latest.toInsert.push(...toInsert);
} else {
result.push({ start, deleteCount, toInsert });
}
}
let beforeIdx = 0;
let afterIdx = 0;
while (true) {
if (beforeIdx === before.length) {
pushSplice(beforeIdx, 0, after.slice(afterIdx));
break;
}
if (afterIdx === after.length) {
pushSplice(beforeIdx, before.length - beforeIdx, []);
break;
}
const beforeElement = before[beforeIdx];
const afterElement = after[afterIdx];
const n = compare(beforeElement, afterElement);
if (n === 0) {
// equal
beforeIdx += 1;
afterIdx += 1;
} else if (n < 0) {
// beforeElement is smaller -> before element removed
pushSplice(beforeIdx, 1, []);
beforeIdx += 1;
} else if (n > 0) {
// beforeElement is greater -> after element added
pushSplice(beforeIdx, 0, [afterElement]);
afterIdx += 1;
}
}
return result;
}
/**
* Takes two *sorted* arrays and computes their delta (removed, added elements).
* Finishes in `Math.min(before.length, after.length)` steps.
* @param before
* @param after
* @param compare
*/
export function delta<T>(before: T[], after: T[], compare: (a: T, b: T) => number): { removed: T[]; added: T[] } {
const splices = sortedDiff(before, after, compare);
const removed: T[] = [];
const added: T[] = [];
for (const splice of splices) {
removed.push(...before.slice(splice.start, splice.start + splice.deleteCount));
added.push(...splice.toInsert);
}
return { removed, added };
}
/**
* Returns the top N elements from the array.
*
* Faster than sorting the entire array when the array is a lot larger than N.
*
* @param array The unsorted array.
* @param compare A sort function for the elements.
* @param n The number of elements to return.
* @return The first n elemnts from array when sorted with compare.
*/
export function top<T>(array: T[], compare: (a: T, b: T) => number, n: number): T[] {
if (n === 0) {
return [];
}
const result = array.slice(0, n).sort(compare);
topStep(array, compare, result, n, array.length);
return result;
}
function topStep<T>(array: T[], compare: (a: T, b: T) => number, result: T[], i: number, m: number): void {
for (const n = result.length; i < m; i++) {
const element = array[i];
if (compare(element, result[n - 1]) < 0) {
result.pop();
const j = findFirst(result, e => compare(element, e) < 0);
result.splice(j, 0, element);
}
}
}
/**
* @returns a new array with all undefined or null values removed. The original array is not modified at all.
*/
export function coalesce<T>(array: T[]): T[] {
if (!array) {
return array;
}
return array.filter(e => !!e);
}
/**
* Moves the element in the array for the provided positions.
*/
export function move(array: any[], from: number, to: number): void {
array.splice(to, 0, array.splice(from, 1)[0]);
}
/**
* @returns {{false}} if the provided object is an array
* and not empty.
*/
export function isFalsyOrEmpty(obj: any): boolean {
return !Array.isArray(obj) || (<Array<any>>obj).length === 0;
}
/**
* Removes duplicates from the given array. The optional keyFn allows to specify
* how elements are checked for equalness by returning a unique string for each.
*/
export function distinct<T>(array: T[], keyFn?: (t: T) => string): T[] {
if (!keyFn) {
return array.filter((element, position) => {
return array.indexOf(element) === position;
});
}
const seen: Record<string, boolean> = Object.create(null);
return array.filter(elem => {
const key = keyFn(elem);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
});
}
export function uniqueFilter<T>(keyFn: (t: T) => string): (t: T) => boolean {
const seen: Record<string, boolean> = Object.create(null);
return element => {
const key = keyFn(element);
if (seen[key]) {
return false;
}
seen[key] = true;
return true;
};
}
export function firstIndex<T>(array: T[], fn: (item: T) => boolean): number {
for (let i = 0; i < array.length; i++) {
const element = array[i];
if (fn(element)) {
return i;
}
}
return -1;
}
// @ts-ignore
export function first<T>(array: T[], fn: (item: T) => boolean, notFoundValue: T = null): T {
const index = firstIndex(array, fn);
return index < 0 ? notFoundValue : array[index];
}
export function commonPrefixLength<T>(
one: T[],
other: T[],
equals: (a: T, b: T) => boolean = (a, b) => a === b
): number {
let result = 0;
for (let i = 0, len = Math.min(one.length, other.length); i < len && equals(one[i], other[i]); i++) {
result++;
}
return result;
}
export function flatten<T>(arr: T[][]): T[] {
// @ts-ignore
return [].concat(...arr);
}
export function range(to: number): number[];
export function range(from: number, to: number): number[];
export function range(arg: number, to?: number): number[] {
let from = typeof to === 'number' ? arg : 0;
if (typeof to === 'number') {
from = arg;
} else {
from = 0;
to = arg;
}
const result: number[] = [];
if (from <= to) {
for (let i = from; i < to; i++) {
result.push(i);
}
} else {
for (let i = from; i > to; i--) {
result.push(i);
}
}
return result;
}
export function fill<T>(num: number, valueFn: () => T, arr: T[] = []): T[] {
for (let i = 0; i < num; i++) {
arr[i] = valueFn();
}
return arr;
}
export function index<T>(array: T[], indexer: (t: T) => string): Record<string, T>;
export function index<T, R>(array: T[], indexer: (t: T) => string, merger?: (t: T, r: R) => R): Record<string, R>;
export function index<T, R>(
array: T[],
indexer: (t: T) => string,
merger: (t: T, r: R) => R = t => t as any
): Record<string, R> {
return array.reduce((r, t) => {
const key = indexer(t);
r[key] = merger(t, r[key]);
return r;
}, Object.create(null));
}
/**
* Inserts an element into an array. Returns a function which, when
* called, will remove that element from the array.
*/
export function insert<T>(array: T[], element: T): () => void {
array.push(element);
return () => {
const index = array.indexOf(element);
if (index > -1) {
array.splice(index, 1);
}
};
}
/**
* Insert `insertArr` inside `target` at `insertIndex`.
* Please don't touch unless you understand https://jsperf.com/inserting-an-array-within-an-array
*/
export function arrayInsert<T>(target: T[], insertIndex: number, insertArr: T[]): T[] {
const before = target.slice(0, insertIndex);
const after = target.slice(insertIndex);
return before.concat(insertArr, after);
}
}
|
pushSplice
|
identifier_name
|
Table.d.ts
|
/// <reference types="react" />
import React from 'react';
import { PaginationProps } from '../pagination';
import { SpinProps } from '../spin';
import { Store } from './createStore';
import { SelectionDecorator } from './SelectionCheckboxAll';
import Column, { ColumnProps } from './Column';
import ColumnGroup from './ColumnGroup';
export declare type TableColumnConfig<T> = ColumnProps<T>;
export interface TableRowSelection<T> {
type?: 'checkbox' | 'radio';
selectedRowKeys?: string[];
onChange?: (selectedRowKeys: string[], selectedRows: Object[]) => any;
getCheckboxProps?: (record: T) => Object;
onSelect?: (record: T, selected: boolean, selectedRows: Object[]) => any;
onSelectAll?: (selected: boolean, selectedRows: Object[], changeRows: Object[]) => any;
onSelectInvert?: (selectedRows: Object[]) => any;
selections?: SelectionDecorator[];
}
export interface TableProps<T> {
prefixCls?: string;
dropdownPrefixCls?: string;
rowSelection?: TableRowSelection<T>;
pagination?: PaginationProps | boolean;
size?: 'default' | 'middle' | 'small';
dataSource?: T[];
columns?: ColumnProps<T>[];
rowKey?: string | ((record: T, index: number) => string);
rowClassName?: (record: T, index: number) => string;
expandedRowRender?: any;
defaultExpandedRowKeys?: string[];
expandedRowKeys?: string[];
expandIconAsCell?: boolean;
expandIconColumnIndex?: number;
onExpandedRowsChange?: (expandedRowKeys: string[]) => void;
onExpand?: (expanded: boolean, record: T) => void;
onChange?: (pagination: PaginationProps | boolean, filters: string[], sorter: Object) => any;
loading?: boolean | SpinProps;
locale?: Object;
indentSize?: number;
onRowClick?: (record: T, index: number) => any;
useFixedHeader?: boolean;
bordered?: boolean;
showHeader?: boolean;
footer?: (currentPageData: Object[]) => React.ReactNode;
title?: (currentPageData: Object[]) => React.ReactNode;
scroll?: {
x?: boolean | number;
y?: boolean | number;
};
childrenColumnName?: string;
bodyStyle?: React.CSSProperties;
className?: string;
style?: React.CSSProperties;
}
export interface TableContext {
antLocale?: {
Table?: any;
};
}
export default class
|
<T> extends React.Component<TableProps<T>, any> {
static Column: typeof Column;
static ColumnGroup: typeof ColumnGroup;
static propTypes: {
dataSource: React.Requireable<any>;
columns: React.Requireable<any>;
prefixCls: React.Requireable<any>;
useFixedHeader: React.Requireable<any>;
rowSelection: React.Requireable<any>;
className: React.Requireable<any>;
size: React.Requireable<any>;
loading: React.Requireable<any>;
bordered: React.Requireable<any>;
onChange: React.Requireable<any>;
locale: React.Requireable<any>;
dropdownPrefixCls: React.Requireable<any>;
};
static defaultProps: {
dataSource: never[];
prefixCls: string;
useFixedHeader: boolean;
rowSelection: null;
className: string;
size: string;
loading: boolean;
bordered: boolean;
indentSize: number;
locale: {};
rowKey: string;
showHeader: boolean;
};
static contextTypes: {
antLocale: React.Requireable<any>;
};
context: TableContext;
CheckboxPropsCache: Object;
store: Store;
columns: ColumnProps<T>[];
constructor(props: any);
getCheckboxPropsByItem: (item: any, index: any) => any;
getDefaultSelection(): string[];
getDefaultPagination(props: any): any;
getLocale(): any;
componentWillReceiveProps(nextProps: any): void;
setSelectedRowKeys(selectedRowKeys: any, {selectWay, record, checked, changeRowKeys}: any): void;
hasPagination(props?: any): boolean;
isFiltersChanged(filters: any): boolean;
getSortOrderColumns(columns?: any): any;
getFilteredValueColumns(columns?: any): any;
getFiltersFromColumns(columns?: any): {};
getSortStateFromColumns(columns?: any): {
sortColumn: any;
sortOrder: any;
};
getSorterFn(): ((a: any, b: any) => any) | undefined;
toggleSortOrder(order: any, column: any): void;
handleFilter: (column: any, nextFilters: any) => void;
handleSelect: (record: any, rowIndex: any, e: any) => void;
handleRadioSelect: (record: any, rowIndex: any, e: any) => void;
handleSelectRow: (selectionKey: any, index: any, onSelectFunc: any) => any;
handlePageChange: (current: any, ...otherArguments: any[]) => void;
renderSelectionBox: (type: any) => (_: any, record: any, index: any) => JSX.Element;
getRecordKey: (record: any, index: any) => string;
renderRowSelection(): ColumnProps<T>[];
getColumnKey(column: any, index?: any): any;
getMaxCurrent(total: any): any;
isSortColumn(column: any): boolean;
renderColumnsDropdown(columns: any): any[];
handleShowSizeChange: (current: any, pageSize: any) => void;
renderPagination(): JSX.Element | null;
prepareParamsArguments(state: any): [any, string[], Object];
findColumn(myKey: any): any;
getCurrentPageData(): T[];
getFlatData(): Object[];
getFlatCurrentPageData(): Object[];
recursiveSort(data: any, sorterFn: any): any;
getLocalData(): T[];
render(): JSX.Element;
}
|
Table
|
identifier_name
|
Table.d.ts
|
/// <reference types="react" />
import React from 'react';
import { PaginationProps } from '../pagination';
import { SpinProps } from '../spin';
import { Store } from './createStore';
import { SelectionDecorator } from './SelectionCheckboxAll';
import Column, { ColumnProps } from './Column';
import ColumnGroup from './ColumnGroup';
export declare type TableColumnConfig<T> = ColumnProps<T>;
export interface TableRowSelection<T> {
type?: 'checkbox' | 'radio';
selectedRowKeys?: string[];
onChange?: (selectedRowKeys: string[], selectedRows: Object[]) => any;
getCheckboxProps?: (record: T) => Object;
onSelect?: (record: T, selected: boolean, selectedRows: Object[]) => any;
onSelectAll?: (selected: boolean, selectedRows: Object[], changeRows: Object[]) => any;
onSelectInvert?: (selectedRows: Object[]) => any;
selections?: SelectionDecorator[];
}
export interface TableProps<T> {
prefixCls?: string;
dropdownPrefixCls?: string;
rowSelection?: TableRowSelection<T>;
pagination?: PaginationProps | boolean;
size?: 'default' | 'middle' | 'small';
dataSource?: T[];
columns?: ColumnProps<T>[];
rowKey?: string | ((record: T, index: number) => string);
rowClassName?: (record: T, index: number) => string;
expandedRowRender?: any;
defaultExpandedRowKeys?: string[];
expandedRowKeys?: string[];
expandIconAsCell?: boolean;
expandIconColumnIndex?: number;
onExpandedRowsChange?: (expandedRowKeys: string[]) => void;
onExpand?: (expanded: boolean, record: T) => void;
onChange?: (pagination: PaginationProps | boolean, filters: string[], sorter: Object) => any;
loading?: boolean | SpinProps;
locale?: Object;
indentSize?: number;
onRowClick?: (record: T, index: number) => any;
useFixedHeader?: boolean;
bordered?: boolean;
showHeader?: boolean;
footer?: (currentPageData: Object[]) => React.ReactNode;
title?: (currentPageData: Object[]) => React.ReactNode;
scroll?: {
x?: boolean | number;
y?: boolean | number;
};
childrenColumnName?: string;
|
className?: string;
style?: React.CSSProperties;
}
export interface TableContext {
antLocale?: {
Table?: any;
};
}
export default class Table<T> extends React.Component<TableProps<T>, any> {
static Column: typeof Column;
static ColumnGroup: typeof ColumnGroup;
static propTypes: {
dataSource: React.Requireable<any>;
columns: React.Requireable<any>;
prefixCls: React.Requireable<any>;
useFixedHeader: React.Requireable<any>;
rowSelection: React.Requireable<any>;
className: React.Requireable<any>;
size: React.Requireable<any>;
loading: React.Requireable<any>;
bordered: React.Requireable<any>;
onChange: React.Requireable<any>;
locale: React.Requireable<any>;
dropdownPrefixCls: React.Requireable<any>;
};
static defaultProps: {
dataSource: never[];
prefixCls: string;
useFixedHeader: boolean;
rowSelection: null;
className: string;
size: string;
loading: boolean;
bordered: boolean;
indentSize: number;
locale: {};
rowKey: string;
showHeader: boolean;
};
static contextTypes: {
antLocale: React.Requireable<any>;
};
context: TableContext;
CheckboxPropsCache: Object;
store: Store;
columns: ColumnProps<T>[];
constructor(props: any);
getCheckboxPropsByItem: (item: any, index: any) => any;
getDefaultSelection(): string[];
getDefaultPagination(props: any): any;
getLocale(): any;
componentWillReceiveProps(nextProps: any): void;
setSelectedRowKeys(selectedRowKeys: any, {selectWay, record, checked, changeRowKeys}: any): void;
hasPagination(props?: any): boolean;
isFiltersChanged(filters: any): boolean;
getSortOrderColumns(columns?: any): any;
getFilteredValueColumns(columns?: any): any;
getFiltersFromColumns(columns?: any): {};
getSortStateFromColumns(columns?: any): {
sortColumn: any;
sortOrder: any;
};
getSorterFn(): ((a: any, b: any) => any) | undefined;
toggleSortOrder(order: any, column: any): void;
handleFilter: (column: any, nextFilters: any) => void;
handleSelect: (record: any, rowIndex: any, e: any) => void;
handleRadioSelect: (record: any, rowIndex: any, e: any) => void;
handleSelectRow: (selectionKey: any, index: any, onSelectFunc: any) => any;
handlePageChange: (current: any, ...otherArguments: any[]) => void;
renderSelectionBox: (type: any) => (_: any, record: any, index: any) => JSX.Element;
getRecordKey: (record: any, index: any) => string;
renderRowSelection(): ColumnProps<T>[];
getColumnKey(column: any, index?: any): any;
getMaxCurrent(total: any): any;
isSortColumn(column: any): boolean;
renderColumnsDropdown(columns: any): any[];
handleShowSizeChange: (current: any, pageSize: any) => void;
renderPagination(): JSX.Element | null;
prepareParamsArguments(state: any): [any, string[], Object];
findColumn(myKey: any): any;
getCurrentPageData(): T[];
getFlatData(): Object[];
getFlatCurrentPageData(): Object[];
recursiveSort(data: any, sorterFn: any): any;
getLocalData(): T[];
render(): JSX.Element;
}
|
bodyStyle?: React.CSSProperties;
|
random_line_split
|
action.py
|
"""Provides a class for managing BIG-IP L7 Rule Action resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
class Action(Resource):
"""L7 Rule Action class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
expression=None,
forward=False,
location=None,
pool=None,
redirect=False,
request=True,
reset=False,
setVariable=False,
tcl=False,
tmName=None,
httpHost=False,
httpUri=False,
path=None,
replace=False,
value=None,
shutdown=True,
select=True,
)
def __init__(self, name, data):
"""Initialize the Action object.
Actions do not have explicit partition attributes, the are
implied by the partition of the rule to which they belong.
"""
super(Action, self).__init__(name, partition=None)
# Actions are Only supported on requests.
self._data['request'] = True
# Is this a forwarding action?
if data.get('forward', False):
self._data['forward'] = True
# Yes, there are two supported forwarding actions:
# forward to pool and reset, these are mutually
# exclusive options.
pool = data.get('pool', None)
reset = data.get('reset', False)
# This allows you to specify an empty node. This is
# what Container Connector does.
select = data.get('select', False)
# This was added in 13.1.0
shutdown = data.get('shutdown', False)
if pool:
self._data['pool'] = pool
elif reset:
self._data['reset'] = reset
elif select:
|
elif shutdown:
self._data['shutdown'] = shutdown
else:
raise ValueError(
"Unsupported forward action, must be one of reset, "
"forward to pool, select, or shutdown.")
# Is this a redirect action?
elif data.get('redirect', False):
self._data['redirect'] = True
# Yes, set the location and httpReply attribute
self._data['location'] = data.get('location', None)
self._data['httpReply'] = data.get('httpReply', True)
# Is this a setVariable action?
elif data.get('setVariable', False):
self._data['setVariable'] = True
# Set the variable name and the value
self._data['tmName'] = data.get('tmName', None)
self._data['expression'] = data.get('expression', None)
self._data['tcl'] = True
# Is this a replace URI host action?
elif data.get('replace', False) and data.get('httpHost', False):
self._data['replace'] = True
self._data['httpHost'] = True
self._data['value'] = data.get('value', None)
# Is this a replace URI path action?
elif data.get('replace', False) and data.get('httpUri', False) and \
data.get('path', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['path'] = data.get('path', None)
self._data['value'] = data.get('value', None)
# Is this a replace URI action?
elif data.get('replace', False) and data.get('httpUri', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['value'] = data.get('value', None)
else:
# Only forward, redirect and setVariable are supported.
raise ValueError("Unsupported action, must be one of forward, "
"redirect, setVariable, replace, or reset.")
def __eq__(self, other):
"""Check the equality of the two objects.
Do a straight data to data comparison.
"""
if not isinstance(other, Action):
return False
return super(Action, self).__eq__(other)
def __str__(self):
return str(self._data)
def _uri_path(self, bigip):
"""Return the URI path of an action object.
Not implemented because the current implementation does
not manage Actions individually."""
raise NotImplementedError
|
self._data['select'] = select
|
conditional_block
|
action.py
|
"""Provides a class for managing BIG-IP L7 Rule Action resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
class Action(Resource):
"""L7 Rule Action class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
expression=None,
forward=False,
location=None,
pool=None,
redirect=False,
request=True,
reset=False,
setVariable=False,
tcl=False,
tmName=None,
httpHost=False,
httpUri=False,
path=None,
replace=False,
value=None,
shutdown=True,
select=True,
)
def __init__(self, name, data):
"""Initialize the Action object.
Actions do not have explicit partition attributes, the are
implied by the partition of the rule to which they belong.
"""
super(Action, self).__init__(name, partition=None)
# Actions are Only supported on requests.
self._data['request'] = True
# Is this a forwarding action?
if data.get('forward', False):
self._data['forward'] = True
# Yes, there are two supported forwarding actions:
# forward to pool and reset, these are mutually
# exclusive options.
pool = data.get('pool', None)
reset = data.get('reset', False)
# This allows you to specify an empty node. This is
# what Container Connector does.
select = data.get('select', False)
# This was added in 13.1.0
shutdown = data.get('shutdown', False)
if pool:
self._data['pool'] = pool
elif reset:
self._data['reset'] = reset
elif select:
self._data['select'] = select
elif shutdown:
self._data['shutdown'] = shutdown
else:
raise ValueError(
"Unsupported forward action, must be one of reset, "
"forward to pool, select, or shutdown.")
# Is this a redirect action?
elif data.get('redirect', False):
self._data['redirect'] = True
# Yes, set the location and httpReply attribute
self._data['location'] = data.get('location', None)
self._data['httpReply'] = data.get('httpReply', True)
# Is this a setVariable action?
elif data.get('setVariable', False):
self._data['setVariable'] = True
# Set the variable name and the value
self._data['tmName'] = data.get('tmName', None)
self._data['expression'] = data.get('expression', None)
self._data['tcl'] = True
# Is this a replace URI host action?
elif data.get('replace', False) and data.get('httpHost', False):
self._data['replace'] = True
self._data['httpHost'] = True
self._data['value'] = data.get('value', None)
# Is this a replace URI path action?
elif data.get('replace', False) and data.get('httpUri', False) and \
data.get('path', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['path'] = data.get('path', None)
self._data['value'] = data.get('value', None)
# Is this a replace URI action?
elif data.get('replace', False) and data.get('httpUri', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['value'] = data.get('value', None)
else:
# Only forward, redirect and setVariable are supported.
raise ValueError("Unsupported action, must be one of forward, "
"redirect, setVariable, replace, or reset.")
def __eq__(self, other):
"""Check the equality of the two objects.
Do a straight data to data comparison.
"""
if not isinstance(other, Action):
return False
return super(Action, self).__eq__(other)
def __str__(self):
return str(self._data)
def
|
(self, bigip):
"""Return the URI path of an action object.
Not implemented because the current implementation does
not manage Actions individually."""
raise NotImplementedError
|
_uri_path
|
identifier_name
|
action.py
|
"""Provides a class for managing BIG-IP L7 Rule Action resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
class Action(Resource):
"""L7 Rule Action class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
expression=None,
forward=False,
location=None,
pool=None,
redirect=False,
request=True,
reset=False,
setVariable=False,
tcl=False,
tmName=None,
httpHost=False,
httpUri=False,
path=None,
replace=False,
value=None,
shutdown=True,
select=True,
)
def __init__(self, name, data):
"""Initialize the Action object.
Actions do not have explicit partition attributes, the are
implied by the partition of the rule to which they belong.
"""
super(Action, self).__init__(name, partition=None)
# Actions are Only supported on requests.
self._data['request'] = True
# Is this a forwarding action?
if data.get('forward', False):
self._data['forward'] = True
# Yes, there are two supported forwarding actions:
# forward to pool and reset, these are mutually
# exclusive options.
pool = data.get('pool', None)
reset = data.get('reset', False)
# This allows you to specify an empty node. This is
# what Container Connector does.
select = data.get('select', False)
# This was added in 13.1.0
shutdown = data.get('shutdown', False)
if pool:
self._data['pool'] = pool
elif reset:
self._data['reset'] = reset
elif select:
self._data['select'] = select
elif shutdown:
self._data['shutdown'] = shutdown
else:
raise ValueError(
"Unsupported forward action, must be one of reset, "
"forward to pool, select, or shutdown.")
# Is this a redirect action?
elif data.get('redirect', False):
self._data['redirect'] = True
# Yes, set the location and httpReply attribute
self._data['location'] = data.get('location', None)
self._data['httpReply'] = data.get('httpReply', True)
# Is this a setVariable action?
elif data.get('setVariable', False):
self._data['setVariable'] = True
# Set the variable name and the value
self._data['tmName'] = data.get('tmName', None)
self._data['expression'] = data.get('expression', None)
self._data['tcl'] = True
# Is this a replace URI host action?
elif data.get('replace', False) and data.get('httpHost', False):
self._data['replace'] = True
self._data['httpHost'] = True
self._data['value'] = data.get('value', None)
# Is this a replace URI path action?
elif data.get('replace', False) and data.get('httpUri', False) and \
data.get('path', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['path'] = data.get('path', None)
self._data['value'] = data.get('value', None)
# Is this a replace URI action?
elif data.get('replace', False) and data.get('httpUri', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['value'] = data.get('value', None)
else:
# Only forward, redirect and setVariable are supported.
raise ValueError("Unsupported action, must be one of forward, "
"redirect, setVariable, replace, or reset.")
def __eq__(self, other):
"""Check the equality of the two objects.
Do a straight data to data comparison.
"""
if not isinstance(other, Action):
return False
return super(Action, self).__eq__(other)
def __str__(self):
|
def _uri_path(self, bigip):
"""Return the URI path of an action object.
Not implemented because the current implementation does
not manage Actions individually."""
raise NotImplementedError
|
return str(self._data)
|
identifier_body
|
action.py
|
"""Provides a class for managing BIG-IP L7 Rule Action resources."""
# coding=utf-8
#
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from f5_cccl.resource import Resource
LOGGER = logging.getLogger(__name__)
class Action(Resource):
"""L7 Rule Action class."""
# The property names class attribute defines the names of the
# properties that we wish to compare.
properties = dict(
expression=None,
forward=False,
location=None,
pool=None,
redirect=False,
request=True,
reset=False,
setVariable=False,
tcl=False,
tmName=None,
httpHost=False,
httpUri=False,
path=None,
replace=False,
value=None,
shutdown=True,
select=True,
)
def __init__(self, name, data):
"""Initialize the Action object.
Actions do not have explicit partition attributes, the are
implied by the partition of the rule to which they belong.
"""
super(Action, self).__init__(name, partition=None)
# Actions are Only supported on requests.
self._data['request'] = True
# Is this a forwarding action?
if data.get('forward', False):
self._data['forward'] = True
# Yes, there are two supported forwarding actions:
# forward to pool and reset, these are mutually
# exclusive options.
pool = data.get('pool', None)
reset = data.get('reset', False)
# This allows you to specify an empty node. This is
# what Container Connector does.
select = data.get('select', False)
# This was added in 13.1.0
shutdown = data.get('shutdown', False)
if pool:
self._data['pool'] = pool
elif reset:
self._data['reset'] = reset
elif select:
self._data['select'] = select
elif shutdown:
self._data['shutdown'] = shutdown
else:
raise ValueError(
"Unsupported forward action, must be one of reset, "
"forward to pool, select, or shutdown.")
# Is this a redirect action?
elif data.get('redirect', False):
|
self._data['httpReply'] = data.get('httpReply', True)
# Is this a setVariable action?
elif data.get('setVariable', False):
self._data['setVariable'] = True
# Set the variable name and the value
self._data['tmName'] = data.get('tmName', None)
self._data['expression'] = data.get('expression', None)
self._data['tcl'] = True
# Is this a replace URI host action?
elif data.get('replace', False) and data.get('httpHost', False):
self._data['replace'] = True
self._data['httpHost'] = True
self._data['value'] = data.get('value', None)
# Is this a replace URI path action?
elif data.get('replace', False) and data.get('httpUri', False) and \
data.get('path', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['path'] = data.get('path', None)
self._data['value'] = data.get('value', None)
# Is this a replace URI action?
elif data.get('replace', False) and data.get('httpUri', False):
self._data['replace'] = True
self._data['httpUri'] = True
self._data['value'] = data.get('value', None)
else:
# Only forward, redirect and setVariable are supported.
raise ValueError("Unsupported action, must be one of forward, "
"redirect, setVariable, replace, or reset.")
def __eq__(self, other):
"""Check the equality of the two objects.
Do a straight data to data comparison.
"""
if not isinstance(other, Action):
return False
return super(Action, self).__eq__(other)
def __str__(self):
return str(self._data)
def _uri_path(self, bigip):
"""Return the URI path of an action object.
Not implemented because the current implementation does
not manage Actions individually."""
raise NotImplementedError
|
self._data['redirect'] = True
# Yes, set the location and httpReply attribute
self._data['location'] = data.get('location', None)
|
random_line_split
|
test_Ipc.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
# Aim: Full coverage testing of the Ipc classes
#
# Test the module loads
import unittest
from Axon.Ipc import *
class ipc_Test(unittest.TestCase):
def test_SmokeTest(self):
"ipc - Should be derived from object."
self.failUnless(isinstance(ipc(),object), "IPC objects should also be instances of object.")
class newComponent_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"newComponent.__init__ - Should work without problems."
nc=newComponent()
self.failUnless(isinstance(nc, ipc), "newComponent should be derived from ipc class")
self.failUnless(len(nc.components())==0, "There should be no components in the message if the constructor was called with no arguments.")
def test___init__SmokeTest(self):
"newComponent.__init__ - Groups all the arguments as a tuple of components that need to be activated/added to the run queue. Order is unimportant, scheduler doesn't care."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "Component tuple is not as expected.")
def test_components(self):
"newComponent.components - Returns a tuple of components that need to be added to the run queue/activated. Same test as for __init__ as they are counterparts."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "components returned something other than expected.")
class shutdownMicroprocess_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"shutdownMicroprocess.__init__ - Should work without problems."
sm=shutdownMicroprocess()
self.failUnless(isinstance(sm,ipc), "shutdownMicroprocess should be derived from ipc")
self.failUnless(sm.microprocesses()==(), "Microprocess tuple not empty as expected.")
def test___init__SmokeTest(self):
"shutdownMicroprocess.__init__ - Treats all the arguments as a tuple of microprocesses that need to be shutdown."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Stored tuple not as expected.")
def test_microprocesses(self):
"shutdownMicroprocess.microprocesses- Returns the list of microprocesses that need to be shutdown. This is essentially the counterpart to the __init__ test."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Returned tuple not as expected.")
class notify_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"notify.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, notify)
def test_SmokeTest_MinArguments(self):
"notify.__init__ - Creates a message from a specific caller with some data payload to notify part of the system of an event."
n=notify("caller", "payload")
self.failUnless(isinstance(n, ipc), "Expected notify to be an instance of ipc.")
self.failUnless(n.object == "payload", "Payload argument not stored in object member.")
self.failUnless(n.caller == "caller", "Caller argument not stored in caller member.")
class status_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"status.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, status)
def test_SmokeTest_MinArguments(self):
"status.__init__ - Stores the status message - for extraction by the recipient of the message. Checks object is instance of ipc."
s=status("Status message.")
self.failUnless(isinstance(s, ipc), "status should be derived from ipc.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
def test_status(self):
"status.status - Returns the status message stored inside the status object. Counterpart to __init__ test."
s=status("Status message.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
class wouldblock_Test(unittest.TestCase):
|
class producerFinished_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"producerFinished.__init__ - Called without arguments defaults to a caller of None, message of None. Checks producerFinished is a subclass of ipc"
pf=producerFinished()
self.failUnless(isinstance(pf, ipc), "producerFinished should be an derived from ipc.")
self.failUnless(pf.caller== None, "caller does not default to None")
self.failUnless(pf.message == None, "message does not default to None")
def test_SmokeTest_MinArguments(self):
"test_SmokeTest.__init__ - Creates a producerFinished message with specified caller & shutdown 'last' message."
pf = producerFinished("caller", "message")
self.failUnless(pf.caller == "caller", "caller not set correctly by position.")
self.failUnless(pf.message == "message", "message not set correctly by position.")
pf2 = producerFinished(message="message", caller="caller")
self.failUnless(pf2.caller == "caller", "caller not set correctly by name.")
self.failUnless(pf2.message == "message", "message not set correctly by name.")
class errorInformation_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"errorInformation.__init__ - Called without arguments fails - must include caller."
self.failUnlessRaises(TypeError, errorInformation)
def test_SmokeTest_MinArguments(self):
"errorInformation.__init__ - Takes the supplied caller, and creates an errorInformation object. Checks errorInformation object is an instance of ipc."
ei=errorInformation(self)
self.failUnless(isinstance(ei, ipc), "errorInformation should be derived from ipc.")
self.failUnless(ei.caller == self, "Caller is not set properly.")
def test_SmokeTest_MinSensibleArguments(self):
"errorInformation.__init__ - An exception & message (any object) in addition to the caller to provide a more meaningful errorInformation message where appropriate. ttbw "
ei=errorInformation("caller", "exception", "message")
self.failUnless(ei.caller == "caller", "Caller is not set properly by position.")
self.failUnless(ei.message == "message", "Caller is not set properly by position.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by position.")
ei=errorInformation(exception="exception", message="message", caller = "caller")
self.failUnless(ei.caller == "caller", "Caller is not set properly by name.")
self.failUnless(ei.message == "message", "Caller is not set properly by name.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by name.")
if __name__=='__main__':
unittest.main()
|
def test_SmokeTest_NoArguments(self):
"wouldblock.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, wouldblock)
def test_SmokeTest_MinArguments(self):
"wouldblock.__init__ - Stores the caller in the wouldblock message. Allows the scheduler to make a decision. Checks wouldblock is a subclass of ipc."
wb=wouldblock(self)
self.failUnless(isinstance(wb, ipc), "wouldblock should be derived from ipc")
self.failUnless(wb.caller == self, "caller not properly set by __init__.")
|
identifier_body
|
test_Ipc.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
# Aim: Full coverage testing of the Ipc classes
#
# Test the module loads
import unittest
from Axon.Ipc import *
class ipc_Test(unittest.TestCase):
def test_SmokeTest(self):
"ipc - Should be derived from object."
self.failUnless(isinstance(ipc(),object), "IPC objects should also be instances of object.")
class newComponent_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"newComponent.__init__ - Should work without problems."
nc=newComponent()
self.failUnless(isinstance(nc, ipc), "newComponent should be derived from ipc class")
self.failUnless(len(nc.components())==0, "There should be no components in the message if the constructor was called with no arguments.")
def test___init__SmokeTest(self):
"newComponent.__init__ - Groups all the arguments as a tuple of components that need to be activated/added to the run queue. Order is unimportant, scheduler doesn't care."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "Component tuple is not as expected.")
def test_components(self):
"newComponent.components - Returns a tuple of components that need to be added to the run queue/activated. Same test as for __init__ as they are counterparts."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "components returned something other than expected.")
class shutdownMicroprocess_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"shutdownMicroprocess.__init__ - Should work without problems."
sm=shutdownMicroprocess()
self.failUnless(isinstance(sm,ipc), "shutdownMicroprocess should be derived from ipc")
self.failUnless(sm.microprocesses()==(), "Microprocess tuple not empty as expected.")
def test___init__SmokeTest(self):
"shutdownMicroprocess.__init__ - Treats all the arguments as a tuple of microprocesses that need to be shutdown."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Stored tuple not as expected.")
def test_microprocesses(self):
"shutdownMicroprocess.microprocesses- Returns the list of microprocesses that need to be shutdown. This is essentially the counterpart to the __init__ test."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Returned tuple not as expected.")
class notify_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"notify.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, notify)
def test_SmokeTest_MinArguments(self):
"notify.__init__ - Creates a message from a specific caller with some data payload to notify part of the system of an event."
n=notify("caller", "payload")
self.failUnless(isinstance(n, ipc), "Expected notify to be an instance of ipc.")
self.failUnless(n.object == "payload", "Payload argument not stored in object member.")
self.failUnless(n.caller == "caller", "Caller argument not stored in caller member.")
class status_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"status.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, status)
def test_SmokeTest_MinArguments(self):
"status.__init__ - Stores the status message - for extraction by the recipient of the message. Checks object is instance of ipc."
s=status("Status message.")
self.failUnless(isinstance(s, ipc), "status should be derived from ipc.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
def test_status(self):
"status.status - Returns the status message stored inside the status object. Counterpart to __init__ test."
s=status("Status message.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
class wouldblock_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"wouldblock.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, wouldblock)
def test_SmokeTest_MinArguments(self):
"wouldblock.__init__ - Stores the caller in the wouldblock message. Allows the scheduler to make a decision. Checks wouldblock is a subclass of ipc."
wb=wouldblock(self)
self.failUnless(isinstance(wb, ipc), "wouldblock should be derived from ipc")
self.failUnless(wb.caller == self, "caller not properly set by __init__.")
class producerFinished_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"producerFinished.__init__ - Called without arguments defaults to a caller of None, message of None. Checks producerFinished is a subclass of ipc"
pf=producerFinished()
self.failUnless(isinstance(pf, ipc), "producerFinished should be an derived from ipc.")
self.failUnless(pf.caller== None, "caller does not default to None")
self.failUnless(pf.message == None, "message does not default to None")
def test_SmokeTest_MinArguments(self):
"test_SmokeTest.__init__ - Creates a producerFinished message with specified caller & shutdown 'last' message."
pf = producerFinished("caller", "message")
self.failUnless(pf.caller == "caller", "caller not set correctly by position.")
self.failUnless(pf.message == "message", "message not set correctly by position.")
pf2 = producerFinished(message="message", caller="caller")
self.failUnless(pf2.caller == "caller", "caller not set correctly by name.")
self.failUnless(pf2.message == "message", "message not set correctly by name.")
class errorInformation_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"errorInformation.__init__ - Called without arguments fails - must include caller."
self.failUnlessRaises(TypeError, errorInformation)
def test_SmokeTest_MinArguments(self):
"errorInformation.__init__ - Takes the supplied caller, and creates an errorInformation object. Checks errorInformation object is an instance of ipc."
ei=errorInformation(self)
self.failUnless(isinstance(ei, ipc), "errorInformation should be derived from ipc.")
self.failUnless(ei.caller == self, "Caller is not set properly.")
def test_SmokeTest_MinSensibleArguments(self):
"errorInformation.__init__ - An exception & message (any object) in addition to the caller to provide a more meaningful errorInformation message where appropriate. ttbw "
ei=errorInformation("caller", "exception", "message")
self.failUnless(ei.caller == "caller", "Caller is not set properly by position.")
self.failUnless(ei.message == "message", "Caller is not set properly by position.")
|
self.failUnless(ei.exception == "exception", "Caller is not set properly by position.")
ei=errorInformation(exception="exception", message="message", caller = "caller")
self.failUnless(ei.caller == "caller", "Caller is not set properly by name.")
self.failUnless(ei.message == "message", "Caller is not set properly by name.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by name.")
if __name__=='__main__':
unittest.main()
|
random_line_split
|
|
test_Ipc.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
# Aim: Full coverage testing of the Ipc classes
#
# Test the module loads
import unittest
from Axon.Ipc import *
class
|
(unittest.TestCase):
def test_SmokeTest(self):
"ipc - Should be derived from object."
self.failUnless(isinstance(ipc(),object), "IPC objects should also be instances of object.")
class newComponent_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"newComponent.__init__ - Should work without problems."
nc=newComponent()
self.failUnless(isinstance(nc, ipc), "newComponent should be derived from ipc class")
self.failUnless(len(nc.components())==0, "There should be no components in the message if the constructor was called with no arguments.")
def test___init__SmokeTest(self):
"newComponent.__init__ - Groups all the arguments as a tuple of components that need to be activated/added to the run queue. Order is unimportant, scheduler doesn't care."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "Component tuple is not as expected.")
def test_components(self):
"newComponent.components - Returns a tuple of components that need to be added to the run queue/activated. Same test as for __init__ as they are counterparts."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "components returned something other than expected.")
class shutdownMicroprocess_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"shutdownMicroprocess.__init__ - Should work without problems."
sm=shutdownMicroprocess()
self.failUnless(isinstance(sm,ipc), "shutdownMicroprocess should be derived from ipc")
self.failUnless(sm.microprocesses()==(), "Microprocess tuple not empty as expected.")
def test___init__SmokeTest(self):
"shutdownMicroprocess.__init__ - Treats all the arguments as a tuple of microprocesses that need to be shutdown."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Stored tuple not as expected.")
def test_microprocesses(self):
"shutdownMicroprocess.microprocesses- Returns the list of microprocesses that need to be shutdown. This is essentially the counterpart to the __init__ test."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Returned tuple not as expected.")
class notify_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"notify.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, notify)
def test_SmokeTest_MinArguments(self):
"notify.__init__ - Creates a message from a specific caller with some data payload to notify part of the system of an event."
n=notify("caller", "payload")
self.failUnless(isinstance(n, ipc), "Expected notify to be an instance of ipc.")
self.failUnless(n.object == "payload", "Payload argument not stored in object member.")
self.failUnless(n.caller == "caller", "Caller argument not stored in caller member.")
class status_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"status.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, status)
def test_SmokeTest_MinArguments(self):
"status.__init__ - Stores the status message - for extraction by the recipient of the message. Checks object is instance of ipc."
s=status("Status message.")
self.failUnless(isinstance(s, ipc), "status should be derived from ipc.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
def test_status(self):
"status.status - Returns the status message stored inside the status object. Counterpart to __init__ test."
s=status("Status message.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
class wouldblock_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"wouldblock.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, wouldblock)
def test_SmokeTest_MinArguments(self):
"wouldblock.__init__ - Stores the caller in the wouldblock message. Allows the scheduler to make a decision. Checks wouldblock is a subclass of ipc."
wb=wouldblock(self)
self.failUnless(isinstance(wb, ipc), "wouldblock should be derived from ipc")
self.failUnless(wb.caller == self, "caller not properly set by __init__.")
class producerFinished_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"producerFinished.__init__ - Called without arguments defaults to a caller of None, message of None. Checks producerFinished is a subclass of ipc"
pf=producerFinished()
self.failUnless(isinstance(pf, ipc), "producerFinished should be an derived from ipc.")
self.failUnless(pf.caller== None, "caller does not default to None")
self.failUnless(pf.message == None, "message does not default to None")
def test_SmokeTest_MinArguments(self):
"test_SmokeTest.__init__ - Creates a producerFinished message with specified caller & shutdown 'last' message."
pf = producerFinished("caller", "message")
self.failUnless(pf.caller == "caller", "caller not set correctly by position.")
self.failUnless(pf.message == "message", "message not set correctly by position.")
pf2 = producerFinished(message="message", caller="caller")
self.failUnless(pf2.caller == "caller", "caller not set correctly by name.")
self.failUnless(pf2.message == "message", "message not set correctly by name.")
class errorInformation_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"errorInformation.__init__ - Called without arguments fails - must include caller."
self.failUnlessRaises(TypeError, errorInformation)
def test_SmokeTest_MinArguments(self):
"errorInformation.__init__ - Takes the supplied caller, and creates an errorInformation object. Checks errorInformation object is an instance of ipc."
ei=errorInformation(self)
self.failUnless(isinstance(ei, ipc), "errorInformation should be derived from ipc.")
self.failUnless(ei.caller == self, "Caller is not set properly.")
def test_SmokeTest_MinSensibleArguments(self):
"errorInformation.__init__ - An exception & message (any object) in addition to the caller to provide a more meaningful errorInformation message where appropriate. ttbw "
ei=errorInformation("caller", "exception", "message")
self.failUnless(ei.caller == "caller", "Caller is not set properly by position.")
self.failUnless(ei.message == "message", "Caller is not set properly by position.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by position.")
ei=errorInformation(exception="exception", message="message", caller = "caller")
self.failUnless(ei.caller == "caller", "Caller is not set properly by name.")
self.failUnless(ei.message == "message", "Caller is not set properly by name.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by name.")
if __name__=='__main__':
unittest.main()
|
ipc_Test
|
identifier_name
|
test_Ipc.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
# Aim: Full coverage testing of the Ipc classes
#
# Test the module loads
import unittest
from Axon.Ipc import *
class ipc_Test(unittest.TestCase):
def test_SmokeTest(self):
"ipc - Should be derived from object."
self.failUnless(isinstance(ipc(),object), "IPC objects should also be instances of object.")
class newComponent_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"newComponent.__init__ - Should work without problems."
nc=newComponent()
self.failUnless(isinstance(nc, ipc), "newComponent should be derived from ipc class")
self.failUnless(len(nc.components())==0, "There should be no components in the message if the constructor was called with no arguments.")
def test___init__SmokeTest(self):
"newComponent.__init__ - Groups all the arguments as a tuple of components that need to be activated/added to the run queue. Order is unimportant, scheduler doesn't care."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "Component tuple is not as expected.")
def test_components(self):
"newComponent.components - Returns a tuple of components that need to be added to the run queue/activated. Same test as for __init__ as they are counterparts."
nc=newComponent("ba","da","bing")
self.failUnless(nc.components()==("ba","da","bing"), "components returned something other than expected.")
class shutdownMicroprocess_Test(unittest.TestCase):
def test___init__SmokeTest_NoArguments(self):
"shutdownMicroprocess.__init__ - Should work without problems."
sm=shutdownMicroprocess()
self.failUnless(isinstance(sm,ipc), "shutdownMicroprocess should be derived from ipc")
self.failUnless(sm.microprocesses()==(), "Microprocess tuple not empty as expected.")
def test___init__SmokeTest(self):
"shutdownMicroprocess.__init__ - Treats all the arguments as a tuple of microprocesses that need to be shutdown."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Stored tuple not as expected.")
def test_microprocesses(self):
"shutdownMicroprocess.microprocesses- Returns the list of microprocesses that need to be shutdown. This is essentially the counterpart to the __init__ test."
sm=shutdownMicroprocess("ba","da","bing")
self.failUnless(sm.microprocesses()==("ba","da","bing"), "Returned tuple not as expected.")
class notify_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"notify.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, notify)
def test_SmokeTest_MinArguments(self):
"notify.__init__ - Creates a message from a specific caller with some data payload to notify part of the system of an event."
n=notify("caller", "payload")
self.failUnless(isinstance(n, ipc), "Expected notify to be an instance of ipc.")
self.failUnless(n.object == "payload", "Payload argument not stored in object member.")
self.failUnless(n.caller == "caller", "Caller argument not stored in caller member.")
class status_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"status.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, status)
def test_SmokeTest_MinArguments(self):
"status.__init__ - Stores the status message - for extraction by the recipient of the message. Checks object is instance of ipc."
s=status("Status message.")
self.failUnless(isinstance(s, ipc), "status should be derived from ipc.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
def test_status(self):
"status.status - Returns the status message stored inside the status object. Counterpart to __init__ test."
s=status("Status message.")
self.failUnless(s.status() == "Status message.", "Status message not stored properly.")
class wouldblock_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"wouldblock.__init__ - Called without arguments fails."
self.failUnlessRaises(TypeError, wouldblock)
def test_SmokeTest_MinArguments(self):
"wouldblock.__init__ - Stores the caller in the wouldblock message. Allows the scheduler to make a decision. Checks wouldblock is a subclass of ipc."
wb=wouldblock(self)
self.failUnless(isinstance(wb, ipc), "wouldblock should be derived from ipc")
self.failUnless(wb.caller == self, "caller not properly set by __init__.")
class producerFinished_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"producerFinished.__init__ - Called without arguments defaults to a caller of None, message of None. Checks producerFinished is a subclass of ipc"
pf=producerFinished()
self.failUnless(isinstance(pf, ipc), "producerFinished should be an derived from ipc.")
self.failUnless(pf.caller== None, "caller does not default to None")
self.failUnless(pf.message == None, "message does not default to None")
def test_SmokeTest_MinArguments(self):
"test_SmokeTest.__init__ - Creates a producerFinished message with specified caller & shutdown 'last' message."
pf = producerFinished("caller", "message")
self.failUnless(pf.caller == "caller", "caller not set correctly by position.")
self.failUnless(pf.message == "message", "message not set correctly by position.")
pf2 = producerFinished(message="message", caller="caller")
self.failUnless(pf2.caller == "caller", "caller not set correctly by name.")
self.failUnless(pf2.message == "message", "message not set correctly by name.")
class errorInformation_Test(unittest.TestCase):
def test_SmokeTest_NoArguments(self):
"errorInformation.__init__ - Called without arguments fails - must include caller."
self.failUnlessRaises(TypeError, errorInformation)
def test_SmokeTest_MinArguments(self):
"errorInformation.__init__ - Takes the supplied caller, and creates an errorInformation object. Checks errorInformation object is an instance of ipc."
ei=errorInformation(self)
self.failUnless(isinstance(ei, ipc), "errorInformation should be derived from ipc.")
self.failUnless(ei.caller == self, "Caller is not set properly.")
def test_SmokeTest_MinSensibleArguments(self):
"errorInformation.__init__ - An exception & message (any object) in addition to the caller to provide a more meaningful errorInformation message where appropriate. ttbw "
ei=errorInformation("caller", "exception", "message")
self.failUnless(ei.caller == "caller", "Caller is not set properly by position.")
self.failUnless(ei.message == "message", "Caller is not set properly by position.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by position.")
ei=errorInformation(exception="exception", message="message", caller = "caller")
self.failUnless(ei.caller == "caller", "Caller is not set properly by name.")
self.failUnless(ei.message == "message", "Caller is not set properly by name.")
self.failUnless(ei.exception == "exception", "Caller is not set properly by name.")
if __name__=='__main__':
|
unittest.main()
|
conditional_block
|
|
set.js
|
// -------------------------------------------------------------------
// markItUp!
// -------------------------------------------------------------------
// Copyright (C) 2008 Jay Salvat
// http://markitup.jaysalvat.com/
// -------------------------------------------------------------------
// MarkDown tags example
// http://en.wikipedia.org/wiki/Markdown
// http://daringfireball.net/projects/markdown/
// -------------------------------------------------------------------
// Feel free to add more tags
// -------------------------------------------------------------------
mySettings = {
previewParserPath: '',
onShiftEnter: {keepDefault:false, openWith:'\n\n'},
markupSet: [
{name:'First Level Heading', key:'1', placeHolder:'Your title here...', closeWith:function(markItUp) { return miu.markdownTitle(markItUp, '=') } },
{name:'Second Level Heading', key:'2', placeHolder:'Your title here...', closeWith:function(markItUp) { return miu.markdownTitle(markItUp, '-') } },
{name:'Heading 3', key:'3', openWith:'### ', placeHolder:'Your title here...' },
{name:'Heading 4', key:'4', openWith:'#### ', placeHolder:'Your title here...' },
{name:'Heading 5', key:'5', openWith:'##### ', placeHolder:'Your title here...' },
{name:'Heading 6', key:'6', openWith:'###### ', placeHolder:'Your title here...' },
{separator:'---------------' },
{name:'Bold', key:'B', openWith:'**', closeWith:'**'},
{name:'Italic', key:'I', openWith:'_', closeWith:'_'},
{separator:'---------------' },
{name:'Bulleted List', openWith:'- ' },
{name:'Numeric List', openWith:function(markItUp) {
return markItUp.line+'. ';
}},
{separator:'---------------' },
{name:'Picture', key:'P', replaceWith:'![[![Alternative text]!]]([![Url:!:http://]!] "[![Title]!]")'},
{name:'Upload Picture', key:'U', className: 'hmdImageUpload'},
{name:'Link', key:'L', openWith:'[', closeWith:']([![Url:!:http://]!] "[![Title]!]")', placeHolder:'Your text to link here...' },
{separator:'---------------'},
{name:'Quotes', openWith:'> '},
{name:'Code Block / Code', openWith:'(!(\t|!|`)!)', closeWith:'(!(`)!)'},
{separator:'---------------'},
{name:'Preview', call:'preview', className:"preview"}
]
}
// mIu nameSpace to avoid conflict.
miu = {
markdownTitle: function(markItUp, char) {
heading = '';
n = $.trim(markItUp.selection||markItUp.placeHolder).length;
for(i = 0; i < n; i++) {
heading += char;
}
return '\n'+heading;
},
imagesIndex: -1,
/**
* добавляет изрображение в поле загруженных изображений
*
* @var jQuery $miu обьект textarea
* @var object imgData обьект с информацией об изображении. возможные поля обьекта: src, code
* @var boolean insert если true, то код изображения будет так же добавлен в текстовую область редактора
*/
pushImage: function($miu, imgData, insert)
{
// функция для вставки кода изображения в редактор
var insertImage = function()
{
$.markItUp({target: $miu, openWith: '{{' + imgData.code + '}}'});
};
var $container = $miu.parent();
if(!$container.hasClass('markItUpWithImages'))
{
// инициализируем режим с прикреплениями
$container
.append('<div id="' + $miu[0].id + 'Images" class="markItUpImagesContainer">')
.addClass('markItUpWithImages');
}
|
$imagesContainer = $('#' + $miu[0].id + 'Images');
if('id' in imgData)
this.imagesIndex = Math.max(this.imagesIndex, imgData.id); // сохраняем максимальный id картинки
else
++this.imagesIndex;
imgData.code = 'IMAGE_' + this.imagesIndex;
imgData.name = imgData.src.slice(imgData.src.lastIndexOf('/') + 1); // имя файла изображения
$('<div>')
.html('<img src="' + imgData.src + '" /> <p>Код изображения: {{<b>' + imgData.code + '</b>}}</p><br><p style="float:right;"><a href="" class="icon_edit"></a><a href="" class="icon_delete"></a></p>')
.click(insertImage)
.appendTo($imagesContainer);
if (insert)
insertImage();
},
}
// TODO: возможность указать титл и альт
// TODO: возможность указать центрирование
// TODO: создание полей формы с данными картинки
// TODO: кнопки "удалить" и "редактировать"
|
random_line_split
|
|
CloudQueueIcon.js
|
import React from 'react';
import Icon from '../Icon';
export default class CloudQueueIcon extends Icon {
|
(){return <svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 48 48"><path d="M38.71 20.07C37.35 13.19 31.28 8 24 8c-5.78 0-10.79 3.28-13.3 8.07C4.69 16.72 0 21.81 0 28c0 6.63 5.37 12 12 12h26c5.52 0 10-4.48 10-10 0-5.28-4.11-9.56-9.29-9.93zM38 36H12c-4.42 0-8-3.58-8-8s3.58-8 8-8h1.42c1.31-4.61 5.54-8 10.58-8 6.08 0 11 4.92 11 11v1h3c3.31 0 6 2.69 6 6s-2.69 6-6 6z"/></svg>;}
};
|
getSVG
|
identifier_name
|
CloudQueueIcon.js
|
import React from 'react';
import Icon from '../Icon';
export default class CloudQueueIcon extends Icon {
getSVG()
|
};
|
{return <svg xmlns="http://www.w3.org/2000/svg" width="48" height="48" viewBox="0 0 48 48"><path d="M38.71 20.07C37.35 13.19 31.28 8 24 8c-5.78 0-10.79 3.28-13.3 8.07C4.69 16.72 0 21.81 0 28c0 6.63 5.37 12 12 12h26c5.52 0 10-4.48 10-10 0-5.28-4.11-9.56-9.29-9.93zM38 36H12c-4.42 0-8-3.58-8-8s3.58-8 8-8h1.42c1.31-4.61 5.54-8 10.58-8 6.08 0 11 4.92 11 11v1h3c3.31 0 6 2.69 6 6s-2.69 6-6 6z"/></svg>;}
|
identifier_body
|
node-card.ts
|
import { Component, Input } from '@angular/core';
import { NodeDef } from '../models/nodeDef'
@Component({
selector: 'chix-node-card',
template: `
<a [routerLink]="['/nodes', id]">
<mat-card>
<mat-card-title-group>
<img mat-card-sm-image *ngIf="thumbnail" [src]="thumbnail"/>
<mat-card-title>{{ title | bcEllipsis:35 }}</mat-card-title>
<mat-card-subtitle *ngIf="subtitle">{{ subtitle | bcEllipsis:40 }}</mat-card-subtitle>
</mat-card-title-group>
<mat-card-content>
<p *ngIf="description">{{ description | bcEllipsis }}</p>
</mat-card-content>
<mat-card-footer>
<bc-book-authors [book]="book"></bc-book-authors>
</mat-card-footer>
</mat-card>
</a>
`,
styles: [
`
:host {
display: flex;
}
:host a {
display: flex;
}
`,
],
})
export class NodeCardComponent {
@Input() node: NodeDef;
get
|
() {
return this.node._id;
}
get title() {
return this.node.title;
}
get description() {
return this.node.description;
}
}
|
id
|
identifier_name
|
node-card.ts
|
import { Component, Input } from '@angular/core';
import { NodeDef } from '../models/nodeDef'
@Component({
selector: 'chix-node-card',
template: `
<a [routerLink]="['/nodes', id]">
<mat-card>
<mat-card-title-group>
<img mat-card-sm-image *ngIf="thumbnail" [src]="thumbnail"/>
<mat-card-title>{{ title | bcEllipsis:35 }}</mat-card-title>
<mat-card-subtitle *ngIf="subtitle">{{ subtitle | bcEllipsis:40 }}</mat-card-subtitle>
</mat-card-title-group>
<mat-card-content>
<p *ngIf="description">{{ description | bcEllipsis }}</p>
</mat-card-content>
<mat-card-footer>
<bc-book-authors [book]="book"></bc-book-authors>
</mat-card-footer>
</mat-card>
</a>
`,
styles: [
`
:host {
display: flex;
}
:host a {
display: flex;
}
`,
],
})
|
export class NodeCardComponent {
@Input() node: NodeDef;
get id() {
return this.node._id;
}
get title() {
return this.node.title;
}
get description() {
return this.node.description;
}
}
|
random_line_split
|
|
metadata.ts
|
import {Type, DirectiveMetadata} from 'angular2/core';
import {DirectiveResolver} from 'angular2/compiler';
import {stringify} from './util';
var COMPONENT_SELECTOR = /^[\w|-]*$/;
var SKEWER_CASE = /-(\w)/g;
var directiveResolver = new DirectiveResolver();
export interface AttrProp {
prop: string;
attr: string;
bracketAttr: string;
bracketParenAttr: string;
parenAttr: string;
onAttr: string;
bindAttr: string;
bindonAttr: string;
}
export interface ComponentInfo {
type: Type;
selector: string;
inputs: AttrProp[];
outputs: AttrProp[];
}
export function getComponentInfo(type: Type): ComponentInfo
|
export function parseFields(names: string[]): AttrProp[] {
var attrProps: AttrProp[] = [];
if (names) {
for (var i = 0; i < names.length; i++) {
var parts = names[i].split(':');
var prop = parts[0].trim();
var attr = (parts[1] || parts[0]).trim();
var capitalAttr = attr.charAt(0).toUpperCase() + attr.substr(1);
attrProps.push(<AttrProp>{
prop: prop,
attr: attr,
bracketAttr: `[${attr}]`,
parenAttr: `(${attr})`,
bracketParenAttr: `[(${attr})]`,
onAttr: `on${capitalAttr}`,
bindAttr: `bind${capitalAttr}`,
bindonAttr: `bindon${capitalAttr}`
});
}
}
return attrProps;
}
|
{
var resolvedMetadata: DirectiveMetadata = directiveResolver.resolve(type);
var selector = resolvedMetadata.selector;
if (!selector.match(COMPONENT_SELECTOR)) {
throw new Error('Only selectors matching element names are supported, got: ' + selector);
}
var selector = selector.replace(SKEWER_CASE, (all, letter: string) => letter.toUpperCase());
return {
type: type,
selector: selector,
inputs: parseFields(resolvedMetadata.inputs),
outputs: parseFields(resolvedMetadata.outputs)
};
}
|
identifier_body
|
metadata.ts
|
import {Type, DirectiveMetadata} from 'angular2/core';
import {DirectiveResolver} from 'angular2/compiler';
import {stringify} from './util';
var COMPONENT_SELECTOR = /^[\w|-]*$/;
var SKEWER_CASE = /-(\w)/g;
var directiveResolver = new DirectiveResolver();
export interface AttrProp {
prop: string;
attr: string;
bracketAttr: string;
bracketParenAttr: string;
parenAttr: string;
onAttr: string;
bindAttr: string;
bindonAttr: string;
}
export interface ComponentInfo {
type: Type;
selector: string;
inputs: AttrProp[];
outputs: AttrProp[];
}
export function getComponentInfo(type: Type): ComponentInfo {
var resolvedMetadata: DirectiveMetadata = directiveResolver.resolve(type);
var selector = resolvedMetadata.selector;
if (!selector.match(COMPONENT_SELECTOR)) {
throw new Error('Only selectors matching element names are supported, got: ' + selector);
}
var selector = selector.replace(SKEWER_CASE, (all, letter: string) => letter.toUpperCase());
return {
type: type,
selector: selector,
inputs: parseFields(resolvedMetadata.inputs),
outputs: parseFields(resolvedMetadata.outputs)
};
}
export function parseFields(names: string[]): AttrProp[] {
var attrProps: AttrProp[] = [];
if (names)
|
return attrProps;
}
|
{
for (var i = 0; i < names.length; i++) {
var parts = names[i].split(':');
var prop = parts[0].trim();
var attr = (parts[1] || parts[0]).trim();
var capitalAttr = attr.charAt(0).toUpperCase() + attr.substr(1);
attrProps.push(<AttrProp>{
prop: prop,
attr: attr,
bracketAttr: `[${attr}]`,
parenAttr: `(${attr})`,
bracketParenAttr: `[(${attr})]`,
onAttr: `on${capitalAttr}`,
bindAttr: `bind${capitalAttr}`,
bindonAttr: `bindon${capitalAttr}`
});
}
}
|
conditional_block
|
metadata.ts
|
import {Type, DirectiveMetadata} from 'angular2/core';
import {DirectiveResolver} from 'angular2/compiler';
import {stringify} from './util';
var COMPONENT_SELECTOR = /^[\w|-]*$/;
var SKEWER_CASE = /-(\w)/g;
var directiveResolver = new DirectiveResolver();
export interface AttrProp {
prop: string;
attr: string;
bracketAttr: string;
bracketParenAttr: string;
parenAttr: string;
onAttr: string;
bindAttr: string;
bindonAttr: string;
}
export interface ComponentInfo {
type: Type;
selector: string;
inputs: AttrProp[];
outputs: AttrProp[];
}
export function getComponentInfo(type: Type): ComponentInfo {
var resolvedMetadata: DirectiveMetadata = directiveResolver.resolve(type);
var selector = resolvedMetadata.selector;
if (!selector.match(COMPONENT_SELECTOR)) {
throw new Error('Only selectors matching element names are supported, got: ' + selector);
}
var selector = selector.replace(SKEWER_CASE, (all, letter: string) => letter.toUpperCase());
return {
type: type,
selector: selector,
inputs: parseFields(resolvedMetadata.inputs),
outputs: parseFields(resolvedMetadata.outputs)
};
}
export function
|
(names: string[]): AttrProp[] {
var attrProps: AttrProp[] = [];
if (names) {
for (var i = 0; i < names.length; i++) {
var parts = names[i].split(':');
var prop = parts[0].trim();
var attr = (parts[1] || parts[0]).trim();
var capitalAttr = attr.charAt(0).toUpperCase() + attr.substr(1);
attrProps.push(<AttrProp>{
prop: prop,
attr: attr,
bracketAttr: `[${attr}]`,
parenAttr: `(${attr})`,
bracketParenAttr: `[(${attr})]`,
onAttr: `on${capitalAttr}`,
bindAttr: `bind${capitalAttr}`,
bindonAttr: `bindon${capitalAttr}`
});
}
}
return attrProps;
}
|
parseFields
|
identifier_name
|
metadata.ts
|
import {Type, DirectiveMetadata} from 'angular2/core';
import {DirectiveResolver} from 'angular2/compiler';
import {stringify} from './util';
var COMPONENT_SELECTOR = /^[\w|-]*$/;
var SKEWER_CASE = /-(\w)/g;
var directiveResolver = new DirectiveResolver();
export interface AttrProp {
prop: string;
attr: string;
bracketAttr: string;
|
bindonAttr: string;
}
export interface ComponentInfo {
type: Type;
selector: string;
inputs: AttrProp[];
outputs: AttrProp[];
}
export function getComponentInfo(type: Type): ComponentInfo {
var resolvedMetadata: DirectiveMetadata = directiveResolver.resolve(type);
var selector = resolvedMetadata.selector;
if (!selector.match(COMPONENT_SELECTOR)) {
throw new Error('Only selectors matching element names are supported, got: ' + selector);
}
var selector = selector.replace(SKEWER_CASE, (all, letter: string) => letter.toUpperCase());
return {
type: type,
selector: selector,
inputs: parseFields(resolvedMetadata.inputs),
outputs: parseFields(resolvedMetadata.outputs)
};
}
export function parseFields(names: string[]): AttrProp[] {
var attrProps: AttrProp[] = [];
if (names) {
for (var i = 0; i < names.length; i++) {
var parts = names[i].split(':');
var prop = parts[0].trim();
var attr = (parts[1] || parts[0]).trim();
var capitalAttr = attr.charAt(0).toUpperCase() + attr.substr(1);
attrProps.push(<AttrProp>{
prop: prop,
attr: attr,
bracketAttr: `[${attr}]`,
parenAttr: `(${attr})`,
bracketParenAttr: `[(${attr})]`,
onAttr: `on${capitalAttr}`,
bindAttr: `bind${capitalAttr}`,
bindonAttr: `bindon${capitalAttr}`
});
}
}
return attrProps;
}
|
bracketParenAttr: string;
parenAttr: string;
onAttr: string;
bindAttr: string;
|
random_line_split
|
package.py
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class OclIcd(AutotoolsPackage):
"""This package aims at creating an Open Source alternative to vendor specific
OpenCL ICD loaders."""
homepage = "https://github.com/OCL-dev/ocl-icd"
url = "https://github.com/OCL-dev/ocl-icd/archive/v2.2.12.tar.gz"
version('2.2.13', sha256='f85d59f3e8327f15637b91e4ae8df0829e94daeff68c647b2927b8376b1f8d92')
version('2.2.12', sha256='17500e5788304eef5b52dbe784cec197bdae64e05eecf38317840d2d05484272')
|
version('2.2.9', sha256='88da749bc2bd75149f0bb6e72eb4a9d74401a54f4508bc730f13cc03c57a17ed')
version('2.2.8', sha256='8a8a405c7d659b905757a358dc467f4aa3d7e4dff1d1624779065764d962a246')
version('2.2.7', sha256='b8e68435904e1a95661c385f24d6924ed28f416985c6db5a3c7448698ad5fea2')
version('2.2.6', sha256='4567cae92f58c1d6ecfc771c456fa95f206d8a5c7c5d6c9010ec688a9fd83750')
version('2.2.5', sha256='50bf51f4544f83e69a5a2f564732a2adca63fbe9511430aba12f8d6f3a53ae59')
version('2.2.4', sha256='92853137ffff393cc74f829357fdd80ac46a82b46c970e80195db86164cca316')
version('2.2.3', sha256='46b8355d90f8cc240555e4e077f223c47b950abeadf3e1af52d6e68d2efc2ff3')
variant("headers", default=False, description="Install also OpenCL headers to use this as OpenCL provider")
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('ruby', type='build')
depends_on('asciidoc-py3', type='build')
depends_on('xmlto', type='build')
depends_on('[email protected]:', when='+headers')
provides('opencl@:2.2', when='@2.2.12:+headers')
provides('opencl@:2.1', when='@2.2.8:2.2.11+headers')
provides('opencl@:2.0', when='@2.2.3:2.2.7+headers')
def flag_handler(self, name, flags):
if name == 'cflags' and self.spec.satisfies('@:2.2.12'):
# https://github.com/OCL-dev/ocl-icd/issues/8
# this is fixed in version grater than 2.2.12
flags.append('-O2')
# gcc-10 change the default from -fcommon to fno-common
# This is fixed in versions greater than 2.2.12:
# https://github.com/OCL-dev/ocl-icd/commit/4667bddd365bcc1dc66c483835971f0083b44b1d
if self.spec.satisfies('%gcc@10:'):
flags.append('-fcommon')
return (flags, None, None)
|
version('2.2.11', sha256='c1865ef7701b8201ebc6930ed3ac757c7e5cb30f3aa4c1e742a6bc022f4f2292')
version('2.2.10', sha256='d0459fa1421e8d86aaf0a4df092185ea63bc4e1a7682d3af261ae5d3fae063c7')
|
random_line_split
|
package.py
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class OclIcd(AutotoolsPackage):
"""This package aims at creating an Open Source alternative to vendor specific
OpenCL ICD loaders."""
homepage = "https://github.com/OCL-dev/ocl-icd"
url = "https://github.com/OCL-dev/ocl-icd/archive/v2.2.12.tar.gz"
version('2.2.13', sha256='f85d59f3e8327f15637b91e4ae8df0829e94daeff68c647b2927b8376b1f8d92')
version('2.2.12', sha256='17500e5788304eef5b52dbe784cec197bdae64e05eecf38317840d2d05484272')
version('2.2.11', sha256='c1865ef7701b8201ebc6930ed3ac757c7e5cb30f3aa4c1e742a6bc022f4f2292')
version('2.2.10', sha256='d0459fa1421e8d86aaf0a4df092185ea63bc4e1a7682d3af261ae5d3fae063c7')
version('2.2.9', sha256='88da749bc2bd75149f0bb6e72eb4a9d74401a54f4508bc730f13cc03c57a17ed')
version('2.2.8', sha256='8a8a405c7d659b905757a358dc467f4aa3d7e4dff1d1624779065764d962a246')
version('2.2.7', sha256='b8e68435904e1a95661c385f24d6924ed28f416985c6db5a3c7448698ad5fea2')
version('2.2.6', sha256='4567cae92f58c1d6ecfc771c456fa95f206d8a5c7c5d6c9010ec688a9fd83750')
version('2.2.5', sha256='50bf51f4544f83e69a5a2f564732a2adca63fbe9511430aba12f8d6f3a53ae59')
version('2.2.4', sha256='92853137ffff393cc74f829357fdd80ac46a82b46c970e80195db86164cca316')
version('2.2.3', sha256='46b8355d90f8cc240555e4e077f223c47b950abeadf3e1af52d6e68d2efc2ff3')
variant("headers", default=False, description="Install also OpenCL headers to use this as OpenCL provider")
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('ruby', type='build')
depends_on('asciidoc-py3', type='build')
depends_on('xmlto', type='build')
depends_on('[email protected]:', when='+headers')
provides('opencl@:2.2', when='@2.2.12:+headers')
provides('opencl@:2.1', when='@2.2.8:2.2.11+headers')
provides('opencl@:2.0', when='@2.2.3:2.2.7+headers')
def
|
(self, name, flags):
if name == 'cflags' and self.spec.satisfies('@:2.2.12'):
# https://github.com/OCL-dev/ocl-icd/issues/8
# this is fixed in version grater than 2.2.12
flags.append('-O2')
# gcc-10 change the default from -fcommon to fno-common
# This is fixed in versions greater than 2.2.12:
# https://github.com/OCL-dev/ocl-icd/commit/4667bddd365bcc1dc66c483835971f0083b44b1d
if self.spec.satisfies('%gcc@10:'):
flags.append('-fcommon')
return (flags, None, None)
|
flag_handler
|
identifier_name
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.