file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
39k
| suffix
large_stringlengths 0
36.1k
| middle
large_stringlengths 0
29.4k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
test_registry.py
|
#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyhmsa_gui.util.testcase import TestCaseQApp, QTest
from pyhmsa_gui.util.registry import \
(iter_entry_points,
iter_condition_widget_classes, iter_condition_widgets,
iter_datum_widget_classes, iter_datum_widgets,
iter_importer_classes, iter_importers,
iter_exporter_classes, iter_exporters,
iter_preferences_widget_classes)
# Globals and constants variables.
class TestModule(TestCaseQApp):
def setUp(self):
TestCaseQApp.setUp(self)
def tearDown(self):
TestCaseQApp.tearDown(self)
def testiter_entry_points(self):
pass
def testiter_condition_widget_classes(self):
pass
def testiter_condition_widgets(self):
pass
def testiter_datum_widget_classes(self):
pass
def testiter_datum_widgets(self):
pass
def testiter_importer_classes(self):
pass
|
pass
def testiter_exporters(self):
pass
def testiter_preferences_widget_classes(self):
pass
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
def testiter_importers(self):
pass
def testiter_exporter_classes(self):
|
random_line_split
|
test_registry.py
|
#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyhmsa_gui.util.testcase import TestCaseQApp, QTest
from pyhmsa_gui.util.registry import \
(iter_entry_points,
iter_condition_widget_classes, iter_condition_widgets,
iter_datum_widget_classes, iter_datum_widgets,
iter_importer_classes, iter_importers,
iter_exporter_classes, iter_exporters,
iter_preferences_widget_classes)
# Globals and constants variables.
class TestModule(TestCaseQApp):
def setUp(self):
TestCaseQApp.setUp(self)
def
|
(self):
TestCaseQApp.tearDown(self)
def testiter_entry_points(self):
pass
def testiter_condition_widget_classes(self):
pass
def testiter_condition_widgets(self):
pass
def testiter_datum_widget_classes(self):
pass
def testiter_datum_widgets(self):
pass
def testiter_importer_classes(self):
pass
def testiter_importers(self):
pass
def testiter_exporter_classes(self):
pass
def testiter_exporters(self):
pass
def testiter_preferences_widget_classes(self):
pass
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
tearDown
|
identifier_name
|
test_registry.py
|
#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyhmsa_gui.util.testcase import TestCaseQApp, QTest
from pyhmsa_gui.util.registry import \
(iter_entry_points,
iter_condition_widget_classes, iter_condition_widgets,
iter_datum_widget_classes, iter_datum_widgets,
iter_importer_classes, iter_importers,
iter_exporter_classes, iter_exporters,
iter_preferences_widget_classes)
# Globals and constants variables.
class TestModule(TestCaseQApp):
def setUp(self):
TestCaseQApp.setUp(self)
def tearDown(self):
TestCaseQApp.tearDown(self)
def testiter_entry_points(self):
pass
def testiter_condition_widget_classes(self):
pass
def testiter_condition_widgets(self):
pass
def testiter_datum_widget_classes(self):
pass
def testiter_datum_widgets(self):
pass
def testiter_importer_classes(self):
pass
def testiter_importers(self):
pass
def testiter_exporter_classes(self):
pass
def testiter_exporters(self):
pass
def testiter_preferences_widget_classes(self):
pass
if __name__ == '__main__': #pragma: no cover
|
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
conditional_block
|
|
test_registry.py
|
#!/usr/bin/env python
""" """
# Standard library modules.
import unittest
import logging
# Third party modules.
# Local modules.
from pyhmsa_gui.util.testcase import TestCaseQApp, QTest
from pyhmsa_gui.util.registry import \
(iter_entry_points,
iter_condition_widget_classes, iter_condition_widgets,
iter_datum_widget_classes, iter_datum_widgets,
iter_importer_classes, iter_importers,
iter_exporter_classes, iter_exporters,
iter_preferences_widget_classes)
# Globals and constants variables.
class TestModule(TestCaseQApp):
def setUp(self):
TestCaseQApp.setUp(self)
def tearDown(self):
TestCaseQApp.tearDown(self)
def testiter_entry_points(self):
pass
def testiter_condition_widget_classes(self):
pass
def testiter_condition_widgets(self):
pass
def testiter_datum_widget_classes(self):
pass
def testiter_datum_widgets(self):
pass
def testiter_importer_classes(self):
pass
def testiter_importers(self):
pass
def testiter_exporter_classes(self):
pass
def testiter_exporters(self):
pass
def testiter_preferences_widget_classes(self):
|
if __name__ == '__main__': #pragma: no cover
logging.getLogger().setLevel(logging.DEBUG)
unittest.main()
|
pass
|
identifier_body
|
frame.py
|
# -----------------------------------------------------------------------------
#
# -*- coding: utf-8 -*-
#
# phlox-libdc1394/dc1394/frame.py
#
# Copyright (C) 2016, by Matthias Yang Chen <[email protected]>
# All rights reserved.
#
# phlox-libdc1394 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# phlox-libdc1394 is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with phlox-libdc1394. If not,
# see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from ctypes import ARRAY, c_byte
from numpy import ndarray
from .core import *
__all__ = ['Frame']
class Frame(ndarray):
"""
A frame returned by the camera.
All metadata are retained as attributes of the resulting image.
"""
_cam = None
_frame = None
def __new__(cls, camera, frame):
"""
Convert a dc1394 frame into a Frame instance.
:param camera:
:param frame:
:return:
"""
dtype = ARRAY(c_byte, frame.contents.image_bytes)
buf = dtype.from_address(frame.contents.image)
width, height = frame.contents.size
pixels = width * height
endian = frame.contents.little_endian and '<' or '>'
type_str = '%su%i' % (endian, frame.contents.image_bytes / pixels)
img = ndarray.__new__(cls, shape=(height, width), dtype=type_str, buffer=buf)
img.frame_id = frame.contents.id
img.frames_behind = frame.contents.frames_behind
img.position = frame.contents.position
img.packet_size = frame.contents.packet_size
img.packets_per_frame = frame.contents.packet_per_frame
img.timestamp = frame.contents.timestamp
img.video_mode = video_modes[frame.contents.video_mode]
img.data_depth = frame.contents.data_depth
img.color_coding = color_codings[frame.contents.color_coding]
img.color_filter = frame.contents.color_filter
img.yuv_byte_order = frame.contents.yuv_byte_order
img.stride = frame.contents.stride
# save camera and frame for enqueue()
img._frame = frame
img._cam = camera
return img
def __array_finalize__(self, img):
"""
Finalize the new Image class array.
If called with an image object, inherit the properties of that image.
"""
if img is None:
return
# do not inherit _frame and _cam since we also get called on copy()
# and should not hold references to the frame in this case
for key in ["position", "color_coding", "color_filter",
"yuv_byte_order", "stride", "packet_size",
"packets_per_frame", "timestamp", "frames_behind",
"frame_id", "data_depth", "video_mode"]:
setattr(self, key, getattr(img, key, None))
def enqueue(self):
"""
Returns a frame to the ring buffer once it has been used.
This method is also called implicitly on ``del``.
Only call this method on the original frame obtained from
Camera.dequeue` and not on its views, new-from-templates or
copies. Otherwise an AttributeError will be raised.
"""
if not hasattr(self, "_frame"): # or self.base is not None:
raise AttributeError("can only enqueue the original frame")
if self._frame is not None:
dll.dc1394_capture_enqueue(self._cam, self._frame)
self._frame = None
self._cam = None
# from contextlib iport closing
# with closing(camera.dequeue()) as im:
# do stuff with im
close = enqueue
def __del__(self):
try:
self.enqueue()
except AttributeError:
pass
@property
def corrupt(self):
"""
Whether this frame corrupt.
Returns ``True`` if the given frame has been detected to be
corrupt (missing data, corrupted data, overrun buffer, etc.) and
``False`` otherwise.
.. note::
Certain types of corruption may go undetected in which case
``False`` will be returned erroneously. The ability to
|
.. note::
Corrupt frames still need to be enqueued with `enqueue`
when no longer needed by the user.
"""
return bool(dll.dc1394_capture_is_frame_corrupt(self._cam, self._frame))
def to_rgb(self):
"""
Convert the image to an RGB image.
Array shape is: (image.shape[0], image.shape[1], 3)
Uses the dc1394_convert_to_RGB() function for the conversion.
"""
res = ndarray(3 * self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_RGB8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape[0], shape[1], 3
return res
def to_mono8(self):
"""
Convert he image to 8 bit gray scale.
Uses the dc1394_convert_to_MONO8() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_MONO8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape
return res
def to_yuv422(self):
"""
Convert he image to YUV422 color format.
Uses the dc1394_convert_to_YUV422() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_YUV422(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
return ndarray(shape=shape, buffer=res.data, dtype='u2')
|
detect corruption also varies between platforms.
|
random_line_split
|
frame.py
|
# -----------------------------------------------------------------------------
#
# -*- coding: utf-8 -*-
#
# phlox-libdc1394/dc1394/frame.py
#
# Copyright (C) 2016, by Matthias Yang Chen <[email protected]>
# All rights reserved.
#
# phlox-libdc1394 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# phlox-libdc1394 is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with phlox-libdc1394. If not,
# see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from ctypes import ARRAY, c_byte
from numpy import ndarray
from .core import *
__all__ = ['Frame']
class Frame(ndarray):
"""
A frame returned by the camera.
All metadata are retained as attributes of the resulting image.
"""
_cam = None
_frame = None
def __new__(cls, camera, frame):
"""
Convert a dc1394 frame into a Frame instance.
:param camera:
:param frame:
:return:
"""
dtype = ARRAY(c_byte, frame.contents.image_bytes)
buf = dtype.from_address(frame.contents.image)
width, height = frame.contents.size
pixels = width * height
endian = frame.contents.little_endian and '<' or '>'
type_str = '%su%i' % (endian, frame.contents.image_bytes / pixels)
img = ndarray.__new__(cls, shape=(height, width), dtype=type_str, buffer=buf)
img.frame_id = frame.contents.id
img.frames_behind = frame.contents.frames_behind
img.position = frame.contents.position
img.packet_size = frame.contents.packet_size
img.packets_per_frame = frame.contents.packet_per_frame
img.timestamp = frame.contents.timestamp
img.video_mode = video_modes[frame.contents.video_mode]
img.data_depth = frame.contents.data_depth
img.color_coding = color_codings[frame.contents.color_coding]
img.color_filter = frame.contents.color_filter
img.yuv_byte_order = frame.contents.yuv_byte_order
img.stride = frame.contents.stride
# save camera and frame for enqueue()
img._frame = frame
img._cam = camera
return img
def __array_finalize__(self, img):
|
def enqueue(self):
"""
Returns a frame to the ring buffer once it has been used.
This method is also called implicitly on ``del``.
Only call this method on the original frame obtained from
Camera.dequeue` and not on its views, new-from-templates or
copies. Otherwise an AttributeError will be raised.
"""
if not hasattr(self, "_frame"): # or self.base is not None:
raise AttributeError("can only enqueue the original frame")
if self._frame is not None:
dll.dc1394_capture_enqueue(self._cam, self._frame)
self._frame = None
self._cam = None
# from contextlib iport closing
# with closing(camera.dequeue()) as im:
# do stuff with im
close = enqueue
def __del__(self):
try:
self.enqueue()
except AttributeError:
pass
@property
def corrupt(self):
"""
Whether this frame corrupt.
Returns ``True`` if the given frame has been detected to be
corrupt (missing data, corrupted data, overrun buffer, etc.) and
``False`` otherwise.
.. note::
Certain types of corruption may go undetected in which case
``False`` will be returned erroneously. The ability to
detect corruption also varies between platforms.
.. note::
Corrupt frames still need to be enqueued with `enqueue`
when no longer needed by the user.
"""
return bool(dll.dc1394_capture_is_frame_corrupt(self._cam, self._frame))
def to_rgb(self):
"""
Convert the image to an RGB image.
Array shape is: (image.shape[0], image.shape[1], 3)
Uses the dc1394_convert_to_RGB() function for the conversion.
"""
res = ndarray(3 * self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_RGB8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape[0], shape[1], 3
return res
def to_mono8(self):
"""
Convert he image to 8 bit gray scale.
Uses the dc1394_convert_to_MONO8() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_MONO8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape
return res
def to_yuv422(self):
"""
Convert he image to YUV422 color format.
Uses the dc1394_convert_to_YUV422() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_YUV422(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
return ndarray(shape=shape, buffer=res.data, dtype='u2')
|
"""
Finalize the new Image class array.
If called with an image object, inherit the properties of that image.
"""
if img is None:
return
# do not inherit _frame and _cam since we also get called on copy()
# and should not hold references to the frame in this case
for key in ["position", "color_coding", "color_filter",
"yuv_byte_order", "stride", "packet_size",
"packets_per_frame", "timestamp", "frames_behind",
"frame_id", "data_depth", "video_mode"]:
setattr(self, key, getattr(img, key, None))
|
identifier_body
|
frame.py
|
# -----------------------------------------------------------------------------
#
# -*- coding: utf-8 -*-
#
# phlox-libdc1394/dc1394/frame.py
#
# Copyright (C) 2016, by Matthias Yang Chen <[email protected]>
# All rights reserved.
#
# phlox-libdc1394 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# phlox-libdc1394 is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with phlox-libdc1394. If not,
# see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from ctypes import ARRAY, c_byte
from numpy import ndarray
from .core import *
__all__ = ['Frame']
class Frame(ndarray):
"""
A frame returned by the camera.
All metadata are retained as attributes of the resulting image.
"""
_cam = None
_frame = None
def __new__(cls, camera, frame):
"""
Convert a dc1394 frame into a Frame instance.
:param camera:
:param frame:
:return:
"""
dtype = ARRAY(c_byte, frame.contents.image_bytes)
buf = dtype.from_address(frame.contents.image)
width, height = frame.contents.size
pixels = width * height
endian = frame.contents.little_endian and '<' or '>'
type_str = '%su%i' % (endian, frame.contents.image_bytes / pixels)
img = ndarray.__new__(cls, shape=(height, width), dtype=type_str, buffer=buf)
img.frame_id = frame.contents.id
img.frames_behind = frame.contents.frames_behind
img.position = frame.contents.position
img.packet_size = frame.contents.packet_size
img.packets_per_frame = frame.contents.packet_per_frame
img.timestamp = frame.contents.timestamp
img.video_mode = video_modes[frame.contents.video_mode]
img.data_depth = frame.contents.data_depth
img.color_coding = color_codings[frame.contents.color_coding]
img.color_filter = frame.contents.color_filter
img.yuv_byte_order = frame.contents.yuv_byte_order
img.stride = frame.contents.stride
# save camera and frame for enqueue()
img._frame = frame
img._cam = camera
return img
def __array_finalize__(self, img):
"""
Finalize the new Image class array.
If called with an image object, inherit the properties of that image.
"""
if img is None:
return
# do not inherit _frame and _cam since we also get called on copy()
# and should not hold references to the frame in this case
for key in ["position", "color_coding", "color_filter",
"yuv_byte_order", "stride", "packet_size",
"packets_per_frame", "timestamp", "frames_behind",
"frame_id", "data_depth", "video_mode"]:
setattr(self, key, getattr(img, key, None))
def enqueue(self):
"""
Returns a frame to the ring buffer once it has been used.
This method is also called implicitly on ``del``.
Only call this method on the original frame obtained from
Camera.dequeue` and not on its views, new-from-templates or
copies. Otherwise an AttributeError will be raised.
"""
if not hasattr(self, "_frame"): # or self.base is not None:
|
if self._frame is not None:
dll.dc1394_capture_enqueue(self._cam, self._frame)
self._frame = None
self._cam = None
# from contextlib iport closing
# with closing(camera.dequeue()) as im:
# do stuff with im
close = enqueue
def __del__(self):
try:
self.enqueue()
except AttributeError:
pass
@property
def corrupt(self):
"""
Whether this frame corrupt.
Returns ``True`` if the given frame has been detected to be
corrupt (missing data, corrupted data, overrun buffer, etc.) and
``False`` otherwise.
.. note::
Certain types of corruption may go undetected in which case
``False`` will be returned erroneously. The ability to
detect corruption also varies between platforms.
.. note::
Corrupt frames still need to be enqueued with `enqueue`
when no longer needed by the user.
"""
return bool(dll.dc1394_capture_is_frame_corrupt(self._cam, self._frame))
def to_rgb(self):
"""
Convert the image to an RGB image.
Array shape is: (image.shape[0], image.shape[1], 3)
Uses the dc1394_convert_to_RGB() function for the conversion.
"""
res = ndarray(3 * self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_RGB8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape[0], shape[1], 3
return res
def to_mono8(self):
"""
Convert he image to 8 bit gray scale.
Uses the dc1394_convert_to_MONO8() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_MONO8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape
return res
def to_yuv422(self):
"""
Convert he image to YUV422 color format.
Uses the dc1394_convert_to_YUV422() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_YUV422(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
return ndarray(shape=shape, buffer=res.data, dtype='u2')
|
raise AttributeError("can only enqueue the original frame")
|
conditional_block
|
frame.py
|
# -----------------------------------------------------------------------------
#
# -*- coding: utf-8 -*-
#
# phlox-libdc1394/dc1394/frame.py
#
# Copyright (C) 2016, by Matthias Yang Chen <[email protected]>
# All rights reserved.
#
# phlox-libdc1394 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# phlox-libdc1394 is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with phlox-libdc1394. If not,
# see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from ctypes import ARRAY, c_byte
from numpy import ndarray
from .core import *
__all__ = ['Frame']
class Frame(ndarray):
"""
A frame returned by the camera.
All metadata are retained as attributes of the resulting image.
"""
_cam = None
_frame = None
def __new__(cls, camera, frame):
"""
Convert a dc1394 frame into a Frame instance.
:param camera:
:param frame:
:return:
"""
dtype = ARRAY(c_byte, frame.contents.image_bytes)
buf = dtype.from_address(frame.contents.image)
width, height = frame.contents.size
pixels = width * height
endian = frame.contents.little_endian and '<' or '>'
type_str = '%su%i' % (endian, frame.contents.image_bytes / pixels)
img = ndarray.__new__(cls, shape=(height, width), dtype=type_str, buffer=buf)
img.frame_id = frame.contents.id
img.frames_behind = frame.contents.frames_behind
img.position = frame.contents.position
img.packet_size = frame.contents.packet_size
img.packets_per_frame = frame.contents.packet_per_frame
img.timestamp = frame.contents.timestamp
img.video_mode = video_modes[frame.contents.video_mode]
img.data_depth = frame.contents.data_depth
img.color_coding = color_codings[frame.contents.color_coding]
img.color_filter = frame.contents.color_filter
img.yuv_byte_order = frame.contents.yuv_byte_order
img.stride = frame.contents.stride
# save camera and frame for enqueue()
img._frame = frame
img._cam = camera
return img
def __array_finalize__(self, img):
"""
Finalize the new Image class array.
If called with an image object, inherit the properties of that image.
"""
if img is None:
return
# do not inherit _frame and _cam since we also get called on copy()
# and should not hold references to the frame in this case
for key in ["position", "color_coding", "color_filter",
"yuv_byte_order", "stride", "packet_size",
"packets_per_frame", "timestamp", "frames_behind",
"frame_id", "data_depth", "video_mode"]:
setattr(self, key, getattr(img, key, None))
def enqueue(self):
"""
Returns a frame to the ring buffer once it has been used.
This method is also called implicitly on ``del``.
Only call this method on the original frame obtained from
Camera.dequeue` and not on its views, new-from-templates or
copies. Otherwise an AttributeError will be raised.
"""
if not hasattr(self, "_frame"): # or self.base is not None:
raise AttributeError("can only enqueue the original frame")
if self._frame is not None:
dll.dc1394_capture_enqueue(self._cam, self._frame)
self._frame = None
self._cam = None
# from contextlib iport closing
# with closing(camera.dequeue()) as im:
# do stuff with im
close = enqueue
def
|
(self):
try:
self.enqueue()
except AttributeError:
pass
@property
def corrupt(self):
"""
Whether this frame corrupt.
Returns ``True`` if the given frame has been detected to be
corrupt (missing data, corrupted data, overrun buffer, etc.) and
``False`` otherwise.
.. note::
Certain types of corruption may go undetected in which case
``False`` will be returned erroneously. The ability to
detect corruption also varies between platforms.
.. note::
Corrupt frames still need to be enqueued with `enqueue`
when no longer needed by the user.
"""
return bool(dll.dc1394_capture_is_frame_corrupt(self._cam, self._frame))
def to_rgb(self):
"""
Convert the image to an RGB image.
Array shape is: (image.shape[0], image.shape[1], 3)
Uses the dc1394_convert_to_RGB() function for the conversion.
"""
res = ndarray(3 * self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_RGB8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape[0], shape[1], 3
return res
def to_mono8(self):
"""
Convert he image to 8 bit gray scale.
Uses the dc1394_convert_to_MONO8() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_MONO8(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
res.shape = shape
return res
def to_yuv422(self):
"""
Convert he image to YUV422 color format.
Uses the dc1394_convert_to_YUV422() function
"""
res = ndarray(self.size, dtype='u1')
shape = self.shape
inp = ndarray(shape=len(self.data), buffer=self.data, dtype='u1')
dll.dc1394_convert_to_YUV422(inp, res, shape[1], shape[0],
self.yuv_byte_order, self.color_coding,
self.data_depth)
return ndarray(shape=shape, buffer=res.data, dtype='u2')
|
__del__
|
identifier_name
|
browsercontext.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::trace::Traceable;
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::window::Window;
use js::jsapi::JSObject;
use js::glue::{WrapperNew, CreateWrapperProxyHandler, ProxyTraps};
use js::rust::with_compartment;
use libc::c_void;
use std::ptr;
#[allow(raw_pointer_deriving)]
#[deriving(Encodable)]
pub struct BrowserContext {
history: Vec<SessionHistoryEntry>,
active_index: uint,
window_proxy: Traceable<*mut JSObject>,
}
impl BrowserContext {
pub fn new(document: JSRef<Document>) -> BrowserContext {
let mut context = BrowserContext {
history: vec!(SessionHistoryEntry::new(document)),
active_index: 0,
window_proxy: Traceable::new(ptr::null_mut()),
};
context.create_window_proxy();
context
}
pub fn active_document(&self) -> Temporary<Document> {
Temporary::new(self.history[self.active_index].document.clone())
}
pub fn active_window(&self) -> Temporary<Window> {
let doc = self.active_document().root();
Temporary::new(doc.deref().window.clone())
}
pub fn window_proxy(&self) -> *mut JSObject {
assert!(self.window_proxy.deref().is_not_null());
*self.window_proxy
}
fn
|
(&mut self) {
let win = self.active_window().root();
let page = win.deref().page();
let js_info = page.js_info();
let handler = js_info.as_ref().unwrap().dom_static.windowproxy_handler;
assert!(handler.deref().is_not_null());
let parent = win.deref().reflector().get_jsobject();
let cx = js_info.as_ref().unwrap().js_context.deref().deref().ptr;
let wrapper = with_compartment(cx, parent, || unsafe {
WrapperNew(cx, parent, *handler.deref())
});
assert!(wrapper.is_not_null());
self.window_proxy = Traceable::new(wrapper);
}
}
#[deriving(Encodable)]
#[must_root]
pub struct SessionHistoryEntry {
document: JS<Document>,
children: Vec<BrowserContext>
}
impl SessionHistoryEntry {
fn new(document: JSRef<Document>) -> SessionHistoryEntry {
SessionHistoryEntry {
document: JS::from_rooted(document),
children: vec!()
}
}
}
static proxy_handler: ProxyTraps = ProxyTraps {
getPropertyDescriptor: None,
getOwnPropertyDescriptor: None,
defineProperty: None,
getOwnPropertyNames: 0 as *const u8,
delete_: None,
enumerate: 0 as *const u8,
has: None,
hasOwn: None,
get: None,
set: None,
keys: 0 as *const u8,
iterate: None,
call: None,
construct: None,
nativeCall: 0 as *const u8,
hasInstance: None,
typeOf: None,
objectClassIs: None,
obj_toString: None,
fun_toString: None,
//regexp_toShared: 0 as *u8,
defaultValue: None,
iteratorNext: None,
finalize: None,
getElementIfPresent: None,
getPrototypeOf: None,
trace: None
};
pub fn new_window_proxy_handler() -> *const c_void {
unsafe {
CreateWrapperProxyHandler(&proxy_handler)
}
}
|
create_window_proxy
|
identifier_name
|
browsercontext.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::trace::Traceable;
use dom::bindings::utils::Reflectable;
use dom::document::Document;
use dom::window::Window;
use js::jsapi::JSObject;
use js::glue::{WrapperNew, CreateWrapperProxyHandler, ProxyTraps};
use js::rust::with_compartment;
use libc::c_void;
use std::ptr;
#[allow(raw_pointer_deriving)]
#[deriving(Encodable)]
pub struct BrowserContext {
history: Vec<SessionHistoryEntry>,
active_index: uint,
window_proxy: Traceable<*mut JSObject>,
}
impl BrowserContext {
pub fn new(document: JSRef<Document>) -> BrowserContext {
let mut context = BrowserContext {
history: vec!(SessionHistoryEntry::new(document)),
active_index: 0,
window_proxy: Traceable::new(ptr::null_mut()),
};
context.create_window_proxy();
context
}
pub fn active_document(&self) -> Temporary<Document>
|
pub fn active_window(&self) -> Temporary<Window> {
let doc = self.active_document().root();
Temporary::new(doc.deref().window.clone())
}
pub fn window_proxy(&self) -> *mut JSObject {
assert!(self.window_proxy.deref().is_not_null());
*self.window_proxy
}
fn create_window_proxy(&mut self) {
let win = self.active_window().root();
let page = win.deref().page();
let js_info = page.js_info();
let handler = js_info.as_ref().unwrap().dom_static.windowproxy_handler;
assert!(handler.deref().is_not_null());
let parent = win.deref().reflector().get_jsobject();
let cx = js_info.as_ref().unwrap().js_context.deref().deref().ptr;
let wrapper = with_compartment(cx, parent, || unsafe {
WrapperNew(cx, parent, *handler.deref())
});
assert!(wrapper.is_not_null());
self.window_proxy = Traceable::new(wrapper);
}
}
#[deriving(Encodable)]
#[must_root]
pub struct SessionHistoryEntry {
document: JS<Document>,
children: Vec<BrowserContext>
}
impl SessionHistoryEntry {
fn new(document: JSRef<Document>) -> SessionHistoryEntry {
SessionHistoryEntry {
document: JS::from_rooted(document),
children: vec!()
}
}
}
static proxy_handler: ProxyTraps = ProxyTraps {
getPropertyDescriptor: None,
getOwnPropertyDescriptor: None,
defineProperty: None,
getOwnPropertyNames: 0 as *const u8,
delete_: None,
enumerate: 0 as *const u8,
has: None,
hasOwn: None,
get: None,
set: None,
keys: 0 as *const u8,
iterate: None,
call: None,
construct: None,
nativeCall: 0 as *const u8,
hasInstance: None,
typeOf: None,
objectClassIs: None,
obj_toString: None,
fun_toString: None,
//regexp_toShared: 0 as *u8,
defaultValue: None,
iteratorNext: None,
finalize: None,
getElementIfPresent: None,
getPrototypeOf: None,
trace: None
};
pub fn new_window_proxy_handler() -> *const c_void {
unsafe {
CreateWrapperProxyHandler(&proxy_handler)
}
}
|
{
Temporary::new(self.history[self.active_index].document.clone())
}
|
identifier_body
|
browsercontext.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::js::{JS, JSRef, Temporary};
use dom::bindings::trace::Traceable;
use dom::bindings::utils::Reflectable;
use dom::document::Document;
|
use dom::window::Window;
use js::jsapi::JSObject;
use js::glue::{WrapperNew, CreateWrapperProxyHandler, ProxyTraps};
use js::rust::with_compartment;
use libc::c_void;
use std::ptr;
#[allow(raw_pointer_deriving)]
#[deriving(Encodable)]
pub struct BrowserContext {
history: Vec<SessionHistoryEntry>,
active_index: uint,
window_proxy: Traceable<*mut JSObject>,
}
impl BrowserContext {
pub fn new(document: JSRef<Document>) -> BrowserContext {
let mut context = BrowserContext {
history: vec!(SessionHistoryEntry::new(document)),
active_index: 0,
window_proxy: Traceable::new(ptr::null_mut()),
};
context.create_window_proxy();
context
}
pub fn active_document(&self) -> Temporary<Document> {
Temporary::new(self.history[self.active_index].document.clone())
}
pub fn active_window(&self) -> Temporary<Window> {
let doc = self.active_document().root();
Temporary::new(doc.deref().window.clone())
}
pub fn window_proxy(&self) -> *mut JSObject {
assert!(self.window_proxy.deref().is_not_null());
*self.window_proxy
}
fn create_window_proxy(&mut self) {
let win = self.active_window().root();
let page = win.deref().page();
let js_info = page.js_info();
let handler = js_info.as_ref().unwrap().dom_static.windowproxy_handler;
assert!(handler.deref().is_not_null());
let parent = win.deref().reflector().get_jsobject();
let cx = js_info.as_ref().unwrap().js_context.deref().deref().ptr;
let wrapper = with_compartment(cx, parent, || unsafe {
WrapperNew(cx, parent, *handler.deref())
});
assert!(wrapper.is_not_null());
self.window_proxy = Traceable::new(wrapper);
}
}
#[deriving(Encodable)]
#[must_root]
pub struct SessionHistoryEntry {
document: JS<Document>,
children: Vec<BrowserContext>
}
impl SessionHistoryEntry {
fn new(document: JSRef<Document>) -> SessionHistoryEntry {
SessionHistoryEntry {
document: JS::from_rooted(document),
children: vec!()
}
}
}
static proxy_handler: ProxyTraps = ProxyTraps {
getPropertyDescriptor: None,
getOwnPropertyDescriptor: None,
defineProperty: None,
getOwnPropertyNames: 0 as *const u8,
delete_: None,
enumerate: 0 as *const u8,
has: None,
hasOwn: None,
get: None,
set: None,
keys: 0 as *const u8,
iterate: None,
call: None,
construct: None,
nativeCall: 0 as *const u8,
hasInstance: None,
typeOf: None,
objectClassIs: None,
obj_toString: None,
fun_toString: None,
//regexp_toShared: 0 as *u8,
defaultValue: None,
iteratorNext: None,
finalize: None,
getElementIfPresent: None,
getPrototypeOf: None,
trace: None
};
pub fn new_window_proxy_handler() -> *const c_void {
unsafe {
CreateWrapperProxyHandler(&proxy_handler)
}
}
|
random_line_split
|
|
cereconf_local.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2003-2018 University of Oslo, Norway
#
# This file is part of Cerebrum.
#
# Cerebrum is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Cerebrum is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Cerebrum; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
|
from Cerebrum.default_config import *
CEREBRUM_DATABASE_NAME = os.getenv('DB_NAME')
CEREBRUM_DATABASE_CONNECT_DATA['user'] = os.getenv('DB_USER')
CEREBRUM_DATABASE_CONNECT_DATA['table_owner'] = os.getenv('DB_USER')
CEREBRUM_DATABASE_CONNECT_DATA['host'] = os.getenv('DB_HOST')
CEREBRUM_DATABASE_CONNECT_DATA['table_owner'] = os.getenv('DB_USER')
CEREBRUM_DDL_DIR = '/src/design'
DB_AUTH_DIR = '/db-auth'
LOGGING_CONFIGFILE = os.path.join(os.getenv('TEST_CONFIG_DIR'),
'logging.ini')
|
import os
|
random_line_split
|
job.ts
|
import * as CronDate from 'cron-parser/lib/date';
import * as cronParser from "cron-parser";
import * as lt from "long-timeout";
import * as util from "./util";
import RecurrenceRule from "./recurrence-rule";
import { EventEmitter } from "events";
import SortedArray from "./sorted-array";
export type InvocationArray = SortedArray<Invocation>;
/**
* Sorter function to sort scheduled invocations by date/time
*
* @param {Invocation} a - invocation
* @param {Invocation} b - another invocation
* @returns {number} - difference between the fire date of 2 invocations
*/
function invocationSorter(a: Invocation, b: Invocation): number {
return a.fireDate.getTime() - b.fireDate.getTime();
}
/**
* Singleton Job Manager class that manage job invocations
*/
export class JobManager {
private static _instance: JobManager;
// anonymous job counter
anonJobCounter = 0;
// active invocations of all jobs
invocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
// current invocation
currentInvocation: Invocation = null;
// scheduled jobs
scheduledJobs: Object = {};
static get instance(): JobManager {
return this._instance || (this._instance = new this());
}
cancelInvocation(invocation: Invocation) {
if (this.invocations.remove(invocation)) {
if (invocation.timerID !== null) {
lt.clearTimeout(invocation.timerID);
}
if (this.currentInvocation === invocation) {
this.currentInvocation = null;
}
invocation.job.emit('canceled', invocation.fireDate);
this.prepareNextInvocation();
}
}
scheduleInvocation(invocation: Invocation): void {
this.invocations.insert(invocation);
this.prepareNextInvocation();
const date = invocation.fireDate instanceof CronDate ? invocation.fireDate.toDate() : invocation.fireDate;
invocation.job.emit('scheduled', date);
}
prepareNextInvocation(): void {
if (this.invocations.length > 0 && this.currentInvocation !== this.invocations.array[0]) {
if (this.currentInvocation !== null) {
lt.clearTimeout(this.currentInvocation.timerID);
this.currentInvocation.timerID = null;
this.currentInvocation = null;
}
this.currentInvocation = this.invocations.array[0];
const job = this.currentInvocation.job;
const cinv = this.currentInvocation;
this.currentInvocation.timerID = this.runOnDate(this.currentInvocation.fireDate, function () {
this.currentInvocationFinished();
const rule = cinv.recurrenceRule;
if (rule && (rule.recurs || rule._endDate === null)) {
// rule: RecurrenceRule, job: Job, fireDate: CronDate, endDate: CronDate
const inv = this.scheduleNextRecurrence(cinv.recurrenceRule, job, cinv.fireDate, cinv.endDate);
if (inv !== null) {
inv.job.trackInvocation(inv);
}
}
job.stopTrackingInvocation(cinv);
job.invoke();
}.bind(this));
}
}
currentInvocationFinished(): void {
this.invocations.array.shift();
this.currentInvocation = null;
this.prepareNextInvocation();
}
runOnDate(date: Date, job: Function): NodeJS.Timer {
const now = Date.now();
const then = date.getTime();
return lt.setTimeout(job, then < now ? 0 : then - now);
}
|
(rule: RecurrenceRule | any, job: Job, prevDate: Date, endDate: Date): Invocation {
prevDate = prevDate || new Date();
const date = rule instanceof RecurrenceRule ? rule.nextInvocationDate(prevDate) : rule.next();
if (date === null) {
return null;
}
if (endDate instanceof Date && date.getTime() > endDate.getTime()) {
return null;
}
const inv = new Invocation(job, date, rule, endDate);
this.scheduleInvocation(inv);
return inv;
}
}
export const jobMan = JobManager.instance;
export const scheduledJobs = jobMan.scheduledJobs;
export class Invocation {
job: Job;
fireDate: Date | CronDate;
endDate?: Date | CronDate;
recurrenceRule: RecurrenceRule | any;
timerID: NodeJS.Timer = null;
constructor(job: Job, fireDate?: Date, rule?: RecurrenceRule | any, endDate?: Date) {
this.job = job;
this.fireDate = fireDate;
this.endDate = endDate;
this.recurrenceRule = rule;
}
}
export interface IJobObject {
execute(): void;
}
export class ScheduleObject {
start?: Date;
end?: Date;
rule?: string | RecurrenceRule; // cron or rec rule
constructor(start?: Date, end?: Date, rule?: string | RecurrenceRule) {
this.start = start;
this.end = end;
this.rule = rule;
}
}
// Date - a specific date in Date object
// string - can be either a Date string, or a cron string
// number - the int value of a date, will be parsed into Date
// RecurrenceRule - recurrence rule
// ScheduleObject - an object with start, end and
// either the rule in RecurrenceRule, or
// a cron string
export type ScheduleSpec = Date | string | number | RecurrenceRule | ScheduleObject;
export class Job extends EventEmitter {
private _name: string;
get name() {
return this._name;
}
private _pendingInvocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
get pendingInvocations(): InvocationArray {
return this._pendingInvocations;
}
private _triggeredJobs: number = 0;
get triggeredJobs() {
return this._triggeredJobs;
}
resetTriggeredJobs(): void {
this._triggeredJobs = 0;
}
addTriggeredJobs(): number {
return ++this._triggeredJobs;
}
job: Function | IJobObject;
callback: Function;
constructor(name?: string, job?: Function | IJobObject, callback?: Function) {
super();
this._name = name || `<Anonymous Job ${++jobMan.anonJobCounter}>`;
this.job = job;
this.callback = callback;
}
trackInvocation(invocation: Invocation): boolean {
this.pendingInvocations.insert(invocation);
return true;
}
stopTrackingInvocation(invocation: Invocation): boolean {
return this.pendingInvocations.remove(invocation);
}
invoke() {
if (typeof this.job === 'function') {
this.addTriggeredJobs();
this.job();
} else {
this.job.execute();
}
if (this.callback) {
this.callback();
}
this.emit('run');
}
schedule(sched: ScheduleSpec): boolean {
if (jobMan.scheduledJobs[this.name]) {
throw new Error(`A job with the name ${this.name} is already scheduled, please either reschedule it, or cancel and then schedule it again.`);
}
let success = false;
let inv: Invocation;
let start: CronDate;
let end: CronDate;
let recRule: RecurrenceRule | any;
let fireDate: Date;
let type = typeof sched;
if (sched instanceof RecurrenceRule) {
recRule = sched;
} else if (sched instanceof ScheduleObject) {
start = sched.start || undefined; // CronDate need undefined instead of null
end = sched.end || undefined;
if (sched.rule instanceof RecurrenceRule) {
recRule = sched.rule;
} else {
recRule = util.parseCron(sched.rule, { currentDate: start });
}
} else if (type === 'string') {
recRule = util.parseCron(String(sched), {});
}
if (recRule) {
// cron or recurrence rule
inv = jobMan.scheduleNextRecurrence(recRule, this, start, end);
if (inv !== null) {
success = this.trackInvocation(inv);
}
} else if (type === 'string' || type === 'number') {
// 'Thu Sep 14 2017 11:50:27 GMT-0700 (PDT)' or 1505427200476
fireDate = new Date(sched);
} else if (sched instanceof Date) {
// Date object
fireDate = sched;
}
if (util.isValidDate(fireDate)) {
if (fireDate.getTime() >= Date.now()) {
inv = new Invocation(this, fireDate);
jobMan.scheduleInvocation(inv);
success = this.trackInvocation(inv);
}
}
if (success) {
jobMan.scheduledJobs[this.name] = this;
}
return success;
}
cancel(): boolean {
let inv: Invocation, newInv: Invocation;
for (let j = 0; j < this.pendingInvocations.length; j++) {
inv = this.pendingInvocations.array[j];
jobMan.cancelInvocation(inv);
}
this.pendingInvocations.clear();
delete jobMan.scheduledJobs[this.name];
return true;
}
reschedule(sched: ScheduleSpec): boolean {
const cInvs = this.pendingInvocations.array.slice();
this.cancel();
if (this.schedule(sched)) {
this.resetTriggeredJobs();
} else {
this.pendingInvocations.array = cInvs;
return false;
}
return true;
}
get nextInvocation(): Date {
return this.pendingInvocations.length > 0 ? this.pendingInvocations.array[0].fireDate : null;
}
}
|
scheduleNextRecurrence
|
identifier_name
|
job.ts
|
import * as CronDate from 'cron-parser/lib/date';
import * as cronParser from "cron-parser";
import * as lt from "long-timeout";
import * as util from "./util";
import RecurrenceRule from "./recurrence-rule";
import { EventEmitter } from "events";
import SortedArray from "./sorted-array";
export type InvocationArray = SortedArray<Invocation>;
/**
* Sorter function to sort scheduled invocations by date/time
*
* @param {Invocation} a - invocation
* @param {Invocation} b - another invocation
* @returns {number} - difference between the fire date of 2 invocations
*/
function invocationSorter(a: Invocation, b: Invocation): number {
return a.fireDate.getTime() - b.fireDate.getTime();
}
/**
* Singleton Job Manager class that manage job invocations
*/
export class JobManager {
private static _instance: JobManager;
// anonymous job counter
anonJobCounter = 0;
// active invocations of all jobs
invocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
// current invocation
currentInvocation: Invocation = null;
// scheduled jobs
scheduledJobs: Object = {};
static get instance(): JobManager {
return this._instance || (this._instance = new this());
}
cancelInvocation(invocation: Invocation) {
if (this.invocations.remove(invocation)) {
if (invocation.timerID !== null) {
lt.clearTimeout(invocation.timerID);
}
if (this.currentInvocation === invocation) {
this.currentInvocation = null;
}
invocation.job.emit('canceled', invocation.fireDate);
this.prepareNextInvocation();
}
}
scheduleInvocation(invocation: Invocation): void {
this.invocations.insert(invocation);
this.prepareNextInvocation();
const date = invocation.fireDate instanceof CronDate ? invocation.fireDate.toDate() : invocation.fireDate;
invocation.job.emit('scheduled', date);
}
prepareNextInvocation(): void {
if (this.invocations.length > 0 && this.currentInvocation !== this.invocations.array[0]) {
if (this.currentInvocation !== null) {
lt.clearTimeout(this.currentInvocation.timerID);
this.currentInvocation.timerID = null;
this.currentInvocation = null;
}
this.currentInvocation = this.invocations.array[0];
const job = this.currentInvocation.job;
const cinv = this.currentInvocation;
this.currentInvocation.timerID = this.runOnDate(this.currentInvocation.fireDate, function () {
this.currentInvocationFinished();
const rule = cinv.recurrenceRule;
if (rule && (rule.recurs || rule._endDate === null)) {
// rule: RecurrenceRule, job: Job, fireDate: CronDate, endDate: CronDate
const inv = this.scheduleNextRecurrence(cinv.recurrenceRule, job, cinv.fireDate, cinv.endDate);
if (inv !== null) {
inv.job.trackInvocation(inv);
}
}
job.stopTrackingInvocation(cinv);
job.invoke();
}.bind(this));
}
}
currentInvocationFinished(): void {
this.invocations.array.shift();
this.currentInvocation = null;
this.prepareNextInvocation();
}
runOnDate(date: Date, job: Function): NodeJS.Timer {
const now = Date.now();
const then = date.getTime();
return lt.setTimeout(job, then < now ? 0 : then - now);
}
scheduleNextRecurrence(rule: RecurrenceRule | any, job: Job, prevDate: Date, endDate: Date): Invocation {
prevDate = prevDate || new Date();
const date = rule instanceof RecurrenceRule ? rule.nextInvocationDate(prevDate) : rule.next();
if (date === null) {
return null;
}
if (endDate instanceof Date && date.getTime() > endDate.getTime()) {
return null;
}
const inv = new Invocation(job, date, rule, endDate);
this.scheduleInvocation(inv);
return inv;
}
}
export const jobMan = JobManager.instance;
export const scheduledJobs = jobMan.scheduledJobs;
export class Invocation {
job: Job;
fireDate: Date | CronDate;
endDate?: Date | CronDate;
recurrenceRule: RecurrenceRule | any;
timerID: NodeJS.Timer = null;
constructor(job: Job, fireDate?: Date, rule?: RecurrenceRule | any, endDate?: Date) {
this.job = job;
this.fireDate = fireDate;
this.endDate = endDate;
this.recurrenceRule = rule;
}
}
export interface IJobObject {
execute(): void;
}
export class ScheduleObject {
start?: Date;
end?: Date;
rule?: string | RecurrenceRule; // cron or rec rule
constructor(start?: Date, end?: Date, rule?: string | RecurrenceRule) {
this.start = start;
this.end = end;
this.rule = rule;
}
}
// Date - a specific date in Date object
// string - can be either a Date string, or a cron string
// number - the int value of a date, will be parsed into Date
// RecurrenceRule - recurrence rule
// ScheduleObject - an object with start, end and
// either the rule in RecurrenceRule, or
// a cron string
export type ScheduleSpec = Date | string | number | RecurrenceRule | ScheduleObject;
export class Job extends EventEmitter {
private _name: string;
get name() {
return this._name;
}
private _pendingInvocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
get pendingInvocations(): InvocationArray {
return this._pendingInvocations;
}
private _triggeredJobs: number = 0;
get triggeredJobs() {
return this._triggeredJobs;
}
resetTriggeredJobs(): void {
this._triggeredJobs = 0;
}
addTriggeredJobs(): number {
return ++this._triggeredJobs;
}
job: Function | IJobObject;
callback: Function;
constructor(name?: string, job?: Function | IJobObject, callback?: Function) {
super();
this._name = name || `<Anonymous Job ${++jobMan.anonJobCounter}>`;
this.job = job;
this.callback = callback;
}
trackInvocation(invocation: Invocation): boolean {
this.pendingInvocations.insert(invocation);
return true;
}
stopTrackingInvocation(invocation: Invocation): boolean {
return this.pendingInvocations.remove(invocation);
}
invoke() {
if (typeof this.job === 'function') {
this.addTriggeredJobs();
this.job();
} else {
this.job.execute();
}
if (this.callback) {
this.callback();
}
this.emit('run');
}
schedule(sched: ScheduleSpec): boolean {
if (jobMan.scheduledJobs[this.name]) {
throw new Error(`A job with the name ${this.name} is already scheduled, please either reschedule it, or cancel and then schedule it again.`);
}
let success = false;
let inv: Invocation;
let start: CronDate;
let end: CronDate;
let recRule: RecurrenceRule | any;
let fireDate: Date;
let type = typeof sched;
if (sched instanceof RecurrenceRule) {
recRule = sched;
} else if (sched instanceof ScheduleObject) {
start = sched.start || undefined; // CronDate need undefined instead of null
end = sched.end || undefined;
if (sched.rule instanceof RecurrenceRule) {
recRule = sched.rule;
} else {
recRule = util.parseCron(sched.rule, { currentDate: start });
}
} else if (type === 'string') {
recRule = util.parseCron(String(sched), {});
}
if (recRule) {
// cron or recurrence rule
inv = jobMan.scheduleNextRecurrence(recRule, this, start, end);
if (inv !== null) {
success = this.trackInvocation(inv);
}
} else if (type === 'string' || type === 'number') {
// 'Thu Sep 14 2017 11:50:27 GMT-0700 (PDT)' or 1505427200476
fireDate = new Date(sched);
} else if (sched instanceof Date) {
// Date object
fireDate = sched;
}
if (util.isValidDate(fireDate)) {
if (fireDate.getTime() >= Date.now()) {
inv = new Invocation(this, fireDate);
jobMan.scheduleInvocation(inv);
success = this.trackInvocation(inv);
}
}
if (success) {
jobMan.scheduledJobs[this.name] = this;
}
return success;
}
cancel(): boolean
|
reschedule(sched: ScheduleSpec): boolean {
const cInvs = this.pendingInvocations.array.slice();
this.cancel();
if (this.schedule(sched)) {
this.resetTriggeredJobs();
} else {
this.pendingInvocations.array = cInvs;
return false;
}
return true;
}
get nextInvocation(): Date {
return this.pendingInvocations.length > 0 ? this.pendingInvocations.array[0].fireDate : null;
}
}
|
{
let inv: Invocation, newInv: Invocation;
for (let j = 0; j < this.pendingInvocations.length; j++) {
inv = this.pendingInvocations.array[j];
jobMan.cancelInvocation(inv);
}
this.pendingInvocations.clear();
delete jobMan.scheduledJobs[this.name];
return true;
}
|
identifier_body
|
job.ts
|
import * as CronDate from 'cron-parser/lib/date';
import * as cronParser from "cron-parser";
import * as lt from "long-timeout";
import * as util from "./util";
import RecurrenceRule from "./recurrence-rule";
import { EventEmitter } from "events";
import SortedArray from "./sorted-array";
export type InvocationArray = SortedArray<Invocation>;
/**
* Sorter function to sort scheduled invocations by date/time
*
* @param {Invocation} a - invocation
* @param {Invocation} b - another invocation
* @returns {number} - difference between the fire date of 2 invocations
*/
function invocationSorter(a: Invocation, b: Invocation): number {
return a.fireDate.getTime() - b.fireDate.getTime();
}
/**
* Singleton Job Manager class that manage job invocations
*/
export class JobManager {
private static _instance: JobManager;
// anonymous job counter
anonJobCounter = 0;
// active invocations of all jobs
invocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
// current invocation
currentInvocation: Invocation = null;
// scheduled jobs
scheduledJobs: Object = {};
static get instance(): JobManager {
return this._instance || (this._instance = new this());
}
cancelInvocation(invocation: Invocation) {
if (this.invocations.remove(invocation)) {
if (invocation.timerID !== null) {
lt.clearTimeout(invocation.timerID);
}
if (this.currentInvocation === invocation) {
this.currentInvocation = null;
}
invocation.job.emit('canceled', invocation.fireDate);
this.prepareNextInvocation();
}
}
scheduleInvocation(invocation: Invocation): void {
this.invocations.insert(invocation);
this.prepareNextInvocation();
const date = invocation.fireDate instanceof CronDate ? invocation.fireDate.toDate() : invocation.fireDate;
invocation.job.emit('scheduled', date);
}
prepareNextInvocation(): void {
if (this.invocations.length > 0 && this.currentInvocation !== this.invocations.array[0]) {
if (this.currentInvocation !== null) {
lt.clearTimeout(this.currentInvocation.timerID);
this.currentInvocation.timerID = null;
this.currentInvocation = null;
}
this.currentInvocation = this.invocations.array[0];
const job = this.currentInvocation.job;
const cinv = this.currentInvocation;
this.currentInvocation.timerID = this.runOnDate(this.currentInvocation.fireDate, function () {
this.currentInvocationFinished();
const rule = cinv.recurrenceRule;
if (rule && (rule.recurs || rule._endDate === null)) {
// rule: RecurrenceRule, job: Job, fireDate: CronDate, endDate: CronDate
const inv = this.scheduleNextRecurrence(cinv.recurrenceRule, job, cinv.fireDate, cinv.endDate);
if (inv !== null) {
inv.job.trackInvocation(inv);
}
}
job.stopTrackingInvocation(cinv);
job.invoke();
}.bind(this));
}
}
currentInvocationFinished(): void {
this.invocations.array.shift();
this.currentInvocation = null;
this.prepareNextInvocation();
}
runOnDate(date: Date, job: Function): NodeJS.Timer {
const now = Date.now();
const then = date.getTime();
return lt.setTimeout(job, then < now ? 0 : then - now);
}
scheduleNextRecurrence(rule: RecurrenceRule | any, job: Job, prevDate: Date, endDate: Date): Invocation {
prevDate = prevDate || new Date();
const date = rule instanceof RecurrenceRule ? rule.nextInvocationDate(prevDate) : rule.next();
if (date === null) {
return null;
}
if (endDate instanceof Date && date.getTime() > endDate.getTime()) {
return null;
}
const inv = new Invocation(job, date, rule, endDate);
this.scheduleInvocation(inv);
return inv;
}
}
export const jobMan = JobManager.instance;
export const scheduledJobs = jobMan.scheduledJobs;
export class Invocation {
job: Job;
fireDate: Date | CronDate;
endDate?: Date | CronDate;
recurrenceRule: RecurrenceRule | any;
timerID: NodeJS.Timer = null;
constructor(job: Job, fireDate?: Date, rule?: RecurrenceRule | any, endDate?: Date) {
this.job = job;
this.fireDate = fireDate;
this.endDate = endDate;
this.recurrenceRule = rule;
}
}
export interface IJobObject {
execute(): void;
}
export class ScheduleObject {
start?: Date;
end?: Date;
rule?: string | RecurrenceRule; // cron or rec rule
constructor(start?: Date, end?: Date, rule?: string | RecurrenceRule) {
this.start = start;
this.end = end;
this.rule = rule;
}
}
// Date - a specific date in Date object
// string - can be either a Date string, or a cron string
// number - the int value of a date, will be parsed into Date
// RecurrenceRule - recurrence rule
// ScheduleObject - an object with start, end and
// either the rule in RecurrenceRule, or
// a cron string
export type ScheduleSpec = Date | string | number | RecurrenceRule | ScheduleObject;
export class Job extends EventEmitter {
private _name: string;
get name() {
return this._name;
}
private _pendingInvocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
get pendingInvocations(): InvocationArray {
return this._pendingInvocations;
}
private _triggeredJobs: number = 0;
get triggeredJobs() {
return this._triggeredJobs;
}
resetTriggeredJobs(): void {
this._triggeredJobs = 0;
}
addTriggeredJobs(): number {
return ++this._triggeredJobs;
}
job: Function | IJobObject;
callback: Function;
constructor(name?: string, job?: Function | IJobObject, callback?: Function) {
super();
this._name = name || `<Anonymous Job ${++jobMan.anonJobCounter}>`;
this.job = job;
this.callback = callback;
}
trackInvocation(invocation: Invocation): boolean {
this.pendingInvocations.insert(invocation);
return true;
}
stopTrackingInvocation(invocation: Invocation): boolean {
return this.pendingInvocations.remove(invocation);
}
invoke() {
if (typeof this.job === 'function') {
this.addTriggeredJobs();
this.job();
} else {
this.job.execute();
}
if (this.callback) {
this.callback();
}
this.emit('run');
}
schedule(sched: ScheduleSpec): boolean {
if (jobMan.scheduledJobs[this.name]) {
throw new Error(`A job with the name ${this.name} is already scheduled, please either reschedule it, or cancel and then schedule it again.`);
}
let success = false;
let inv: Invocation;
let start: CronDate;
let end: CronDate;
let recRule: RecurrenceRule | any;
let fireDate: Date;
let type = typeof sched;
if (sched instanceof RecurrenceRule) {
recRule = sched;
} else if (sched instanceof ScheduleObject) {
start = sched.start || undefined; // CronDate need undefined instead of null
end = sched.end || undefined;
if (sched.rule instanceof RecurrenceRule) {
recRule = sched.rule;
} else {
recRule = util.parseCron(sched.rule, { currentDate: start });
}
} else if (type === 'string') {
recRule = util.parseCron(String(sched), {});
}
if (recRule) {
// cron or recurrence rule
inv = jobMan.scheduleNextRecurrence(recRule, this, start, end);
if (inv !== null) {
success = this.trackInvocation(inv);
}
} else if (type === 'string' || type === 'number') {
// 'Thu Sep 14 2017 11:50:27 GMT-0700 (PDT)' or 1505427200476
fireDate = new Date(sched);
} else if (sched instanceof Date) {
// Date object
fireDate = sched;
}
if (util.isValidDate(fireDate)) {
if (fireDate.getTime() >= Date.now()) {
inv = new Invocation(this, fireDate);
jobMan.scheduleInvocation(inv);
success = this.trackInvocation(inv);
}
}
if (success) {
jobMan.scheduledJobs[this.name] = this;
}
return success;
}
cancel(): boolean {
let inv: Invocation, newInv: Invocation;
for (let j = 0; j < this.pendingInvocations.length; j++) {
|
jobMan.cancelInvocation(inv);
}
this.pendingInvocations.clear();
delete jobMan.scheduledJobs[this.name];
return true;
}
reschedule(sched: ScheduleSpec): boolean {
const cInvs = this.pendingInvocations.array.slice();
this.cancel();
if (this.schedule(sched)) {
this.resetTriggeredJobs();
} else {
this.pendingInvocations.array = cInvs;
return false;
}
return true;
}
get nextInvocation(): Date {
return this.pendingInvocations.length > 0 ? this.pendingInvocations.array[0].fireDate : null;
}
}
|
inv = this.pendingInvocations.array[j];
|
random_line_split
|
job.ts
|
import * as CronDate from 'cron-parser/lib/date';
import * as cronParser from "cron-parser";
import * as lt from "long-timeout";
import * as util from "./util";
import RecurrenceRule from "./recurrence-rule";
import { EventEmitter } from "events";
import SortedArray from "./sorted-array";
export type InvocationArray = SortedArray<Invocation>;
/**
* Sorter function to sort scheduled invocations by date/time
*
* @param {Invocation} a - invocation
* @param {Invocation} b - another invocation
* @returns {number} - difference between the fire date of 2 invocations
*/
function invocationSorter(a: Invocation, b: Invocation): number {
return a.fireDate.getTime() - b.fireDate.getTime();
}
/**
* Singleton Job Manager class that manage job invocations
*/
export class JobManager {
private static _instance: JobManager;
// anonymous job counter
anonJobCounter = 0;
// active invocations of all jobs
invocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
// current invocation
currentInvocation: Invocation = null;
// scheduled jobs
scheduledJobs: Object = {};
static get instance(): JobManager {
return this._instance || (this._instance = new this());
}
cancelInvocation(invocation: Invocation) {
if (this.invocations.remove(invocation)) {
if (invocation.timerID !== null) {
lt.clearTimeout(invocation.timerID);
}
if (this.currentInvocation === invocation) {
this.currentInvocation = null;
}
invocation.job.emit('canceled', invocation.fireDate);
this.prepareNextInvocation();
}
}
scheduleInvocation(invocation: Invocation): void {
this.invocations.insert(invocation);
this.prepareNextInvocation();
const date = invocation.fireDate instanceof CronDate ? invocation.fireDate.toDate() : invocation.fireDate;
invocation.job.emit('scheduled', date);
}
prepareNextInvocation(): void {
if (this.invocations.length > 0 && this.currentInvocation !== this.invocations.array[0]) {
if (this.currentInvocation !== null)
|
this.currentInvocation = this.invocations.array[0];
const job = this.currentInvocation.job;
const cinv = this.currentInvocation;
this.currentInvocation.timerID = this.runOnDate(this.currentInvocation.fireDate, function () {
this.currentInvocationFinished();
const rule = cinv.recurrenceRule;
if (rule && (rule.recurs || rule._endDate === null)) {
// rule: RecurrenceRule, job: Job, fireDate: CronDate, endDate: CronDate
const inv = this.scheduleNextRecurrence(cinv.recurrenceRule, job, cinv.fireDate, cinv.endDate);
if (inv !== null) {
inv.job.trackInvocation(inv);
}
}
job.stopTrackingInvocation(cinv);
job.invoke();
}.bind(this));
}
}
currentInvocationFinished(): void {
this.invocations.array.shift();
this.currentInvocation = null;
this.prepareNextInvocation();
}
runOnDate(date: Date, job: Function): NodeJS.Timer {
const now = Date.now();
const then = date.getTime();
return lt.setTimeout(job, then < now ? 0 : then - now);
}
scheduleNextRecurrence(rule: RecurrenceRule | any, job: Job, prevDate: Date, endDate: Date): Invocation {
prevDate = prevDate || new Date();
const date = rule instanceof RecurrenceRule ? rule.nextInvocationDate(prevDate) : rule.next();
if (date === null) {
return null;
}
if (endDate instanceof Date && date.getTime() > endDate.getTime()) {
return null;
}
const inv = new Invocation(job, date, rule, endDate);
this.scheduleInvocation(inv);
return inv;
}
}
export const jobMan = JobManager.instance;
export const scheduledJobs = jobMan.scheduledJobs;
export class Invocation {
job: Job;
fireDate: Date | CronDate;
endDate?: Date | CronDate;
recurrenceRule: RecurrenceRule | any;
timerID: NodeJS.Timer = null;
constructor(job: Job, fireDate?: Date, rule?: RecurrenceRule | any, endDate?: Date) {
this.job = job;
this.fireDate = fireDate;
this.endDate = endDate;
this.recurrenceRule = rule;
}
}
export interface IJobObject {
execute(): void;
}
export class ScheduleObject {
start?: Date;
end?: Date;
rule?: string | RecurrenceRule; // cron or rec rule
constructor(start?: Date, end?: Date, rule?: string | RecurrenceRule) {
this.start = start;
this.end = end;
this.rule = rule;
}
}
// Date - a specific date in Date object
// string - can be either a Date string, or a cron string
// number - the int value of a date, will be parsed into Date
// RecurrenceRule - recurrence rule
// ScheduleObject - an object with start, end and
// either the rule in RecurrenceRule, or
// a cron string
export type ScheduleSpec = Date | string | number | RecurrenceRule | ScheduleObject;
export class Job extends EventEmitter {
private _name: string;
get name() {
return this._name;
}
private _pendingInvocations: InvocationArray = new SortedArray<Invocation>([], invocationSorter);
get pendingInvocations(): InvocationArray {
return this._pendingInvocations;
}
private _triggeredJobs: number = 0;
get triggeredJobs() {
return this._triggeredJobs;
}
resetTriggeredJobs(): void {
this._triggeredJobs = 0;
}
addTriggeredJobs(): number {
return ++this._triggeredJobs;
}
job: Function | IJobObject;
callback: Function;
constructor(name?: string, job?: Function | IJobObject, callback?: Function) {
super();
this._name = name || `<Anonymous Job ${++jobMan.anonJobCounter}>`;
this.job = job;
this.callback = callback;
}
trackInvocation(invocation: Invocation): boolean {
this.pendingInvocations.insert(invocation);
return true;
}
stopTrackingInvocation(invocation: Invocation): boolean {
return this.pendingInvocations.remove(invocation);
}
invoke() {
if (typeof this.job === 'function') {
this.addTriggeredJobs();
this.job();
} else {
this.job.execute();
}
if (this.callback) {
this.callback();
}
this.emit('run');
}
schedule(sched: ScheduleSpec): boolean {
if (jobMan.scheduledJobs[this.name]) {
throw new Error(`A job with the name ${this.name} is already scheduled, please either reschedule it, or cancel and then schedule it again.`);
}
let success = false;
let inv: Invocation;
let start: CronDate;
let end: CronDate;
let recRule: RecurrenceRule | any;
let fireDate: Date;
let type = typeof sched;
if (sched instanceof RecurrenceRule) {
recRule = sched;
} else if (sched instanceof ScheduleObject) {
start = sched.start || undefined; // CronDate need undefined instead of null
end = sched.end || undefined;
if (sched.rule instanceof RecurrenceRule) {
recRule = sched.rule;
} else {
recRule = util.parseCron(sched.rule, { currentDate: start });
}
} else if (type === 'string') {
recRule = util.parseCron(String(sched), {});
}
if (recRule) {
// cron or recurrence rule
inv = jobMan.scheduleNextRecurrence(recRule, this, start, end);
if (inv !== null) {
success = this.trackInvocation(inv);
}
} else if (type === 'string' || type === 'number') {
// 'Thu Sep 14 2017 11:50:27 GMT-0700 (PDT)' or 1505427200476
fireDate = new Date(sched);
} else if (sched instanceof Date) {
// Date object
fireDate = sched;
}
if (util.isValidDate(fireDate)) {
if (fireDate.getTime() >= Date.now()) {
inv = new Invocation(this, fireDate);
jobMan.scheduleInvocation(inv);
success = this.trackInvocation(inv);
}
}
if (success) {
jobMan.scheduledJobs[this.name] = this;
}
return success;
}
cancel(): boolean {
let inv: Invocation, newInv: Invocation;
for (let j = 0; j < this.pendingInvocations.length; j++) {
inv = this.pendingInvocations.array[j];
jobMan.cancelInvocation(inv);
}
this.pendingInvocations.clear();
delete jobMan.scheduledJobs[this.name];
return true;
}
reschedule(sched: ScheduleSpec): boolean {
const cInvs = this.pendingInvocations.array.slice();
this.cancel();
if (this.schedule(sched)) {
this.resetTriggeredJobs();
} else {
this.pendingInvocations.array = cInvs;
return false;
}
return true;
}
get nextInvocation(): Date {
return this.pendingInvocations.length > 0 ? this.pendingInvocations.array[0].fireDate : null;
}
}
|
{
lt.clearTimeout(this.currentInvocation.timerID);
this.currentInvocation.timerID = null;
this.currentInvocation = null;
}
|
conditional_block
|
htmlmodelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLModElementBinding;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct
|
{
htmlelement: HTMLElement,
}
impl HTMLModElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLModElement {
HTMLModElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLModElement> {
Node::reflect_node(
Box::new(HTMLModElement::new_inherited(local_name, prefix, document)),
document,
HTMLModElementBinding::Wrap,
)
}
}
|
HTMLModElement
|
identifier_name
|
htmlmodelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLModElementBinding;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLModElement {
htmlelement: HTMLElement,
}
impl HTMLModElement {
fn new_inherited(
local_name: LocalName,
|
prefix: Option<Prefix>,
document: &Document,
) -> HTMLModElement {
HTMLModElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLModElement> {
Node::reflect_node(
Box::new(HTMLModElement::new_inherited(local_name, prefix, document)),
document,
HTMLModElementBinding::Wrap,
)
}
}
|
random_line_split
|
|
htmlmodelement.rs
|
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::codegen::Bindings::HTMLModElementBinding;
use crate::dom::bindings::root::DomRoot;
use crate::dom::document::Document;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::Node;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
#[dom_struct]
pub struct HTMLModElement {
htmlelement: HTMLElement,
}
impl HTMLModElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLModElement {
HTMLModElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLModElement>
|
}
|
{
Node::reflect_node(
Box::new(HTMLModElement::new_inherited(local_name, prefix, document)),
document,
HTMLModElementBinding::Wrap,
)
}
|
identifier_body
|
term.rs
|
use std::time::Duration;
use std::thread;
use std::sync::mpsc;
use std::io::{self, BufRead};
fn main()
|
{
println!("Press enter to wake up the child thread");
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
loop {
println!("Suspending...");
match rx.try_recv() {
Ok(_) => {
println!("Terminating.");
break;
}
Err(_) => {
println!("Working...");
thread::sleep(Duration::from_millis(500));
}
}
}
});
thread::sleep(Duration::from_millis(50000));
let _ = tx.send(());
// let mut line = String::new();
// let stdin = io::stdin();
// for _ in 0..4 {
// let _ = stdin.lock().read_line(&mut line);
// let _ = tx.send(());
// }
}
|
identifier_body
|
|
term.rs
|
use std::time::Duration;
use std::thread;
use std::sync::mpsc;
use std::io::{self, BufRead};
fn main() {
println!("Press enter to wake up the child thread");
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
loop {
println!("Suspending...");
|
Ok(_) => {
println!("Terminating.");
break;
}
Err(_) => {
println!("Working...");
thread::sleep(Duration::from_millis(500));
}
}
}
});
thread::sleep(Duration::from_millis(50000));
let _ = tx.send(());
// let mut line = String::new();
// let stdin = io::stdin();
// for _ in 0..4 {
// let _ = stdin.lock().read_line(&mut line);
// let _ = tx.send(());
// }
}
|
match rx.try_recv() {
|
random_line_split
|
term.rs
|
use std::time::Duration;
use std::thread;
use std::sync::mpsc;
use std::io::{self, BufRead};
fn
|
() {
println!("Press enter to wake up the child thread");
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
loop {
println!("Suspending...");
match rx.try_recv() {
Ok(_) => {
println!("Terminating.");
break;
}
Err(_) => {
println!("Working...");
thread::sleep(Duration::from_millis(500));
}
}
}
});
thread::sleep(Duration::from_millis(50000));
let _ = tx.send(());
// let mut line = String::new();
// let stdin = io::stdin();
// for _ in 0..4 {
// let _ = stdin.lock().read_line(&mut line);
// let _ = tx.send(());
// }
}
|
main
|
identifier_name
|
term.rs
|
use std::time::Duration;
use std::thread;
use std::sync::mpsc;
use std::io::{self, BufRead};
fn main() {
println!("Press enter to wake up the child thread");
let (tx, rx) = mpsc::channel();
thread::spawn(move || {
loop {
println!("Suspending...");
match rx.try_recv() {
Ok(_) =>
|
Err(_) => {
println!("Working...");
thread::sleep(Duration::from_millis(500));
}
}
}
});
thread::sleep(Duration::from_millis(50000));
let _ = tx.send(());
// let mut line = String::new();
// let stdin = io::stdin();
// for _ in 0..4 {
// let _ = stdin.lock().read_line(&mut line);
// let _ = tx.send(());
// }
}
|
{
println!("Terminating.");
break;
}
|
conditional_block
|
chunk.py
|
"""
Chunk (N number of bytes at M offset to a source's beginning) provider.
Primarily for file sources but usable by any iterator that has both
seek and read( N ).
"""
import os
import base64
import base
import exceptions
import logging
log = logging.getLogger( __name__ )
# -----------------------------------------------------------------------------
class ChunkDataProvider( base.DataProvider ):
"""
Data provider that yields chunks of data from its file.
Note: this version does not account for lines and works with Binary datatypes.
"""
MAX_CHUNK_SIZE = 2 ** 16
DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
settings = {
'chunk_index' : 'int',
'chunk_size' : 'int'
}
# TODO: subclass from LimitedOffsetDataProvider?
# see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
"""
:param chunk_index: if a source can be divided into N number of
|
(gen. in bytes).
"""
super( ChunkDataProvider, self ).__init__( source, **kwargs )
self.chunk_size = int( chunk_size )
self.chunk_pos = int( chunk_index ) * self.chunk_size
def validate_source( self, source ):
"""
Does the given source have both the methods `seek` and `read`?
:raises InvalidDataProviderSource: if not.
"""
source = super( ChunkDataProvider, self ).validate_source( source )
if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
raise exceptions.InvalidDataProviderSource( source )
return source
def __iter__( self ):
# not reeeally an iterator per se
self.__enter__()
self.source.seek( self.chunk_pos, os.SEEK_SET )
chunk = self.encode( self.source.read( self.chunk_size ) )
yield chunk
self.__exit__()
def encode( self, chunk ):
"""
Called on the chunk before returning.
Overrride to modify, encode, or decode chunks.
"""
return chunk
class Base64ChunkDataProvider( ChunkDataProvider ):
"""
Data provider that yields chunks of base64 encoded data from its file.
"""
def encode( self, chunk ):
"""
Return chunks encoded in base 64.
"""
return base64.b64encode( chunk )
|
`chunk_size` sections, this is the index of which section to
return.
:param chunk_size: how large are the desired chunks to return
|
random_line_split
|
chunk.py
|
"""
Chunk (N number of bytes at M offset to a source's beginning) provider.
Primarily for file sources but usable by any iterator that has both
seek and read( N ).
"""
import os
import base64
import base
import exceptions
import logging
log = logging.getLogger( __name__ )
# -----------------------------------------------------------------------------
class ChunkDataProvider( base.DataProvider ):
|
class Base64ChunkDataProvider( ChunkDataProvider ):
"""
Data provider that yields chunks of base64 encoded data from its file.
"""
def encode( self, chunk ):
"""
Return chunks encoded in base 64.
"""
return base64.b64encode( chunk )
|
"""
Data provider that yields chunks of data from its file.
Note: this version does not account for lines and works with Binary datatypes.
"""
MAX_CHUNK_SIZE = 2 ** 16
DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
settings = {
'chunk_index' : 'int',
'chunk_size' : 'int'
}
# TODO: subclass from LimitedOffsetDataProvider?
# see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
"""
:param chunk_index: if a source can be divided into N number of
`chunk_size` sections, this is the index of which section to
return.
:param chunk_size: how large are the desired chunks to return
(gen. in bytes).
"""
super( ChunkDataProvider, self ).__init__( source, **kwargs )
self.chunk_size = int( chunk_size )
self.chunk_pos = int( chunk_index ) * self.chunk_size
def validate_source( self, source ):
"""
Does the given source have both the methods `seek` and `read`?
:raises InvalidDataProviderSource: if not.
"""
source = super( ChunkDataProvider, self ).validate_source( source )
if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
raise exceptions.InvalidDataProviderSource( source )
return source
def __iter__( self ):
# not reeeally an iterator per se
self.__enter__()
self.source.seek( self.chunk_pos, os.SEEK_SET )
chunk = self.encode( self.source.read( self.chunk_size ) )
yield chunk
self.__exit__()
def encode( self, chunk ):
"""
Called on the chunk before returning.
Overrride to modify, encode, or decode chunks.
"""
return chunk
|
identifier_body
|
chunk.py
|
"""
Chunk (N number of bytes at M offset to a source's beginning) provider.
Primarily for file sources but usable by any iterator that has both
seek and read( N ).
"""
import os
import base64
import base
import exceptions
import logging
log = logging.getLogger( __name__ )
# -----------------------------------------------------------------------------
class ChunkDataProvider( base.DataProvider ):
"""
Data provider that yields chunks of data from its file.
Note: this version does not account for lines and works with Binary datatypes.
"""
MAX_CHUNK_SIZE = 2 ** 16
DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
settings = {
'chunk_index' : 'int',
'chunk_size' : 'int'
}
# TODO: subclass from LimitedOffsetDataProvider?
# see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
"""
:param chunk_index: if a source can be divided into N number of
`chunk_size` sections, this is the index of which section to
return.
:param chunk_size: how large are the desired chunks to return
(gen. in bytes).
"""
super( ChunkDataProvider, self ).__init__( source, **kwargs )
self.chunk_size = int( chunk_size )
self.chunk_pos = int( chunk_index ) * self.chunk_size
def
|
( self, source ):
"""
Does the given source have both the methods `seek` and `read`?
:raises InvalidDataProviderSource: if not.
"""
source = super( ChunkDataProvider, self ).validate_source( source )
if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
raise exceptions.InvalidDataProviderSource( source )
return source
def __iter__( self ):
# not reeeally an iterator per se
self.__enter__()
self.source.seek( self.chunk_pos, os.SEEK_SET )
chunk = self.encode( self.source.read( self.chunk_size ) )
yield chunk
self.__exit__()
def encode( self, chunk ):
"""
Called on the chunk before returning.
Overrride to modify, encode, or decode chunks.
"""
return chunk
class Base64ChunkDataProvider( ChunkDataProvider ):
"""
Data provider that yields chunks of base64 encoded data from its file.
"""
def encode( self, chunk ):
"""
Return chunks encoded in base 64.
"""
return base64.b64encode( chunk )
|
validate_source
|
identifier_name
|
chunk.py
|
"""
Chunk (N number of bytes at M offset to a source's beginning) provider.
Primarily for file sources but usable by any iterator that has both
seek and read( N ).
"""
import os
import base64
import base
import exceptions
import logging
log = logging.getLogger( __name__ )
# -----------------------------------------------------------------------------
class ChunkDataProvider( base.DataProvider ):
"""
Data provider that yields chunks of data from its file.
Note: this version does not account for lines and works with Binary datatypes.
"""
MAX_CHUNK_SIZE = 2 ** 16
DEFAULT_CHUNK_SIZE = MAX_CHUNK_SIZE
settings = {
'chunk_index' : 'int',
'chunk_size' : 'int'
}
# TODO: subclass from LimitedOffsetDataProvider?
# see web/framework/base.iterate_file, util/__init__.file_reader, and datatypes.tabular
def __init__( self, source, chunk_index=0, chunk_size=DEFAULT_CHUNK_SIZE, **kwargs ):
"""
:param chunk_index: if a source can be divided into N number of
`chunk_size` sections, this is the index of which section to
return.
:param chunk_size: how large are the desired chunks to return
(gen. in bytes).
"""
super( ChunkDataProvider, self ).__init__( source, **kwargs )
self.chunk_size = int( chunk_size )
self.chunk_pos = int( chunk_index ) * self.chunk_size
def validate_source( self, source ):
"""
Does the given source have both the methods `seek` and `read`?
:raises InvalidDataProviderSource: if not.
"""
source = super( ChunkDataProvider, self ).validate_source( source )
if( ( not hasattr( source, 'seek' ) ) or ( not hasattr( source, 'read' ) ) ):
|
return source
def __iter__( self ):
# not reeeally an iterator per se
self.__enter__()
self.source.seek( self.chunk_pos, os.SEEK_SET )
chunk = self.encode( self.source.read( self.chunk_size ) )
yield chunk
self.__exit__()
def encode( self, chunk ):
"""
Called on the chunk before returning.
Overrride to modify, encode, or decode chunks.
"""
return chunk
class Base64ChunkDataProvider( ChunkDataProvider ):
"""
Data provider that yields chunks of base64 encoded data from its file.
"""
def encode( self, chunk ):
"""
Return chunks encoded in base 64.
"""
return base64.b64encode( chunk )
|
raise exceptions.InvalidDataProviderSource( source )
|
conditional_block
|
AppShellRoutes.tsx
|
import React from 'react';
import { Navigate, Route, Routes } from 'react-router-dom';
import { RouteEnum } from 'types';
import {
Account,
Decks,
Game,
Player,
Room,
Server,
Login,
Logs,
Initialize,
|
} from 'containers';
const AppShellRoutes = () => (
<div className="AppShell-routes overflow-scroll">
<Routes>
<Route path='*' element={<Initialize />} />
<Route path={RouteEnum.ACCOUNT} element={<Account />} />
<Route path={RouteEnum.DECKS} element={<Decks />} />
<Route path={RouteEnum.GAME} element={<Game />} />
<Route path={RouteEnum.LOGS} element={<Logs />} />
<Route path={RouteEnum.PLAYER} element={<Player />} />
{<Route path={RouteEnum.ROOM} element={<Room />} />}
<Route path={RouteEnum.SERVER} element={<Server />} />
<Route path={RouteEnum.LOGIN} element={<Login />} />
<Route path={RouteEnum.UNSUPPORTED} element={<Unsupported />} />
</Routes>
</div>
);
export default AppShellRoutes;
|
Unsupported
|
random_line_split
|
yanker.py
|
"""
Yanker
Usage:
yanker [--threads=<tnum>]
"""
__version__ = '1.0.1'
import Queue
import threading
import youtube_dl as ydl
import pyperclip as clip
import time
from docopt import docopt
class ErrLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
print msg
class Worker(threading.Thread):
def __init__(self, tasks):
threading.Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
vid = self.tasks.get()
vid.download()
self.tasks.task_done()
class Video:
def progress(self, s):
if s['status'] == 'finished':
print 'Finished {}'.format(s['filename'])
def __init__(self, url, opts={}):
self.url = url
self.ydl_opts = {
'progress_hooks': [self.progress],
'logger': ErrLogger()
}
self.ydl_opts.update(opts)
def download(self):
print 'Downloading: {}'.format(self.url)
with ydl.YoutubeDL(self.ydl_opts) as y:
try:
y.download([self.url])
except ydl.DownloadError:
print 'Unsupported URL, skipping'
class Watcher:
def __init__(self, urls=[], threads=2):
self.queue = Queue.Queue(0)
self.threads = threads
self.stopped = False
self.grabbed_urls = set([])
for _ in range(threads): Worker(self.queue)
def
|
(self):
recent = ''
while not self.stopped:
current = clip.paste()
if recent != current:
recent = current
if current.startswith(('http://', 'https://',)) and current not in self.grabbed_urls:
print 'Added: {}'.format(current)
self.grabbed_urls.add(current)
self.queue.put(Video(current))
elif current in self.grabbed_urls:
print 'Already grabbed {}'.format(current)
time.sleep(0.25)
def run():
args = docopt(__doc__, version='Yanker {}'.format(__version__))
threads = args['--threads']
if not threads:
threads = 2
else:
threads = int(threads)
print 'Starting Yanker with {} threads...'.format(threads)
watch = Watcher(threads=threads)
try:
watch.run()
except KeyboardInterrupt:
print 'Stopping...'
watch.stopped = True
|
run
|
identifier_name
|
yanker.py
|
"""
Yanker
Usage:
yanker [--threads=<tnum>]
"""
|
import pyperclip as clip
import time
from docopt import docopt
class ErrLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
print msg
class Worker(threading.Thread):
def __init__(self, tasks):
threading.Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
vid = self.tasks.get()
vid.download()
self.tasks.task_done()
class Video:
def progress(self, s):
if s['status'] == 'finished':
print 'Finished {}'.format(s['filename'])
def __init__(self, url, opts={}):
self.url = url
self.ydl_opts = {
'progress_hooks': [self.progress],
'logger': ErrLogger()
}
self.ydl_opts.update(opts)
def download(self):
print 'Downloading: {}'.format(self.url)
with ydl.YoutubeDL(self.ydl_opts) as y:
try:
y.download([self.url])
except ydl.DownloadError:
print 'Unsupported URL, skipping'
class Watcher:
def __init__(self, urls=[], threads=2):
self.queue = Queue.Queue(0)
self.threads = threads
self.stopped = False
self.grabbed_urls = set([])
for _ in range(threads): Worker(self.queue)
def run(self):
recent = ''
while not self.stopped:
current = clip.paste()
if recent != current:
recent = current
if current.startswith(('http://', 'https://',)) and current not in self.grabbed_urls:
print 'Added: {}'.format(current)
self.grabbed_urls.add(current)
self.queue.put(Video(current))
elif current in self.grabbed_urls:
print 'Already grabbed {}'.format(current)
time.sleep(0.25)
def run():
args = docopt(__doc__, version='Yanker {}'.format(__version__))
threads = args['--threads']
if not threads:
threads = 2
else:
threads = int(threads)
print 'Starting Yanker with {} threads...'.format(threads)
watch = Watcher(threads=threads)
try:
watch.run()
except KeyboardInterrupt:
print 'Stopping...'
watch.stopped = True
|
__version__ = '1.0.1'
import Queue
import threading
import youtube_dl as ydl
|
random_line_split
|
yanker.py
|
"""
Yanker
Usage:
yanker [--threads=<tnum>]
"""
__version__ = '1.0.1'
import Queue
import threading
import youtube_dl as ydl
import pyperclip as clip
import time
from docopt import docopt
class ErrLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
print msg
class Worker(threading.Thread):
def __init__(self, tasks):
|
def run(self):
while True:
vid = self.tasks.get()
vid.download()
self.tasks.task_done()
class Video:
def progress(self, s):
if s['status'] == 'finished':
print 'Finished {}'.format(s['filename'])
def __init__(self, url, opts={}):
self.url = url
self.ydl_opts = {
'progress_hooks': [self.progress],
'logger': ErrLogger()
}
self.ydl_opts.update(opts)
def download(self):
print 'Downloading: {}'.format(self.url)
with ydl.YoutubeDL(self.ydl_opts) as y:
try:
y.download([self.url])
except ydl.DownloadError:
print 'Unsupported URL, skipping'
class Watcher:
def __init__(self, urls=[], threads=2):
self.queue = Queue.Queue(0)
self.threads = threads
self.stopped = False
self.grabbed_urls = set([])
for _ in range(threads): Worker(self.queue)
def run(self):
recent = ''
while not self.stopped:
current = clip.paste()
if recent != current:
recent = current
if current.startswith(('http://', 'https://',)) and current not in self.grabbed_urls:
print 'Added: {}'.format(current)
self.grabbed_urls.add(current)
self.queue.put(Video(current))
elif current in self.grabbed_urls:
print 'Already grabbed {}'.format(current)
time.sleep(0.25)
def run():
args = docopt(__doc__, version='Yanker {}'.format(__version__))
threads = args['--threads']
if not threads:
threads = 2
else:
threads = int(threads)
print 'Starting Yanker with {} threads...'.format(threads)
watch = Watcher(threads=threads)
try:
watch.run()
except KeyboardInterrupt:
print 'Stopping...'
watch.stopped = True
|
threading.Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
|
identifier_body
|
yanker.py
|
"""
Yanker
Usage:
yanker [--threads=<tnum>]
"""
__version__ = '1.0.1'
import Queue
import threading
import youtube_dl as ydl
import pyperclip as clip
import time
from docopt import docopt
class ErrLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
print msg
class Worker(threading.Thread):
def __init__(self, tasks):
threading.Thread.__init__(self)
self.tasks = tasks
self.daemon = True
self.start()
def run(self):
while True:
vid = self.tasks.get()
vid.download()
self.tasks.task_done()
class Video:
def progress(self, s):
if s['status'] == 'finished':
print 'Finished {}'.format(s['filename'])
def __init__(self, url, opts={}):
self.url = url
self.ydl_opts = {
'progress_hooks': [self.progress],
'logger': ErrLogger()
}
self.ydl_opts.update(opts)
def download(self):
print 'Downloading: {}'.format(self.url)
with ydl.YoutubeDL(self.ydl_opts) as y:
try:
y.download([self.url])
except ydl.DownloadError:
print 'Unsupported URL, skipping'
class Watcher:
def __init__(self, urls=[], threads=2):
self.queue = Queue.Queue(0)
self.threads = threads
self.stopped = False
self.grabbed_urls = set([])
for _ in range(threads): Worker(self.queue)
def run(self):
recent = ''
while not self.stopped:
current = clip.paste()
if recent != current:
recent = current
if current.startswith(('http://', 'https://',)) and current not in self.grabbed_urls:
|
elif current in self.grabbed_urls:
print 'Already grabbed {}'.format(current)
time.sleep(0.25)
def run():
args = docopt(__doc__, version='Yanker {}'.format(__version__))
threads = args['--threads']
if not threads:
threads = 2
else:
threads = int(threads)
print 'Starting Yanker with {} threads...'.format(threads)
watch = Watcher(threads=threads)
try:
watch.run()
except KeyboardInterrupt:
print 'Stopping...'
watch.stopped = True
|
print 'Added: {}'.format(current)
self.grabbed_urls.add(current)
self.queue.put(Video(current))
|
conditional_block
|
logging.rs
|
//! Module implementing logging for the application.
//!
//! This includes setting up log filtering given a verbosity value,
//! as well as defining how the logs are being formatted to stderr.
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io;
use ansi_term::{Colour, Style};
use isatty;
use log::SetLoggerError;
use slog::{self, DrainExt, FilterLevel, Level};
use slog_envlogger::LogBuilder;
use slog_stdlog;
use slog_stream;
use time;
// Default logging level defined using the two enums used by slog.
// Both values must correspond to the same level. (This is checked by a test).
const DEFAULT_LEVEL: Level = Level::Info;
const DEFAULT_FILTER_LEVEL: FilterLevel = FilterLevel::Info;
// Arrays of log levels, indexed by verbosity.
const POSITIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Debug,
FilterLevel::Trace,
];
const NEGATIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Warning,
FilterLevel::Error,
FilterLevel::Critical,
FilterLevel::Off,
];
/// Initialize logging with given verbosity.
/// The verbosity value has the same meaning as in args::Options::verbosity.
pub fn init(verbosity: isize) -> Result<(), SetLoggerError> {
let istty = cfg!(unix) && isatty::stderr_isatty();
let stderr = slog_stream::stream(io::stderr(), LogFormat{tty: istty});
// Determine the log filtering level based on verbosity.
// If the argument is excessive, log that but clamp to the highest/lowest log level.
let mut verbosity = verbosity;
let mut excessive = false;
let level = if verbosity >= 0 {
if verbosity >= POSITIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = POSITIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
POSITIVE_VERBOSITY_LEVELS[verbosity as usize]
} else {
verbosity = -verbosity;
if verbosity >= NEGATIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = NEGATIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
NEGATIVE_VERBOSITY_LEVELS[verbosity as usize]
};
// Include universal logger options, like the level.
let mut builder = LogBuilder::new(stderr);
builder = builder.filter(None, level);
// Make some of the libraries less chatty.
builder = builder
.filter(Some("html5ever"), FilterLevel::Info)
.filter(Some("hyper"), FilterLevel::Info);
// Include any additional config from environmental variables.
// This will override the options above if necessary,
// so e.g. it is still possible to get full debug output from hyper.
if let Ok(ref conf) = env::var("RUST_LOG") {
builder = builder.parse(conf);
}
// Initialize the logger, possibly logging the excessive verbosity option.
// TODO: migrate off of `log` macro to slog completely,
// so that slog_scope is used to set up the application's logger
// and slog_stdlog is only for the libraries like hyper that use `log` macros
let env_logger_drain = builder.build();
let logger = slog::Logger::root(env_logger_drain.fuse(), o!());
try!(slog_stdlog::set_logger(logger));
if excessive {
warn!("-v/-q flag passed too many times, logging level {:?} assumed", level);
}
Ok(())
}
// Log formatting
/// Token type that's only uses to tell slog-stream how to format our log entries.
struct LogFormat {
pub tty: bool,
}
impl slog_stream::Format for LogFormat {
/// Format a single log Record and write it to given output.
fn format(&self, output: &mut io::Write,
record: &slog::Record,
_logger_kvp: &slog::OwnedKeyValueList) -> io::Result<()> {
// Format the higher level (more fine-grained) messages with greater detail,
// as they are only visible when user explicitly enables verbose logging.
let msg = if record.level() > DEFAULT_LEVEL {
let logtime = format_log_time();
let level: String = {
let first_char = record.level().as_str().chars().next().unwrap();
first_char.to_uppercase().collect()
};
let module = {
let module = record.module();
match module.find("::") {
Some(idx) => Cow::Borrowed(&module[idx + 2..]),
None => "main".into(),
}
};
// Dim the prefix (everything that's not a message) if we're outputting to a TTY.
let prefix_style = if self.tty { *TTY_FINE_PREFIX_STYLE } else { Style::default() };
let prefix = format!("{}{} {}#{}]", level, logtime, module, record.line());
format!("{} {}\n", prefix_style.paint(prefix), record.msg())
} else {
// Colorize the level label if we're outputting to a TTY.
let level: Cow<str> = if self.tty {
let style = TTY_LEVEL_STYLES.get(&record.level().as_usize())
.cloned()
.unwrap_or_else(Style::default);
format!("{}", style.paint(record.level().as_str())).into()
} else {
record.level().as_str().into()
};
format!("{}: {}\n", level, record.msg())
};
try!(output.write_all(msg.as_bytes()));
Ok(())
}
}
/// Format the timestamp part of a detailed log entry.
fn
|
() -> String {
let utc_now = time::now().to_utc();
let mut logtime = format!("{}", utc_now.rfc3339()); // E.g.: 2012-02-22T14:53:18Z
// Insert millisecond count before the Z.
let millis = utc_now.tm_nsec / NANOS_IN_MILLISEC;
logtime.pop();
format!("{}.{:04}Z", logtime, millis)
}
const NANOS_IN_MILLISEC: i32 = 1000000;
lazy_static! {
/// Map of log levels to their ANSI terminal styles.
// (Level doesn't implement Hash so it has to be usize).
static ref TTY_LEVEL_STYLES: HashMap<usize, Style> = hashmap!{
Level::Info.as_usize() => Colour::Green.normal(),
Level::Warning.as_usize() => Colour::Yellow.normal(),
Level::Error.as_usize() => Colour::Red.normal(),
Level::Critical.as_usize() => Colour::Purple.normal(),
};
/// ANSI terminal style for the prefix (timestamp etc.) of a fine log message.
static ref TTY_FINE_PREFIX_STYLE: Style = Style::new().dimmed();
}
#[cfg(test)]
mod tests {
use slog::FilterLevel;
use super::{DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL,
NEGATIVE_VERBOSITY_LEVELS, POSITIVE_VERBOSITY_LEVELS};
/// Check that default logging level is defined consistently.
#[test]
fn default_level() {
let level = DEFAULT_LEVEL.as_usize();
let filter_level = DEFAULT_FILTER_LEVEL.as_usize();
assert_eq!(level, filter_level,
"Default logging level is defined inconsistently: Level::{:?} vs. FilterLevel::{:?}",
DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL);
}
#[test]
fn verbosity_levels() {
assert_eq!(NEGATIVE_VERBOSITY_LEVELS[0], POSITIVE_VERBOSITY_LEVELS[0]);
assert!(NEGATIVE_VERBOSITY_LEVELS.contains(&FilterLevel::Off),
"Verbosity levels don't allow to turn logging off completely");
}
}
|
format_log_time
|
identifier_name
|
logging.rs
|
//! Module implementing logging for the application.
//!
//! This includes setting up log filtering given a verbosity value,
//! as well as defining how the logs are being formatted to stderr.
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io;
use ansi_term::{Colour, Style};
use isatty;
use log::SetLoggerError;
use slog::{self, DrainExt, FilterLevel, Level};
use slog_envlogger::LogBuilder;
use slog_stdlog;
use slog_stream;
use time;
// Default logging level defined using the two enums used by slog.
// Both values must correspond to the same level. (This is checked by a test).
const DEFAULT_LEVEL: Level = Level::Info;
const DEFAULT_FILTER_LEVEL: FilterLevel = FilterLevel::Info;
// Arrays of log levels, indexed by verbosity.
const POSITIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Debug,
FilterLevel::Trace,
];
const NEGATIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Warning,
FilterLevel::Error,
FilterLevel::Critical,
FilterLevel::Off,
];
/// Initialize logging with given verbosity.
/// The verbosity value has the same meaning as in args::Options::verbosity.
pub fn init(verbosity: isize) -> Result<(), SetLoggerError> {
let istty = cfg!(unix) && isatty::stderr_isatty();
let stderr = slog_stream::stream(io::stderr(), LogFormat{tty: istty});
// Determine the log filtering level based on verbosity.
// If the argument is excessive, log that but clamp to the highest/lowest log level.
let mut verbosity = verbosity;
let mut excessive = false;
let level = if verbosity >= 0 {
if verbosity >= POSITIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = POSITIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
POSITIVE_VERBOSITY_LEVELS[verbosity as usize]
} else {
verbosity = -verbosity;
if verbosity >= NEGATIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = NEGATIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
NEGATIVE_VERBOSITY_LEVELS[verbosity as usize]
};
// Include universal logger options, like the level.
let mut builder = LogBuilder::new(stderr);
builder = builder.filter(None, level);
// Make some of the libraries less chatty.
builder = builder
.filter(Some("html5ever"), FilterLevel::Info)
.filter(Some("hyper"), FilterLevel::Info);
// Include any additional config from environmental variables.
// This will override the options above if necessary,
// so e.g. it is still possible to get full debug output from hyper.
if let Ok(ref conf) = env::var("RUST_LOG") {
builder = builder.parse(conf);
}
// Initialize the logger, possibly logging the excessive verbosity option.
// TODO: migrate off of `log` macro to slog completely,
// so that slog_scope is used to set up the application's logger
// and slog_stdlog is only for the libraries like hyper that use `log` macros
let env_logger_drain = builder.build();
let logger = slog::Logger::root(env_logger_drain.fuse(), o!());
try!(slog_stdlog::set_logger(logger));
if excessive {
warn!("-v/-q flag passed too many times, logging level {:?} assumed", level);
}
Ok(())
}
// Log formatting
/// Token type that's only uses to tell slog-stream how to format our log entries.
struct LogFormat {
pub tty: bool,
}
impl slog_stream::Format for LogFormat {
/// Format a single log Record and write it to given output.
fn format(&self, output: &mut io::Write,
record: &slog::Record,
_logger_kvp: &slog::OwnedKeyValueList) -> io::Result<()> {
// Format the higher level (more fine-grained) messages with greater detail,
// as they are only visible when user explicitly enables verbose logging.
let msg = if record.level() > DEFAULT_LEVEL {
let logtime = format_log_time();
let level: String = {
let first_char = record.level().as_str().chars().next().unwrap();
first_char.to_uppercase().collect()
};
let module = {
let module = record.module();
match module.find("::") {
Some(idx) => Cow::Borrowed(&module[idx + 2..]),
None => "main".into(),
}
};
// Dim the prefix (everything that's not a message) if we're outputting to a TTY.
let prefix_style = if self.tty { *TTY_FINE_PREFIX_STYLE } else { Style::default() };
let prefix = format!("{}{} {}#{}]", level, logtime, module, record.line());
format!("{} {}\n", prefix_style.paint(prefix), record.msg())
} else {
// Colorize the level label if we're outputting to a TTY.
let level: Cow<str> = if self.tty {
let style = TTY_LEVEL_STYLES.get(&record.level().as_usize())
.cloned()
.unwrap_or_else(Style::default);
format!("{}", style.paint(record.level().as_str())).into()
} else {
record.level().as_str().into()
};
format!("{}: {}\n", level, record.msg())
};
try!(output.write_all(msg.as_bytes()));
Ok(())
}
}
/// Format the timestamp part of a detailed log entry.
fn format_log_time() -> String {
let utc_now = time::now().to_utc();
let mut logtime = format!("{}", utc_now.rfc3339()); // E.g.: 2012-02-22T14:53:18Z
// Insert millisecond count before the Z.
let millis = utc_now.tm_nsec / NANOS_IN_MILLISEC;
logtime.pop();
format!("{}.{:04}Z", logtime, millis)
}
const NANOS_IN_MILLISEC: i32 = 1000000;
lazy_static! {
/// Map of log levels to their ANSI terminal styles.
// (Level doesn't implement Hash so it has to be usize).
static ref TTY_LEVEL_STYLES: HashMap<usize, Style> = hashmap!{
Level::Info.as_usize() => Colour::Green.normal(),
Level::Warning.as_usize() => Colour::Yellow.normal(),
Level::Error.as_usize() => Colour::Red.normal(),
Level::Critical.as_usize() => Colour::Purple.normal(),
};
/// ANSI terminal style for the prefix (timestamp etc.) of a fine log message.
static ref TTY_FINE_PREFIX_STYLE: Style = Style::new().dimmed();
}
#[cfg(test)]
mod tests {
use slog::FilterLevel;
use super::{DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL,
NEGATIVE_VERBOSITY_LEVELS, POSITIVE_VERBOSITY_LEVELS};
/// Check that default logging level is defined consistently.
#[test]
fn default_level() {
let level = DEFAULT_LEVEL.as_usize();
let filter_level = DEFAULT_FILTER_LEVEL.as_usize();
assert_eq!(level, filter_level,
"Default logging level is defined inconsistently: Level::{:?} vs. FilterLevel::{:?}",
DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL);
}
#[test]
fn verbosity_levels()
|
}
|
{
assert_eq!(NEGATIVE_VERBOSITY_LEVELS[0], POSITIVE_VERBOSITY_LEVELS[0]);
assert!(NEGATIVE_VERBOSITY_LEVELS.contains(&FilterLevel::Off),
"Verbosity levels don't allow to turn logging off completely");
}
|
identifier_body
|
logging.rs
|
//! Module implementing logging for the application.
//!
//! This includes setting up log filtering given a verbosity value,
//! as well as defining how the logs are being formatted to stderr.
use std::borrow::Cow;
use std::collections::HashMap;
use std::env;
use std::io;
use ansi_term::{Colour, Style};
use isatty;
use log::SetLoggerError;
use slog::{self, DrainExt, FilterLevel, Level};
use slog_envlogger::LogBuilder;
use slog_stdlog;
use slog_stream;
use time;
// Default logging level defined using the two enums used by slog.
// Both values must correspond to the same level. (This is checked by a test).
const DEFAULT_LEVEL: Level = Level::Info;
const DEFAULT_FILTER_LEVEL: FilterLevel = FilterLevel::Info;
// Arrays of log levels, indexed by verbosity.
const POSITIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Debug,
FilterLevel::Trace,
];
const NEGATIVE_VERBOSITY_LEVELS: &'static [FilterLevel] = &[
DEFAULT_FILTER_LEVEL,
FilterLevel::Warning,
FilterLevel::Error,
FilterLevel::Critical,
FilterLevel::Off,
];
/// Initialize logging with given verbosity.
/// The verbosity value has the same meaning as in args::Options::verbosity.
pub fn init(verbosity: isize) -> Result<(), SetLoggerError> {
let istty = cfg!(unix) && isatty::stderr_isatty();
let stderr = slog_stream::stream(io::stderr(), LogFormat{tty: istty});
// Determine the log filtering level based on verbosity.
// If the argument is excessive, log that but clamp to the highest/lowest log level.
let mut verbosity = verbosity;
let mut excessive = false;
let level = if verbosity >= 0 {
if verbosity >= POSITIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = POSITIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
POSITIVE_VERBOSITY_LEVELS[verbosity as usize]
} else {
verbosity = -verbosity;
if verbosity >= NEGATIVE_VERBOSITY_LEVELS.len() as isize {
excessive = true;
verbosity = NEGATIVE_VERBOSITY_LEVELS.len() as isize - 1;
}
NEGATIVE_VERBOSITY_LEVELS[verbosity as usize]
};
// Include universal logger options, like the level.
let mut builder = LogBuilder::new(stderr);
builder = builder.filter(None, level);
// Make some of the libraries less chatty.
builder = builder
.filter(Some("html5ever"), FilterLevel::Info)
.filter(Some("hyper"), FilterLevel::Info);
// Include any additional config from environmental variables.
// This will override the options above if necessary,
// so e.g. it is still possible to get full debug output from hyper.
if let Ok(ref conf) = env::var("RUST_LOG") {
builder = builder.parse(conf);
}
// Initialize the logger, possibly logging the excessive verbosity option.
// TODO: migrate off of `log` macro to slog completely,
// so that slog_scope is used to set up the application's logger
// and slog_stdlog is only for the libraries like hyper that use `log` macros
let env_logger_drain = builder.build();
let logger = slog::Logger::root(env_logger_drain.fuse(), o!());
try!(slog_stdlog::set_logger(logger));
if excessive {
warn!("-v/-q flag passed too many times, logging level {:?} assumed", level);
}
Ok(())
}
// Log formatting
/// Token type that's only uses to tell slog-stream how to format our log entries.
struct LogFormat {
pub tty: bool,
}
impl slog_stream::Format for LogFormat {
/// Format a single log Record and write it to given output.
fn format(&self, output: &mut io::Write,
record: &slog::Record,
_logger_kvp: &slog::OwnedKeyValueList) -> io::Result<()> {
// Format the higher level (more fine-grained) messages with greater detail,
// as they are only visible when user explicitly enables verbose logging.
let msg = if record.level() > DEFAULT_LEVEL {
let logtime = format_log_time();
let level: String = {
let first_char = record.level().as_str().chars().next().unwrap();
first_char.to_uppercase().collect()
};
let module = {
let module = record.module();
match module.find("::") {
Some(idx) => Cow::Borrowed(&module[idx + 2..]),
None => "main".into(),
}
};
// Dim the prefix (everything that's not a message) if we're outputting to a TTY.
let prefix_style = if self.tty { *TTY_FINE_PREFIX_STYLE } else { Style::default() };
let prefix = format!("{}{} {}#{}]", level, logtime, module, record.line());
format!("{} {}\n", prefix_style.paint(prefix), record.msg())
} else {
// Colorize the level label if we're outputting to a TTY.
let level: Cow<str> = if self.tty {
let style = TTY_LEVEL_STYLES.get(&record.level().as_usize())
.cloned()
.unwrap_or_else(Style::default);
format!("{}", style.paint(record.level().as_str())).into()
} else {
record.level().as_str().into()
};
format!("{}: {}\n", level, record.msg())
};
try!(output.write_all(msg.as_bytes()));
Ok(())
}
}
/// Format the timestamp part of a detailed log entry.
fn format_log_time() -> String {
let utc_now = time::now().to_utc();
let mut logtime = format!("{}", utc_now.rfc3339()); // E.g.: 2012-02-22T14:53:18Z
// Insert millisecond count before the Z.
let millis = utc_now.tm_nsec / NANOS_IN_MILLISEC;
logtime.pop();
format!("{}.{:04}Z", logtime, millis)
}
const NANOS_IN_MILLISEC: i32 = 1000000;
|
Level::Info.as_usize() => Colour::Green.normal(),
Level::Warning.as_usize() => Colour::Yellow.normal(),
Level::Error.as_usize() => Colour::Red.normal(),
Level::Critical.as_usize() => Colour::Purple.normal(),
};
/// ANSI terminal style for the prefix (timestamp etc.) of a fine log message.
static ref TTY_FINE_PREFIX_STYLE: Style = Style::new().dimmed();
}
#[cfg(test)]
mod tests {
use slog::FilterLevel;
use super::{DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL,
NEGATIVE_VERBOSITY_LEVELS, POSITIVE_VERBOSITY_LEVELS};
/// Check that default logging level is defined consistently.
#[test]
fn default_level() {
let level = DEFAULT_LEVEL.as_usize();
let filter_level = DEFAULT_FILTER_LEVEL.as_usize();
assert_eq!(level, filter_level,
"Default logging level is defined inconsistently: Level::{:?} vs. FilterLevel::{:?}",
DEFAULT_LEVEL, DEFAULT_FILTER_LEVEL);
}
#[test]
fn verbosity_levels() {
assert_eq!(NEGATIVE_VERBOSITY_LEVELS[0], POSITIVE_VERBOSITY_LEVELS[0]);
assert!(NEGATIVE_VERBOSITY_LEVELS.contains(&FilterLevel::Off),
"Verbosity levels don't allow to turn logging off completely");
}
}
|
lazy_static! {
/// Map of log levels to their ANSI terminal styles.
// (Level doesn't implement Hash so it has to be usize).
static ref TTY_LEVEL_STYLES: HashMap<usize, Style> = hashmap!{
|
random_line_split
|
spinner.js
|
/*!
* Ext JS Library 3.3.0
* Copyright(c) 2006-2010 Ext JS, Inc.
* [email protected]
* http://www.extjs.com/license
*/
Ext.onReady(function(){
var simple = new Ext.FormPanel({
labelWidth: 40, // label settings here cascade unless overridden
frame: true,
title: 'Simple Form',
bodyStyle: 'padding:5px 5px 0',
width: 210,
defaults: {width: 135},
defaultType: 'textfield',
items: [
new Ext.ux.form.SpinnerField({
fieldLabel: 'Age',
name: 'age'
}),
{
xtype: 'spinnerfield',
fieldLabel: 'Test',
name: 'test',
minValue: 0,
maxValue: 100,
allowDecimals: true,
decimalPrecision: 1,
incrementValue: 0.4,
alternateIncrementValue: 2.1,
accelerate: true
}
]
});
|
});
|
simple.render('form-ct');
|
random_line_split
|
up-to-date.py
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/QT/up-to-date.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Validate that a stripped-down real-world Qt configuation (thanks
to Leanid Nazdrynau) with a generated .h file is correctly
up-to-date after a build.
(This catches a bug that was introduced during a signature refactoring
ca. September 2005.)
"""
import os
import TestSCons
_obj = TestSCons._obj
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
|
test.subdir('layer',
['layer', 'aclock'],
['layer', 'aclock', 'qt_bug'])
test.write('SConstruct', """\
import os
aa=os.getcwd()
env=Environment(tools=['default','expheaders','qt'],toolpath=[aa])
env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include')
if not os.access(env["EXP_HEADER_ABS"],os.F_OK):
os.mkdir (env["EXP_HEADER_ABS"])
Export('env')
env.SConscript('layer/aclock/qt_bug/SConscript')
""")
test.write('expheaders.py', """\
import SCons.Defaults
def ExpHeaderScanner(node, env, path):
return []
def generate(env):
HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0755)])
HeaderBuilder= SCons.Builder.Builder(action=HeaderAction)
env['BUILDERS']['ExportHeaders'] = HeaderBuilder
def exists(env):
return 0
""")
test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\
import os
Import ("env")
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h')
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h')
env.Append(CPPPATH=env["EXP_HEADER_ABS"])
env.StaticLibrary('all',['main.ui','migraform.ui','my.cc'])
""")
test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>Main</class>
<widget class="QWizard">
<property name="name">
<cstring>Main</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">migraform.h</include>
</includes>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>MigrateForm</class>
<widget class="QWizard">
<property name="name">
<cstring>MigrateForm</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'my.cc'], """\
#include <main.h>
""")
my_obj = 'layer/aclock/qt_bug/my'+_obj
test.run(arguments = my_obj, stderr=None)
expect = my_obj.replace( '/', os.sep )
test.up_to_date(options = '--debug=explain',
arguments = (expect),
stderr=None)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
|
conditional_block
|
up-to-date.py
|
#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/QT/up-to-date.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Validate that a stripped-down real-world Qt configuation (thanks
to Leanid Nazdrynau) with a generated .h file is correctly
up-to-date after a build.
(This catches a bug that was introduced during a signature refactoring
ca. September 2005.)
"""
import os
import TestSCons
_obj = TestSCons._obj
test = TestSCons.TestSCons()
if not os.environ.get('QTDIR', None):
x ="External environment variable $QTDIR not set; skipping test(s).\n"
test.skip_test(x)
test.subdir('layer',
['layer', 'aclock'],
['layer', 'aclock', 'qt_bug'])
test.write('SConstruct', """\
import os
aa=os.getcwd()
env=Environment(tools=['default','expheaders','qt'],toolpath=[aa])
env["EXP_HEADER_ABS"]=os.path.join(os.getcwd(),'include')
if not os.access(env["EXP_HEADER_ABS"],os.F_OK):
os.mkdir (env["EXP_HEADER_ABS"])
Export('env')
env.SConscript('layer/aclock/qt_bug/SConscript')
""")
test.write('expheaders.py', """\
import SCons.Defaults
def ExpHeaderScanner(node, env, path):
return []
def generate(env):
HeaderAction=SCons.Action.Action([SCons.Defaults.Copy('$TARGET','$SOURCE'),SCons.Defaults.Chmod('$TARGET',0755)])
HeaderBuilder= SCons.Builder.Builder(action=HeaderAction)
env['BUILDERS']['ExportHeaders'] = HeaderBuilder
def exists(env):
return 0
""")
test.write(['layer', 'aclock', 'qt_bug', 'SConscript'], """\
import os
Import ("env")
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'main.h'), 'main.h')
env.ExportHeaders(os.path.join(env["EXP_HEADER_ABS"],'migraform.h'), 'migraform.h')
env.Append(CPPPATH=env["EXP_HEADER_ABS"])
env.StaticLibrary('all',['main.ui','migraform.ui','my.cc'])
""")
test.write(['layer', 'aclock', 'qt_bug', 'main.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>Main</class>
<widget class="QWizard">
<property name="name">
<cstring>Main</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
<includes>
<include location="local" impldecl="in implementation">migraform.h</include>
</includes>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'migraform.ui'], """\
<!DOCTYPE UI><UI version="3.3" stdsetdef="1">
<class>MigrateForm</class>
<widget class="QWizard">
<property name="name">
<cstring>MigrateForm</cstring>
</property>
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>600</width>
<height>385</height>
</rect>
</property>
</widget>
</UI>
""")
test.write(['layer', 'aclock', 'qt_bug', 'my.cc'], """\
#include <main.h>
""")
my_obj = 'layer/aclock/qt_bug/my'+_obj
test.run(arguments = my_obj, stderr=None)
expect = my_obj.replace( '/', os.sep )
test.up_to_date(options = '--debug=explain',
arguments = (expect),
stderr=None)
|
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
|
random_line_split
|
dimpleStepHor-cn.js
|
HTMLWidgets.widget({
name: 'dimpleStepHor-cn',
type: 'output',
initialize: function(el, width, height) {
d3.select(el).append("div")
|
.attr("id","chartContainer")
.attr("width", width)
.attr("height", height);
var svg = dimple.newSvg("#chartContainer", width, height);
var data = []
var myChart = new dimple.chart(svg, data);
return myChart
},
renderValue: function(el, x, instance) {
var myChart = instance;
myChart.data = x.data;
var colNames = x.colNames
//myChart.setBounds(60, 30, 510, 305)
myChart.setMargins("15%", "15%", "15%", "15%");
var y = myChart.addCategoryAxis("y",colNames[0]);
var x = myChart.addMeasureAxis("x", colNames[1]);
var s = myChart.addSeries(null, dimple.plot.line);
s.interpolation = 'step';
myChart.draw();
},
resize: function(el, width, height, instance) {
d3.select(el).select("svg")
.attr("width", width)
.attr("height", height);
instance.draw(0, true);
}
});
|
random_line_split
|
|
loading.component.ts
|
import { Component, ViewChild, ElementRef, AfterViewInit, Input, ChangeDetectionStrategy } from '@angular/core'
import { ColorsService } from '../../services/colors.service'
@Component({
selector: 'eqm-loading',
templateUrl: './loading.component.html',
styleUrls: [ './loading.component.scss' ],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class LoadingComponent implements AfterViewInit {
@ViewChild('wave', { static: true }) wave!: ElementRef
@Input() text?: string
@Input() showText = true
constructor (
public colors: ColorsService
) {}
ngAfterViewInit ()
|
}
|
{
const path = this.wave.nativeElement
// eslint-disable-next-line no-loss-of-precision
const m = 0.512286623256592433
const w = 90
const h = 60
const a = h / 4
const y = h / 2
const pathData = [
'M', w * 0, y + a / 2,
'c', a * m, 0, -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a
].join(' ')
path.setAttribute('d', pathData)
}
|
identifier_body
|
loading.component.ts
|
import { Component, ViewChild, ElementRef, AfterViewInit, Input, ChangeDetectionStrategy } from '@angular/core'
import { ColorsService } from '../../services/colors.service'
@Component({
selector: 'eqm-loading',
templateUrl: './loading.component.html',
styleUrls: [ './loading.component.scss' ],
changeDetection: ChangeDetectionStrategy.OnPush
})
export class
|
implements AfterViewInit {
@ViewChild('wave', { static: true }) wave!: ElementRef
@Input() text?: string
@Input() showText = true
constructor (
public colors: ColorsService
) {}
ngAfterViewInit () {
const path = this.wave.nativeElement
// eslint-disable-next-line no-loss-of-precision
const m = 0.512286623256592433
const w = 90
const h = 60
const a = h / 4
const y = h / 2
const pathData = [
'M', w * 0, y + a / 2,
'c', a * m, 0, -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a
].join(' ')
path.setAttribute('d', pathData)
}
}
|
LoadingComponent
|
identifier_name
|
loading.component.ts
|
import { Component, ViewChild, ElementRef, AfterViewInit, Input, ChangeDetectionStrategy } from '@angular/core'
import { ColorsService } from '../../services/colors.service'
@Component({
selector: 'eqm-loading',
templateUrl: './loading.component.html',
|
changeDetection: ChangeDetectionStrategy.OnPush
})
export class LoadingComponent implements AfterViewInit {
@ViewChild('wave', { static: true }) wave!: ElementRef
@Input() text?: string
@Input() showText = true
constructor (
public colors: ColorsService
) {}
ngAfterViewInit () {
const path = this.wave.nativeElement
// eslint-disable-next-line no-loss-of-precision
const m = 0.512286623256592433
const w = 90
const h = 60
const a = h / 4
const y = h / 2
const pathData = [
'M', w * 0, y + a / 2,
'c', a * m, 0, -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a,
's', -(1 - a) * m, a, a, a,
's', -(1 - a) * m, -a, a, -a
].join(' ')
path.setAttribute('d', pathData)
}
}
|
styleUrls: [ './loading.component.scss' ],
|
random_line_split
|
principals.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.core.db.sqlalchemy.principals import PrincipalPermissionsMixin
from indico.core.db.sqlalchemy.util.models import auto_table_args
from indico.util.string import format_repr
class CategoryPrincipal(PrincipalPermissionsMixin, db.Model):
__tablename__ = 'principals'
principal_backref_name = 'in_category_acls'
principal_for = 'Category'
unique_columns = ('category_id',)
allow_networks = True
allow_category_roles = True
@declared_attr
def
|
(cls):
return auto_table_args(cls, schema='categories')
#: The ID of the acl entry
id = db.Column(
db.Integer,
primary_key=True
)
#: The ID of the associated event
category_id = db.Column(
db.Integer,
db.ForeignKey('categories.categories.id'),
nullable=False,
index=True
)
# relationship backrefs:
# - category (Category.acl_entries)
def __repr__(self):
return format_repr(self, 'id', 'category_id', 'principal', read_access=False, full_access=False, permissions=[])
|
__table_args__
|
identifier_name
|
principals.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.core.db.sqlalchemy.principals import PrincipalPermissionsMixin
from indico.core.db.sqlalchemy.util.models import auto_table_args
from indico.util.string import format_repr
|
class CategoryPrincipal(PrincipalPermissionsMixin, db.Model):
__tablename__ = 'principals'
principal_backref_name = 'in_category_acls'
principal_for = 'Category'
unique_columns = ('category_id',)
allow_networks = True
allow_category_roles = True
@declared_attr
def __table_args__(cls):
return auto_table_args(cls, schema='categories')
#: The ID of the acl entry
id = db.Column(
db.Integer,
primary_key=True
)
#: The ID of the associated event
category_id = db.Column(
db.Integer,
db.ForeignKey('categories.categories.id'),
nullable=False,
index=True
)
# relationship backrefs:
# - category (Category.acl_entries)
def __repr__(self):
return format_repr(self, 'id', 'category_id', 'principal', read_access=False, full_access=False, permissions=[])
|
random_line_split
|
|
principals.py
|
# This file is part of Indico.
# Copyright (C) 2002 - 2021 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from sqlalchemy.ext.declarative import declared_attr
from indico.core.db import db
from indico.core.db.sqlalchemy.principals import PrincipalPermissionsMixin
from indico.core.db.sqlalchemy.util.models import auto_table_args
from indico.util.string import format_repr
class CategoryPrincipal(PrincipalPermissionsMixin, db.Model):
__tablename__ = 'principals'
principal_backref_name = 'in_category_acls'
principal_for = 'Category'
unique_columns = ('category_id',)
allow_networks = True
allow_category_roles = True
@declared_attr
def __table_args__(cls):
return auto_table_args(cls, schema='categories')
#: The ID of the acl entry
id = db.Column(
db.Integer,
primary_key=True
)
#: The ID of the associated event
category_id = db.Column(
db.Integer,
db.ForeignKey('categories.categories.id'),
nullable=False,
index=True
)
# relationship backrefs:
# - category (Category.acl_entries)
def __repr__(self):
|
return format_repr(self, 'id', 'category_id', 'principal', read_access=False, full_access=False, permissions=[])
|
identifier_body
|
|
nedb.spec.js
|
'use strict';
const fs = require('fs')
, path = require('path')
, expect = require('chai').expect
, co = require('co')
, nedb = require('nedb')
, wrapper = require('co-nedb');
const sdeFixture = require('./fixtures/sde.json');
const sdeDbFile = path.join(__dirname, '../../lib/db/staticData.db');
const sdeMapFixture = require('./fixtures/sde_map.json');
const sdeMapDbFile = path.join(__dirname, '../../lib/db/universeData.db');
describe('CCP Item SDE', () => {
var db;
before(() => {
db = wrapper(new nedb({
filename: sdeDbFile,
autoload: true
}));
});
after(() => {
db = undefined;
});
it('should load', () => {
expect(db).to.be.an('object');
expect(db.filename).to.equal(sdeDbFile);
|
});
it('should let us query the collection', () => {
expect(db).to.respondTo('findOne');
});
it('should correctly find a thing', () => {
co(function * () {
let thing = yield db.findOne({ id: 583 });
return expect(thing).to.equal(sdeFixture);
});
});
});
describe('CCP Map SDE', () => {
var db;
before(() => {
db = wrapper(new nedb({
filename: sdeMapDbFile,
autoload: true
}));
});
after(() => {
db = undefined;
});
it('should load', () => {
expect(db).to.be.an('object');
expect(db.filename).to.equal(sdeMapDbFile);
});
it('should let us query the collection', () => {
expect(db).to.respondTo('findOne');
});
it('should correctly find a celestial thing', () => {
co(function * () {
let mapData = yield db.findOne({ itemID: 40151384 });
return expect(mapData).to.equal(sdeMapFixture);
});
});
});
|
random_line_split
|
|
easy-249.py
|
#! /usr/bin/env python3
'''
given a list of stock price ticks for the day, can you tell me what
trades I should make to maximize my gain within the constraints of the
market? Remember - buy low, sell high, and you can't sell before you
buy.
Sample Input
19.35 19.30 18.88 18.93 18.95 19.03 19.00 18.97 18.97 18.98
'''
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='easy 249')
parser.add_argument('stock_prices', action='store', nargs='+',
help='prices of a given stock')
return parser.parse_args()
def stock(stock_prices):
buy_day = 0
max_profit = 0
max_buy = 0
max_sell = 0
for buy_day in range(len(stock_prices) - 2):
# maybe do a max(here)
|
print("max profit: %.2f from buy on day %d at %.2f sell on day %d at %.2f" %
(max_profit, max_buy, stock_prices[max_buy], max_sell, stock_prices[max_sell]))
if __name__ == '__main__':
args = parse_args()
stock([float(price) for price in args.stock_prices])
|
for sell_day in range(buy_day + 2, len(stock_prices)):
profit = stock_prices[sell_day] - stock_prices[buy_day]
if profit > max_profit:
max_profit = profit
max_buy = buy_day
max_sell = sell_day
|
conditional_block
|
easy-249.py
|
#! /usr/bin/env python3
'''
given a list of stock price ticks for the day, can you tell me what
trades I should make to maximize my gain within the constraints of the
market? Remember - buy low, sell high, and you can't sell before you
buy.
Sample Input
19.35 19.30 18.88 18.93 18.95 19.03 19.00 18.97 18.97 18.98
'''
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='easy 249')
parser.add_argument('stock_prices', action='store', nargs='+',
help='prices of a given stock')
return parser.parse_args()
def stock(stock_prices):
|
if __name__ == '__main__':
args = parse_args()
stock([float(price) for price in args.stock_prices])
|
buy_day = 0
max_profit = 0
max_buy = 0
max_sell = 0
for buy_day in range(len(stock_prices) - 2):
# maybe do a max(here)
for sell_day in range(buy_day + 2, len(stock_prices)):
profit = stock_prices[sell_day] - stock_prices[buy_day]
if profit > max_profit:
max_profit = profit
max_buy = buy_day
max_sell = sell_day
print("max profit: %.2f from buy on day %d at %.2f sell on day %d at %.2f" %
(max_profit, max_buy, stock_prices[max_buy], max_sell, stock_prices[max_sell]))
|
identifier_body
|
easy-249.py
|
#! /usr/bin/env python3
'''
given a list of stock price ticks for the day, can you tell me what
trades I should make to maximize my gain within the constraints of the
market? Remember - buy low, sell high, and you can't sell before you
buy.
Sample Input
19.35 19.30 18.88 18.93 18.95 19.03 19.00 18.97 18.97 18.98
'''
import argparse
def parse_args():
parser = argparse.ArgumentParser(description='easy 249')
parser.add_argument('stock_prices', action='store', nargs='+',
help='prices of a given stock')
return parser.parse_args()
def stock(stock_prices):
buy_day = 0
max_profit = 0
max_buy = 0
max_sell = 0
for buy_day in range(len(stock_prices) - 2):
# maybe do a max(here)
for sell_day in range(buy_day + 2, len(stock_prices)):
profit = stock_prices[sell_day] - stock_prices[buy_day]
if profit > max_profit:
max_profit = profit
max_buy = buy_day
|
max_sell = sell_day
print("max profit: %.2f from buy on day %d at %.2f sell on day %d at %.2f" %
(max_profit, max_buy, stock_prices[max_buy], max_sell, stock_prices[max_sell]))
if __name__ == '__main__':
args = parse_args()
stock([float(price) for price in args.stock_prices])
|
random_line_split
|
|
easy-249.py
|
#! /usr/bin/env python3
'''
given a list of stock price ticks for the day, can you tell me what
trades I should make to maximize my gain within the constraints of the
market? Remember - buy low, sell high, and you can't sell before you
buy.
Sample Input
19.35 19.30 18.88 18.93 18.95 19.03 19.00 18.97 18.97 18.98
'''
import argparse
def
|
():
parser = argparse.ArgumentParser(description='easy 249')
parser.add_argument('stock_prices', action='store', nargs='+',
help='prices of a given stock')
return parser.parse_args()
def stock(stock_prices):
buy_day = 0
max_profit = 0
max_buy = 0
max_sell = 0
for buy_day in range(len(stock_prices) - 2):
# maybe do a max(here)
for sell_day in range(buy_day + 2, len(stock_prices)):
profit = stock_prices[sell_day] - stock_prices[buy_day]
if profit > max_profit:
max_profit = profit
max_buy = buy_day
max_sell = sell_day
print("max profit: %.2f from buy on day %d at %.2f sell on day %d at %.2f" %
(max_profit, max_buy, stock_prices[max_buy], max_sell, stock_prices[max_sell]))
if __name__ == '__main__':
args = parse_args()
stock([float(price) for price in args.stock_prices])
|
parse_args
|
identifier_name
|
kvstore.rs
|
//! test was move from base (it could not compile in base since its trait just change
//! (bidirectional dependency))
//! TODO seems pretty useless : remove??
use keyval::KeyVal;
use node::{Node,NodeID};
use peer::{Peer,Shadow};
use std::cmp::Eq;
use std::cmp::PartialEq;
use keyval::{Attachment,SettableAttachment};
use rustc_serialize::{Encodable, Encoder, Decoder};
// Testing only nodeK, with key different from id
#[derive(RustcDecodable,RustcEncodable,Debug,Clone)]
struct NodeK2(Node,String);
impl Eq for NodeK2 {}
impl PartialEq<NodeK2> for NodeK2 {
fn eq(&self, other: &NodeK2) -> bool {
other.0 == self.0 && other.1 == self.1
}
}
impl KeyVal for NodeK2 {
type Key = String;
fn get_key(&self) -> NodeID {
self.1.clone()
}
/*
fn get_key_ref<'a>(&'a self) -> &'a NodeID {
&self.1
}*/
noattachment!();
}
impl SettableAttachment for NodeK2 { }
impl Peer for NodeK2 {
type Address = <Node as Peer>::Address;
type Shadow = <Node as Peer>::Shadow;
#[inline]
fn get_address(&self) -> &<Node as Peer>::Address {
self.0.get_address()
}
#[inline]
fn get_shadower (&self, write : bool) -> Self::Shadow {
self.0.get_shadower(write)
}
fn
|
(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_auth_mode()
}
fn default_message_mode(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_message_mode()
}
fn default_header_mode(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_header_mode()
}
}
|
default_auth_mode
|
identifier_name
|
kvstore.rs
|
//! test was move from base (it could not compile in base since its trait just change
//! (bidirectional dependency))
//! TODO seems pretty useless : remove??
use keyval::KeyVal;
use node::{Node,NodeID};
use peer::{Peer,Shadow};
use std::cmp::Eq;
use std::cmp::PartialEq;
|
use rustc_serialize::{Encodable, Encoder, Decoder};
// Testing only nodeK, with key different from id
#[derive(RustcDecodable,RustcEncodable,Debug,Clone)]
struct NodeK2(Node,String);
impl Eq for NodeK2 {}
impl PartialEq<NodeK2> for NodeK2 {
fn eq(&self, other: &NodeK2) -> bool {
other.0 == self.0 && other.1 == self.1
}
}
impl KeyVal for NodeK2 {
type Key = String;
fn get_key(&self) -> NodeID {
self.1.clone()
}
/*
fn get_key_ref<'a>(&'a self) -> &'a NodeID {
&self.1
}*/
noattachment!();
}
impl SettableAttachment for NodeK2 { }
impl Peer for NodeK2 {
type Address = <Node as Peer>::Address;
type Shadow = <Node as Peer>::Shadow;
#[inline]
fn get_address(&self) -> &<Node as Peer>::Address {
self.0.get_address()
}
#[inline]
fn get_shadower (&self, write : bool) -> Self::Shadow {
self.0.get_shadower(write)
}
fn default_auth_mode(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_auth_mode()
}
fn default_message_mode(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_message_mode()
}
fn default_header_mode(&self) -> <Self::Shadow as Shadow>::ShadowMode {
self.0.default_header_mode()
}
}
|
use keyval::{Attachment,SettableAttachment};
|
random_line_split
|
shared.module.ts
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ReactiveFormsModule } from '@angular/forms';
import { RouterModule } from '@angular/router';
import { MaterialModule } from '@app/material';
import { AboutDialogComponent } from './components/footer/about-dialog/about-dialog.component'; // remove
import { FooterComponent } from './components/footer/footer.component'; // remove
import { HeaderComponent } from './components/header/header.component'; // remove
import { PageComponent } from './components/page/page.component';
import { SidenavComponent } from './components/sidenav/sidenav.component';
import { CollapseDirective } from './directives/collapse/collapse.directive';
|
imports: [
CommonModule,
ReactiveFormsModule,
RouterModule,
MaterialModule
],
declarations: [
AboutDialogComponent,
FooterComponent,
HeaderComponent,
PageComponent,
SidenavComponent,
CollapseDirective,
CircularJsonPipe
],
entryComponents: [
AboutDialogComponent
],
exports: [
ReactiveFormsModule,
RouterModule,
MaterialModule,
FooterComponent,
HeaderComponent,
PageComponent,
SidenavComponent,
CollapseDirective,
CircularJsonPipe
]
})
export class SharedModule { }
|
import { CircularJsonPipe } from './pipes/circular-json/circular-json.pipe';
@NgModule({
|
random_line_split
|
shared.module.ts
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { ReactiveFormsModule } from '@angular/forms';
import { RouterModule } from '@angular/router';
import { MaterialModule } from '@app/material';
import { AboutDialogComponent } from './components/footer/about-dialog/about-dialog.component'; // remove
import { FooterComponent } from './components/footer/footer.component'; // remove
import { HeaderComponent } from './components/header/header.component'; // remove
import { PageComponent } from './components/page/page.component';
import { SidenavComponent } from './components/sidenav/sidenav.component';
import { CollapseDirective } from './directives/collapse/collapse.directive';
import { CircularJsonPipe } from './pipes/circular-json/circular-json.pipe';
@NgModule({
imports: [
CommonModule,
ReactiveFormsModule,
RouterModule,
MaterialModule
],
declarations: [
AboutDialogComponent,
FooterComponent,
HeaderComponent,
PageComponent,
SidenavComponent,
CollapseDirective,
CircularJsonPipe
],
entryComponents: [
AboutDialogComponent
],
exports: [
ReactiveFormsModule,
RouterModule,
MaterialModule,
FooterComponent,
HeaderComponent,
PageComponent,
SidenavComponent,
CollapseDirective,
CircularJsonPipe
]
})
export class
|
{ }
|
SharedModule
|
identifier_name
|
fms.py
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class
|
(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
IEMap
|
identifier_name
|
fms.py
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
|
class IEMap(AWSProperty):
props = {
'ACCOUNT': ([basestring], False),
}
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
random_line_split
|
|
fms.py
|
# Copyright (c) 2012-2020, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSProperty, AWSObject, Tags
from .validators import json_checker, boolean
class IEMap(AWSProperty):
|
class Policy(AWSObject):
resource_type = "AWS::FMS::Policy"
props = {
'DeleteAllPolicyResources': (boolean, False),
'ExcludeMap': (IEMap, False),
'ExcludeResourceTags': (boolean, True),
'IncludeMap': (IEMap, False),
'PolicyName': (basestring, True),
'RemediationEnabled': (boolean, True),
'ResourceTags': (Tags, False),
'ResourceType': (basestring, True),
'ResourceTypeList': ([basestring], True),
'SecurityServicePolicyData': (json_checker, True),
'Tags': (Tags, False),
}
class NotificationChannel(AWSObject):
resource_type = "AWS::FMS::NotificationChannel"
props = {
'SnsRoleName': (basestring, True),
'SnsTopicArn': (basestring, True),
}
|
props = {
'ACCOUNT': ([basestring], False),
}
|
identifier_body
|
_1_advanced_lighting.rs
|
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
extern crate glfw;
use self::glfw::{Context, Key, Action};
extern crate gl;
use self::gl::types::*;
use std::ptr;
use std::mem;
use std::os::raw::c_void;
use std::ffi::CStr;
use common::{process_events, loadTexture};
use shader::Shader;
use camera::Camera;
use camera::Camera_Movement::*;
use cgmath::{Matrix4, vec3, Deg, perspective, Point3};
use cgmath::prelude::*;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_5_1() {
let mut blinn = false;
let mut blinnKeyPressed = false;
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, planeVBO, planeVAO, floorTexture) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
// build and compile shaders
// ------------------------------------
let shader = Shader::new(
"src/_5_advanced_lighting/shaders/1.advanced_lighting.vs",
"src/_5_advanced_lighting/shaders/1.advanced_lighting.fs");
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let planeVertices: [f32; 48] = [
// positions // normals // texcoords
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 0.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 10.0, 10.0
];
// plane VAO
let (mut planeVAO, mut planeVBO) = (0, 0);
gl::GenVertexArrays(1, &mut planeVAO);
gl::GenBuffers(1, &mut planeVBO);
gl::BindVertexArray(planeVAO);
gl::BindBuffer(gl::ARRAY_BUFFER, planeVBO);
gl::BufferData(gl::ARRAY_BUFFER,
(planeVertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&planeVertices[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
gl::EnableVertexAttribArray(0);
let stride = 8 * mem::size_of::<GLfloat>() as GLsizei;
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (3 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(2);
gl::VertexAttribPointer(2, 2, gl::FLOAT, gl::FALSE, stride, (6 * mem::size_of::<GLfloat>()) as *const c_void);
gl::BindVertexArray(0);
// load textures
// -------------
let floorTexture = loadTexture("resources/textures/wood.png");
// shader configuration
// --------------------
shader.useProgram();
shader.setInt(c_str!("texture1"), 0);
(shader, planeVBO, planeVAO, floorTexture)
};
// lighting info
// -------------
let lightPos = vec3(0.0, 0.0, 0.0);
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera, &mut blinn, &mut blinnKeyPressed);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// draw objects
shader.useProgram();
let projection: Matrix4<f32> = perspective(Deg(camera.Zoom), SCR_WIDTH as f32 / SCR_HEIGHT as f32 , 0.1, 100.0);
let view = camera.GetViewMatrix();
shader.setMat4(c_str!("projection"), &projection);
shader.setMat4(c_str!("view"), &view);
// set light uniforms
shader.setVector3(c_str!("viewPos"), &camera.Position.to_vec());
shader.setVector3(c_str!("lightPos"), &lightPos);
shader.setInt(c_str!("blinn"), blinn as i32);
// floor
gl::BindVertexArray(planeVAO);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, floorTexture);
gl::DrawArrays(gl::TRIANGLES, 0, 6);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &planeVAO);
gl::DeleteBuffers(1, &planeVBO);
}
}
// NOTE: not the same version as in common.rs
pub fn processInput(window: &mut glfw::Window, deltaTime: f32, camera: &mut Camera, blinn: &mut bool, blinnKeyPressed: &mut bool) {
if window.get_key(Key::Escape) == Action::Press {
window.set_should_close(true)
}
if window.get_key(Key::W) == Action::Press {
camera.ProcessKeyboard(FORWARD, deltaTime);
}
if window.get_key(Key::S) == Action::Press {
camera.ProcessKeyboard(BACKWARD, deltaTime);
}
if window.get_key(Key::A) == Action::Press {
camera.ProcessKeyboard(LEFT, deltaTime);
}
if window.get_key(Key::D) == Action::Press {
camera.ProcessKeyboard(RIGHT, deltaTime);
}
if window.get_key(Key::B) == Action::Press && !(*blinnKeyPressed) {
*blinn = !(*blinn);
*blinnKeyPressed = true;
println!("{}", if *blinn { "Blinn-Phong" } else { "Phong" })
}
if window.get_key(Key::B) == Action::Release
|
}
|
{
*blinnKeyPressed = false;
}
|
conditional_block
|
_1_advanced_lighting.rs
|
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
extern crate glfw;
use self::glfw::{Context, Key, Action};
extern crate gl;
use self::gl::types::*;
use std::ptr;
use std::mem;
use std::os::raw::c_void;
use std::ffi::CStr;
use common::{process_events, loadTexture};
use shader::Shader;
use camera::Camera;
use camera::Camera_Movement::*;
use cgmath::{Matrix4, vec3, Deg, perspective, Point3};
use cgmath::prelude::*;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_5_1()
|
// NOTE: not the same version as in common.rs
pub fn processInput(window: &mut glfw::Window, deltaTime: f32, camera: &mut Camera, blinn: &mut bool, blinnKeyPressed: &mut bool) {
if window.get_key(Key::Escape) == Action::Press {
window.set_should_close(true)
}
if window.get_key(Key::W) == Action::Press {
camera.ProcessKeyboard(FORWARD, deltaTime);
}
if window.get_key(Key::S) == Action::Press {
camera.ProcessKeyboard(BACKWARD, deltaTime);
}
if window.get_key(Key::A) == Action::Press {
camera.ProcessKeyboard(LEFT, deltaTime);
}
if window.get_key(Key::D) == Action::Press {
camera.ProcessKeyboard(RIGHT, deltaTime);
}
if window.get_key(Key::B) == Action::Press && !(*blinnKeyPressed) {
*blinn = !(*blinn);
*blinnKeyPressed = true;
println!("{}", if *blinn { "Blinn-Phong" } else { "Phong" })
}
if window.get_key(Key::B) == Action::Release {
*blinnKeyPressed = false;
}
}
|
{
let mut blinn = false;
let mut blinnKeyPressed = false;
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, planeVBO, planeVAO, floorTexture) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
// build and compile shaders
// ------------------------------------
let shader = Shader::new(
"src/_5_advanced_lighting/shaders/1.advanced_lighting.vs",
"src/_5_advanced_lighting/shaders/1.advanced_lighting.fs");
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let planeVertices: [f32; 48] = [
// positions // normals // texcoords
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 0.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 10.0, 10.0
];
// plane VAO
let (mut planeVAO, mut planeVBO) = (0, 0);
gl::GenVertexArrays(1, &mut planeVAO);
gl::GenBuffers(1, &mut planeVBO);
gl::BindVertexArray(planeVAO);
gl::BindBuffer(gl::ARRAY_BUFFER, planeVBO);
gl::BufferData(gl::ARRAY_BUFFER,
(planeVertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&planeVertices[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
gl::EnableVertexAttribArray(0);
let stride = 8 * mem::size_of::<GLfloat>() as GLsizei;
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (3 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(2);
gl::VertexAttribPointer(2, 2, gl::FLOAT, gl::FALSE, stride, (6 * mem::size_of::<GLfloat>()) as *const c_void);
gl::BindVertexArray(0);
// load textures
// -------------
let floorTexture = loadTexture("resources/textures/wood.png");
// shader configuration
// --------------------
shader.useProgram();
shader.setInt(c_str!("texture1"), 0);
(shader, planeVBO, planeVAO, floorTexture)
};
// lighting info
// -------------
let lightPos = vec3(0.0, 0.0, 0.0);
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera, &mut blinn, &mut blinnKeyPressed);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// draw objects
shader.useProgram();
let projection: Matrix4<f32> = perspective(Deg(camera.Zoom), SCR_WIDTH as f32 / SCR_HEIGHT as f32 , 0.1, 100.0);
let view = camera.GetViewMatrix();
shader.setMat4(c_str!("projection"), &projection);
shader.setMat4(c_str!("view"), &view);
// set light uniforms
shader.setVector3(c_str!("viewPos"), &camera.Position.to_vec());
shader.setVector3(c_str!("lightPos"), &lightPos);
shader.setInt(c_str!("blinn"), blinn as i32);
// floor
gl::BindVertexArray(planeVAO);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, floorTexture);
gl::DrawArrays(gl::TRIANGLES, 0, 6);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &planeVAO);
gl::DeleteBuffers(1, &planeVBO);
}
}
|
identifier_body
|
_1_advanced_lighting.rs
|
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
extern crate glfw;
use self::glfw::{Context, Key, Action};
extern crate gl;
use self::gl::types::*;
use std::ptr;
use std::mem;
use std::os::raw::c_void;
use std::ffi::CStr;
use common::{process_events, loadTexture};
use shader::Shader;
use camera::Camera;
use camera::Camera_Movement::*;
use cgmath::{Matrix4, vec3, Deg, perspective, Point3};
use cgmath::prelude::*;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_5_1() {
let mut blinn = false;
let mut blinnKeyPressed = false;
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, planeVBO, planeVAO, floorTexture) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
// build and compile shaders
// ------------------------------------
let shader = Shader::new(
"src/_5_advanced_lighting/shaders/1.advanced_lighting.vs",
"src/_5_advanced_lighting/shaders/1.advanced_lighting.fs");
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let planeVertices: [f32; 48] = [
// positions // normals // texcoords
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 0.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 10.0, 10.0
];
// plane VAO
let (mut planeVAO, mut planeVBO) = (0, 0);
gl::GenVertexArrays(1, &mut planeVAO);
gl::GenBuffers(1, &mut planeVBO);
gl::BindVertexArray(planeVAO);
gl::BindBuffer(gl::ARRAY_BUFFER, planeVBO);
gl::BufferData(gl::ARRAY_BUFFER,
(planeVertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&planeVertices[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
gl::EnableVertexAttribArray(0);
let stride = 8 * mem::size_of::<GLfloat>() as GLsizei;
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (3 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(2);
gl::VertexAttribPointer(2, 2, gl::FLOAT, gl::FALSE, stride, (6 * mem::size_of::<GLfloat>()) as *const c_void);
gl::BindVertexArray(0);
// load textures
// -------------
let floorTexture = loadTexture("resources/textures/wood.png");
// shader configuration
// --------------------
shader.useProgram();
shader.setInt(c_str!("texture1"), 0);
(shader, planeVBO, planeVAO, floorTexture)
};
// lighting info
// -------------
|
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera, &mut blinn, &mut blinnKeyPressed);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// draw objects
shader.useProgram();
let projection: Matrix4<f32> = perspective(Deg(camera.Zoom), SCR_WIDTH as f32 / SCR_HEIGHT as f32 , 0.1, 100.0);
let view = camera.GetViewMatrix();
shader.setMat4(c_str!("projection"), &projection);
shader.setMat4(c_str!("view"), &view);
// set light uniforms
shader.setVector3(c_str!("viewPos"), &camera.Position.to_vec());
shader.setVector3(c_str!("lightPos"), &lightPos);
shader.setInt(c_str!("blinn"), blinn as i32);
// floor
gl::BindVertexArray(planeVAO);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, floorTexture);
gl::DrawArrays(gl::TRIANGLES, 0, 6);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &planeVAO);
gl::DeleteBuffers(1, &planeVBO);
}
}
// NOTE: not the same version as in common.rs
pub fn processInput(window: &mut glfw::Window, deltaTime: f32, camera: &mut Camera, blinn: &mut bool, blinnKeyPressed: &mut bool) {
if window.get_key(Key::Escape) == Action::Press {
window.set_should_close(true)
}
if window.get_key(Key::W) == Action::Press {
camera.ProcessKeyboard(FORWARD, deltaTime);
}
if window.get_key(Key::S) == Action::Press {
camera.ProcessKeyboard(BACKWARD, deltaTime);
}
if window.get_key(Key::A) == Action::Press {
camera.ProcessKeyboard(LEFT, deltaTime);
}
if window.get_key(Key::D) == Action::Press {
camera.ProcessKeyboard(RIGHT, deltaTime);
}
if window.get_key(Key::B) == Action::Press && !(*blinnKeyPressed) {
*blinn = !(*blinn);
*blinnKeyPressed = true;
println!("{}", if *blinn { "Blinn-Phong" } else { "Phong" })
}
if window.get_key(Key::B) == Action::Release {
*blinnKeyPressed = false;
}
}
|
let lightPos = vec3(0.0, 0.0, 0.0);
// render loop
// -----------
|
random_line_split
|
_1_advanced_lighting.rs
|
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
extern crate glfw;
use self::glfw::{Context, Key, Action};
extern crate gl;
use self::gl::types::*;
use std::ptr;
use std::mem;
use std::os::raw::c_void;
use std::ffi::CStr;
use common::{process_events, loadTexture};
use shader::Shader;
use camera::Camera;
use camera::Camera_Movement::*;
use cgmath::{Matrix4, vec3, Deg, perspective, Point3};
use cgmath::prelude::*;
// settings
const SCR_WIDTH: u32 = 1280;
const SCR_HEIGHT: u32 = 720;
pub fn main_5_1() {
let mut blinn = false;
let mut blinnKeyPressed = false;
let mut camera = Camera {
Position: Point3::new(0.0, 0.0, 3.0),
..Camera::default()
};
let mut firstMouse = true;
let mut lastX: f32 = SCR_WIDTH as f32 / 2.0;
let mut lastY: f32 = SCR_HEIGHT as f32 / 2.0;
// timing
let mut deltaTime: f32; // time between current frame and last frame
let mut lastFrame: f32 = 0.0;
// glfw: initialize and configure
// ------------------------------
let mut glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
glfw.window_hint(glfw::WindowHint::ContextVersion(3, 3));
glfw.window_hint(glfw::WindowHint::OpenGlProfile(glfw::OpenGlProfileHint::Core));
#[cfg(target_os = "macos")]
glfw.window_hint(glfw::WindowHint::OpenGlForwardCompat(true));
// glfw window creation
// --------------------
let (mut window, events) = glfw.create_window(SCR_WIDTH, SCR_HEIGHT, "LearnOpenGL", glfw::WindowMode::Windowed)
.expect("Failed to create GLFW window");
window.make_current();
window.set_framebuffer_size_polling(true);
window.set_cursor_pos_polling(true);
window.set_scroll_polling(true);
// tell GLFW to capture our mouse
window.set_cursor_mode(glfw::CursorMode::Disabled);
// gl: load all OpenGL function pointers
// ---------------------------------------
gl::load_with(|symbol| window.get_proc_address(symbol) as *const _);
let (shader, planeVBO, planeVAO, floorTexture) = unsafe {
// configure global opengl state
// -----------------------------
gl::Enable(gl::DEPTH_TEST);
gl::Enable(gl::BLEND);
gl::BlendFunc(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);
// build and compile shaders
// ------------------------------------
let shader = Shader::new(
"src/_5_advanced_lighting/shaders/1.advanced_lighting.vs",
"src/_5_advanced_lighting/shaders/1.advanced_lighting.fs");
// set up vertex data (and buffer(s)) and configure vertex attributes
// ------------------------------------------------------------------
let planeVertices: [f32; 48] = [
// positions // normals // texcoords
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 0.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, 10.0, 0.0, 1.0, 0.0, 10.0, 0.0,
-10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 0.0, 10.0,
10.0, -0.5, -10.0, 0.0, 1.0, 0.0, 10.0, 10.0
];
// plane VAO
let (mut planeVAO, mut planeVBO) = (0, 0);
gl::GenVertexArrays(1, &mut planeVAO);
gl::GenBuffers(1, &mut planeVBO);
gl::BindVertexArray(planeVAO);
gl::BindBuffer(gl::ARRAY_BUFFER, planeVBO);
gl::BufferData(gl::ARRAY_BUFFER,
(planeVertices.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
&planeVertices[0] as *const f32 as *const c_void,
gl::STATIC_DRAW);
gl::EnableVertexAttribArray(0);
let stride = 8 * mem::size_of::<GLfloat>() as GLsizei;
gl::VertexAttribPointer(0, 3, gl::FLOAT, gl::FALSE, stride, ptr::null());
gl::EnableVertexAttribArray(1);
gl::VertexAttribPointer(1, 3, gl::FLOAT, gl::FALSE, stride, (3 * mem::size_of::<GLfloat>()) as *const c_void);
gl::EnableVertexAttribArray(2);
gl::VertexAttribPointer(2, 2, gl::FLOAT, gl::FALSE, stride, (6 * mem::size_of::<GLfloat>()) as *const c_void);
gl::BindVertexArray(0);
// load textures
// -------------
let floorTexture = loadTexture("resources/textures/wood.png");
// shader configuration
// --------------------
shader.useProgram();
shader.setInt(c_str!("texture1"), 0);
(shader, planeVBO, planeVAO, floorTexture)
};
// lighting info
// -------------
let lightPos = vec3(0.0, 0.0, 0.0);
// render loop
// -----------
while !window.should_close() {
// per-frame time logic
// --------------------
let currentFrame = glfw.get_time() as f32;
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// events
// -----
process_events(&events, &mut firstMouse, &mut lastX, &mut lastY, &mut camera);
// input
// -----
processInput(&mut window, deltaTime, &mut camera, &mut blinn, &mut blinnKeyPressed);
// render
// ------
unsafe {
gl::ClearColor(0.1, 0.1, 0.1, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);
// draw objects
shader.useProgram();
let projection: Matrix4<f32> = perspective(Deg(camera.Zoom), SCR_WIDTH as f32 / SCR_HEIGHT as f32 , 0.1, 100.0);
let view = camera.GetViewMatrix();
shader.setMat4(c_str!("projection"), &projection);
shader.setMat4(c_str!("view"), &view);
// set light uniforms
shader.setVector3(c_str!("viewPos"), &camera.Position.to_vec());
shader.setVector3(c_str!("lightPos"), &lightPos);
shader.setInt(c_str!("blinn"), blinn as i32);
// floor
gl::BindVertexArray(planeVAO);
gl::ActiveTexture(gl::TEXTURE0);
gl::BindTexture(gl::TEXTURE_2D, floorTexture);
gl::DrawArrays(gl::TRIANGLES, 0, 6);
}
// glfw: swap buffers and poll IO events (keys pressed/released, mouse moved etc.)
// -------------------------------------------------------------------------------
window.swap_buffers();
glfw.poll_events();
}
// optional: de-allocate all resources once they've outlived their purpose:
// ------------------------------------------------------------------------
unsafe {
gl::DeleteVertexArrays(1, &planeVAO);
gl::DeleteBuffers(1, &planeVBO);
}
}
// NOTE: not the same version as in common.rs
pub fn
|
(window: &mut glfw::Window, deltaTime: f32, camera: &mut Camera, blinn: &mut bool, blinnKeyPressed: &mut bool) {
if window.get_key(Key::Escape) == Action::Press {
window.set_should_close(true)
}
if window.get_key(Key::W) == Action::Press {
camera.ProcessKeyboard(FORWARD, deltaTime);
}
if window.get_key(Key::S) == Action::Press {
camera.ProcessKeyboard(BACKWARD, deltaTime);
}
if window.get_key(Key::A) == Action::Press {
camera.ProcessKeyboard(LEFT, deltaTime);
}
if window.get_key(Key::D) == Action::Press {
camera.ProcessKeyboard(RIGHT, deltaTime);
}
if window.get_key(Key::B) == Action::Press && !(*blinnKeyPressed) {
*blinn = !(*blinn);
*blinnKeyPressed = true;
println!("{}", if *blinn { "Blinn-Phong" } else { "Phong" })
}
if window.get_key(Key::B) == Action::Release {
*blinnKeyPressed = false;
}
}
|
processInput
|
identifier_name
|
rename.py
|
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def
|
(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
tempname
|
identifier_name
|
rename.py
|
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
|
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
|
random_line_split
|
rename.py
|
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
|
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
return fn
|
conditional_block
|
rename.py
|
import os.path
import os
import random
def rename(src, dst):
|
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
identifier_body
|
|
directive.js
|
import 'velocity-animate';
import 'velocity-animate/velocity.ui';
import templateUrl from './template.html';
import ngModule from '../../module';
class AvLoaderController {
constructor($element) {
this.av = { $element };
this.active = false;
}
start()
|
animate() {
const self = this;
this.av.$element
.find('.loading-bullet')
.velocity('transition.slideRightIn', { stagger: 250 })
.velocity({ opacity: 0 }, {
delay: 750,
duration: 500,
complete() {
if (self.active) {
setTimeout( () => { self.animate() }, 500);
} else {
self.endAnimation();
}
}
});
}
endAnimation = function() {
this.av.$element.find('.loading-bullet').velocity('stop', true);
this.av.$element.removeData();
}
$destroy() {
this.active = false;
}
$postLink() {
this.start();
}
}
ngModule.directive('avLoader', () => {
return {
restrict: 'AE',
replace: true,
controller: AvLoaderController,
templateUrl
};
});
export default ngModule;
|
{
this.active = true;
this.animate();
}
|
identifier_body
|
directive.js
|
import 'velocity-animate';
import 'velocity-animate/velocity.ui';
import templateUrl from './template.html';
import ngModule from '../../module';
class AvLoaderController {
constructor($element) {
this.av = { $element };
this.active = false;
}
start() {
this.active = true;
this.animate();
}
animate() {
const self = this;
this.av.$element
.find('.loading-bullet')
.velocity('transition.slideRightIn', { stagger: 250 })
.velocity({ opacity: 0 }, {
delay: 750,
duration: 500,
complete() {
if (self.active) {
setTimeout( () => { self.animate() }, 500);
} else {
self.endAnimation();
}
}
});
}
endAnimation = function() {
this.av.$element.find('.loading-bullet').velocity('stop', true);
this.av.$element.removeData();
}
$destroy() {
this.active = false;
}
$postLink() {
this.start();
}
}
|
controller: AvLoaderController,
templateUrl
};
});
export default ngModule;
|
ngModule.directive('avLoader', () => {
return {
restrict: 'AE',
replace: true,
|
random_line_split
|
directive.js
|
import 'velocity-animate';
import 'velocity-animate/velocity.ui';
import templateUrl from './template.html';
import ngModule from '../../module';
class AvLoaderController {
constructor($element) {
this.av = { $element };
this.active = false;
}
start() {
this.active = true;
this.animate();
}
animate() {
const self = this;
this.av.$element
.find('.loading-bullet')
.velocity('transition.slideRightIn', { stagger: 250 })
.velocity({ opacity: 0 }, {
delay: 750,
duration: 500,
|
() {
if (self.active) {
setTimeout( () => { self.animate() }, 500);
} else {
self.endAnimation();
}
}
});
}
endAnimation = function() {
this.av.$element.find('.loading-bullet').velocity('stop', true);
this.av.$element.removeData();
}
$destroy() {
this.active = false;
}
$postLink() {
this.start();
}
}
ngModule.directive('avLoader', () => {
return {
restrict: 'AE',
replace: true,
controller: AvLoaderController,
templateUrl
};
});
export default ngModule;
|
complete
|
identifier_name
|
directive.js
|
import 'velocity-animate';
import 'velocity-animate/velocity.ui';
import templateUrl from './template.html';
import ngModule from '../../module';
class AvLoaderController {
constructor($element) {
this.av = { $element };
this.active = false;
}
start() {
this.active = true;
this.animate();
}
animate() {
const self = this;
this.av.$element
.find('.loading-bullet')
.velocity('transition.slideRightIn', { stagger: 250 })
.velocity({ opacity: 0 }, {
delay: 750,
duration: 500,
complete() {
if (self.active) {
setTimeout( () => { self.animate() }, 500);
} else
|
}
});
}
endAnimation = function() {
this.av.$element.find('.loading-bullet').velocity('stop', true);
this.av.$element.removeData();
}
$destroy() {
this.active = false;
}
$postLink() {
this.start();
}
}
ngModule.directive('avLoader', () => {
return {
restrict: 'AE',
replace: true,
controller: AvLoaderController,
templateUrl
};
});
export default ngModule;
|
{
self.endAnimation();
}
|
conditional_block
|
conftest.py
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def engine():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
|
trans.rollback()
connection.close()
|
yield session
session.close()
|
random_line_split
|
conftest.py
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def
|
():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
yield session
session.close()
trans.rollback()
connection.close()
|
engine
|
identifier_name
|
conftest.py
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from daklib.database.all import Base
Session = sessionmaker()
@pytest.fixture(scope='session')
def engine():
engine = create_engine('sqlite://', echo=True)
Base.metadata.create_all(engine)
return engine
@pytest.yield_fixture
def session(engine):
|
connection = engine.connect()
trans = connection.begin()
session = Session(bind=connection)
yield session
session.close()
trans.rollback()
connection.close()
|
identifier_body
|
|
pbxareavoip.py
|
# -*- coding: utf-8 -*-
import requests
import json
from yamlns import namespace as ns
from .. import persons
class AreaVoip(object):
@staticmethod
def defaultQueue():
import dbconfig
return dbconfig.tomatic.get('areavoip',{}).get('queue', None)
def __init__(self):
import dbconfig
self.config = dbconfig.tomatic.areavoip
def _api(self, request, **kwds):
print(request,kwds)
result = requests.get(self.config.baseurl,
params=dict(
reqtype = request,
tenant = self.config.tenant,
key = self.config.apikey,
**kwds),
timeout=2, # seconds
)
print((result.request.url))
print(result.text)
if 'action' in kwds and kwds.get('format') != 'json':
if result.text.strip() != 'OK':
raise Exception(result.text.strip())
return True
if kwds.get('format') == 'json':
return result.json()
return result.text.strip()
def setQueue(self, queue, names):
self.clear(queue)
for name in names:
self.add(queue, name)
def
|
(self, queue):
response = self._api('INFO', info='agentsconnected',
queue = queue,
format='json',
)
if not response: return []
return [
ns(
key = persons.byExtension(extension),
extension = extension,
name = persons.name(persons.byExtension(extension)),
paused = status.get('1') == 'paused',
disconnected = status['2'] is None or status['2'] == 'UNAVAILABLE',
available = status['2'] == 'NOT_INUSE',
ringing = status['2'] == 'RINGING',
incall = status['2'] == 'INUSE',
ncalls = int(status['0']),
secondsInCalls = int(status.get('3','0')),
secondsSinceLastCall = 0, # TODO
flags = [status['2']] if status['2'] and status['2'] not in (
'UNAVAILABLE', 'NOT_INUSE', 'RINGING', 'INUSE',
) else [],
)
for extension, status in response.items()
]
def pause(self, queue, name, paused=True):
extension = persons.extension(name)
if not extension: return
response = self._api('AGENT',
action='pause' if paused else 'unpause',
queue = queue,
extension = extension,
reason = 'notimplemented',
)
def resume(self, queue, name):
self.pause(queue, name, False)
def add(self, queue, name):
extension = persons.extension(name)
if not extension: return
response = self._api('QUEUE', action='add',
id = queue,
extension = extension,
type='NF', # agent type: non-follow
)
def clear(self, queue):
response = self._api('QUEUE', action='clean',
id = queue,
)
def stats(self, queue, date=None):
date = date or '{:%Y-%m-%d}'.format(datetime.date.today())
stats = ns(
self._api('INFO',
info='queue',
id=queue,
format='json',
),
DATE=date,
)
fields = [
'date',
'callsreceived',
'answeredcalls',
'abandonedcalls',
'timedoutcalls',
'talktime',
'averagetalktime',
'holdtime',
'averageholdtime',
'maxholdtime',
]
return ns([
(attr, stats[attr.upper()])
for attr in fields
])
def _allExtensions(self):
return self._api('MANAGEDB',
object='extension',
action='list',
format='json',
).items()
def addExtension(self, extension, fullname, email=''):
for id, extensionInfo in self._allExtensions():
if extensionInfo['ex_number'] == extension:
break
else:
return
jsondata = json.dumps(dict(
ex_name = fullname,
))
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata=jsondata,
format='json',
)
def removeExtension(self, extension):
self.addExtension(extension,'')
def clearExtensions(self):
for id, extensionInfo in self._allExtensions():
if not extensionInfo.get('ex_name'):
continue
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata='{"ex_name": ""}',
format='json',
)
def extensions(self):
return [(
extensionInfo['ex_number'],
extensionInfo['ex_name'],
)
for id, extensionInfo in self._allExtensions()
if extensionInfo['ex_name']
]
# vim: ts=4 sw=4 et
|
queue
|
identifier_name
|
pbxareavoip.py
|
# -*- coding: utf-8 -*-
import requests
import json
from yamlns import namespace as ns
from .. import persons
class AreaVoip(object):
@staticmethod
def defaultQueue():
import dbconfig
return dbconfig.tomatic.get('areavoip',{}).get('queue', None)
def __init__(self):
import dbconfig
self.config = dbconfig.tomatic.areavoip
def _api(self, request, **kwds):
print(request,kwds)
result = requests.get(self.config.baseurl,
params=dict(
reqtype = request,
tenant = self.config.tenant,
key = self.config.apikey,
**kwds),
timeout=2, # seconds
)
print((result.request.url))
print(result.text)
if 'action' in kwds and kwds.get('format') != 'json':
if result.text.strip() != 'OK':
raise Exception(result.text.strip())
return True
if kwds.get('format') == 'json':
return result.json()
return result.text.strip()
def setQueue(self, queue, names):
|
def queue(self, queue):
response = self._api('INFO', info='agentsconnected',
queue = queue,
format='json',
)
if not response: return []
return [
ns(
key = persons.byExtension(extension),
extension = extension,
name = persons.name(persons.byExtension(extension)),
paused = status.get('1') == 'paused',
disconnected = status['2'] is None or status['2'] == 'UNAVAILABLE',
available = status['2'] == 'NOT_INUSE',
ringing = status['2'] == 'RINGING',
incall = status['2'] == 'INUSE',
ncalls = int(status['0']),
secondsInCalls = int(status.get('3','0')),
secondsSinceLastCall = 0, # TODO
flags = [status['2']] if status['2'] and status['2'] not in (
'UNAVAILABLE', 'NOT_INUSE', 'RINGING', 'INUSE',
) else [],
)
for extension, status in response.items()
]
def pause(self, queue, name, paused=True):
extension = persons.extension(name)
if not extension: return
response = self._api('AGENT',
action='pause' if paused else 'unpause',
queue = queue,
extension = extension,
reason = 'notimplemented',
)
def resume(self, queue, name):
self.pause(queue, name, False)
def add(self, queue, name):
extension = persons.extension(name)
if not extension: return
response = self._api('QUEUE', action='add',
id = queue,
extension = extension,
type='NF', # agent type: non-follow
)
def clear(self, queue):
response = self._api('QUEUE', action='clean',
id = queue,
)
def stats(self, queue, date=None):
date = date or '{:%Y-%m-%d}'.format(datetime.date.today())
stats = ns(
self._api('INFO',
info='queue',
id=queue,
format='json',
),
DATE=date,
)
fields = [
'date',
'callsreceived',
'answeredcalls',
'abandonedcalls',
'timedoutcalls',
'talktime',
'averagetalktime',
'holdtime',
'averageholdtime',
'maxholdtime',
]
return ns([
(attr, stats[attr.upper()])
for attr in fields
])
def _allExtensions(self):
return self._api('MANAGEDB',
object='extension',
action='list',
format='json',
).items()
def addExtension(self, extension, fullname, email=''):
for id, extensionInfo in self._allExtensions():
if extensionInfo['ex_number'] == extension:
break
else:
return
jsondata = json.dumps(dict(
ex_name = fullname,
))
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata=jsondata,
format='json',
)
def removeExtension(self, extension):
self.addExtension(extension,'')
def clearExtensions(self):
for id, extensionInfo in self._allExtensions():
if not extensionInfo.get('ex_name'):
continue
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata='{"ex_name": ""}',
format='json',
)
def extensions(self):
return [(
extensionInfo['ex_number'],
extensionInfo['ex_name'],
)
for id, extensionInfo in self._allExtensions()
if extensionInfo['ex_name']
]
# vim: ts=4 sw=4 et
|
self.clear(queue)
for name in names:
self.add(queue, name)
|
identifier_body
|
pbxareavoip.py
|
# -*- coding: utf-8 -*-
import requests
import json
from yamlns import namespace as ns
from .. import persons
class AreaVoip(object):
@staticmethod
def defaultQueue():
import dbconfig
return dbconfig.tomatic.get('areavoip',{}).get('queue', None)
def __init__(self):
import dbconfig
self.config = dbconfig.tomatic.areavoip
def _api(self, request, **kwds):
print(request,kwds)
result = requests.get(self.config.baseurl,
params=dict(
reqtype = request,
tenant = self.config.tenant,
key = self.config.apikey,
**kwds),
timeout=2, # seconds
)
print((result.request.url))
print(result.text)
if 'action' in kwds and kwds.get('format') != 'json':
if result.text.strip() != 'OK':
raise Exception(result.text.strip())
return True
if kwds.get('format') == 'json':
return result.json()
return result.text.strip()
def setQueue(self, queue, names):
self.clear(queue)
for name in names:
self.add(queue, name)
def queue(self, queue):
response = self._api('INFO', info='agentsconnected',
queue = queue,
format='json',
)
if not response: return []
return [
ns(
key = persons.byExtension(extension),
extension = extension,
name = persons.name(persons.byExtension(extension)),
paused = status.get('1') == 'paused',
disconnected = status['2'] is None or status['2'] == 'UNAVAILABLE',
available = status['2'] == 'NOT_INUSE',
ringing = status['2'] == 'RINGING',
incall = status['2'] == 'INUSE',
ncalls = int(status['0']),
secondsInCalls = int(status.get('3','0')),
secondsSinceLastCall = 0, # TODO
flags = [status['2']] if status['2'] and status['2'] not in (
'UNAVAILABLE', 'NOT_INUSE', 'RINGING', 'INUSE',
) else [],
)
for extension, status in response.items()
]
def pause(self, queue, name, paused=True):
extension = persons.extension(name)
if not extension: return
response = self._api('AGENT',
action='pause' if paused else 'unpause',
queue = queue,
extension = extension,
reason = 'notimplemented',
)
def resume(self, queue, name):
self.pause(queue, name, False)
def add(self, queue, name):
extension = persons.extension(name)
if not extension: return
response = self._api('QUEUE', action='add',
id = queue,
extension = extension,
type='NF', # agent type: non-follow
)
def clear(self, queue):
response = self._api('QUEUE', action='clean',
id = queue,
)
def stats(self, queue, date=None):
date = date or '{:%Y-%m-%d}'.format(datetime.date.today())
stats = ns(
self._api('INFO',
info='queue',
id=queue,
format='json',
),
DATE=date,
)
fields = [
'date',
'callsreceived',
'answeredcalls',
'abandonedcalls',
'timedoutcalls',
'talktime',
'averagetalktime',
'holdtime',
'averageholdtime',
'maxholdtime',
]
return ns([
(attr, stats[attr.upper()])
for attr in fields
])
def _allExtensions(self):
return self._api('MANAGEDB',
object='extension',
action='list',
format='json',
).items()
def addExtension(self, extension, fullname, email=''):
for id, extensionInfo in self._allExtensions():
if extensionInfo['ex_number'] == extension:
break
else:
return
jsondata = json.dumps(dict(
ex_name = fullname,
))
self._api('MANAGEDB',
|
jsondata=jsondata,
format='json',
)
def removeExtension(self, extension):
self.addExtension(extension,'')
def clearExtensions(self):
for id, extensionInfo in self._allExtensions():
if not extensionInfo.get('ex_name'):
continue
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata='{"ex_name": ""}',
format='json',
)
def extensions(self):
return [(
extensionInfo['ex_number'],
extensionInfo['ex_name'],
)
for id, extensionInfo in self._allExtensions()
if extensionInfo['ex_name']
]
# vim: ts=4 sw=4 et
|
object='extension',
action='update',
objectid=id,
|
random_line_split
|
pbxareavoip.py
|
# -*- coding: utf-8 -*-
import requests
import json
from yamlns import namespace as ns
from .. import persons
class AreaVoip(object):
@staticmethod
def defaultQueue():
import dbconfig
return dbconfig.tomatic.get('areavoip',{}).get('queue', None)
def __init__(self):
import dbconfig
self.config = dbconfig.tomatic.areavoip
def _api(self, request, **kwds):
print(request,kwds)
result = requests.get(self.config.baseurl,
params=dict(
reqtype = request,
tenant = self.config.tenant,
key = self.config.apikey,
**kwds),
timeout=2, # seconds
)
print((result.request.url))
print(result.text)
if 'action' in kwds and kwds.get('format') != 'json':
if result.text.strip() != 'OK':
raise Exception(result.text.strip())
return True
if kwds.get('format') == 'json':
return result.json()
return result.text.strip()
def setQueue(self, queue, names):
self.clear(queue)
for name in names:
self.add(queue, name)
def queue(self, queue):
response = self._api('INFO', info='agentsconnected',
queue = queue,
format='json',
)
if not response: return []
return [
ns(
key = persons.byExtension(extension),
extension = extension,
name = persons.name(persons.byExtension(extension)),
paused = status.get('1') == 'paused',
disconnected = status['2'] is None or status['2'] == 'UNAVAILABLE',
available = status['2'] == 'NOT_INUSE',
ringing = status['2'] == 'RINGING',
incall = status['2'] == 'INUSE',
ncalls = int(status['0']),
secondsInCalls = int(status.get('3','0')),
secondsSinceLastCall = 0, # TODO
flags = [status['2']] if status['2'] and status['2'] not in (
'UNAVAILABLE', 'NOT_INUSE', 'RINGING', 'INUSE',
) else [],
)
for extension, status in response.items()
]
def pause(self, queue, name, paused=True):
extension = persons.extension(name)
if not extension: return
response = self._api('AGENT',
action='pause' if paused else 'unpause',
queue = queue,
extension = extension,
reason = 'notimplemented',
)
def resume(self, queue, name):
self.pause(queue, name, False)
def add(self, queue, name):
extension = persons.extension(name)
if not extension:
|
response = self._api('QUEUE', action='add',
id = queue,
extension = extension,
type='NF', # agent type: non-follow
)
def clear(self, queue):
response = self._api('QUEUE', action='clean',
id = queue,
)
def stats(self, queue, date=None):
date = date or '{:%Y-%m-%d}'.format(datetime.date.today())
stats = ns(
self._api('INFO',
info='queue',
id=queue,
format='json',
),
DATE=date,
)
fields = [
'date',
'callsreceived',
'answeredcalls',
'abandonedcalls',
'timedoutcalls',
'talktime',
'averagetalktime',
'holdtime',
'averageholdtime',
'maxholdtime',
]
return ns([
(attr, stats[attr.upper()])
for attr in fields
])
def _allExtensions(self):
return self._api('MANAGEDB',
object='extension',
action='list',
format='json',
).items()
def addExtension(self, extension, fullname, email=''):
for id, extensionInfo in self._allExtensions():
if extensionInfo['ex_number'] == extension:
break
else:
return
jsondata = json.dumps(dict(
ex_name = fullname,
))
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata=jsondata,
format='json',
)
def removeExtension(self, extension):
self.addExtension(extension,'')
def clearExtensions(self):
for id, extensionInfo in self._allExtensions():
if not extensionInfo.get('ex_name'):
continue
self._api('MANAGEDB',
object='extension',
action='update',
objectid=id,
jsondata='{"ex_name": ""}',
format='json',
)
def extensions(self):
return [(
extensionInfo['ex_number'],
extensionInfo['ex_name'],
)
for id, extensionInfo in self._allExtensions()
if extensionInfo['ex_name']
]
# vim: ts=4 sw=4 et
|
return
|
conditional_block
|
attention_test.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for attention functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
import numpy as np
from seq2seq.decoders.attention import AttentionLayerDot
from seq2seq.decoders.attention import AttentionLayerBahdanau
class AttentionLayerTest(tf.test.TestCase):
"""
Tests the AttentionLayer module.
"""
def setUp(self):
super(AttentionLayerTest, self).setUp()
tf.logging.set_verbosity(tf.logging.INFO)
self.batch_size = 8
self.attention_dim = 128
self.input_dim = 16
self.seq_len = 10
self.state_dim = 32
def _create_layer(self):
"""Creates the attention layer. Should be implemented by child classes"""
raise NotImplementedError
def _test_layer(self):
"""Tests Attention layer with a given score type"""
inputs_pl = tf.placeholder(tf.float32, (None, None, self.input_dim))
inputs_length_pl = tf.placeholder(tf.int32, [None])
state_pl = tf.placeholder(tf.float32, (None, self.state_dim))
attention_fn = self._create_layer()
scores, context = attention_fn(
query=state_pl,
keys=inputs_pl,
values=inputs_pl,
values_length=inputs_length_pl)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
feed_dict = {}
feed_dict[inputs_pl] = np.random.randn(self.batch_size, self.seq_len,
self.input_dim)
feed_dict[state_pl] = np.random.randn(self.batch_size, self.state_dim)
feed_dict[inputs_length_pl] = np.arange(self.batch_size) + 1
scores_, context_ = sess.run([scores, context], feed_dict)
np.testing.assert_array_equal(scores_.shape,
[self.batch_size, self.seq_len])
np.testing.assert_array_equal(context_.shape,
[self.batch_size, self.input_dim])
for idx, batch in enumerate(scores_, 1):
# All scores that are padded should be zero
np.testing.assert_array_equal(batch[idx:], np.zeros_like(batch[idx:]))
# Scores should sum to 1
scores_sum = np.sum(scores_, axis=1)
np.testing.assert_array_almost_equal(scores_sum, np.ones([self.batch_size]))
class AttentionLayerDotTest(AttentionLayerTest):
"""Tests the AttentionLayerDot class"""
def _create_layer(self):
|
def test_layer(self):
self._test_layer()
class AttentionLayerBahdanauTest(AttentionLayerTest):
"""Tests the AttentionLayerBahdanau class"""
def _create_layer(self):
return AttentionLayerBahdanau(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
if __name__ == "__main__":
tf.test.main()
|
return AttentionLayerDot(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
|
identifier_body
|
attention_test.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for attention functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
import numpy as np
from seq2seq.decoders.attention import AttentionLayerDot
from seq2seq.decoders.attention import AttentionLayerBahdanau
class AttentionLayerTest(tf.test.TestCase):
"""
Tests the AttentionLayer module.
"""
def setUp(self):
super(AttentionLayerTest, self).setUp()
tf.logging.set_verbosity(tf.logging.INFO)
self.batch_size = 8
self.attention_dim = 128
self.input_dim = 16
self.seq_len = 10
self.state_dim = 32
def _create_layer(self):
"""Creates the attention layer. Should be implemented by child classes"""
raise NotImplementedError
def _test_layer(self):
"""Tests Attention layer with a given score type"""
inputs_pl = tf.placeholder(tf.float32, (None, None, self.input_dim))
inputs_length_pl = tf.placeholder(tf.int32, [None])
state_pl = tf.placeholder(tf.float32, (None, self.state_dim))
attention_fn = self._create_layer()
scores, context = attention_fn(
query=state_pl,
keys=inputs_pl,
values=inputs_pl,
values_length=inputs_length_pl)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
feed_dict = {}
feed_dict[inputs_pl] = np.random.randn(self.batch_size, self.seq_len,
self.input_dim)
feed_dict[state_pl] = np.random.randn(self.batch_size, self.state_dim)
feed_dict[inputs_length_pl] = np.arange(self.batch_size) + 1
scores_, context_ = sess.run([scores, context], feed_dict)
np.testing.assert_array_equal(scores_.shape,
[self.batch_size, self.seq_len])
np.testing.assert_array_equal(context_.shape,
[self.batch_size, self.input_dim])
for idx, batch in enumerate(scores_, 1):
# All scores that are padded should be zero
np.testing.assert_array_equal(batch[idx:], np.zeros_like(batch[idx:]))
# Scores should sum to 1
scores_sum = np.sum(scores_, axis=1)
np.testing.assert_array_almost_equal(scores_sum, np.ones([self.batch_size]))
class AttentionLayerDotTest(AttentionLayerTest):
"""Tests the AttentionLayerDot class"""
def _create_layer(self):
return AttentionLayerDot(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
class AttentionLayerBahdanauTest(AttentionLayerTest):
"""Tests the AttentionLayerBahdanau class"""
def _create_layer(self):
return AttentionLayerBahdanau(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def
|
(self):
self._test_layer()
if __name__ == "__main__":
tf.test.main()
|
test_layer
|
identifier_name
|
attention_test.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for attention functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
import numpy as np
from seq2seq.decoders.attention import AttentionLayerDot
from seq2seq.decoders.attention import AttentionLayerBahdanau
class AttentionLayerTest(tf.test.TestCase):
"""
Tests the AttentionLayer module.
"""
def setUp(self):
super(AttentionLayerTest, self).setUp()
tf.logging.set_verbosity(tf.logging.INFO)
self.batch_size = 8
self.attention_dim = 128
self.input_dim = 16
|
self.seq_len = 10
self.state_dim = 32
def _create_layer(self):
"""Creates the attention layer. Should be implemented by child classes"""
raise NotImplementedError
def _test_layer(self):
"""Tests Attention layer with a given score type"""
inputs_pl = tf.placeholder(tf.float32, (None, None, self.input_dim))
inputs_length_pl = tf.placeholder(tf.int32, [None])
state_pl = tf.placeholder(tf.float32, (None, self.state_dim))
attention_fn = self._create_layer()
scores, context = attention_fn(
query=state_pl,
keys=inputs_pl,
values=inputs_pl,
values_length=inputs_length_pl)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
feed_dict = {}
feed_dict[inputs_pl] = np.random.randn(self.batch_size, self.seq_len,
self.input_dim)
feed_dict[state_pl] = np.random.randn(self.batch_size, self.state_dim)
feed_dict[inputs_length_pl] = np.arange(self.batch_size) + 1
scores_, context_ = sess.run([scores, context], feed_dict)
np.testing.assert_array_equal(scores_.shape,
[self.batch_size, self.seq_len])
np.testing.assert_array_equal(context_.shape,
[self.batch_size, self.input_dim])
for idx, batch in enumerate(scores_, 1):
# All scores that are padded should be zero
np.testing.assert_array_equal(batch[idx:], np.zeros_like(batch[idx:]))
# Scores should sum to 1
scores_sum = np.sum(scores_, axis=1)
np.testing.assert_array_almost_equal(scores_sum, np.ones([self.batch_size]))
class AttentionLayerDotTest(AttentionLayerTest):
"""Tests the AttentionLayerDot class"""
def _create_layer(self):
return AttentionLayerDot(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
class AttentionLayerBahdanauTest(AttentionLayerTest):
"""Tests the AttentionLayerBahdanau class"""
def _create_layer(self):
return AttentionLayerBahdanau(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
if __name__ == "__main__":
tf.test.main()
|
random_line_split
|
|
attention_test.py
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for attention functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import tensorflow as tf
import numpy as np
from seq2seq.decoders.attention import AttentionLayerDot
from seq2seq.decoders.attention import AttentionLayerBahdanau
class AttentionLayerTest(tf.test.TestCase):
"""
Tests the AttentionLayer module.
"""
def setUp(self):
super(AttentionLayerTest, self).setUp()
tf.logging.set_verbosity(tf.logging.INFO)
self.batch_size = 8
self.attention_dim = 128
self.input_dim = 16
self.seq_len = 10
self.state_dim = 32
def _create_layer(self):
"""Creates the attention layer. Should be implemented by child classes"""
raise NotImplementedError
def _test_layer(self):
"""Tests Attention layer with a given score type"""
inputs_pl = tf.placeholder(tf.float32, (None, None, self.input_dim))
inputs_length_pl = tf.placeholder(tf.int32, [None])
state_pl = tf.placeholder(tf.float32, (None, self.state_dim))
attention_fn = self._create_layer()
scores, context = attention_fn(
query=state_pl,
keys=inputs_pl,
values=inputs_pl,
values_length=inputs_length_pl)
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
feed_dict = {}
feed_dict[inputs_pl] = np.random.randn(self.batch_size, self.seq_len,
self.input_dim)
feed_dict[state_pl] = np.random.randn(self.batch_size, self.state_dim)
feed_dict[inputs_length_pl] = np.arange(self.batch_size) + 1
scores_, context_ = sess.run([scores, context], feed_dict)
np.testing.assert_array_equal(scores_.shape,
[self.batch_size, self.seq_len])
np.testing.assert_array_equal(context_.shape,
[self.batch_size, self.input_dim])
for idx, batch in enumerate(scores_, 1):
# All scores that are padded should be zero
|
# Scores should sum to 1
scores_sum = np.sum(scores_, axis=1)
np.testing.assert_array_almost_equal(scores_sum, np.ones([self.batch_size]))
class AttentionLayerDotTest(AttentionLayerTest):
"""Tests the AttentionLayerDot class"""
def _create_layer(self):
return AttentionLayerDot(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
class AttentionLayerBahdanauTest(AttentionLayerTest):
"""Tests the AttentionLayerBahdanau class"""
def _create_layer(self):
return AttentionLayerBahdanau(
params={"num_units": self.attention_dim},
mode=tf.contrib.learn.ModeKeys.TRAIN)
def test_layer(self):
self._test_layer()
if __name__ == "__main__":
tf.test.main()
|
np.testing.assert_array_equal(batch[idx:], np.zeros_like(batch[idx:]))
|
conditional_block
|
gulp-filter.d.ts
|
// Compiled using [email protected]
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/1735153b55c4616192219e7edaecdef3971bd5b3/gulp-filter/gulp-filter.d.ts
// Type definitions for gulp-filter v3.0.1
// Project: https://github.com/sindresorhus/gulp-filter
// Definitions by: Tanguy Krotoff <https://github.com/tkrotoff>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module 'gulp-filter' {
import File = require('vinyl');
import * as Minimatch from 'minimatch';
namespace filter {
interface FileFunction {
(file: File): boolean;
}
interface Options extends Minimatch.IOptions {
|
restore?: boolean;
passthrough?: boolean;
}
// A transform stream with a .restore object
interface Filter extends NodeJS.ReadWriteStream {
restore: NodeJS.ReadWriteStream
}
}
function filter(pattern: string | string[] | filter.FileFunction, options?: filter.Options): filter.Filter;
export = filter;
}
|
random_line_split
|
|
lightbox.js
|
// Lightbox
;(function() {
|
$('.lightboxed').click(function (e) {
e.preventDefault();
var image_href = $(this).attr("src");
if ($('#lightbox').length > 0) {
$('#content').html('<img src="' + image_href + '" />');
//show lightbox window - you could use .show('fast') for a transition
$('#lightbox').show();
}
else {
var lightbox =
'<div id="lightbox">' +
'<p></p>' +
'<div id="content">' + //insert clicked link's href into img src
'<img src="' + image_href +'" />' +
'</div>' +
'</div>';
$('body').append(lightbox);
}
});
$('#lightbox').live('click', function() {
$('#lightbox').hide();
});
$(document).keyup(function(e) {
$('#lightbox').hide();
});
});
}());
|
$(document).ready(function () {
$('.lightboxed').css('cursor', 'pointer');
|
random_line_split
|
lightbox.js
|
// Lightbox
;(function() {
$(document).ready(function () {
$('.lightboxed').css('cursor', 'pointer');
$('.lightboxed').click(function (e) {
e.preventDefault();
var image_href = $(this).attr("src");
if ($('#lightbox').length > 0)
|
else {
var lightbox =
'<div id="lightbox">' +
'<p></p>' +
'<div id="content">' + //insert clicked link's href into img src
'<img src="' + image_href +'" />' +
'</div>' +
'</div>';
$('body').append(lightbox);
}
});
$('#lightbox').live('click', function() {
$('#lightbox').hide();
});
$(document).keyup(function(e) {
$('#lightbox').hide();
});
});
}());
|
{
$('#content').html('<img src="' + image_href + '" />');
//show lightbox window - you could use .show('fast') for a transition
$('#lightbox').show();
}
|
conditional_block
|
test_core.py
|
import os
import sys
import unittest
from cStringIO import StringIO
from optparse import OptionParser
import nose.core
from nose.config import Config
from nose.tools import set_trace
from mock import Bucket, MockOptParser
class NullLoader:
def loadTestsFromNames(self, names):
return unittest.TestSuite()
class TestAPI_run(unittest.TestCase):
def test_restore_stdout(self):
print "AHOY"
s = StringIO()
print s
stdout = sys.stdout
conf = Config(stream=s)
# set_trace()
print "About to run"
res = nose.core.run(
testLoader=NullLoader(), argv=['test_run'], env={}, config=conf)
print "Done running"
stdout_after = sys.stdout
self.assertEqual(stdout, stdout_after)
class Undefined(object):
pass
class TestUsage(unittest.TestCase):
def test_from_directory(self):
|
def test_from_zip(self):
requested_data = []
# simulates importing nose from a zip archive
# with a zipimport.zipimporter instance
class fake_zipimporter(object):
def get_data(self, path):
requested_data.append(path)
# Return as str in Python 2, bytes in Python 3.
return '<usage>'.encode('utf-8')
existing_loader = getattr(nose, '__loader__', Undefined)
try:
nose.__loader__ = fake_zipimporter()
usage_txt = nose.core.TestProgram.usage()
self.assertEqual(usage_txt, '<usage>')
self.assertEqual(requested_data, [os.path.join(
os.path.dirname(nose.__file__), 'usage.txt')])
finally:
if existing_loader is not Undefined:
nose.__loader__ = existing_loader
else:
del nose.__loader__
if __name__ == '__main__':
unittest.main()
|
usage_txt = nose.core.TestProgram.usage()
assert usage_txt.startswith('nose collects tests automatically'), (
"Unexpected usage: '%s...'" % usage_txt[0:50].replace("\n", '\n'))
|
random_line_split
|
test_core.py
|
import os
import sys
import unittest
from cStringIO import StringIO
from optparse import OptionParser
import nose.core
from nose.config import Config
from nose.tools import set_trace
from mock import Bucket, MockOptParser
class NullLoader:
def loadTestsFromNames(self, names):
return unittest.TestSuite()
class TestAPI_run(unittest.TestCase):
def test_restore_stdout(self):
print "AHOY"
s = StringIO()
print s
stdout = sys.stdout
conf = Config(stream=s)
# set_trace()
print "About to run"
res = nose.core.run(
testLoader=NullLoader(), argv=['test_run'], env={}, config=conf)
print "Done running"
stdout_after = sys.stdout
self.assertEqual(stdout, stdout_after)
class Undefined(object):
pass
class TestUsage(unittest.TestCase):
def test_from_directory(self):
usage_txt = nose.core.TestProgram.usage()
assert usage_txt.startswith('nose collects tests automatically'), (
"Unexpected usage: '%s...'" % usage_txt[0:50].replace("\n", '\n'))
def test_from_zip(self):
requested_data = []
# simulates importing nose from a zip archive
# with a zipimport.zipimporter instance
class fake_zipimporter(object):
def get_data(self, path):
|
existing_loader = getattr(nose, '__loader__', Undefined)
try:
nose.__loader__ = fake_zipimporter()
usage_txt = nose.core.TestProgram.usage()
self.assertEqual(usage_txt, '<usage>')
self.assertEqual(requested_data, [os.path.join(
os.path.dirname(nose.__file__), 'usage.txt')])
finally:
if existing_loader is not Undefined:
nose.__loader__ = existing_loader
else:
del nose.__loader__
if __name__ == '__main__':
unittest.main()
|
requested_data.append(path)
# Return as str in Python 2, bytes in Python 3.
return '<usage>'.encode('utf-8')
|
identifier_body
|
test_core.py
|
import os
import sys
import unittest
from cStringIO import StringIO
from optparse import OptionParser
import nose.core
from nose.config import Config
from nose.tools import set_trace
from mock import Bucket, MockOptParser
class NullLoader:
def loadTestsFromNames(self, names):
return unittest.TestSuite()
class TestAPI_run(unittest.TestCase):
def test_restore_stdout(self):
print "AHOY"
s = StringIO()
print s
stdout = sys.stdout
conf = Config(stream=s)
# set_trace()
print "About to run"
res = nose.core.run(
testLoader=NullLoader(), argv=['test_run'], env={}, config=conf)
print "Done running"
stdout_after = sys.stdout
self.assertEqual(stdout, stdout_after)
class Undefined(object):
pass
class TestUsage(unittest.TestCase):
def test_from_directory(self):
usage_txt = nose.core.TestProgram.usage()
assert usage_txt.startswith('nose collects tests automatically'), (
"Unexpected usage: '%s...'" % usage_txt[0:50].replace("\n", '\n'))
def test_from_zip(self):
requested_data = []
# simulates importing nose from a zip archive
# with a zipimport.zipimporter instance
class
|
(object):
def get_data(self, path):
requested_data.append(path)
# Return as str in Python 2, bytes in Python 3.
return '<usage>'.encode('utf-8')
existing_loader = getattr(nose, '__loader__', Undefined)
try:
nose.__loader__ = fake_zipimporter()
usage_txt = nose.core.TestProgram.usage()
self.assertEqual(usage_txt, '<usage>')
self.assertEqual(requested_data, [os.path.join(
os.path.dirname(nose.__file__), 'usage.txt')])
finally:
if existing_loader is not Undefined:
nose.__loader__ = existing_loader
else:
del nose.__loader__
if __name__ == '__main__':
unittest.main()
|
fake_zipimporter
|
identifier_name
|
test_core.py
|
import os
import sys
import unittest
from cStringIO import StringIO
from optparse import OptionParser
import nose.core
from nose.config import Config
from nose.tools import set_trace
from mock import Bucket, MockOptParser
class NullLoader:
def loadTestsFromNames(self, names):
return unittest.TestSuite()
class TestAPI_run(unittest.TestCase):
def test_restore_stdout(self):
print "AHOY"
s = StringIO()
print s
stdout = sys.stdout
conf = Config(stream=s)
# set_trace()
print "About to run"
res = nose.core.run(
testLoader=NullLoader(), argv=['test_run'], env={}, config=conf)
print "Done running"
stdout_after = sys.stdout
self.assertEqual(stdout, stdout_after)
class Undefined(object):
pass
class TestUsage(unittest.TestCase):
def test_from_directory(self):
usage_txt = nose.core.TestProgram.usage()
assert usage_txt.startswith('nose collects tests automatically'), (
"Unexpected usage: '%s...'" % usage_txt[0:50].replace("\n", '\n'))
def test_from_zip(self):
requested_data = []
# simulates importing nose from a zip archive
# with a zipimport.zipimporter instance
class fake_zipimporter(object):
def get_data(self, path):
requested_data.append(path)
# Return as str in Python 2, bytes in Python 3.
return '<usage>'.encode('utf-8')
existing_loader = getattr(nose, '__loader__', Undefined)
try:
nose.__loader__ = fake_zipimporter()
usage_txt = nose.core.TestProgram.usage()
self.assertEqual(usage_txt, '<usage>')
self.assertEqual(requested_data, [os.path.join(
os.path.dirname(nose.__file__), 'usage.txt')])
finally:
if existing_loader is not Undefined:
nose.__loader__ = existing_loader
else:
|
if __name__ == '__main__':
unittest.main()
|
del nose.__loader__
|
conditional_block
|
unpin_chat_message.rs
|
use crate::requests::*;
use crate::types::*;
///Use this method to unpin a message in a supergroup or a channel.
/// The bot must be an administrator in the chat for this to work
/// and must have the ‘can_pin_messages’ admin right in the
/// supergroup or ‘can_edit_messages’ admin right in the channel.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct UnpinCha
|
hat_id: ChatRef,
}
impl Request for UnpinChatMessage {
type Type = JsonRequestType<Self>;
type Response = JsonTrueToUnitResponse;
fn serialize(&self) -> Result<HttpRequest, Error> {
Self::Type::serialize(RequestUrl::method("unpinChatMessage"), self)
}
}
impl UnpinChatMessage {
fn new<C>(chat: C) -> Self
where
C: ToChatRef,
{
Self {
chat_id: chat.to_chat_ref(),
}
}
}
pub trait CanUnpinMessage {
fn unpin_message(&self) -> UnpinChatMessage;
}
impl<C> CanUnpinMessage for C
where
C: ToChatRef,
{
fn unpin_message(&self) -> UnpinChatMessage {
UnpinChatMessage::new(self)
}
}
|
tMessage {
c
|
identifier_name
|
unpin_chat_message.rs
|
use crate::requests::*;
use crate::types::*;
///Use this method to unpin a message in a supergroup or a channel.
/// The bot must be an administrator in the chat for this to work
/// and must have the ‘can_pin_messages’ admin right in the
/// supergroup or ‘can_edit_messages’ admin right in the channel.
#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize)]
#[must_use = "requests do nothing unless sent"]
pub struct UnpinChatMessage {
chat_id: ChatRef,
}
impl Request for UnpinChatMessage {
type Type = JsonRequestType<Self>;
type Response = JsonTrueToUnitResponse;
fn serialize(&self) -> Result<HttpRequest, Error> {
Self::Type::serialize(RequestUrl::method("unpinChatMessage"), self)
}
}
impl UnpinChatMessage {
fn new<C>(chat: C) -> Self
where
C: ToChatRef,
{
Self {
chat_id: chat.to_chat_ref(),
}
}
}
pub trait CanUnpinMessage {
fn unpin_message(&self) -> UnpinChatMessage;
}
impl<C> CanUnpinMessage for C
where
C: ToChatRef,
{
fn unpin_message(&self) -> UnpinChatMessage {
UnpinChatMessage::new(self)
|
}
|
}
|
random_line_split
|
utils.py
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
|
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
# ------------------------------------------------------------------------------
# Enumerations
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
|
float
)
|
random_line_split
|
utils.py
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
|
# ------------------------------------------------------------------------------
# Enumerations
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
|
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
|
identifier_body
|
utils.py
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
|
return str(data, "UTF-8")
# ------------------------------------------------------------------------------
# Enumerations
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
|
return data
|
conditional_block
|
utils.py
|
#!/usr/bin/python
# -- Content-Encoding: UTF-8 --
"""
Utility methods, for compatibility between Python version
:author: Thomas Calmant
:copyright: Copyright 2017, Thomas Calmant
:license: Apache License 2.0
:version: 0.3.1
..
Copyright 2017 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 3, 1)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
if sys.version_info[0] < 3:
# Python 2
# pylint: disable=E1101
import types
try:
STRING_TYPES = (
types.StringType,
types.UnicodeType
)
except NameError:
# Python built without unicode support
STRING_TYPES = (types.StringType,)
NUMERIC_TYPES = (
types.IntType,
types.LongType,
types.FloatType
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
# pylint: disable=E0602
if type(string) is unicode:
return str(string)
return string
def from_bytes(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data)
else:
# Python 3
# pylint: disable=E1101
STRING_TYPES = (
bytes,
str
)
NUMERIC_TYPES = (
int,
float
)
def to_bytes(string):
"""
Converts the given string into bytes
"""
if type(string) is bytes:
return string
return bytes(string, "UTF-8")
def
|
(data):
"""
Converts the given bytes into a string
"""
if type(data) is str:
return data
return str(data, "UTF-8")
# ------------------------------------------------------------------------------
# Enumerations
try:
import enum
def is_enum(obj):
"""
Checks if an object is from an enumeration class
:param obj: Object to test
:return: True if the object is an enumeration item
"""
return isinstance(obj, enum.Enum)
except ImportError:
# Pre-Python 3.4
def is_enum(_):
"""
Before Python 3.4, enumerations didn't exist.
:param _: Object to test
:return: Always False
"""
return False
# ------------------------------------------------------------------------------
# Common
DictType = dict
ListType = list
TupleType = tuple
ITERABLE_TYPES = (
list,
set, frozenset,
tuple
)
VALUE_TYPES = (
bool,
type(None)
)
PRIMITIVE_TYPES = STRING_TYPES + NUMERIC_TYPES + VALUE_TYPES
|
from_bytes
|
identifier_name
|
setup.py
|
from setuptools import setup, find_packages
import imp
version = imp.load_source('crema.version', 'crema/version.py')
setup(
name='crema',
version=version.version,
description="Convolutional-recurrent estimators for music analysis",
author='Brian McFee',
url='http://github.com/bmcfee/crema',
download_url='http://github.com/bmcfee/crema/releases',
packages=find_packages(),
package_data={'': ['models/*/*.pkl',
'models/*/*.h5',
'models/*/*.json',
'models/*/*.txt']},
long_description="Convolutional-recurrent estimators for music analysis",
classifiers=[
"License :: OSI Approved :: ISC License (ISCL)",
"Programming Language :: Python",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
|
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='audio music learning',
license='ISC',
install_requires=['six',
'librosa>=0.6',
'jams>=0.3',
'scikit-learn>=0.18',
'keras>=2.0',
'tensorflow>=1.0',
'mir_eval>=0.5',
'pumpp>=0.4',
'h5py>=2.7'],
extras_require={
'docs': ['numpydoc', 'sphinx'],
'tests': ['pytest', 'pytest-cov'],
'training': ['pescador>=2.0.1', 'muda']
}
)
|
"Topic :: Software Development",
"Programming Language :: Python :: 3",
|
random_line_split
|
confirmation.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013-2014 Didotech SRL (info at didotech.com)
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import orm, fields
import decimal_precision as dp
import netsvc
from tools import ustr
class sale_order_confirm(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'cig': fields.char('CIG', size=64, help="Codice identificativo di gara"),
'cup': fields.char('CUP', size=64, help="Codice unico di Progetto")
}
# def default_get(self, cr, uid, fields, context=None):
# sale_order_obj = self.pool['sale.order']
# if context is None:
# context = {}
#
# res = super(sale_order_confirm, self).default_get(cr, uid, fields, context=context)
# sale_order_data = sale_order_obj.browse(cr, uid, context['active_ids'][0], context)
#
# res['cup'] = sale_order_data.cig
# res['cig'] = sale_order_data.cup
#
# return res
def sale_order_confirmated(self, cr, uid, ids, context=None):
|
sale_order_obj = self.pool['sale.order']
result = super(sale_order_confirm, self).sale_order_confirmated(cr, uid, ids, context=context)
sale_order_confirm_data = self.browse(cr, uid, ids[0], context=context)
if result.get('res_id'):
sale_order_obj.write(cr, uid, result['res_id'], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
else:
sale_order_obj.write(cr, uid, context['active_ids'][0], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
for order in sale_order_obj.browse(cr, uid, [result.get('res_id') or context['active_ids'][0]], context=context):
# partner = self.pool['res.partner'].browse(cr, uid, order.partner_id.id)
picking_obj = self.pool['stock.picking']
picking_ids = picking_obj.search(cr, uid, [('sale_id', '=', order.id)], context=context)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {
'cig': sale_order_confirm_data.cig or '',
'cup': sale_order_confirm_data.cup or ''
}, context=context)
return result
|
identifier_body
|
|
confirmation.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013-2014 Didotech SRL (info at didotech.com)
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import orm, fields
import decimal_precision as dp
import netsvc
from tools import ustr
class sale_order_confirm(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'cig': fields.char('CIG', size=64, help="Codice identificativo di gara"),
'cup': fields.char('CUP', size=64, help="Codice unico di Progetto")
}
# def default_get(self, cr, uid, fields, context=None):
# sale_order_obj = self.pool['sale.order']
# if context is None:
# context = {}
#
# res = super(sale_order_confirm, self).default_get(cr, uid, fields, context=context)
# sale_order_data = sale_order_obj.browse(cr, uid, context['active_ids'][0], context)
#
# res['cup'] = sale_order_data.cig
# res['cig'] = sale_order_data.cup
#
# return res
def sale_order_confirmated(self, cr, uid, ids, context=None):
sale_order_obj = self.pool['sale.order']
result = super(sale_order_confirm, self).sale_order_confirmated(cr, uid, ids, context=context)
sale_order_confirm_data = self.browse(cr, uid, ids[0], context=context)
if result.get('res_id'):
sale_order_obj.write(cr, uid, result['res_id'], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
else:
sale_order_obj.write(cr, uid, context['active_ids'][0], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
for order in sale_order_obj.browse(cr, uid, [result.get('res_id') or context['active_ids'][0]], context=context):
# partner = self.pool['res.partner'].browse(cr, uid, order.partner_id.id)
|
return result
|
picking_obj = self.pool['stock.picking']
picking_ids = picking_obj.search(cr, uid, [('sale_id', '=', order.id)], context=context)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {
'cig': sale_order_confirm_data.cig or '',
'cup': sale_order_confirm_data.cup or ''
}, context=context)
|
conditional_block
|
confirmation.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013-2014 Didotech SRL (info at didotech.com)
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import orm, fields
import decimal_precision as dp
import netsvc
from tools import ustr
class
|
(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'cig': fields.char('CIG', size=64, help="Codice identificativo di gara"),
'cup': fields.char('CUP', size=64, help="Codice unico di Progetto")
}
# def default_get(self, cr, uid, fields, context=None):
# sale_order_obj = self.pool['sale.order']
# if context is None:
# context = {}
#
# res = super(sale_order_confirm, self).default_get(cr, uid, fields, context=context)
# sale_order_data = sale_order_obj.browse(cr, uid, context['active_ids'][0], context)
#
# res['cup'] = sale_order_data.cig
# res['cig'] = sale_order_data.cup
#
# return res
def sale_order_confirmated(self, cr, uid, ids, context=None):
sale_order_obj = self.pool['sale.order']
result = super(sale_order_confirm, self).sale_order_confirmated(cr, uid, ids, context=context)
sale_order_confirm_data = self.browse(cr, uid, ids[0], context=context)
if result.get('res_id'):
sale_order_obj.write(cr, uid, result['res_id'], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
else:
sale_order_obj.write(cr, uid, context['active_ids'][0], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
for order in sale_order_obj.browse(cr, uid, [result.get('res_id') or context['active_ids'][0]], context=context):
# partner = self.pool['res.partner'].browse(cr, uid, order.partner_id.id)
picking_obj = self.pool['stock.picking']
picking_ids = picking_obj.search(cr, uid, [('sale_id', '=', order.id)], context=context)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {
'cig': sale_order_confirm_data.cig or '',
'cup': sale_order_confirm_data.cup or ''
}, context=context)
return result
|
sale_order_confirm
|
identifier_name
|
confirmation.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013-2014 Didotech SRL (info at didotech.com)
# All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
from openerp.osv import orm, fields
|
class sale_order_confirm(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'cig': fields.char('CIG', size=64, help="Codice identificativo di gara"),
'cup': fields.char('CUP', size=64, help="Codice unico di Progetto")
}
# def default_get(self, cr, uid, fields, context=None):
# sale_order_obj = self.pool['sale.order']
# if context is None:
# context = {}
#
# res = super(sale_order_confirm, self).default_get(cr, uid, fields, context=context)
# sale_order_data = sale_order_obj.browse(cr, uid, context['active_ids'][0], context)
#
# res['cup'] = sale_order_data.cig
# res['cig'] = sale_order_data.cup
#
# return res
def sale_order_confirmated(self, cr, uid, ids, context=None):
sale_order_obj = self.pool['sale.order']
result = super(sale_order_confirm, self).sale_order_confirmated(cr, uid, ids, context=context)
sale_order_confirm_data = self.browse(cr, uid, ids[0], context=context)
if result.get('res_id'):
sale_order_obj.write(cr, uid, result['res_id'], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
else:
sale_order_obj.write(cr, uid, context['active_ids'][0], {
'cig': sale_order_confirm_data.cig,
'cup': sale_order_confirm_data.cup,
}, context=context)
for order in sale_order_obj.browse(cr, uid, [result.get('res_id') or context['active_ids'][0]], context=context):
# partner = self.pool['res.partner'].browse(cr, uid, order.partner_id.id)
picking_obj = self.pool['stock.picking']
picking_ids = picking_obj.search(cr, uid, [('sale_id', '=', order.id)], context=context)
for picking_id in picking_ids:
picking_obj.write(cr, uid, picking_id, {
'cig': sale_order_confirm_data.cig or '',
'cup': sale_order_confirm_data.cup or ''
}, context=context)
return result
|
import decimal_precision as dp
import netsvc
from tools import ustr
|
random_line_split
|
15.2.3.6-3-3.js
|
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
/// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
The abtract operation ToPropertyDescriptor is used to package the
into a property desc. Step 10 of ToPropertyDescriptor throws a TypeError
if the property desc ends up having a mix of accessor and data property elements.
*/
ES5Harness.registerTest( {
id: "15.2.3.6-3-3",
path: "TestCases/chapter15/15.2/15.2.3/15.2.3.6/15.2.3.6-3-3.js",
description: "Object.defineProperty throws TypeError if desc has 'set' and 'value' present",
test: function testcase() {
var o = {};
// dummy setter
var setter = function () { }
var desc = { set: setter, value: 101};
try {
Object.defineProperty(o, "foo", desc);
}
catch (e) {
if (e instanceof TypeError &&
(o.hasOwnProperty("foo") === false)) {
return true;
}
}
},
precondition: function prereq() {
return fnExists(Object.defineProperty);
}
});
|
/// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
|
random_line_split
|
|
15.2.3.6-3-3.js
|
/// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
/// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/*
The abtract operation ToPropertyDescriptor is used to package the
into a property desc. Step 10 of ToPropertyDescriptor throws a TypeError
if the property desc ends up having a mix of accessor and data property elements.
*/
ES5Harness.registerTest( {
id: "15.2.3.6-3-3",
path: "TestCases/chapter15/15.2/15.2.3/15.2.3.6/15.2.3.6-3-3.js",
description: "Object.defineProperty throws TypeError if desc has 'set' and 'value' present",
test: function testcase() {
var o = {};
// dummy setter
var setter = function () { }
var desc = { set: setter, value: 101};
try {
Object.defineProperty(o, "foo", desc);
}
catch (e) {
if (e instanceof TypeError &&
(o.hasOwnProperty("foo") === false))
|
}
},
precondition: function prereq() {
return fnExists(Object.defineProperty);
}
});
|
{
return true;
}
|
conditional_block
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.