applied-ai-018 commited on
Commit
618c5a9
·
verified ·
1 Parent(s): 6a25a2b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/dataproperty/__init__.py +45 -0
  2. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/__init__.cpython-310.pyc +0 -0
  3. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/__version__.cpython-310.pyc +0 -0
  4. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_align.cpython-310.pyc +0 -0
  5. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_align_getter.cpython-310.pyc +0 -0
  6. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_base.cpython-310.pyc +0 -0
  7. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_column.cpython-310.pyc +0 -0
  8. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_common.cpython-310.pyc +0 -0
  9. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_container.cpython-310.pyc +0 -0
  10. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_converter.cpython-310.pyc +0 -0
  11. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_dataproperty.cpython-310.pyc +0 -0
  12. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_extractor.cpython-310.pyc +0 -0
  13. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_formatter.cpython-310.pyc +0 -0
  14. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_function.cpython-310.pyc +0 -0
  15. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_interface.cpython-310.pyc +0 -0
  16. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_line_break.cpython-310.pyc +0 -0
  17. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_preprocessor.cpython-310.pyc +0 -0
  18. llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/typing.cpython-310.pyc +0 -0
  19. llmeval-env/lib/python3.10/site-packages/dataproperty/__version__.py +6 -0
  20. llmeval-env/lib/python3.10/site-packages/dataproperty/_align.py +25 -0
  21. llmeval-env/lib/python3.10/site-packages/dataproperty/_align_getter.py +33 -0
  22. llmeval-env/lib/python3.10/site-packages/dataproperty/_base.py +98 -0
  23. llmeval-env/lib/python3.10/site-packages/dataproperty/_column.py +352 -0
  24. llmeval-env/lib/python3.10/site-packages/dataproperty/_common.py +69 -0
  25. llmeval-env/lib/python3.10/site-packages/dataproperty/_container.py +193 -0
  26. llmeval-env/lib/python3.10/site-packages/dataproperty/_converter.py +90 -0
  27. llmeval-env/lib/python3.10/site-packages/dataproperty/_dataproperty.py +382 -0
  28. llmeval-env/lib/python3.10/site-packages/dataproperty/_extractor.py +814 -0
  29. llmeval-env/lib/python3.10/site-packages/dataproperty/_formatter.py +98 -0
  30. llmeval-env/lib/python3.10/site-packages/dataproperty/_function.py +112 -0
  31. llmeval-env/lib/python3.10/site-packages/dataproperty/_interface.py +30 -0
  32. llmeval-env/lib/python3.10/site-packages/dataproperty/_line_break.py +8 -0
  33. llmeval-env/lib/python3.10/site-packages/dataproperty/_preprocessor.py +173 -0
  34. llmeval-env/lib/python3.10/site-packages/dataproperty/py.typed +0 -0
  35. llmeval-env/lib/python3.10/site-packages/dataproperty/typing.py +62 -0
  36. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/AUTHORS +8 -0
  37. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/INSTALLER +1 -0
  38. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/LICENSE +202 -0
  39. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/METADATA +370 -0
  40. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/RECORD +274 -0
  41. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/WHEEL +5 -0
  42. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/entry_points.txt +3 -0
  43. llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/top_level.txt +1 -0
  44. llmeval-env/lib/python3.10/site-packages/numexpr/__init__.py +68 -0
  45. llmeval-env/lib/python3.10/site-packages/numexpr/__pycache__/__init__.cpython-310.pyc +0 -0
  46. llmeval-env/lib/python3.10/site-packages/numexpr/__pycache__/necompiler.cpython-310.pyc +0 -0
  47. llmeval-env/lib/python3.10/site-packages/numexpr/cpuinfo.py +859 -0
  48. llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/INSTALLER +1 -0
  49. llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/License.txt +1568 -0
  50. llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/METADATA +35 -0
llmeval-env/lib/python3.10/site-packages/dataproperty/__init__.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ from .__version__ import __author__, __copyright__, __email__, __license__, __version__
6
+ from ._align import Align
7
+ from ._align_getter import align_getter
8
+ from ._column import ColumnDataProperty
9
+ from ._common import MAX_STRICT_LEVEL_MAP, MIN_STRICT_LEVEL_MAP, NOT_QUOTING_FLAGS, DefaultValue
10
+ from ._container import MinMaxContainer
11
+ from ._dataproperty import DataProperty
12
+ from ._extractor import DataPropertyExtractor, DataPropertyMatrix, MatrixFormatting
13
+ from ._formatter import Format
14
+ from ._function import calc_ascii_char_width, get_integer_digit, get_number_of_digit
15
+ from ._line_break import LineBreakHandling
16
+ from ._preprocessor import Preprocessor
17
+ from .logger import set_logger
18
+
19
+
20
+ __all__ = (
21
+ "Align",
22
+ "align_getter",
23
+ "ColumnDataProperty",
24
+ "DataProperty",
25
+ "DataPropertyExtractor",
26
+ "DataPropertyMatrix",
27
+ "Format",
28
+ "LineBreakHandling",
29
+ "MatrixFormatting",
30
+ "MinMaxContainer",
31
+ "Preprocessor",
32
+ "calc_ascii_char_width",
33
+ "get_integer_digit",
34
+ "get_number_of_digit",
35
+ "MAX_STRICT_LEVEL_MAP",
36
+ "MIN_STRICT_LEVEL_MAP",
37
+ "NOT_QUOTING_FLAGS",
38
+ "DefaultValue",
39
+ "set_logger",
40
+ "__author__",
41
+ "__copyright__",
42
+ "__email__",
43
+ "__license__",
44
+ "__version__",
45
+ )
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/__version__.cpython-310.pyc ADDED
Binary file (391 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_align.cpython-310.pyc ADDED
Binary file (1.07 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_align_getter.cpython-310.pyc ADDED
Binary file (1.4 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_base.cpython-310.pyc ADDED
Binary file (2.68 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_column.cpython-310.pyc ADDED
Binary file (10.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_common.cpython-310.pyc ADDED
Binary file (1.79 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_container.cpython-310.pyc ADDED
Binary file (6.13 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_converter.cpython-310.pyc ADDED
Binary file (3.29 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_dataproperty.cpython-310.pyc ADDED
Binary file (9.38 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_extractor.cpython-310.pyc ADDED
Binary file (20.6 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_formatter.cpython-310.pyc ADDED
Binary file (3.01 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_function.cpython-310.pyc ADDED
Binary file (3.19 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_interface.cpython-310.pyc ADDED
Binary file (1.26 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_line_break.cpython-310.pyc ADDED
Binary file (465 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/_preprocessor.cpython-310.pyc ADDED
Binary file (4.93 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__pycache__/typing.cpython-310.pyc ADDED
Binary file (1.4 kB). View file
 
llmeval-env/lib/python3.10/site-packages/dataproperty/__version__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ __author__ = "Tsuyoshi Hombashi"
2
+ __copyright__ = f"Copyright 2016, {__author__}"
3
+ __license__ = "MIT License"
4
+ __version__ = "1.0.1"
5
+ __maintainer__ = __author__
6
+ __email__ = "[email protected]"
llmeval-env/lib/python3.10/site-packages/dataproperty/_align.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import enum
6
+
7
+
8
+ @enum.unique
9
+ class Align(enum.Enum):
10
+ AUTO = (1 << 0, "auto")
11
+ LEFT = (1 << 1, "left")
12
+ RIGHT = (1 << 2, "right")
13
+ CENTER = (1 << 3, "center")
14
+
15
+ @property
16
+ def align_code(self) -> int:
17
+ return self.__align_code
18
+
19
+ @property
20
+ def align_string(self) -> str:
21
+ return self.__align_string
22
+
23
+ def __init__(self, code: int, string: str) -> None:
24
+ self.__align_code = code
25
+ self.__align_string = string
llmeval-env/lib/python3.10/site-packages/dataproperty/_align_getter.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ from typing import Dict
6
+
7
+ from typepy import Typecode
8
+
9
+ from ._align import Align
10
+
11
+
12
+ class AlignGetter:
13
+ @property
14
+ def typecode_align_table(self):
15
+ raise NotImplementedError()
16
+
17
+ @typecode_align_table.setter
18
+ def typecode_align_table(self, x: Dict[Typecode, Align]) -> None:
19
+ self.__typecode_align_table = x
20
+
21
+ def get_align_from_typecode(self, typecode: Typecode) -> Align:
22
+ return self.__typecode_align_table.get(typecode, self.default_align)
23
+
24
+ def __init__(self) -> None:
25
+ self.typecode_align_table = {
26
+ Typecode.STRING: Align.LEFT,
27
+ Typecode.INTEGER: Align.RIGHT,
28
+ Typecode.REAL_NUMBER: Align.RIGHT,
29
+ }
30
+ self.default_align = Align.LEFT
31
+
32
+
33
+ align_getter = AlignGetter()
llmeval-env/lib/python3.10/site-packages/dataproperty/_base.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Dict, Optional, Type
2
+
3
+ from typepy import (
4
+ Bool,
5
+ DateTime,
6
+ Dictionary,
7
+ Infinity,
8
+ Integer,
9
+ IpAddress,
10
+ List,
11
+ Nan,
12
+ NoneType,
13
+ NullString,
14
+ RealNumber,
15
+ String,
16
+ Typecode,
17
+ )
18
+ from typepy.type import AbstractType
19
+
20
+ from ._formatter import Formatter
21
+ from ._interface import DataPeropertyInterface
22
+
23
+
24
+ class DataPeropertyBase(DataPeropertyInterface):
25
+ __slots__ = (
26
+ "_datetime_format_str",
27
+ "_decimal_places",
28
+ "_east_asian_ambiguous_width",
29
+ "_formatter",
30
+ "_typecode",
31
+ "__format_str",
32
+ )
33
+
34
+ __TYPE_CLASS_TABLE: Dict[Typecode, AbstractType] = {
35
+ Typecode.BOOL: Bool,
36
+ Typecode.DATETIME: DateTime,
37
+ Typecode.DICTIONARY: Dictionary,
38
+ Typecode.INTEGER: Integer,
39
+ Typecode.INFINITY: Infinity,
40
+ Typecode.IP_ADDRESS: IpAddress,
41
+ Typecode.LIST: List,
42
+ Typecode.NAN: Nan,
43
+ Typecode.NONE: NoneType,
44
+ Typecode.NULL_STRING: NullString,
45
+ Typecode.REAL_NUMBER: RealNumber,
46
+ Typecode.STRING: String,
47
+ }
48
+
49
+ @property
50
+ def type_class(self) -> Type[AbstractType]:
51
+ return self.__TYPE_CLASS_TABLE[self.typecode]
52
+
53
+ @property
54
+ def typecode(self) -> Typecode:
55
+ """
56
+ ``typepy.Typecode`` that corresponds to the type of the ``data``.
57
+
58
+ :return:
59
+ One of the Enum value that are defined ``typepy.Typecode``.
60
+ :rtype: typepy.Typecode
61
+ """
62
+
63
+ assert self._typecode
64
+
65
+ return self._typecode
66
+
67
+ @property
68
+ def typename(self) -> str:
69
+ return self.typecode.name
70
+
71
+ def __init__(
72
+ self,
73
+ format_flags: Optional[int],
74
+ is_formatting_float: bool,
75
+ datetime_format_str: str,
76
+ east_asian_ambiguous_width: int,
77
+ ) -> None:
78
+ self._decimal_places: Optional[int] = None
79
+ self._east_asian_ambiguous_width = east_asian_ambiguous_width
80
+ self._typecode: Optional[Typecode] = None
81
+
82
+ self._datetime_format_str = datetime_format_str
83
+ self.__format_str = ""
84
+
85
+ self._formatter = Formatter(
86
+ format_flags=format_flags,
87
+ datetime_format_str=self._datetime_format_str,
88
+ is_formatting_float=is_formatting_float,
89
+ )
90
+
91
+ @property
92
+ def format_str(self) -> str:
93
+ if self.__format_str:
94
+ return self.__format_str
95
+
96
+ self.__format_str = self._formatter.make_format_str(self.typecode, self.decimal_places)
97
+
98
+ return self.__format_str
llmeval-env/lib/python3.10/site-packages/dataproperty/_column.py ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Dict, List, Optional
2
+
3
+ from mbstrdecoder import MultiByteStrDecoder
4
+ from typepy import Integer, StrictLevel, Typecode, TypeConversionError
5
+
6
+ from ._align import Align
7
+ from ._align_getter import align_getter
8
+ from ._base import DataPeropertyBase
9
+ from ._common import DefaultValue
10
+ from ._container import ListContainer, MinMaxContainer
11
+ from ._dataproperty import DataProperty
12
+ from ._function import calc_ascii_char_width
13
+ from .typing import FloatType
14
+
15
+
16
+ class ColumnDataProperty(DataPeropertyBase):
17
+ __slots__ = (
18
+ "__header_ascii_char_width",
19
+ "__body_ascii_char_width",
20
+ "__column_index",
21
+ "__dp_list",
22
+ "__float_type",
23
+ "__format_map",
24
+ "__is_calculate",
25
+ "__max_precision",
26
+ "__minmax_integer_digits",
27
+ "__minmax_decimal_places",
28
+ "__minmax_additional_format_len",
29
+ "__typecode_bitmap",
30
+ )
31
+
32
+ @property
33
+ def align(self) -> Align:
34
+ return align_getter.get_align_from_typecode(self.typecode)
35
+
36
+ @property
37
+ def bit_length(self) -> Optional[int]:
38
+ if self.typecode != Typecode.INTEGER:
39
+ return None
40
+
41
+ bit_length = 0
42
+ for value_dp in self.__dp_list:
43
+ try:
44
+ bit_length = max(bit_length, int.bit_length(value_dp.data))
45
+ except TypeError:
46
+ pass
47
+
48
+ return bit_length
49
+
50
+ @property
51
+ def column_index(self) -> int:
52
+ return self.__column_index
53
+
54
+ @property
55
+ def decimal_places(self) -> Optional[int]:
56
+ return self._decimal_places
57
+
58
+ @property
59
+ def ascii_char_width(self) -> int:
60
+ return max(self.__header_ascii_char_width, self.__body_ascii_char_width)
61
+
62
+ @property
63
+ def minmax_integer_digits(self) -> MinMaxContainer:
64
+ return self.__minmax_integer_digits
65
+
66
+ @property
67
+ def minmax_decimal_places(self) -> ListContainer:
68
+ return self.__minmax_decimal_places
69
+
70
+ @property
71
+ def minmax_additional_format_len(self) -> MinMaxContainer:
72
+ return self.__minmax_additional_format_len
73
+
74
+ def __init__(
75
+ self,
76
+ column_index: int,
77
+ float_type: Optional[FloatType],
78
+ min_width: int = 0,
79
+ format_flags: Optional[int] = None,
80
+ is_formatting_float: bool = True,
81
+ datetime_format_str: str = DefaultValue.DATETIME_FORMAT,
82
+ east_asian_ambiguous_width: int = 1,
83
+ max_precision: int = DefaultValue.MAX_PRECISION,
84
+ ) -> None:
85
+ super().__init__(
86
+ format_flags=format_flags,
87
+ is_formatting_float=is_formatting_float,
88
+ datetime_format_str=datetime_format_str,
89
+ east_asian_ambiguous_width=east_asian_ambiguous_width,
90
+ )
91
+
92
+ self.__header_ascii_char_width = 0
93
+ self.__body_ascii_char_width = min_width
94
+ self.__column_index = column_index
95
+
96
+ self.__float_type = float_type
97
+
98
+ self.__is_calculate = True
99
+ self.__dp_list: List[DataProperty] = []
100
+ self.__minmax_integer_digits = MinMaxContainer()
101
+ self.__minmax_decimal_places = ListContainer()
102
+ self.__minmax_additional_format_len = MinMaxContainer()
103
+ self.__max_precision = max_precision
104
+
105
+ self.__typecode_bitmap = Typecode.NONE.value
106
+ self.__calc_typecode_from_bitmap()
107
+
108
+ self.__format_map: Dict[Typecode, str] = self._formatter.make_format_map(
109
+ decimal_places=self._decimal_places
110
+ )
111
+
112
+ def __repr__(self) -> str:
113
+ element_list = []
114
+
115
+ if self.column_index is not None:
116
+ element_list.append(f"column={self.column_index}")
117
+
118
+ element_list.extend(
119
+ [
120
+ f"type={self.typename}",
121
+ f"align={self.align.align_string}",
122
+ f"ascii_width={self.ascii_char_width}",
123
+ ]
124
+ )
125
+
126
+ if Integer(self.bit_length).is_type():
127
+ element_list.append(f"bit_len={self.bit_length}")
128
+
129
+ if self.minmax_integer_digits.has_value():
130
+ if self.minmax_integer_digits.is_same_value():
131
+ value = f"int_digits={self.minmax_integer_digits.min_value}"
132
+ else:
133
+ value = f"int_digits=({self.minmax_integer_digits})"
134
+
135
+ element_list.append(value)
136
+
137
+ if self.minmax_decimal_places.has_value():
138
+ if self.minmax_decimal_places.is_same_value():
139
+ value = f"decimal_places={self.minmax_decimal_places.min_value}"
140
+ else:
141
+ value = f"decimal_places=({self.minmax_decimal_places})"
142
+
143
+ element_list.append(value)
144
+
145
+ if not self.minmax_additional_format_len.is_zero():
146
+ if self.minmax_additional_format_len.is_same_value():
147
+ value = f"extra_len={self.minmax_additional_format_len.min_value}"
148
+ else:
149
+ value = f"extra_len=({self.minmax_additional_format_len})"
150
+
151
+ element_list.append(value)
152
+
153
+ return ", ".join(element_list)
154
+
155
+ def dp_to_str(self, value_dp: DataProperty) -> str:
156
+ if value_dp.typecode == Typecode.STRING:
157
+ return str(value_dp.data)
158
+
159
+ try:
160
+ value = self.__preprocess_value_before_tostring(value_dp)
161
+ except TypeConversionError:
162
+ return self.__format_map.get(value_dp.typecode, "{:s}").format(value_dp.data)
163
+
164
+ to_string_format_str = self.__get_tostring_format(value_dp)
165
+
166
+ try:
167
+ return to_string_format_str.format(value)
168
+ except (ValueError, TypeError):
169
+ pass
170
+
171
+ try:
172
+ return MultiByteStrDecoder(value).unicode_str
173
+ except ValueError:
174
+ pass
175
+
176
+ return str(value)
177
+
178
+ def extend_width(self, ascii_char_width: int) -> None:
179
+ self.extend_header_width(ascii_char_width)
180
+ self.extend_body_width(ascii_char_width)
181
+
182
+ def extend_header_width(self, ascii_char_width: int) -> None:
183
+ self.__header_ascii_char_width += ascii_char_width
184
+
185
+ def extend_body_width(self, ascii_char_width: int) -> None:
186
+ self.__body_ascii_char_width += ascii_char_width
187
+
188
+ def update_header(self, header_db: DataProperty) -> None:
189
+ self.__header_ascii_char_width = header_db.ascii_char_width
190
+
191
+ def update_body(self, value_dp: DataProperty) -> None:
192
+ if value_dp.is_include_ansi_escape:
193
+ assert value_dp.no_ansi_escape_dp
194
+ value_dp = value_dp.no_ansi_escape_dp
195
+
196
+ self.__typecode_bitmap |= value_dp.typecode.value
197
+ self.__calc_typecode_from_bitmap()
198
+
199
+ if value_dp.typecode in (Typecode.REAL_NUMBER, Typecode.INTEGER):
200
+ self.__minmax_integer_digits.update(value_dp.integer_digits)
201
+ self.__minmax_decimal_places.update(value_dp.decimal_places)
202
+ self.__update_decimal_places()
203
+
204
+ self.__minmax_additional_format_len.update(value_dp.additional_format_len)
205
+
206
+ self.__dp_list.append(value_dp)
207
+ self.__update_ascii_char_width()
208
+
209
+ def merge(self, column_dp: "ColumnDataProperty") -> None:
210
+ self.__typecode_bitmap |= column_dp.typecode.value
211
+ self.__calc_typecode_from_bitmap()
212
+
213
+ self.__minmax_integer_digits.merge(column_dp.minmax_integer_digits)
214
+ self.__minmax_decimal_places.merge(column_dp.minmax_decimal_places)
215
+ self.__update_decimal_places()
216
+
217
+ self.__minmax_additional_format_len.merge(column_dp.minmax_additional_format_len)
218
+
219
+ self.__body_ascii_char_width = max(self.__body_ascii_char_width, column_dp.ascii_char_width)
220
+ self.__update_ascii_char_width()
221
+
222
+ def begin_update(self) -> None:
223
+ self.__is_calculate = False
224
+
225
+ def end_update(self) -> None:
226
+ self.__is_calculate = True
227
+
228
+ self.__calc_typecode_from_bitmap()
229
+ self.__update_decimal_places()
230
+ self.__update_ascii_char_width()
231
+
232
+ def __is_not_single_typecode(self, typecode_bitmap: int) -> bool:
233
+ return bool(
234
+ self.__typecode_bitmap & typecode_bitmap and self.__typecode_bitmap & ~typecode_bitmap
235
+ )
236
+
237
+ def __is_float_typecode(self) -> bool:
238
+ FLOAT_TYPECODE_BMP = (
239
+ Typecode.REAL_NUMBER.value | Typecode.INFINITY.value | Typecode.NAN.value
240
+ )
241
+ NUMBER_TYPECODE_BMP = FLOAT_TYPECODE_BMP | Typecode.INTEGER.value
242
+
243
+ if self.__is_not_single_typecode(NUMBER_TYPECODE_BMP | Typecode.NULL_STRING.value):
244
+ return False
245
+
246
+ if (
247
+ bin(self.__typecode_bitmap & (FLOAT_TYPECODE_BMP | Typecode.NULL_STRING.value)).count(
248
+ "1"
249
+ )
250
+ >= 2
251
+ ):
252
+ return True
253
+
254
+ if bin(self.__typecode_bitmap & NUMBER_TYPECODE_BMP).count("1") >= 2:
255
+ return True
256
+
257
+ return False
258
+
259
+ def __calc_body_ascii_char_width(self) -> int:
260
+ width_list = [self.__body_ascii_char_width]
261
+
262
+ for value_dp in self.__dp_list:
263
+ if value_dp.is_include_ansi_escape:
264
+ assert value_dp.no_ansi_escape_dp
265
+ value_dp = value_dp.no_ansi_escape_dp
266
+
267
+ width_list.append(
268
+ calc_ascii_char_width(self.dp_to_str(value_dp), self._east_asian_ambiguous_width)
269
+ )
270
+
271
+ return max(width_list)
272
+
273
+ def __calc_decimal_places(self) -> Optional[int]:
274
+ if self.minmax_decimal_places.max_value is None:
275
+ return None
276
+
277
+ return min(self.__max_precision, int(self.minmax_decimal_places.max_value))
278
+
279
+ def __get_tostring_format(self, value_dp: DataProperty) -> str:
280
+ if self.typecode == Typecode.STRING:
281
+ return self.__format_map.get(value_dp.typecode, "{:s}")
282
+
283
+ return self.__format_map.get(self.typecode, "{:s}")
284
+
285
+ def __get_typecode_from_bitmap(self) -> Typecode:
286
+ if self.__is_float_typecode():
287
+ return Typecode.REAL_NUMBER
288
+
289
+ if any(
290
+ [
291
+ self.__is_not_single_typecode(Typecode.BOOL.value),
292
+ self.__is_not_single_typecode(Typecode.DATETIME.value),
293
+ ]
294
+ ):
295
+ return Typecode.STRING
296
+
297
+ typecode_list = [
298
+ Typecode.STRING,
299
+ Typecode.REAL_NUMBER,
300
+ Typecode.INTEGER,
301
+ Typecode.DATETIME,
302
+ Typecode.DICTIONARY,
303
+ Typecode.IP_ADDRESS,
304
+ Typecode.LIST,
305
+ Typecode.BOOL,
306
+ Typecode.INFINITY,
307
+ Typecode.NAN,
308
+ Typecode.NULL_STRING,
309
+ ]
310
+
311
+ for typecode in typecode_list:
312
+ if self.__typecode_bitmap & typecode.value:
313
+ return typecode
314
+
315
+ if self.__typecode_bitmap == Typecode.NONE.value:
316
+ return Typecode.NONE
317
+
318
+ return Typecode.STRING
319
+
320
+ def __update_ascii_char_width(self) -> None:
321
+ if not self.__is_calculate:
322
+ return
323
+
324
+ self.__body_ascii_char_width = self.__calc_body_ascii_char_width()
325
+
326
+ def __update_decimal_places(self) -> None:
327
+ if not self.__is_calculate:
328
+ return
329
+
330
+ self._decimal_places = self.__calc_decimal_places()
331
+ self.__format_map = self._formatter.make_format_map(decimal_places=self._decimal_places)
332
+
333
+ def __calc_typecode_from_bitmap(self) -> None:
334
+ if not self.__is_calculate:
335
+ return
336
+
337
+ self._typecode = self.__get_typecode_from_bitmap()
338
+
339
+ def __preprocess_value_before_tostring(self, value_dp: DataProperty) -> Any:
340
+ if self.typecode == value_dp.typecode or self.typecode in [
341
+ Typecode.STRING,
342
+ Typecode.BOOL,
343
+ Typecode.DATETIME,
344
+ ]:
345
+ return value_dp.data
346
+
347
+ return self.type_class(
348
+ value_dp.data,
349
+ strict_level=StrictLevel.MIN,
350
+ float_type=self.__float_type,
351
+ strip_ansi_escape=False,
352
+ ).convert()
llmeval-env/lib/python3.10/site-packages/dataproperty/_common.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import copy
6
+ import itertools
7
+ from datetime import datetime
8
+ from decimal import Decimal
9
+
10
+ from typepy import StrictLevel, Typecode
11
+
12
+ from .typing import StrictLevelMap, TypeValueMap
13
+
14
+
15
+ NOT_QUOTING_FLAGS = {
16
+ Typecode.BOOL: False,
17
+ Typecode.DATETIME: False,
18
+ Typecode.DICTIONARY: False,
19
+ Typecode.INFINITY: False,
20
+ Typecode.INTEGER: False,
21
+ Typecode.IP_ADDRESS: False,
22
+ Typecode.LIST: False,
23
+ Typecode.NAN: False,
24
+ Typecode.NULL_STRING: False,
25
+ Typecode.NONE: False,
26
+ Typecode.REAL_NUMBER: False,
27
+ Typecode.STRING: False,
28
+ }
29
+
30
+ MAX_STRICT_LEVEL_MAP: StrictLevelMap = dict(itertools.product(list(Typecode), [StrictLevel.MAX]))
31
+ MIN_STRICT_LEVEL_MAP: StrictLevelMap = dict(itertools.product(list(Typecode), [StrictLevel.MIN]))
32
+
33
+
34
+ class DefaultValue:
35
+ DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z"
36
+ FLOAT_TYPE = Decimal
37
+ INF_VALUE = FLOAT_TYPE("inf")
38
+ NAN_VALUE = FLOAT_TYPE("nan")
39
+
40
+ QUOTING_FLAGS = copy.deepcopy(NOT_QUOTING_FLAGS)
41
+
42
+ STRICT_LEVEL_MAP: StrictLevelMap = {
43
+ "default": StrictLevel.MAX,
44
+ Typecode.BOOL: StrictLevel.MAX,
45
+ Typecode.DATETIME: StrictLevel.MAX,
46
+ Typecode.DICTIONARY: StrictLevel.MAX,
47
+ Typecode.REAL_NUMBER: 1,
48
+ Typecode.INFINITY: StrictLevel.MIN,
49
+ Typecode.INTEGER: 1,
50
+ Typecode.IP_ADDRESS: StrictLevel.MAX,
51
+ Typecode.LIST: StrictLevel.MAX,
52
+ Typecode.NAN: StrictLevel.MIN,
53
+ Typecode.NONE: StrictLevel.MAX,
54
+ Typecode.NULL_STRING: StrictLevel.MIN,
55
+ Typecode.STRING: StrictLevel.MIN,
56
+ }
57
+
58
+ TYPE_VALUE_MAP: TypeValueMap = {
59
+ Typecode.NONE: None,
60
+ Typecode.INFINITY: INF_VALUE,
61
+ Typecode.NAN: NAN_VALUE,
62
+ }
63
+
64
+ MAX_WORKERS = 1
65
+ MAX_PRECISION = 100
66
+
67
+
68
+ def default_datetime_formatter(value: datetime) -> str:
69
+ return value.strftime(DefaultValue.DATETIME_FORMAT)
llmeval-env/lib/python3.10/site-packages/dataproperty/_container.py ADDED
@@ -0,0 +1,193 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import abc
6
+ from decimal import Decimal
7
+ from typing import Any, List, Optional, Sequence, Union
8
+
9
+ from typepy import RealNumber
10
+
11
+
12
+ T = Union[int, float, Decimal]
13
+ NAN = Decimal("nan")
14
+
15
+
16
+ class AbstractContainer(metaclass=abc.ABCMeta):
17
+ @abc.abstractproperty
18
+ def min_value(self) -> Optional[Decimal]: # pragma: no cover
19
+ pass
20
+
21
+ @abc.abstractproperty
22
+ def max_value(self) -> Optional[Decimal]: # pragma: no cover
23
+ pass
24
+
25
+ @abc.abstractmethod
26
+ def mean(self) -> Decimal: # pragma: no cover
27
+ pass
28
+
29
+ @abc.abstractmethod
30
+ def update(self, value: Optional[T]) -> None: # pragma: no cover
31
+ pass
32
+
33
+ @abc.abstractmethod
34
+ def merge(self, value: "AbstractContainer") -> None: # pragma: no cover
35
+ pass
36
+
37
+ def __repr__(self) -> str:
38
+ if not self.has_value():
39
+ return "None"
40
+
41
+ return ", ".join([f"min={self.min_value}", f"max={self.max_value}"])
42
+
43
+ def has_value(self) -> bool:
44
+ return self.min_value is not None and self.max_value is not None
45
+
46
+ def is_same_value(self) -> bool:
47
+ return self.has_value() and self.min_value == self.max_value
48
+
49
+ def is_zero(self) -> bool:
50
+ return self.has_value() and self.min_value == 0 and self.max_value == 0
51
+
52
+
53
+ class ListContainer(AbstractContainer):
54
+ __slots__ = ("__value_list",)
55
+
56
+ @property
57
+ def min_value(self) -> Optional[Decimal]:
58
+ try:
59
+ return min(self.__value_list)
60
+ except ValueError:
61
+ return None
62
+
63
+ @property
64
+ def max_value(self) -> Optional[Decimal]:
65
+ try:
66
+ return max(self.__value_list)
67
+ except ValueError:
68
+ return None
69
+
70
+ @property
71
+ def value_list(self) -> List[Decimal]:
72
+ return self.__value_list
73
+
74
+ def __init__(self, value_list: Optional[List[Decimal]] = None) -> None:
75
+ if value_list is None:
76
+ self.__value_list: List[Decimal] = []
77
+ return
78
+
79
+ for value in value_list:
80
+ self.update(value)
81
+
82
+ def mean(self) -> Decimal:
83
+ try:
84
+ return Decimal(sum(self.__value_list) / len(self.__value_list))
85
+ except ZeroDivisionError:
86
+ return NAN
87
+
88
+ def update(self, value: Union[int, float, Decimal, None]) -> None:
89
+ if value is None:
90
+ return
91
+
92
+ store_value = RealNumber(value).try_convert()
93
+ if store_value is None:
94
+ return
95
+
96
+ self.__value_list.append(store_value)
97
+
98
+ def merge(self, value: "AbstractContainer") -> None:
99
+ if not isinstance(value, ListContainer):
100
+ return
101
+
102
+ for v in value.value_list:
103
+ self.update(v)
104
+
105
+
106
+ class MinMaxContainer(AbstractContainer):
107
+ __slots__ = ("__min_value", "__max_value")
108
+
109
+ def __init__(self, value_list: Optional[Sequence[Decimal]] = None) -> None:
110
+ self.__min_value: Optional[Decimal] = None
111
+ self.__max_value: Optional[Decimal] = None
112
+
113
+ if value_list is None:
114
+ return
115
+
116
+ for value in value_list:
117
+ self.update(value)
118
+
119
+ @property
120
+ def min_value(self) -> Optional[Decimal]:
121
+ return self.__min_value
122
+
123
+ @property
124
+ def max_value(self) -> Optional[Decimal]:
125
+ return self.__max_value
126
+
127
+ def __eq__(self, other: Any) -> bool:
128
+ if not isinstance(other, MinMaxContainer):
129
+ return False
130
+
131
+ return all([self.min_value == other.min_value, self.max_value == other.max_value])
132
+
133
+ def __ne__(self, other: Any) -> bool:
134
+ if not isinstance(other, MinMaxContainer):
135
+ return True
136
+
137
+ return any([self.min_value != other.min_value, self.max_value != other.max_value])
138
+
139
+ def __contains__(self, x: T) -> bool:
140
+ if self.min_value is None:
141
+ return False
142
+
143
+ if self.max_value is None:
144
+ return False
145
+
146
+ return self.min_value <= x <= self.max_value
147
+
148
+ def diff(self) -> Decimal:
149
+ if self.min_value is None:
150
+ return NAN
151
+
152
+ if self.max_value is None:
153
+ return NAN
154
+
155
+ try:
156
+ return self.max_value - self.min_value
157
+ except TypeError:
158
+ return NAN
159
+
160
+ def mean(self) -> Decimal:
161
+ if self.min_value is None:
162
+ return NAN
163
+
164
+ if self.max_value is None:
165
+ return NAN
166
+
167
+ try:
168
+ return (self.max_value + self.min_value) * Decimal("0.5")
169
+ except TypeError:
170
+ return NAN
171
+
172
+ def update(self, value: Optional[T]) -> None:
173
+ if value is None:
174
+ return
175
+
176
+ decimal_value = Decimal(value)
177
+
178
+ if self.__min_value is None:
179
+ self.__min_value = decimal_value
180
+ else:
181
+ self.__min_value = min(self.__min_value, decimal_value)
182
+
183
+ if self.__max_value is None:
184
+ self.__max_value = decimal_value
185
+ else:
186
+ self.__max_value = max(self.__max_value, decimal_value)
187
+
188
+ def merge(self, value: "AbstractContainer") -> None:
189
+ if not isinstance(value, MinMaxContainer):
190
+ return
191
+
192
+ self.update(value.min_value)
193
+ self.update(value.max_value)
llmeval-env/lib/python3.10/site-packages/dataproperty/_converter.py ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import re
6
+ from typing import Any, Dict, Optional
7
+
8
+ from typepy import Typecode, TypeConversionError
9
+
10
+ from ._common import MAX_STRICT_LEVEL_MAP, DefaultValue
11
+ from ._dataproperty import DataProperty
12
+ from ._preprocessor import Preprocessor
13
+ from .typing import DateTimeFormatter, FloatType, StrictLevelMap, TypeValueMap
14
+
15
+
16
+ class DataPropertyConverter:
17
+ __RE_QUOTE_LINE = re.compile(r"^\s*[\"'].*[\"']\s*$") # noqa: w605
18
+ __RE_QUOTE_CHAR = re.compile("[\"']")
19
+
20
+ def __init__(
21
+ self,
22
+ preprocessor: Preprocessor,
23
+ datetime_format_str: str,
24
+ datetime_formatter: Optional[DateTimeFormatter] = None,
25
+ type_value_map: Optional[TypeValueMap] = None,
26
+ quoting_flags: Optional[Dict[Typecode, bool]] = None,
27
+ float_type: Optional[FloatType] = None,
28
+ strict_level_map: Optional[StrictLevelMap] = None,
29
+ ) -> None:
30
+ self.__preprocessor = preprocessor
31
+ self.__type_value_map: TypeValueMap = (
32
+ type_value_map if type_value_map else DefaultValue.TYPE_VALUE_MAP
33
+ )
34
+ self.__quoting_flags: Dict[Typecode, bool] = (
35
+ quoting_flags if quoting_flags else DefaultValue.QUOTING_FLAGS
36
+ )
37
+
38
+ self.__datetime_formatter = datetime_formatter
39
+ self.__datetime_format_str = datetime_format_str
40
+ self.__float_type = float_type
41
+ self.__strict_level_map = strict_level_map
42
+
43
+ def convert(self, dp_value: DataProperty) -> DataProperty:
44
+ try:
45
+ return self.__create_dataproperty(self.__convert_value(dp_value))
46
+ except TypeConversionError:
47
+ pass
48
+
49
+ if not self.__quoting_flags.get(dp_value.typecode):
50
+ if self.__preprocessor.is_escape_html_tag:
51
+ return self.__create_dataproperty(dp_value.to_str())
52
+
53
+ return dp_value
54
+
55
+ return self.__create_dataproperty(self.__apply_quote(dp_value.typecode, dp_value.to_str()))
56
+
57
+ def __create_dataproperty(self, value: Any) -> DataProperty:
58
+ return DataProperty(
59
+ value,
60
+ preprocessor=self.__preprocessor,
61
+ float_type=self.__float_type,
62
+ datetime_format_str=self.__datetime_format_str,
63
+ strict_level_map=MAX_STRICT_LEVEL_MAP,
64
+ )
65
+
66
+ def __apply_quote(self, typecode: Typecode, data: Any) -> Any:
67
+ if not self.__quoting_flags.get(typecode):
68
+ return data
69
+
70
+ try:
71
+ if self.__RE_QUOTE_LINE.search(data):
72
+ return data
73
+ except TypeError:
74
+ return data
75
+
76
+ return '"{}"'.format(self.__RE_QUOTE_CHAR.sub('\\"', data.replace("\\", "\\\\")))
77
+
78
+ def __convert_value(self, dp_value: DataProperty) -> Any:
79
+ if dp_value.typecode in self.__type_value_map:
80
+ return self.__apply_quote(dp_value.typecode, self.__type_value_map[dp_value.typecode])
81
+
82
+ if dp_value.typecode == Typecode.DATETIME and self.__datetime_formatter:
83
+ try:
84
+ return self.__apply_quote(
85
+ dp_value.typecode, self.__datetime_formatter(dp_value.data)
86
+ )
87
+ except TypeError:
88
+ raise TypeConversionError
89
+
90
+ raise TypeConversionError("no need to convert")
llmeval-env/lib/python3.10/site-packages/dataproperty/_dataproperty.py ADDED
@@ -0,0 +1,382 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import typing
6
+ from decimal import Decimal
7
+ from typing import Any, Optional, cast
8
+
9
+ import typepy
10
+ from mbstrdecoder import MultiByteStrDecoder
11
+ from typepy import (
12
+ Bool,
13
+ DateTime,
14
+ Dictionary,
15
+ Infinity,
16
+ Integer,
17
+ IpAddress,
18
+ Nan,
19
+ NoneType,
20
+ NullString,
21
+ RealNumber,
22
+ StrictLevel,
23
+ String,
24
+ Typecode,
25
+ TypeConversionError,
26
+ )
27
+ from typepy.type import AbstractType
28
+
29
+ from ._align import Align
30
+ from ._align_getter import align_getter
31
+ from ._base import DataPeropertyBase
32
+ from ._common import DefaultValue
33
+ from ._function import calc_ascii_char_width, get_number_of_digit
34
+ from ._preprocessor import Preprocessor
35
+ from .typing import FloatType, StrictLevelMap, TypeHint
36
+
37
+
38
+ class DataProperty(DataPeropertyBase):
39
+ __slots__ = (
40
+ "__data",
41
+ "__no_ansi_escape_data",
42
+ "__align",
43
+ "__integer_digits",
44
+ "__additional_format_len",
45
+ "__length",
46
+ "__ascii_char_width",
47
+ )
48
+
49
+ __type_class_list: typing.List[AbstractType] = [
50
+ NoneType,
51
+ Integer,
52
+ Infinity,
53
+ Nan,
54
+ IpAddress,
55
+ RealNumber,
56
+ Bool,
57
+ typepy.List,
58
+ Dictionary,
59
+ DateTime,
60
+ NullString,
61
+ String,
62
+ ]
63
+
64
+ def __init__(
65
+ self,
66
+ data: Any,
67
+ preprocessor: Optional[Preprocessor] = None,
68
+ type_hint: TypeHint = None,
69
+ float_type: Optional[FloatType] = None,
70
+ format_flags: Optional[int] = None,
71
+ datetime_format_str: str = DefaultValue.DATETIME_FORMAT,
72
+ strict_level_map: Optional[StrictLevelMap] = None,
73
+ east_asian_ambiguous_width: int = 1,
74
+ ) -> None:
75
+ super().__init__(
76
+ format_flags=format_flags,
77
+ is_formatting_float=True,
78
+ datetime_format_str=datetime_format_str,
79
+ east_asian_ambiguous_width=east_asian_ambiguous_width,
80
+ )
81
+
82
+ self.__additional_format_len: Optional[int] = None
83
+ self.__align: Optional[Align] = None
84
+ self.__ascii_char_width: Optional[int] = None
85
+ self.__integer_digits: Optional[int] = None
86
+ self.__length: Optional[int] = None
87
+
88
+ if preprocessor is None:
89
+ preprocessor = Preprocessor()
90
+
91
+ data, no_ansi_escape_data = preprocessor.preprocess(data)
92
+
93
+ self.__set_data(data, type_hint, float_type, strict_level_map)
94
+
95
+ if no_ansi_escape_data is None or len(data) == len(no_ansi_escape_data):
96
+ self.__no_ansi_escape_data: Optional[DataProperty] = None
97
+ else:
98
+ self.__no_ansi_escape_data = DataProperty(no_ansi_escape_data, float_type=float_type)
99
+
100
+ def __eq__(self, other: Any) -> bool:
101
+ if not isinstance(other, DataProperty):
102
+ return False
103
+
104
+ if self.typecode != other.typecode:
105
+ return False
106
+
107
+ if self.typecode == Typecode.NAN:
108
+ return True
109
+
110
+ return self.data == other.data
111
+
112
+ def __ne__(self, other: Any) -> bool:
113
+ if not isinstance(other, DataProperty):
114
+ return True
115
+
116
+ if self.typecode != other.typecode:
117
+ return True
118
+
119
+ if self.typecode == Typecode.NAN:
120
+ return False
121
+
122
+ return self.data != other.data
123
+
124
+ def __repr__(self) -> str:
125
+ element_list = []
126
+
127
+ if self.typecode == Typecode.DATETIME:
128
+ element_list.append(f"data={str(self.data):s}")
129
+ else:
130
+ try:
131
+ element_list.append("data=" + self.to_str())
132
+ except UnicodeEncodeError:
133
+ element_list.append(f"data={MultiByteStrDecoder(self.data).unicode_str}")
134
+
135
+ element_list.extend(
136
+ [
137
+ f"type={self.typename:s}",
138
+ f"align={self.align.align_string}",
139
+ f"ascii_width={self.ascii_char_width:d}",
140
+ ]
141
+ )
142
+
143
+ if Integer(self.length).is_type():
144
+ element_list.append(f"length={self.length}")
145
+
146
+ if Integer(self.integer_digits).is_type():
147
+ element_list.append(f"int_digits={self.integer_digits}")
148
+
149
+ if Integer(self.decimal_places).is_type():
150
+ element_list.append(f"decimal_places={self.decimal_places}")
151
+
152
+ if Integer(self.additional_format_len).is_type():
153
+ element_list.append(f"extra_len={self.additional_format_len}")
154
+
155
+ return ", ".join(element_list)
156
+
157
+ @property
158
+ def align(self) -> Align:
159
+ if not self.__align:
160
+ if self.is_include_ansi_escape:
161
+ assert self.no_ansi_escape_dp
162
+ self.__align = self.no_ansi_escape_dp.align
163
+ else:
164
+ self.__align = align_getter.get_align_from_typecode(self.typecode)
165
+
166
+ assert self.__align
167
+
168
+ return self.__align
169
+
170
+ @property
171
+ def decimal_places(self) -> Optional[int]:
172
+ """
173
+ :return:
174
+ Decimal places if the ``data`` type either ``float`` or
175
+ ``decimal.Decimal``. Returns ``0`` if the ``data`` type is ``int``.
176
+ Otherwise, returns ``float("nan")``.
177
+ :rtype: int
178
+ """
179
+
180
+ if self._decimal_places is None:
181
+ self.__set_digit()
182
+
183
+ return self._decimal_places
184
+
185
+ @property
186
+ def data(self) -> Any:
187
+ """
188
+ :return: Original data value.
189
+ :rtype: Original data type.
190
+ """
191
+
192
+ return self.__data
193
+
194
+ @property
195
+ def is_include_ansi_escape(self) -> bool:
196
+ if self.no_ansi_escape_dp is None:
197
+ return False
198
+
199
+ return self.length != self.no_ansi_escape_dp.length
200
+
201
+ @property
202
+ def no_ansi_escape_dp(self) -> Optional["DataProperty"]:
203
+ return self.__no_ansi_escape_data
204
+
205
+ @property
206
+ def length(self) -> Optional[int]:
207
+ """
208
+ :return: Length of the ``data``.
209
+ :rtype: int
210
+ """
211
+
212
+ if self.__length is None:
213
+ self.__length = self.__get_length()
214
+
215
+ return self.__length
216
+
217
+ @property
218
+ def ascii_char_width(self) -> int:
219
+ if self.__ascii_char_width is None:
220
+ self.__ascii_char_width = self.__calc_ascii_char_width()
221
+
222
+ return self.__ascii_char_width
223
+
224
+ @property
225
+ def integer_digits(self) -> Optional[int]:
226
+ """
227
+ :return:
228
+ Integer digits if the ``data`` type either
229
+ ``int``/``float``/``decimal.Decimal``.
230
+ Otherwise, returns ``None``.
231
+ :rtype: int
232
+ """
233
+
234
+ if self.__integer_digits is None:
235
+ self.__set_digit()
236
+
237
+ return self.__integer_digits
238
+
239
+ @property
240
+ def additional_format_len(self) -> int:
241
+ if self.__additional_format_len is None:
242
+ self.__additional_format_len = self.__get_additional_format_len()
243
+
244
+ return self.__additional_format_len
245
+
246
+ def get_padding_len(self, ascii_char_width: int) -> int:
247
+ if self.typecode in (Typecode.LIST, Typecode.DICTIONARY):
248
+ unicode_str_len = DataProperty(MultiByteStrDecoder(str(self.data)).unicode_str).length
249
+ assert unicode_str_len
250
+ return max(
251
+ ascii_char_width - (self.ascii_char_width - unicode_str_len),
252
+ 0,
253
+ )
254
+
255
+ try:
256
+ return max(ascii_char_width - (self.ascii_char_width - cast(int, self.length)), 0)
257
+ except TypeError:
258
+ return ascii_char_width
259
+
260
+ def to_str(self) -> str:
261
+ return self.format_str.format(self.data)
262
+
263
+ def __get_additional_format_len(self) -> int:
264
+ if not RealNumber(self.data, strip_ansi_escape=False).is_type():
265
+ return 0
266
+
267
+ format_len = 0
268
+
269
+ if Decimal(self.data) < 0:
270
+ # for minus character
271
+ format_len += 1
272
+
273
+ return format_len
274
+
275
+ def __get_base_float_len(self) -> int:
276
+ assert self.integer_digits is not None
277
+ assert self.decimal_places is not None
278
+
279
+ if any([self.integer_digits < 0, self.decimal_places < 0]):
280
+ raise ValueError("integer digits and decimal places must be greater or equals to zero")
281
+
282
+ float_len = self.integer_digits + self.decimal_places
283
+ if self.decimal_places > 0:
284
+ # for dot
285
+ float_len += 1
286
+
287
+ return float_len
288
+
289
+ def __get_length(self) -> Optional[int]:
290
+ if self.typecode in (Typecode.DICTIONARY, Typecode.LIST, Typecode.STRING):
291
+ return len(self.data)
292
+
293
+ return None
294
+
295
+ def __calc_ascii_char_width(self) -> int:
296
+ if self.typecode == Typecode.INTEGER:
297
+ return cast(int, self.integer_digits) + self.additional_format_len
298
+
299
+ if self.typecode == Typecode.REAL_NUMBER:
300
+ return self.__get_base_float_len() + self.additional_format_len
301
+
302
+ if self.typecode == Typecode.DATETIME:
303
+ try:
304
+ return len(self.to_str())
305
+ except ValueError:
306
+ # reach to this line if the year <1900.
307
+ # the datetime strftime() methods require year >= 1900.
308
+ return len(str(self.data))
309
+
310
+ if self.is_include_ansi_escape:
311
+ assert self.no_ansi_escape_dp
312
+ return self.no_ansi_escape_dp.ascii_char_width
313
+
314
+ try:
315
+ unicode_str = MultiByteStrDecoder(self.data).unicode_str
316
+ except ValueError:
317
+ unicode_str = self.to_str()
318
+
319
+ return calc_ascii_char_width(unicode_str, self._east_asian_ambiguous_width)
320
+
321
+ def __set_data(
322
+ self,
323
+ data: Any,
324
+ type_hint: TypeHint,
325
+ float_type: Optional[FloatType],
326
+ strict_level_map: Optional[StrictLevelMap],
327
+ ) -> None:
328
+ if float_type is None:
329
+ float_type = DefaultValue.FLOAT_TYPE
330
+
331
+ if strict_level_map is None:
332
+ strict_level_map = DefaultValue.STRICT_LEVEL_MAP
333
+
334
+ if type_hint:
335
+ type_obj = type_hint(
336
+ data, strict_level=StrictLevel.MIN, float_type=float_type, strip_ansi_escape=False
337
+ )
338
+ self._typecode = type_obj.typecode
339
+ self.__data = type_obj.try_convert()
340
+
341
+ if type_hint(
342
+ self.__data,
343
+ strict_level=StrictLevel.MAX,
344
+ float_type=float_type,
345
+ strip_ansi_escape=False,
346
+ ).is_type():
347
+ return
348
+
349
+ for type_class in self.__type_class_list:
350
+ strict_level = strict_level_map.get(
351
+ type_class(None).typecode, strict_level_map.get("default", StrictLevel.MAX)
352
+ )
353
+
354
+ if self.__try_convert_type(data, type_class, strict_level, float_type):
355
+ return
356
+
357
+ raise TypeConversionError(
358
+ f"failed to convert: data={data}, strict_level={strict_level_map}"
359
+ )
360
+
361
+ def __set_digit(self) -> None:
362
+ integer_digits, decimal_places = get_number_of_digit(self.__data)
363
+ self.__integer_digits = integer_digits
364
+ self._decimal_places = decimal_places
365
+
366
+ def __try_convert_type(
367
+ self,
368
+ data: Any,
369
+ type_class: AbstractType,
370
+ strict_level: int,
371
+ float_type: Optional[FloatType],
372
+ ) -> bool:
373
+ type_obj = type_class(data, strict_level, float_type=float_type, strip_ansi_escape=False)
374
+
375
+ try:
376
+ self.__data = type_obj.convert()
377
+ except TypeConversionError:
378
+ return False
379
+
380
+ self._typecode = type_obj.typecode
381
+
382
+ return True
llmeval-env/lib/python3.10/site-packages/dataproperty/_extractor.py ADDED
@@ -0,0 +1,814 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import copy
6
+ import enum
7
+ import sys
8
+ import typing
9
+ from collections import Counter
10
+ from decimal import Decimal
11
+ from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast
12
+
13
+ import typepy
14
+ from typepy import (
15
+ Bool,
16
+ DateTime,
17
+ Dictionary,
18
+ Infinity,
19
+ Integer,
20
+ IpAddress,
21
+ Nan,
22
+ NoneType,
23
+ NullString,
24
+ RealNumber,
25
+ StrictLevel,
26
+ String,
27
+ Typecode,
28
+ is_empty_sequence,
29
+ )
30
+ from typepy.type import AbstractType
31
+
32
+ from ._column import ColumnDataProperty
33
+ from ._common import MIN_STRICT_LEVEL_MAP, DefaultValue
34
+ from ._converter import DataPropertyConverter
35
+ from ._dataproperty import DataProperty
36
+ from ._formatter import Format
37
+ from ._preprocessor import Preprocessor
38
+ from .logger import logger
39
+ from .typing import (
40
+ DateTimeFormatter,
41
+ StrictLevelMap,
42
+ TransFunc,
43
+ TypeHint,
44
+ TypeValueMap,
45
+ normalize_type_hint,
46
+ )
47
+
48
+
49
+ DataPropertyMatrix = List[List[DataProperty]]
50
+
51
+
52
+ @enum.unique
53
+ class MatrixFormatting(enum.Enum):
54
+ # raise exception if the matrix is not properly formatted
55
+ EXCEPTION = 1 << 1
56
+
57
+ # trim to the minimum size column
58
+ TRIM = 1 << 2
59
+
60
+ # Append None values to columns so that it is the same as the maximum
61
+ # column size.
62
+ FILL_NONE = 1 << 3
63
+
64
+ HEADER_ALIGNED = 1 << 4
65
+
66
+
67
+ class DataPropertyExtractor:
68
+ """
69
+ .. py:attribute:: quoting_flags
70
+
71
+ Configurations to add double quote to for each items in a matrix,
72
+ where |Typecode| of table-value is |True| in the ``quote_flag_table``
73
+ mapping table. ``quote_flag_table`` should be a dictionary.
74
+ And is ``{ Typecode : bool }``. Defaults to:
75
+
76
+ .. code-block:: json
77
+ :caption: The default values
78
+
79
+ {
80
+ Typecode.BOOL: False,
81
+ Typecode.DATETIME: False,
82
+ Typecode.DICTIONARY: False,
83
+ Typecode.INFINITY: False,
84
+ Typecode.INTEGER: False,
85
+ Typecode.IP_ADDRESS: False,
86
+ Typecode.LIST: False,
87
+ Typecode.NAN: False,
88
+ Typecode.NULL_STRING: False,
89
+ Typecode.NONE: False,
90
+ Typecode.REAL_NUMBER: False,
91
+ Typecode.STRING: False,
92
+ }
93
+ """
94
+
95
+ def __init__(self, max_precision: Optional[int] = None) -> None:
96
+ self.max_workers = DefaultValue.MAX_WORKERS
97
+
98
+ if max_precision is None:
99
+ self.__max_precision = DefaultValue.MAX_PRECISION
100
+ else:
101
+ self.__max_precision = max_precision
102
+
103
+ self.__headers: Sequence[str] = []
104
+ self.__default_type_hint: TypeHint = None
105
+ self.__col_type_hints: List[TypeHint] = []
106
+
107
+ self.__strip_str_header: Optional[str] = None
108
+ self.__is_formatting_float = True
109
+ self.__min_col_ascii_char_width = 0
110
+ self.__default_format_flags = Format.NONE
111
+ self.__format_flags_list: Sequence[int] = []
112
+ self.__float_type: Union[Type[float], Type[Decimal], None] = None
113
+ self.__datetime_format_str = DefaultValue.DATETIME_FORMAT
114
+ self.__strict_level_map = copy.deepcopy(
115
+ cast(Dict[Union[Typecode, str], int], DefaultValue.STRICT_LEVEL_MAP)
116
+ )
117
+ self.__east_asian_ambiguous_width = 1
118
+
119
+ self.__preprocessor = Preprocessor()
120
+
121
+ self.__type_value_map: Mapping[Typecode, Union[float, Decimal, None]] = copy.deepcopy(
122
+ DefaultValue.TYPE_VALUE_MAP
123
+ )
124
+
125
+ self.__trans_func_list: List[TransFunc] = []
126
+ self.__quoting_flags = copy.deepcopy(DefaultValue.QUOTING_FLAGS)
127
+ self.__datetime_formatter: Optional[DateTimeFormatter] = None
128
+ self.__matrix_formatting = MatrixFormatting.TRIM
129
+ self.__dp_converter: DataPropertyConverter
130
+
131
+ self.__clear_cache()
132
+
133
+ def __clear_cache(self) -> None:
134
+ self.__update_dp_converter()
135
+ self.__dp_cache_zero = self.__to_dp_raw(0)
136
+ self.__dp_cache_one = self.__to_dp_raw(1)
137
+ self.__dp_cache_true = self.__to_dp_raw(True)
138
+ self.__dp_cache_false = self.__to_dp_raw(False)
139
+ self.__dp_cache_map = {None: self.__to_dp_raw(None), "": self.__to_dp_raw("")}
140
+
141
+ @property
142
+ def headers(self) -> Sequence[str]:
143
+ return self.__headers
144
+
145
+ @headers.setter
146
+ def headers(self, value: Sequence[str]) -> None:
147
+ if self.__headers == value:
148
+ return
149
+
150
+ self.__headers = value
151
+ self.__clear_cache()
152
+
153
+ @property
154
+ def default_type_hint(self) -> TypeHint:
155
+ return self.__default_type_hint
156
+
157
+ @default_type_hint.setter
158
+ def default_type_hint(self, value: TypeHint) -> None:
159
+ if self.__default_type_hint == value:
160
+ return
161
+
162
+ self.__default_type_hint = value
163
+ self.__clear_cache()
164
+
165
+ @property
166
+ def column_type_hints(self) -> List[TypeHint]:
167
+ return self.__col_type_hints
168
+
169
+ @column_type_hints.setter
170
+ def column_type_hints(self, value: Sequence[Union[str, TypeHint]]) -> None:
171
+ normalized_type_hints: List[TypeHint] = []
172
+
173
+ for type_hint in value:
174
+ type_hint = normalize_type_hint(type_hint)
175
+ if type_hint not in (
176
+ Bool,
177
+ DateTime,
178
+ Dictionary,
179
+ Infinity,
180
+ Integer,
181
+ IpAddress,
182
+ typepy.List,
183
+ Nan,
184
+ NoneType,
185
+ RealNumber,
186
+ String,
187
+ NullString,
188
+ None,
189
+ ):
190
+ raise ValueError(f"invalid type hint: {type(type_hint)}")
191
+
192
+ normalized_type_hints.append(type_hint)
193
+
194
+ if self.__col_type_hints == normalized_type_hints:
195
+ return
196
+
197
+ self.__col_type_hints = normalized_type_hints
198
+ self.__clear_cache()
199
+
200
+ @property
201
+ def is_formatting_float(self) -> bool:
202
+ return self.__is_formatting_float
203
+
204
+ @is_formatting_float.setter
205
+ def is_formatting_float(self, value: bool) -> None:
206
+ self.__is_formatting_float = value
207
+
208
+ @property
209
+ def max_precision(self) -> int:
210
+ return self.__max_precision
211
+
212
+ @max_precision.setter
213
+ def max_precision(self, value: int) -> None:
214
+ if self.__max_precision == value:
215
+ return
216
+
217
+ self.__max_precision = value
218
+ self.__clear_cache()
219
+
220
+ @property
221
+ def preprocessor(self) -> Preprocessor:
222
+ return self.__preprocessor
223
+
224
+ @preprocessor.setter
225
+ def preprocessor(self, value: Preprocessor) -> None:
226
+ if self.preprocessor == value:
227
+ return
228
+
229
+ self.__preprocessor = value
230
+ self.__update_dp_converter()
231
+
232
+ @property
233
+ def strip_str_header(self) -> Optional[str]:
234
+ return self.__strip_str_header
235
+
236
+ @strip_str_header.setter
237
+ def strip_str_header(self, value: str) -> None:
238
+ if self.__strip_str_header == value:
239
+ return
240
+
241
+ self.__strip_str_header = value
242
+ self.__clear_cache()
243
+
244
+ @property
245
+ def min_column_width(self) -> int:
246
+ return self.__min_col_ascii_char_width
247
+
248
+ @min_column_width.setter
249
+ def min_column_width(self, value: int) -> None:
250
+ if self.__min_col_ascii_char_width == value:
251
+ return
252
+
253
+ self.__min_col_ascii_char_width = value
254
+ self.__clear_cache()
255
+
256
+ @property
257
+ def default_format_flags(self) -> int:
258
+ return self.__default_format_flags
259
+
260
+ @default_format_flags.setter
261
+ def default_format_flags(self, value: int) -> None:
262
+ if self.__default_format_flags == value:
263
+ return
264
+
265
+ self.__default_format_flags = value
266
+ self.__clear_cache()
267
+
268
+ @property
269
+ def format_flags_list(self) -> Sequence[int]:
270
+ return self.__format_flags_list
271
+
272
+ @format_flags_list.setter
273
+ def format_flags_list(self, value: Sequence[int]) -> None:
274
+ if self.__format_flags_list == value:
275
+ return
276
+
277
+ self.__format_flags_list = value
278
+ self.__clear_cache()
279
+
280
+ @property
281
+ def float_type(self) -> Union[Type[float], Type[Decimal], None]:
282
+ return self.__float_type
283
+
284
+ @float_type.setter
285
+ def float_type(self, value: Union[Type[float], Type[Decimal]]) -> None:
286
+ if self.__float_type == value:
287
+ return
288
+
289
+ self.__float_type = value
290
+ self.__clear_cache()
291
+
292
+ @property
293
+ def datetime_format_str(self) -> str:
294
+ return self.__datetime_format_str
295
+
296
+ @datetime_format_str.setter
297
+ def datetime_format_str(self, value: str) -> None:
298
+ if self.__datetime_format_str == value:
299
+ return
300
+
301
+ self.__datetime_format_str = value
302
+ self.__clear_cache()
303
+
304
+ @property
305
+ def strict_level_map(self) -> StrictLevelMap:
306
+ return self.__strict_level_map
307
+
308
+ @strict_level_map.setter
309
+ def strict_level_map(self, value: StrictLevelMap) -> None:
310
+ if self.__strict_level_map == value:
311
+ return
312
+
313
+ self.__strict_level_map = cast(Dict[Union[Typecode, str], int], value)
314
+ self.__clear_cache()
315
+
316
+ @property
317
+ def east_asian_ambiguous_width(self) -> int:
318
+ return self.__east_asian_ambiguous_width
319
+
320
+ @east_asian_ambiguous_width.setter
321
+ def east_asian_ambiguous_width(self, value: int) -> None:
322
+ if self.__east_asian_ambiguous_width == value:
323
+ return
324
+
325
+ self.__east_asian_ambiguous_width = value
326
+ self.__clear_cache()
327
+
328
+ @property
329
+ def type_value_map(self) -> TypeValueMap:
330
+ return self.__type_value_map
331
+
332
+ @type_value_map.setter
333
+ def type_value_map(self, value: TypeValueMap) -> None:
334
+ if self.__type_value_map == value:
335
+ return
336
+
337
+ self.__type_value_map = value
338
+ self.__clear_cache()
339
+
340
+ def register_trans_func(self, trans_func: TransFunc) -> None:
341
+ self.__trans_func_list.insert(0, trans_func)
342
+ self.__clear_cache()
343
+
344
+ @property
345
+ def quoting_flags(self) -> Dict[Typecode, bool]:
346
+ return self.__quoting_flags
347
+
348
+ @quoting_flags.setter
349
+ def quoting_flags(self, value: Dict[Typecode, bool]) -> None:
350
+ if self.__quoting_flags == value:
351
+ return
352
+
353
+ self.__quoting_flags = value
354
+ self.__clear_cache()
355
+
356
+ @property
357
+ def datetime_formatter(self) -> Optional[DateTimeFormatter]:
358
+ return self.__datetime_formatter
359
+
360
+ @datetime_formatter.setter
361
+ def datetime_formatter(self, value: Optional[DateTimeFormatter]) -> None:
362
+ if self.__datetime_formatter == value:
363
+ return
364
+
365
+ self.__datetime_formatter = value
366
+ self.__clear_cache()
367
+
368
+ @property
369
+ def matrix_formatting(self) -> MatrixFormatting:
370
+ return self.__matrix_formatting
371
+
372
+ @matrix_formatting.setter
373
+ def matrix_formatting(self, value: MatrixFormatting) -> None:
374
+ if self.__matrix_formatting == value:
375
+ return
376
+
377
+ self.__matrix_formatting = value
378
+ self.__clear_cache()
379
+
380
+ @property
381
+ def max_workers(self) -> int:
382
+ assert self.__max_workers
383
+
384
+ return self.__max_workers
385
+
386
+ @max_workers.setter
387
+ def max_workers(self, value: Optional[int]) -> None:
388
+ try:
389
+ from _multiprocessing import SemLock, sem_unlink # noqa
390
+ except ImportError:
391
+ logger.debug("This platform lacks a functioning sem_open implementation")
392
+ value = 1
393
+
394
+ if "pytest" in sys.modules and value != 1:
395
+ logger.debug("set max_workers to 1 to avoid deadlock when executed from pytest")
396
+ value = 1
397
+
398
+ self.__max_workers = value
399
+ if not self.__max_workers:
400
+ self.__max_workers = DefaultValue.MAX_WORKERS
401
+
402
+ def to_dp(self, value: Any) -> DataProperty:
403
+ self.__update_dp_converter()
404
+
405
+ return self.__to_dp(value)
406
+
407
+ def to_dp_list(self, values: Sequence[Any]) -> List[DataProperty]:
408
+ if is_empty_sequence(values):
409
+ return []
410
+
411
+ self.__update_dp_converter()
412
+
413
+ return self._to_dp_list(values)
414
+
415
+ def to_column_dp_list(
416
+ self,
417
+ value_dp_matrix: Any,
418
+ previous_column_dp_list: Optional[Sequence[ColumnDataProperty]] = None,
419
+ ) -> List[ColumnDataProperty]:
420
+ col_dp_list = self.__get_col_dp_list_base()
421
+
422
+ logger.debug("converting to column dataproperty:")
423
+
424
+ logs = [" params:"]
425
+ if self.headers:
426
+ logs.append(f" headers={len(self.headers)}")
427
+ logs.extend(
428
+ [
429
+ " prev_col_count={}".format(
430
+ len(previous_column_dp_list) if previous_column_dp_list else None
431
+ ),
432
+ f" matrix_formatting={self.matrix_formatting}",
433
+ ]
434
+ )
435
+ if self.column_type_hints:
436
+ logs.append(
437
+ " column_type_hints=({})".format(
438
+ ", ".join(
439
+ [
440
+ type_hint.__name__ if type_hint else "none"
441
+ for type_hint in self.column_type_hints
442
+ ]
443
+ )
444
+ )
445
+ )
446
+ else:
447
+ logs.append(" column_type_hints=()")
448
+
449
+ for log in logs:
450
+ logger.debug(log)
451
+
452
+ logger.debug(" results:")
453
+ for col_idx, value_dp_list in enumerate(zip(*value_dp_matrix)):
454
+ try:
455
+ col_dp_list[col_idx]
456
+ except IndexError:
457
+ col_dp_list.append(
458
+ ColumnDataProperty(
459
+ column_index=col_idx,
460
+ float_type=self.float_type,
461
+ min_width=self.min_column_width,
462
+ format_flags=self.__get_format_flags(col_idx),
463
+ is_formatting_float=self.is_formatting_float,
464
+ datetime_format_str=self.datetime_format_str,
465
+ east_asian_ambiguous_width=self.east_asian_ambiguous_width,
466
+ max_precision=self.__max_precision,
467
+ )
468
+ )
469
+
470
+ col_dp = col_dp_list[col_idx]
471
+ col_dp.begin_update()
472
+
473
+ try:
474
+ col_dp.merge(previous_column_dp_list[col_idx]) # type: ignore
475
+ except (TypeError, IndexError):
476
+ pass
477
+
478
+ for value_dp in value_dp_list:
479
+ col_dp.update_body(value_dp)
480
+
481
+ col_dp.end_update()
482
+
483
+ logger.debug(f" {str(col_dp):s}")
484
+
485
+ return col_dp_list
486
+
487
+ def to_dp_matrix(self, value_matrix: Sequence[Sequence[Any]]) -> DataPropertyMatrix:
488
+ self.__update_dp_converter()
489
+ logger.debug(f"max_workers={self.max_workers}, preprocessor={self.__preprocessor}")
490
+
491
+ value_matrix = self.__strip_data_matrix(value_matrix)
492
+
493
+ if self.__is_dp_matrix(value_matrix):
494
+ logger.debug("already a dataproperty matrix")
495
+ return value_matrix # type: ignore
496
+
497
+ if self.max_workers <= 1:
498
+ return self.__to_dp_matrix_st(value_matrix)
499
+
500
+ return self.__to_dp_matrix_mt(value_matrix)
501
+
502
+ def to_header_dp_list(self) -> List[DataProperty]:
503
+ self.__update_dp_converter()
504
+
505
+ preprocessor = copy.deepcopy(self.__preprocessor)
506
+ preprocessor.strip_str = self.strip_str_header
507
+
508
+ return self._to_dp_list(
509
+ self.headers,
510
+ type_hint=String,
511
+ preprocessor=preprocessor,
512
+ strict_level_map=MIN_STRICT_LEVEL_MAP,
513
+ )
514
+
515
+ def update_preprocessor(self, **kwargs: Any) -> bool:
516
+ is_updated = self.__preprocessor.update(**kwargs)
517
+ self.__update_dp_converter()
518
+
519
+ return is_updated
520
+
521
+ def update_strict_level_map(self, value: StrictLevelMap) -> bool:
522
+ org = copy.deepcopy(self.__strict_level_map)
523
+ self.__strict_level_map.update(value)
524
+
525
+ if org == self.__strict_level_map:
526
+ return False
527
+
528
+ self.__clear_cache()
529
+
530
+ return True
531
+
532
+ """
533
+ def update_dict(self, lhs: Mapping, rhs: Mapping) -> bool:
534
+ is_updated = False
535
+
536
+ for key, value in rhs.items():
537
+ if key not in lhs:
538
+ lhs[]
539
+ continue
540
+
541
+ if getattr(lhs, key) == value:
542
+ continue
543
+
544
+ setattr(lhs, key, value)
545
+ is_updated = True
546
+
547
+ return is_updated
548
+ """
549
+
550
+ @staticmethod
551
+ def __is_dp_matrix(value: Any) -> bool:
552
+ try:
553
+ return isinstance(value[0][0], DataProperty)
554
+ except (TypeError, IndexError):
555
+ return False
556
+
557
+ def __get_col_type_hint(self, col_idx: int) -> TypeHint:
558
+ try:
559
+ return self.column_type_hints[col_idx]
560
+ except (TypeError, IndexError):
561
+ return self.default_type_hint
562
+
563
+ def __get_format_flags(self, col_idx: int) -> int:
564
+ try:
565
+ return self.format_flags_list[col_idx]
566
+ except (TypeError, IndexError):
567
+ return self.__default_format_flags
568
+
569
+ def __to_dp(
570
+ self,
571
+ data: Any,
572
+ type_hint: TypeHint = None,
573
+ preprocessor: Optional[Preprocessor] = None,
574
+ strict_level_map: Optional[StrictLevelMap] = None,
575
+ ) -> DataProperty:
576
+ for trans_func in self.__trans_func_list:
577
+ data = trans_func(data)
578
+
579
+ if type_hint:
580
+ return self.__to_dp_raw(
581
+ data,
582
+ type_hint=type_hint,
583
+ preprocessor=preprocessor,
584
+ strict_level_map=strict_level_map,
585
+ )
586
+
587
+ try:
588
+ if data in self.__dp_cache_map:
589
+ return self.__dp_cache_map[data]
590
+ except TypeError:
591
+ # unhashable type
592
+ pass
593
+
594
+ if data == 0:
595
+ if data is False:
596
+ return self.__dp_cache_false
597
+ return self.__dp_cache_zero
598
+ if data == 1:
599
+ if data is True:
600
+ return self.__dp_cache_true
601
+ return self.__dp_cache_one
602
+
603
+ return self.__to_dp_raw(
604
+ data, type_hint=type_hint, preprocessor=preprocessor, strict_level_map=strict_level_map
605
+ )
606
+
607
+ def __to_dp_raw(
608
+ self,
609
+ data: Any,
610
+ type_hint: TypeHint = None,
611
+ preprocessor: Optional[Preprocessor] = None,
612
+ strict_level_map: Optional[StrictLevelMap] = None,
613
+ ) -> DataProperty:
614
+ if preprocessor:
615
+ preprocessor = Preprocessor(
616
+ dequote=preprocessor.dequote,
617
+ line_break_handling=preprocessor.line_break_handling,
618
+ line_break_repl=preprocessor.line_break_repl,
619
+ strip_str=preprocessor.strip_str,
620
+ is_escape_formula_injection=preprocessor.is_escape_formula_injection,
621
+ )
622
+ else:
623
+ preprocessor = Preprocessor(
624
+ dequote=self.preprocessor.dequote,
625
+ line_break_handling=self.preprocessor.line_break_handling,
626
+ line_break_repl=self.preprocessor.line_break_repl,
627
+ strip_str=self.preprocessor.strip_str,
628
+ is_escape_formula_injection=self.__preprocessor.is_escape_formula_injection,
629
+ )
630
+
631
+ value_dp = DataProperty(
632
+ data,
633
+ preprocessor=preprocessor,
634
+ type_hint=(type_hint if type_hint is not None else self.default_type_hint),
635
+ float_type=self.float_type,
636
+ datetime_format_str=self.datetime_format_str,
637
+ strict_level_map=(strict_level_map if type_hint is not None else self.strict_level_map),
638
+ east_asian_ambiguous_width=self.east_asian_ambiguous_width,
639
+ )
640
+
641
+ return self.__dp_converter.convert(value_dp)
642
+
643
+ def __to_dp_matrix_st(self, value_matrix: Sequence[Sequence[Any]]) -> DataPropertyMatrix:
644
+ return list(
645
+ zip( # type: ignore
646
+ *(
647
+ _to_dp_list_helper(
648
+ self,
649
+ col_idx,
650
+ values,
651
+ self.__get_col_type_hint(col_idx),
652
+ self.__preprocessor,
653
+ )[1]
654
+ for col_idx, values in enumerate(zip(*value_matrix))
655
+ )
656
+ )
657
+ )
658
+
659
+ def __to_dp_matrix_mt(self, value_matrix: Sequence[Sequence[Any]]) -> DataPropertyMatrix:
660
+ from concurrent import futures
661
+
662
+ col_data_map = {}
663
+
664
+ with futures.ProcessPoolExecutor(self.max_workers) as executor:
665
+ future_list = [
666
+ executor.submit(
667
+ _to_dp_list_helper,
668
+ self,
669
+ col_idx,
670
+ values,
671
+ self.__get_col_type_hint(col_idx),
672
+ self.__preprocessor,
673
+ )
674
+ for col_idx, values in enumerate(zip(*value_matrix))
675
+ ]
676
+
677
+ for future in futures.as_completed(future_list):
678
+ col_idx, value_dp_list = future.result()
679
+ col_data_map[col_idx] = value_dp_list
680
+
681
+ return list(
682
+ zip(*(col_data_map[col_idx] for col_idx in sorted(col_data_map))) # type: ignore
683
+ )
684
+
685
+ def _to_dp_list(
686
+ self,
687
+ data_list: Sequence[Any],
688
+ type_hint: TypeHint = None,
689
+ preprocessor: Optional[Preprocessor] = None,
690
+ strict_level_map: Optional[StrictLevelMap] = None,
691
+ ) -> List[DataProperty]:
692
+ if is_empty_sequence(data_list):
693
+ return []
694
+
695
+ type_counter: typing.Counter[Type[AbstractType]] = Counter()
696
+
697
+ dp_list = []
698
+ for data in data_list:
699
+ expect_type_hint: TypeHint = type_hint
700
+ if type_hint is None:
701
+ try:
702
+ expect_type_hint, _count = type_counter.most_common(1)[0]
703
+ if not expect_type_hint(
704
+ data, float_type=self.float_type, strict_level=StrictLevel.MAX
705
+ ).is_type():
706
+ expect_type_hint = None
707
+ except IndexError:
708
+ pass
709
+
710
+ dataprop = self.__to_dp(
711
+ data=data,
712
+ type_hint=expect_type_hint,
713
+ preprocessor=preprocessor if preprocessor else self.__preprocessor,
714
+ strict_level_map=strict_level_map,
715
+ )
716
+ type_counter[dataprop.type_class] += 1
717
+
718
+ dp_list.append(dataprop)
719
+
720
+ return dp_list
721
+
722
+ def __strip_data_matrix(self, data_matrix: Sequence[Sequence[Any]]) -> Sequence[Sequence[Any]]:
723
+ header_col_size = len(self.headers) if self.headers else 0
724
+ try:
725
+ col_size_list = [len(data_list) for data_list in data_matrix]
726
+ except TypeError:
727
+ return []
728
+
729
+ if self.headers:
730
+ min_col_size = min([header_col_size] + col_size_list)
731
+ max_col_size = max([header_col_size] + col_size_list)
732
+ elif col_size_list:
733
+ min_col_size = min(col_size_list)
734
+ max_col_size = max(col_size_list)
735
+ else:
736
+ min_col_size = 0
737
+ max_col_size = 0
738
+
739
+ if self.matrix_formatting == MatrixFormatting.EXCEPTION:
740
+ if min_col_size != max_col_size:
741
+ raise ValueError(
742
+ "nonuniform column size found: min={}, max={}".format(
743
+ min_col_size, max_col_size
744
+ )
745
+ )
746
+
747
+ return data_matrix
748
+
749
+ if self.matrix_formatting == MatrixFormatting.HEADER_ALIGNED:
750
+ if header_col_size > 0:
751
+ format_col_size = header_col_size
752
+ else:
753
+ format_col_size = max_col_size
754
+ elif self.matrix_formatting == MatrixFormatting.TRIM:
755
+ format_col_size = min_col_size
756
+ elif self.matrix_formatting == MatrixFormatting.FILL_NONE:
757
+ format_col_size = max_col_size
758
+ else:
759
+ raise ValueError(f"unknown matrix formatting: {self.matrix_formatting}")
760
+
761
+ return [
762
+ list(data_matrix[row_idx][:format_col_size]) + [None] * (format_col_size - col_size)
763
+ for row_idx, col_size in enumerate(col_size_list)
764
+ ]
765
+
766
+ def __get_col_dp_list_base(self) -> List[ColumnDataProperty]:
767
+ header_dp_list = self.to_header_dp_list()
768
+ col_dp_list = []
769
+
770
+ for col_idx, header_dp in enumerate(header_dp_list):
771
+ col_dp = ColumnDataProperty(
772
+ column_index=col_idx,
773
+ float_type=self.float_type,
774
+ min_width=self.min_column_width,
775
+ format_flags=self.__get_format_flags(col_idx),
776
+ is_formatting_float=self.is_formatting_float,
777
+ datetime_format_str=self.datetime_format_str,
778
+ east_asian_ambiguous_width=self.east_asian_ambiguous_width,
779
+ max_precision=self.__max_precision,
780
+ )
781
+ col_dp.update_header(header_dp)
782
+ col_dp_list.append(col_dp)
783
+
784
+ return col_dp_list
785
+
786
+ def __update_dp_converter(self) -> None:
787
+ preprocessor = Preprocessor(
788
+ line_break_handling=self.__preprocessor.line_break_handling,
789
+ line_break_repl=self.preprocessor.line_break_repl,
790
+ is_escape_html_tag=self.__preprocessor.is_escape_html_tag,
791
+ is_escape_formula_injection=self.__preprocessor.is_escape_formula_injection,
792
+ )
793
+ self.__dp_converter = DataPropertyConverter(
794
+ preprocessor=preprocessor,
795
+ type_value_map=self.type_value_map,
796
+ quoting_flags=self.quoting_flags,
797
+ datetime_formatter=self.datetime_formatter,
798
+ datetime_format_str=self.datetime_format_str,
799
+ float_type=self.float_type,
800
+ strict_level_map=self.strict_level_map,
801
+ )
802
+
803
+
804
+ def _to_dp_list_helper(
805
+ extractor: DataPropertyExtractor,
806
+ col_idx: int,
807
+ data_list: Sequence[Any],
808
+ type_hint: TypeHint,
809
+ preprocessor: Preprocessor,
810
+ ) -> Tuple[int, List[DataProperty]]:
811
+ return (
812
+ col_idx,
813
+ extractor._to_dp_list(data_list, type_hint=type_hint, preprocessor=preprocessor),
814
+ )
llmeval-env/lib/python3.10/site-packages/dataproperty/_formatter.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+ from decimal import Decimal
3
+ from typing import Dict, Optional, Union
4
+
5
+ from typepy import Nan, Typecode
6
+
7
+
8
+ DecimalPlaces = Union[float, Decimal]
9
+
10
+
11
+ class Format:
12
+ NONE = 0
13
+ THOUSAND_SEPARATOR = 1
14
+
15
+
16
+ class Formatter:
17
+ __slots__ = ("__is_formatting_float", "__format_flags", "__datetime_format_str")
18
+
19
+ _BLANK_CURLY_BRACES_FORMAT_MAP: Dict[Typecode, str] = {
20
+ Typecode.NONE: "{}",
21
+ Typecode.IP_ADDRESS: "{}",
22
+ Typecode.BOOL: "{}",
23
+ Typecode.DICTIONARY: "{}",
24
+ Typecode.LIST: "{}",
25
+ }
26
+
27
+ def __init__(
28
+ self,
29
+ datetime_format_str: str,
30
+ is_formatting_float: Optional[bool] = True,
31
+ format_flags: Optional[int] = None,
32
+ ) -> None:
33
+ if format_flags is not None:
34
+ self.__format_flags = format_flags
35
+ else:
36
+ self.__format_flags = Format.NONE
37
+
38
+ self.__datetime_format_str = datetime_format_str
39
+ self.__is_formatting_float = is_formatting_float
40
+
41
+ def make_format_map(
42
+ self, decimal_places: Optional[DecimalPlaces] = None
43
+ ) -> Dict[Typecode, str]:
44
+ format_map = copy.copy(self._BLANK_CURLY_BRACES_FORMAT_MAP)
45
+ format_map.update(
46
+ {
47
+ Typecode.INTEGER: self.make_format_str(Typecode.INTEGER),
48
+ Typecode.REAL_NUMBER: self.make_format_str(Typecode.REAL_NUMBER, decimal_places),
49
+ Typecode.INFINITY: self.make_format_str(Typecode.INFINITY),
50
+ Typecode.NAN: self.make_format_str(Typecode.NAN),
51
+ Typecode.DATETIME: self.make_format_str(Typecode.DATETIME),
52
+ }
53
+ )
54
+
55
+ return format_map
56
+
57
+ def make_format_str(
58
+ self, typecode: Typecode, decimal_places: Optional[DecimalPlaces] = None
59
+ ) -> str:
60
+ format_str = self._BLANK_CURLY_BRACES_FORMAT_MAP.get(typecode)
61
+ if format_str is not None:
62
+ return format_str
63
+
64
+ if typecode == Typecode.INTEGER:
65
+ return self.__get_integer_format()
66
+
67
+ if typecode in (Typecode.REAL_NUMBER, Typecode.INFINITY, Typecode.NAN):
68
+ return self.__get_realnumber_format(decimal_places)
69
+
70
+ if typecode == Typecode.DATETIME:
71
+ return "{:" + self.__datetime_format_str + "}"
72
+
73
+ return "{:s}"
74
+
75
+ def __get_base_format_str(self) -> str:
76
+ if self.__format_flags & Format.THOUSAND_SEPARATOR:
77
+ return ","
78
+
79
+ return ""
80
+
81
+ def __get_integer_format(self) -> str:
82
+ return "{:" + self.__get_base_format_str() + "d}"
83
+
84
+ def __get_realnumber_format(self, decimal_places: Optional[DecimalPlaces]) -> str:
85
+ if not self.__is_formatting_float:
86
+ return "{}"
87
+
88
+ base_format = self.__get_base_format_str()
89
+
90
+ if decimal_places is None or Nan(decimal_places).is_type():
91
+ return "{:" + base_format + "f}"
92
+
93
+ try:
94
+ return "{:" + f"{base_format:s}.{decimal_places:d}f" + "}"
95
+ except ValueError:
96
+ pass
97
+
98
+ return "{:" + base_format + "f}"
llmeval-env/lib/python3.10/site-packages/dataproperty/_function.py ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import decimal
6
+ import re
7
+ from decimal import Decimal
8
+ from typing import Any, Optional, Tuple, Union
9
+
10
+ from typepy import Integer, RealNumber, TypeConversionError
11
+
12
+
13
+ decimal.setcontext(decimal.Context(prec=60, rounding=decimal.ROUND_HALF_DOWN))
14
+
15
+ _ansi_escape = re.compile(r"(\x9b|\x1b\[)[0-?]*[ -\/]*[@-~]", re.IGNORECASE)
16
+
17
+
18
+ def get_integer_digit(value: Any) -> int:
19
+ float_type = RealNumber(value)
20
+
21
+ try:
22
+ abs_value = abs(float_type.convert())
23
+ except TypeConversionError:
24
+ try:
25
+ abs_value = abs(Integer(value).convert())
26
+ except TypeConversionError:
27
+ raise ValueError(f"the value must be a number: value='{value}' type='{type(value)}'")
28
+
29
+ return len(str(abs_value))
30
+
31
+ if abs_value.is_zero():
32
+ return 1
33
+
34
+ try:
35
+ return len(str(abs_value.quantize(Decimal("1."), rounding=decimal.ROUND_DOWN)))
36
+ except decimal.InvalidOperation:
37
+ return len(str(abs_value))
38
+
39
+
40
+ class DigitCalculator:
41
+ REGEXP_COMMON_LOG = re.compile(r"[\d\.]+[eE]\-\d+")
42
+ REGEXP_SPLIT = re.compile(r"[eE]\-")
43
+
44
+ def get_decimal_places(self, value: Union[str, float, int, Decimal]) -> int:
45
+ if Integer(value).is_type():
46
+ return 0
47
+
48
+ float_digit_len = 0
49
+ abs_value = abs(float(value))
50
+ text_value = str(abs_value)
51
+ float_text = "0"
52
+ if text_value.find(".") != -1:
53
+ float_text = text_value.split(".")[1]
54
+ float_digit_len = len(float_text)
55
+ elif self.REGEXP_COMMON_LOG.search(text_value):
56
+ float_text = self.REGEXP_SPLIT.split(text_value)[1]
57
+ float_digit_len = int(float_text)
58
+
59
+ return float_digit_len
60
+
61
+
62
+ _digit_calculator = DigitCalculator()
63
+
64
+
65
+ def get_number_of_digit(
66
+ value: Any, max_decimal_places: int = 99
67
+ ) -> Tuple[Optional[int], Optional[int]]:
68
+ try:
69
+ integer_digits = get_integer_digit(value)
70
+ except (ValueError, TypeError, OverflowError):
71
+ return (None, None)
72
+
73
+ try:
74
+ decimal_places: Optional[int] = min(
75
+ _digit_calculator.get_decimal_places(value), max_decimal_places
76
+ )
77
+ except (ValueError, TypeError):
78
+ decimal_places = None
79
+
80
+ return (integer_digits, decimal_places)
81
+
82
+
83
+ def _validate_eaaw(east_asian_ambiguous_width: int) -> None:
84
+ if east_asian_ambiguous_width in (1, 2):
85
+ return
86
+
87
+ raise ValueError(
88
+ "invalid east_asian_ambiguous_width: expected=1 or 2, actual={}".format(
89
+ east_asian_ambiguous_width
90
+ )
91
+ )
92
+
93
+
94
+ def strip_ansi_escape(unicode_str: str) -> str:
95
+ return _ansi_escape.sub("", unicode_str)
96
+
97
+
98
+ def calc_ascii_char_width(unicode_str: str, east_asian_ambiguous_width: int = 1) -> int:
99
+ import unicodedata
100
+
101
+ width = 0
102
+ for char in unicode_str:
103
+ char_width = unicodedata.east_asian_width(char)
104
+ if char_width in "WF":
105
+ width += 2
106
+ elif char_width == "A":
107
+ _validate_eaaw(east_asian_ambiguous_width)
108
+ width += east_asian_ambiguous_width
109
+ else:
110
+ width += 1
111
+
112
+ return width
llmeval-env/lib/python3.10/site-packages/dataproperty/_interface.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ .. codeauthor:: Tsuyoshi Hombashi <[email protected]>
3
+ """
4
+
5
+ import abc
6
+ from typing import Optional
7
+
8
+ from typepy import Typecode
9
+
10
+ from ._align import Align
11
+
12
+
13
+ class DataPeropertyInterface(metaclass=abc.ABCMeta):
14
+ __slots__ = ()
15
+
16
+ @abc.abstractproperty
17
+ def align(self) -> Align: # pragma: no cover
18
+ pass
19
+
20
+ @abc.abstractproperty
21
+ def decimal_places(self) -> Optional[int]: # pragma: no cover
22
+ pass
23
+
24
+ @abc.abstractproperty
25
+ def typecode(self) -> Typecode: # pragma: no cover
26
+ pass
27
+
28
+ @abc.abstractproperty
29
+ def typename(self) -> str: # pragma: no cover
30
+ pass
llmeval-env/lib/python3.10/site-packages/dataproperty/_line_break.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from enum import Enum, unique
2
+
3
+
4
+ @unique
5
+ class LineBreakHandling(Enum):
6
+ NOP = 0
7
+ REPLACE = 1
8
+ ESCAPE = 2
llmeval-env/lib/python3.10/site-packages/dataproperty/_preprocessor.py ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import html
2
+ import re
3
+ from typing import Any, Optional, Tuple, Union
4
+
5
+ from mbstrdecoder import MultiByteStrDecoder
6
+
7
+ from ._function import strip_ansi_escape
8
+ from ._line_break import LineBreakHandling
9
+
10
+
11
+ _RE_LINE_BREAK = re.compile(r"\r\n|\n")
12
+ _RE_FORMULA_PREFIX = re.compile(r"^[-\+=@]")
13
+
14
+
15
+ def normalize_lbh(value: Optional[LineBreakHandling]) -> LineBreakHandling:
16
+ if isinstance(value, LineBreakHandling):
17
+ return value
18
+
19
+ if value is None:
20
+ return LineBreakHandling.NOP
21
+
22
+ return LineBreakHandling[value.upper()] # type: ignore
23
+
24
+
25
+ class Preprocessor:
26
+ @property
27
+ def line_break_handling(self) -> Optional[LineBreakHandling]:
28
+ return self.__line_break_handling
29
+
30
+ @line_break_handling.setter
31
+ def line_break_handling(self, value: Optional[LineBreakHandling]) -> None:
32
+ self.__line_break_handling = normalize_lbh(value)
33
+
34
+ def __init__(
35
+ self,
36
+ strip_str: Optional[Union[str, bytes]] = None,
37
+ replace_tabs_with_spaces: bool = True,
38
+ tab_length: int = 2,
39
+ line_break_handling: Optional[LineBreakHandling] = None,
40
+ line_break_repl: str = " ",
41
+ dequote: bool = False,
42
+ is_escape_html_tag: bool = False,
43
+ is_escape_formula_injection: bool = False,
44
+ ) -> None:
45
+ self.strip_str = strip_str
46
+ self.replace_tabs_with_spaces = replace_tabs_with_spaces
47
+ self.tab_length = tab_length
48
+ self.line_break_handling = line_break_handling
49
+ self.line_break_repl = line_break_repl
50
+ self.dequote = dequote
51
+ self.is_escape_html_tag = is_escape_html_tag
52
+ self.is_escape_formula_injection = is_escape_formula_injection
53
+
54
+ def __repr__(self) -> str:
55
+ return ", ".join(
56
+ [
57
+ f"strip_str={self.strip_str!r}",
58
+ f"replace_tabs_with_spaces={self.replace_tabs_with_spaces}",
59
+ f"tab_length={self.tab_length}",
60
+ f"line_break_handling={self.line_break_handling}",
61
+ f"line_break_repl={self.line_break_repl}",
62
+ f"escape_html_tag={self.is_escape_html_tag}",
63
+ f"escape_formula_injection={self.is_escape_formula_injection}",
64
+ ]
65
+ )
66
+
67
+ def preprocess(self, data: Any) -> Tuple:
68
+ data, no_ansi_escape_data = self.__preprocess_string(
69
+ self.__preprocess_data(data, self.strip_str),
70
+ )
71
+ return (data, no_ansi_escape_data)
72
+
73
+ def update(self, **kwargs: Any) -> bool:
74
+ is_updated = False
75
+
76
+ for key, value in kwargs.items():
77
+ if not hasattr(self, key):
78
+ continue
79
+
80
+ if getattr(self, key) == value:
81
+ continue
82
+
83
+ setattr(self, key, value)
84
+ is_updated = True
85
+
86
+ return is_updated
87
+
88
+ def __preprocess_string(self, raw_data: Any) -> Tuple[Any, Optional[str]]:
89
+ data = raw_data
90
+
91
+ if not isinstance(data, str):
92
+ return (data, None)
93
+
94
+ if self.replace_tabs_with_spaces:
95
+ try:
96
+ data = data.replace("\t", " " * self.tab_length)
97
+ except (TypeError, AttributeError, ValueError):
98
+ pass
99
+
100
+ if self.is_escape_html_tag:
101
+ try:
102
+ data = html.escape(data)
103
+ except AttributeError:
104
+ return (data, None)
105
+
106
+ data = self.__process_line_break(data)
107
+ data = self.__escape_formula_injection(data)
108
+ data = self.__dequote(data)
109
+
110
+ try:
111
+ return (data, strip_ansi_escape(data))
112
+ except TypeError:
113
+ return (data, None)
114
+
115
+ @staticmethod
116
+ def __preprocess_data(data: Any, strip_str: Optional[Union[str, bytes]]) -> Any:
117
+ if strip_str is None:
118
+ return data
119
+
120
+ try:
121
+ return data.strip(strip_str)
122
+ except AttributeError:
123
+ return data
124
+ except UnicodeDecodeError:
125
+ return MultiByteStrDecoder(data).unicode_str.strip(strip_str)
126
+ except TypeError:
127
+ # reach here when data and strip_str type are different
128
+ if isinstance(data, bytes):
129
+ return MultiByteStrDecoder(data).unicode_str.strip(strip_str)
130
+ elif isinstance(strip_str, bytes):
131
+ return data.strip(MultiByteStrDecoder(strip_str).unicode_str)
132
+
133
+ def __dequote(self, s: str) -> str:
134
+ if not self.dequote or not s:
135
+ return s
136
+
137
+ try:
138
+ if (s[0] == s[-1]) and s.startswith(("'", '"')):
139
+ if s.count(s[0]) == 2:
140
+ return s[1:-1]
141
+ except TypeError:
142
+ pass
143
+
144
+ return s
145
+
146
+ def __process_line_break(self, data: str) -> str:
147
+ lbh = self.line_break_handling
148
+
149
+ if lbh == LineBreakHandling.NOP:
150
+ return data
151
+
152
+ try:
153
+ if lbh == LineBreakHandling.REPLACE:
154
+ return _RE_LINE_BREAK.sub(self.line_break_repl, data)
155
+
156
+ if lbh == LineBreakHandling.ESCAPE:
157
+ return data.replace("\n", "\\n").replace("\r", "\\r")
158
+ except (TypeError, AttributeError):
159
+ return data
160
+
161
+ raise ValueError(f"unexpected line_break_handling: {lbh}")
162
+
163
+ def __escape_formula_injection(self, data: str) -> str:
164
+ if not self.is_escape_formula_injection:
165
+ return data
166
+
167
+ try:
168
+ if _RE_FORMULA_PREFIX.search(data):
169
+ return "'" + data
170
+ except (TypeError, AttributeError):
171
+ return data
172
+
173
+ return data
llmeval-env/lib/python3.10/site-packages/dataproperty/py.typed ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/dataproperty/typing.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ from decimal import Decimal
3
+ from typing import Any, Callable, Mapping, Optional, Type, Union
4
+
5
+ from typepy import (
6
+ Bool,
7
+ DateTime,
8
+ Dictionary,
9
+ Infinity,
10
+ Integer,
11
+ IpAddress,
12
+ List,
13
+ Nan,
14
+ NoneType,
15
+ NullString,
16
+ RealNumber,
17
+ String,
18
+ Typecode,
19
+ )
20
+ from typepy.type import AbstractType
21
+
22
+
23
+ TypeHint = Optional[Type[AbstractType]]
24
+ TransFunc = Callable[[Any], Any]
25
+ DateTimeFormatter = Callable[[datetime], str]
26
+
27
+ FloatType = Union[Type[Decimal], Type[float]]
28
+ StrictLevelMap = Mapping[Union[str, Typecode], int]
29
+ TypeValueMap = Mapping[Typecode, Union[float, Decimal, None]]
30
+
31
+ _type_hint_map = {
32
+ # high frequently used types
33
+ "int": Integer,
34
+ "float": RealNumber,
35
+ "realnumber": RealNumber,
36
+ "str": String,
37
+ # low frequently used types
38
+ "bool": Bool,
39
+ "datetime": DateTime,
40
+ "dict": Dictionary,
41
+ "inf": Infinity,
42
+ "ip": IpAddress,
43
+ "list": List,
44
+ "nan": Nan,
45
+ "none": NoneType,
46
+ "nullstr": NullString,
47
+ }
48
+
49
+
50
+ def normalize_type_hint(type_hint: Union[str, TypeHint]) -> TypeHint:
51
+ if not type_hint:
52
+ return None
53
+
54
+ if not isinstance(type_hint, str):
55
+ return type_hint
56
+
57
+ type_hint = type_hint.strip().casefold()
58
+ for key, value in _type_hint_map.items():
59
+ if type_hint.startswith(key):
60
+ return value
61
+
62
+ raise ValueError(f"unknown typehint: {type_hint}")
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/AUTHORS ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # This is the list of HuggingFace Datasets authors for copyright purposes.
2
+ #
3
+ # This does not necessarily list everyone who has contributed code, since in
4
+ # some cases, their employer may be the copyright holder. To see the full list
5
+ # of contributors, see the revision history in source control.
6
+
7
+ Google Inc.
8
+ HuggingFace Inc.
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/LICENSE ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Apache License
3
+ Version 2.0, January 2004
4
+ http://www.apache.org/licenses/
5
+
6
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7
+
8
+ 1. Definitions.
9
+
10
+ "License" shall mean the terms and conditions for use, reproduction,
11
+ and distribution as defined by Sections 1 through 9 of this document.
12
+
13
+ "Licensor" shall mean the copyright owner or entity authorized by
14
+ the copyright owner that is granting the License.
15
+
16
+ "Legal Entity" shall mean the union of the acting entity and all
17
+ other entities that control, are controlled by, or are under common
18
+ control with that entity. For the purposes of this definition,
19
+ "control" means (i) the power, direct or indirect, to cause the
20
+ direction or management of such entity, whether by contract or
21
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or (iii) beneficial ownership of such entity.
23
+
24
+ "You" (or "Your") shall mean an individual or Legal Entity
25
+ exercising permissions granted by this License.
26
+
27
+ "Source" form shall mean the preferred form for making modifications,
28
+ including but not limited to software source code, documentation
29
+ source, and configuration files.
30
+
31
+ "Object" form shall mean any form resulting from mechanical
32
+ transformation or translation of a Source form, including but
33
+ not limited to compiled object code, generated documentation,
34
+ and conversions to other media types.
35
+
36
+ "Work" shall mean the work of authorship, whether in Source or
37
+ Object form, made available under the License, as indicated by a
38
+ copyright notice that is included in or attached to the work
39
+ (an example is provided in the Appendix below).
40
+
41
+ "Derivative Works" shall mean any work, whether in Source or Object
42
+ form, that is based on (or derived from) the Work and for which the
43
+ editorial revisions, annotations, elaborations, or other modifications
44
+ represent, as a whole, an original work of authorship. For the purposes
45
+ of this License, Derivative Works shall not include works that remain
46
+ separable from, or merely link (or bind by name) to the interfaces of,
47
+ the Work and Derivative Works thereof.
48
+
49
+ "Contribution" shall mean any work of authorship, including
50
+ the original version of the Work and any modifications or additions
51
+ to that Work or Derivative Works thereof, that is intentionally
52
+ submitted to Licensor for inclusion in the Work by the copyright owner
53
+ or by an individual or Legal Entity authorized to submit on behalf of
54
+ the copyright owner. For the purposes of this definition, "submitted"
55
+ means any form of electronic, verbal, or written communication sent
56
+ to the Licensor or its representatives, including but not limited to
57
+ communication on electronic mailing lists, source code control systems,
58
+ and issue tracking systems that are managed by, or on behalf of, the
59
+ Licensor for the purpose of discussing and improving the Work, but
60
+ excluding communication that is conspicuously marked or otherwise
61
+ designated in writing by the copyright owner as "Not a Contribution."
62
+
63
+ "Contributor" shall mean Licensor and any individual or Legal Entity
64
+ on behalf of whom a Contribution has been received by Licensor and
65
+ subsequently incorporated within the Work.
66
+
67
+ 2. Grant of Copyright License. Subject to the terms and conditions of
68
+ this License, each Contributor hereby grants to You a perpetual,
69
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70
+ copyright license to reproduce, prepare Derivative Works of,
71
+ publicly display, publicly perform, sublicense, and distribute the
72
+ Work and such Derivative Works in Source or Object form.
73
+
74
+ 3. Grant of Patent License. Subject to the terms and conditions of
75
+ this License, each Contributor hereby grants to You a perpetual,
76
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77
+ (except as stated in this section) patent license to make, have made,
78
+ use, offer to sell, sell, import, and otherwise transfer the Work,
79
+ where such license applies only to those patent claims licensable
80
+ by such Contributor that are necessarily infringed by their
81
+ Contribution(s) alone or by combination of their Contribution(s)
82
+ with the Work to which such Contribution(s) was submitted. If You
83
+ institute patent litigation against any entity (including a
84
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
85
+ or a Contribution incorporated within the Work constitutes direct
86
+ or contributory patent infringement, then any patent licenses
87
+ granted to You under this License for that Work shall terminate
88
+ as of the date such litigation is filed.
89
+
90
+ 4. Redistribution. You may reproduce and distribute copies of the
91
+ Work or Derivative Works thereof in any medium, with or without
92
+ modifications, and in Source or Object form, provided that You
93
+ meet the following conditions:
94
+
95
+ (a) You must give any other recipients of the Work or
96
+ Derivative Works a copy of this License; and
97
+
98
+ (b) You must cause any modified files to carry prominent notices
99
+ stating that You changed the files; and
100
+
101
+ (c) You must retain, in the Source form of any Derivative Works
102
+ that You distribute, all copyright, patent, trademark, and
103
+ attribution notices from the Source form of the Work,
104
+ excluding those notices that do not pertain to any part of
105
+ the Derivative Works; and
106
+
107
+ (d) If the Work includes a "NOTICE" text file as part of its
108
+ distribution, then any Derivative Works that You distribute must
109
+ include a readable copy of the attribution notices contained
110
+ within such NOTICE file, excluding those notices that do not
111
+ pertain to any part of the Derivative Works, in at least one
112
+ of the following places: within a NOTICE text file distributed
113
+ as part of the Derivative Works; within the Source form or
114
+ documentation, if provided along with the Derivative Works; or,
115
+ within a display generated by the Derivative Works, if and
116
+ wherever such third-party notices normally appear. The contents
117
+ of the NOTICE file are for informational purposes only and
118
+ do not modify the License. You may add Your own attribution
119
+ notices within Derivative Works that You distribute, alongside
120
+ or as an addendum to the NOTICE text from the Work, provided
121
+ that such additional attribution notices cannot be construed
122
+ as modifying the License.
123
+
124
+ You may add Your own copyright statement to Your modifications and
125
+ may provide additional or different license terms and conditions
126
+ for use, reproduction, or distribution of Your modifications, or
127
+ for any such Derivative Works as a whole, provided Your use,
128
+ reproduction, and distribution of the Work otherwise complies with
129
+ the conditions stated in this License.
130
+
131
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
132
+ any Contribution intentionally submitted for inclusion in the Work
133
+ by You to the Licensor shall be under the terms and conditions of
134
+ this License, without any additional terms or conditions.
135
+ Notwithstanding the above, nothing herein shall supersede or modify
136
+ the terms of any separate license agreement you may have executed
137
+ with Licensor regarding such Contributions.
138
+
139
+ 6. Trademarks. This License does not grant permission to use the trade
140
+ names, trademarks, service marks, or product names of the Licensor,
141
+ except as required for reasonable and customary use in describing the
142
+ origin of the Work and reproducing the content of the NOTICE file.
143
+
144
+ 7. Disclaimer of Warranty. Unless required by applicable law or
145
+ agreed to in writing, Licensor provides the Work (and each
146
+ Contributor provides its Contributions) on an "AS IS" BASIS,
147
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148
+ implied, including, without limitation, any warranties or conditions
149
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150
+ PARTICULAR PURPOSE. You are solely responsible for determining the
151
+ appropriateness of using or redistributing the Work and assume any
152
+ risks associated with Your exercise of permissions under this License.
153
+
154
+ 8. Limitation of Liability. In no event and under no legal theory,
155
+ whether in tort (including negligence), contract, or otherwise,
156
+ unless required by applicable law (such as deliberate and grossly
157
+ negligent acts) or agreed to in writing, shall any Contributor be
158
+ liable to You for damages, including any direct, indirect, special,
159
+ incidental, or consequential damages of any character arising as a
160
+ result of this License or out of the use or inability to use the
161
+ Work (including but not limited to damages for loss of goodwill,
162
+ work stoppage, computer failure or malfunction, or any and all
163
+ other commercial damages or losses), even if such Contributor
164
+ has been advised of the possibility of such damages.
165
+
166
+ 9. Accepting Warranty or Additional Liability. While redistributing
167
+ the Work or Derivative Works thereof, You may choose to offer,
168
+ and charge a fee for, acceptance of support, warranty, indemnity,
169
+ or other liability obligations and/or rights consistent with this
170
+ License. However, in accepting such obligations, You may act only
171
+ on Your own behalf and on Your sole responsibility, not on behalf
172
+ of any other Contributor, and only if You agree to indemnify,
173
+ defend, and hold each Contributor harmless for any liability
174
+ incurred by, or claims asserted against, such Contributor by reason
175
+ of your accepting any such warranty or additional liability.
176
+
177
+ END OF TERMS AND CONDITIONS
178
+
179
+ APPENDIX: How to apply the Apache License to your work.
180
+
181
+ To apply the Apache License to your work, attach the following
182
+ boilerplate notice, with the fields enclosed by brackets "[]"
183
+ replaced with your own identifying information. (Don't include
184
+ the brackets!) The text should be enclosed in the appropriate
185
+ comment syntax for the file format. We also recommend that a
186
+ file or class name and description of purpose be included on the
187
+ same "printed page" as the copyright notice for easier
188
+ identification within third-party archives.
189
+
190
+ Copyright [yyyy] [name of copyright owner]
191
+
192
+ Licensed under the Apache License, Version 2.0 (the "License");
193
+ you may not use this file except in compliance with the License.
194
+ You may obtain a copy of the License at
195
+
196
+ http://www.apache.org/licenses/LICENSE-2.0
197
+
198
+ Unless required by applicable law or agreed to in writing, software
199
+ distributed under the License is distributed on an "AS IS" BASIS,
200
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201
+ See the License for the specific language governing permissions and
202
+ limitations under the License.
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/METADATA ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: datasets
3
+ Version: 2.19.1
4
+ Summary: HuggingFace community-driven open-source library of datasets
5
+ Home-page: https://github.com/huggingface/datasets
6
+ Author: HuggingFace Inc.
7
+ Author-email: [email protected]
8
+ License: Apache 2.0
9
+ Download-URL: https://github.com/huggingface/datasets/tags
10
+ Keywords: datasets machine learning datasets metrics
11
+ Platform: UNKNOWN
12
+ Classifier: Development Status :: 5 - Production/Stable
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Intended Audience :: Education
15
+ Classifier: Intended Audience :: Science/Research
16
+ Classifier: License :: OSI Approved :: Apache Software License
17
+ Classifier: Operating System :: OS Independent
18
+ Classifier: Programming Language :: Python :: 3
19
+ Classifier: Programming Language :: Python :: 3.8
20
+ Classifier: Programming Language :: Python :: 3.9
21
+ Classifier: Programming Language :: Python :: 3.10
22
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
23
+ Requires-Python: >=3.8.0
24
+ Description-Content-Type: text/markdown
25
+ License-File: LICENSE
26
+ License-File: AUTHORS
27
+ Requires-Dist: filelock
28
+ Requires-Dist: numpy (>=1.17)
29
+ Requires-Dist: pyarrow (>=12.0.0)
30
+ Requires-Dist: pyarrow-hotfix
31
+ Requires-Dist: dill (<0.3.9,>=0.3.0)
32
+ Requires-Dist: pandas
33
+ Requires-Dist: requests (>=2.19.0)
34
+ Requires-Dist: tqdm (>=4.62.1)
35
+ Requires-Dist: xxhash
36
+ Requires-Dist: multiprocess
37
+ Requires-Dist: fsspec[http] (<=2024.3.1,>=2023.1.0)
38
+ Requires-Dist: aiohttp
39
+ Requires-Dist: huggingface-hub (>=0.21.2)
40
+ Requires-Dist: packaging
41
+ Requires-Dist: pyyaml (>=5.1)
42
+ Provides-Extra: apache-beam
43
+ Requires-Dist: apache-beam (>=2.26.0) ; extra == 'apache-beam'
44
+ Provides-Extra: audio
45
+ Requires-Dist: soundfile (>=0.12.1) ; extra == 'audio'
46
+ Requires-Dist: librosa ; extra == 'audio'
47
+ Provides-Extra: benchmarks
48
+ Requires-Dist: tensorflow (==2.12.0) ; extra == 'benchmarks'
49
+ Requires-Dist: torch (==2.0.1) ; extra == 'benchmarks'
50
+ Requires-Dist: transformers (==4.30.1) ; extra == 'benchmarks'
51
+ Provides-Extra: dev
52
+ Requires-Dist: absl-py ; extra == 'dev'
53
+ Requires-Dist: joblib (<1.3.0) ; extra == 'dev'
54
+ Requires-Dist: joblibspark ; extra == 'dev'
55
+ Requires-Dist: pytest ; extra == 'dev'
56
+ Requires-Dist: pytest-datadir ; extra == 'dev'
57
+ Requires-Dist: pytest-xdist ; extra == 'dev'
58
+ Requires-Dist: elasticsearch (<8.0.0) ; extra == 'dev'
59
+ Requires-Dist: faiss-cpu (>=1.6.4) ; extra == 'dev'
60
+ Requires-Dist: lz4 ; extra == 'dev'
61
+ Requires-Dist: pyspark (>=3.4) ; extra == 'dev'
62
+ Requires-Dist: py7zr ; extra == 'dev'
63
+ Requires-Dist: rarfile (>=4.0) ; extra == 'dev'
64
+ Requires-Dist: sqlalchemy ; extra == 'dev'
65
+ Requires-Dist: s3fs (>=2021.11.1) ; extra == 'dev'
66
+ Requires-Dist: protobuf (<4.0.0) ; extra == 'dev'
67
+ Requires-Dist: tensorflow (>=2.6.0) ; extra == 'dev'
68
+ Requires-Dist: tiktoken ; extra == 'dev'
69
+ Requires-Dist: torch (>=2.0.0) ; extra == 'dev'
70
+ Requires-Dist: soundfile (>=0.12.1) ; extra == 'dev'
71
+ Requires-Dist: transformers ; extra == 'dev'
72
+ Requires-Dist: typing-extensions (>=4.6.1) ; extra == 'dev'
73
+ Requires-Dist: zstandard ; extra == 'dev'
74
+ Requires-Dist: polars[timezone] (>=0.20.0) ; extra == 'dev'
75
+ Requires-Dist: Pillow (>=6.2.1) ; extra == 'dev'
76
+ Requires-Dist: librosa ; extra == 'dev'
77
+ Requires-Dist: ruff (>=0.3.0) ; extra == 'dev'
78
+ Requires-Dist: s3fs ; extra == 'dev'
79
+ Requires-Dist: torch ; extra == 'dev'
80
+ Requires-Dist: jax (>=0.3.14) ; (sys_platform != "win32") and extra == 'dev'
81
+ Requires-Dist: jaxlib (>=0.3.14) ; (sys_platform != "win32") and extra == 'dev'
82
+ Requires-Dist: apache-beam (>=2.26.0) ; (sys_platform != "win32" and python_version < "3.10") and extra == 'dev'
83
+ Provides-Extra: docs
84
+ Requires-Dist: s3fs ; extra == 'docs'
85
+ Requires-Dist: transformers ; extra == 'docs'
86
+ Requires-Dist: torch ; extra == 'docs'
87
+ Requires-Dist: tensorflow (>=2.6.0) ; extra == 'docs'
88
+ Provides-Extra: jax
89
+ Requires-Dist: jax (>=0.3.14) ; extra == 'jax'
90
+ Requires-Dist: jaxlib (>=0.3.14) ; extra == 'jax'
91
+ Provides-Extra: metrics-tests
92
+ Requires-Dist: accelerate ; extra == 'metrics-tests'
93
+ Requires-Dist: bert-score (>=0.3.6) ; extra == 'metrics-tests'
94
+ Requires-Dist: jiwer ; extra == 'metrics-tests'
95
+ Requires-Dist: langdetect ; extra == 'metrics-tests'
96
+ Requires-Dist: mauve-text ; extra == 'metrics-tests'
97
+ Requires-Dist: nltk ; extra == 'metrics-tests'
98
+ Requires-Dist: rouge-score ; extra == 'metrics-tests'
99
+ Requires-Dist: sacrebleu ; extra == 'metrics-tests'
100
+ Requires-Dist: sacremoses ; extra == 'metrics-tests'
101
+ Requires-Dist: scikit-learn ; extra == 'metrics-tests'
102
+ Requires-Dist: scipy ; extra == 'metrics-tests'
103
+ Requires-Dist: sentencepiece ; extra == 'metrics-tests'
104
+ Requires-Dist: seqeval ; extra == 'metrics-tests'
105
+ Requires-Dist: spacy (>=3.0.0) ; extra == 'metrics-tests'
106
+ Requires-Dist: tldextract ; extra == 'metrics-tests'
107
+ Requires-Dist: toml (>=0.10.1) ; extra == 'metrics-tests'
108
+ Requires-Dist: typer (<0.5.0) ; extra == 'metrics-tests'
109
+ Requires-Dist: requests-file (>=1.5.1) ; extra == 'metrics-tests'
110
+ Requires-Dist: tldextract (>=3.1.0) ; extra == 'metrics-tests'
111
+ Requires-Dist: texttable (>=1.6.3) ; extra == 'metrics-tests'
112
+ Requires-Dist: Werkzeug (>=1.0.1) ; extra == 'metrics-tests'
113
+ Requires-Dist: six (~=1.15.0) ; extra == 'metrics-tests'
114
+ Provides-Extra: quality
115
+ Requires-Dist: ruff (>=0.3.0) ; extra == 'quality'
116
+ Provides-Extra: s3
117
+ Requires-Dist: s3fs ; extra == 's3'
118
+ Provides-Extra: streaming
119
+ Provides-Extra: tensorflow
120
+ Requires-Dist: tensorflow (>=2.6.0) ; extra == 'tensorflow'
121
+ Provides-Extra: tensorflow_gpu
122
+ Requires-Dist: tensorflow (>=2.6.0) ; extra == 'tensorflow_gpu'
123
+ Provides-Extra: tests
124
+ Requires-Dist: absl-py ; extra == 'tests'
125
+ Requires-Dist: joblib (<1.3.0) ; extra == 'tests'
126
+ Requires-Dist: joblibspark ; extra == 'tests'
127
+ Requires-Dist: pytest ; extra == 'tests'
128
+ Requires-Dist: pytest-datadir ; extra == 'tests'
129
+ Requires-Dist: pytest-xdist ; extra == 'tests'
130
+ Requires-Dist: elasticsearch (<8.0.0) ; extra == 'tests'
131
+ Requires-Dist: faiss-cpu (>=1.6.4) ; extra == 'tests'
132
+ Requires-Dist: lz4 ; extra == 'tests'
133
+ Requires-Dist: pyspark (>=3.4) ; extra == 'tests'
134
+ Requires-Dist: py7zr ; extra == 'tests'
135
+ Requires-Dist: rarfile (>=4.0) ; extra == 'tests'
136
+ Requires-Dist: sqlalchemy ; extra == 'tests'
137
+ Requires-Dist: s3fs (>=2021.11.1) ; extra == 'tests'
138
+ Requires-Dist: protobuf (<4.0.0) ; extra == 'tests'
139
+ Requires-Dist: tensorflow (>=2.6.0) ; extra == 'tests'
140
+ Requires-Dist: tiktoken ; extra == 'tests'
141
+ Requires-Dist: torch (>=2.0.0) ; extra == 'tests'
142
+ Requires-Dist: soundfile (>=0.12.1) ; extra == 'tests'
143
+ Requires-Dist: transformers ; extra == 'tests'
144
+ Requires-Dist: typing-extensions (>=4.6.1) ; extra == 'tests'
145
+ Requires-Dist: zstandard ; extra == 'tests'
146
+ Requires-Dist: polars[timezone] (>=0.20.0) ; extra == 'tests'
147
+ Requires-Dist: Pillow (>=6.2.1) ; extra == 'tests'
148
+ Requires-Dist: librosa ; extra == 'tests'
149
+ Requires-Dist: jax (>=0.3.14) ; (sys_platform != "win32") and extra == 'tests'
150
+ Requires-Dist: jaxlib (>=0.3.14) ; (sys_platform != "win32") and extra == 'tests'
151
+ Requires-Dist: apache-beam (>=2.26.0) ; (sys_platform != "win32" and python_version < "3.10") and extra == 'tests'
152
+ Provides-Extra: torch
153
+ Requires-Dist: torch ; extra == 'torch'
154
+ Provides-Extra: vision
155
+ Requires-Dist: Pillow (>=6.2.1) ; extra == 'vision'
156
+
157
+ <p align="center">
158
+ <picture>
159
+ <source media="(prefers-color-scheme: dark)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-dark.svg">
160
+ <source media="(prefers-color-scheme: light)" srcset="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg">
161
+ <img alt="Hugging Face Datasets Library" src="https://huggingface.co/datasets/huggingface/documentation-images/raw/main/datasets-logo-light.svg" width="352" height="59" style="max-width: 100%;">
162
+ </picture>
163
+ <br/>
164
+ <br/>
165
+ </p>
166
+
167
+ <p align="center">
168
+ <a href="https://github.com/huggingface/datasets/actions/workflows/ci.yml?query=branch%3Amain">
169
+ <img alt="Build" src="https://github.com/huggingface/datasets/actions/workflows/ci.yml/badge.svg?branch=main">
170
+ </a>
171
+ <a href="https://github.com/huggingface/datasets/blob/main/LICENSE">
172
+ <img alt="GitHub" src="https://img.shields.io/github/license/huggingface/datasets.svg?color=blue">
173
+ </a>
174
+ <a href="https://huggingface.co/docs/datasets/index.html">
175
+ <img alt="Documentation" src="https://img.shields.io/website/http/huggingface.co/docs/datasets/index.html.svg?down_color=red&down_message=offline&up_message=online">
176
+ </a>
177
+ <a href="https://github.com/huggingface/datasets/releases">
178
+ <img alt="GitHub release" src="https://img.shields.io/github/release/huggingface/datasets.svg">
179
+ </a>
180
+ <a href="https://huggingface.co/datasets/">
181
+ <img alt="Number of datasets" src="https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/datasets&color=brightgreen">
182
+ </a>
183
+ <a href="CODE_OF_CONDUCT.md">
184
+ <img alt="Contributor Covenant" src="https://img.shields.io/badge/Contributor%20Covenant-2.0-4baaaa.svg">
185
+ </a>
186
+ <a href="https://zenodo.org/badge/latestdoi/250213286"><img src="https://zenodo.org/badge/250213286.svg" alt="DOI"></a>
187
+ </p>
188
+
189
+ 🤗 Datasets is a lightweight library providing **two** main features:
190
+
191
+ - **one-line dataloaders for many public datasets**: one-liners to download and pre-process any of the ![number of datasets](https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/datasets&color=brightgreen) major public datasets (image datasets, audio datasets, text datasets in 467 languages and dialects, etc.) provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets). With a simple command like `squad_dataset = load_dataset("squad")`, get any of these datasets ready to use in a dataloader for training/evaluating a ML model (Numpy/Pandas/PyTorch/TensorFlow/JAX),
192
+ - **efficient data pre-processing**: simple, fast and reproducible data pre-processing for the public datasets as well as your own local datasets in CSV, JSON, text, PNG, JPEG, WAV, MP3, Parquet, etc. With simple commands like `processed_dataset = dataset.map(process_example)`, efficiently prepare the dataset for inspection and ML model evaluation and training.
193
+
194
+ [🎓 **Documentation**](https://huggingface.co/docs/datasets/) [🔎 **Find a dataset in the Hub**](https://huggingface.co/datasets) [🌟 **Share a dataset on the Hub**](https://huggingface.co/docs/datasets/share)
195
+
196
+ <h3 align="center">
197
+ <a href="https://hf.co/course"><img src="https://raw.githubusercontent.com/huggingface/datasets/main/docs/source/imgs/course_banner.png"></a>
198
+ </h3>
199
+
200
+ 🤗 Datasets is designed to let the community easily add and share new datasets.
201
+
202
+ 🤗 Datasets has many additional interesting features:
203
+
204
+ - Thrive on large datasets: 🤗 Datasets naturally frees the user from RAM memory limitation, all datasets are memory-mapped using an efficient zero-serialization cost backend (Apache Arrow).
205
+ - Smart caching: never wait for your data to process several times.
206
+ - Lightweight and fast with a transparent and pythonic API (multi-processing/caching/memory-mapping).
207
+ - Built-in interoperability with NumPy, pandas, PyTorch, TensorFlow 2 and JAX.
208
+ - Native support for audio and image data.
209
+ - Enable streaming mode to save disk space and start iterating over the dataset immediately.
210
+
211
+ 🤗 Datasets originated from a fork of the awesome [TensorFlow Datasets](https://github.com/tensorflow/datasets) and the HuggingFace team want to deeply thank the TensorFlow Datasets team for building this amazing library. More details on the differences between 🤗 Datasets and `tfds` can be found in the section [Main differences between 🤗 Datasets and `tfds`](#main-differences-between--datasets-and-tfds).
212
+
213
+ # Installation
214
+
215
+ ## With pip
216
+
217
+ 🤗 Datasets can be installed from PyPi and has to be installed in a virtual environment (venv or conda for instance)
218
+
219
+ ```bash
220
+ pip install datasets
221
+ ```
222
+
223
+ ## With conda
224
+
225
+ 🤗 Datasets can be installed using conda as follows:
226
+
227
+ ```bash
228
+ conda install -c huggingface -c conda-forge datasets
229
+ ```
230
+
231
+ Follow the installation pages of TensorFlow and PyTorch to see how to install them with conda.
232
+
233
+ For more details on installation, check the installation page in the documentation: https://huggingface.co/docs/datasets/installation
234
+
235
+ ## Installation to use with PyTorch/TensorFlow/pandas
236
+
237
+ If you plan to use 🤗 Datasets with PyTorch (1.0+), TensorFlow (2.2+) or pandas, you should also install PyTorch, TensorFlow or pandas.
238
+
239
+ For more details on using the library with NumPy, pandas, PyTorch or TensorFlow, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart
240
+
241
+ # Usage
242
+
243
+ 🤗 Datasets is made to be very simple to use - the API is centered around a single function, `datasets.load_dataset(dataset_name, **kwargs)`, that instantiates a dataset.
244
+
245
+ This library can be used for text/image/audio/etc. datasets. Here is an example to load a text dataset:
246
+
247
+ Here is a quick example:
248
+
249
+ ```python
250
+ from datasets import load_dataset
251
+
252
+ # Print all the available datasets
253
+ from huggingface_hub import list_datasets
254
+ print([dataset.id for dataset in list_datasets()])
255
+
256
+ # Load a dataset and print the first example in the training set
257
+ squad_dataset = load_dataset('squad')
258
+ print(squad_dataset['train'][0])
259
+
260
+ # Process the dataset - add a column with the length of the context texts
261
+ dataset_with_length = squad_dataset.map(lambda x: {"length": len(x["context"])})
262
+
263
+ # Process the dataset - tokenize the context texts (using a tokenizer from the 🤗 Transformers library)
264
+ from transformers import AutoTokenizer
265
+ tokenizer = AutoTokenizer.from_pretrained('bert-base-cased')
266
+
267
+ tokenized_dataset = squad_dataset.map(lambda x: tokenizer(x['context']), batched=True)
268
+ ```
269
+
270
+ If your dataset is bigger than your disk or if you don't want to wait to download the data, you can use streaming:
271
+
272
+ ```python
273
+ # If you want to use the dataset immediately and efficiently stream the data as you iterate over the dataset
274
+ image_dataset = load_dataset('cifar100', streaming=True)
275
+ for example in image_dataset["train"]:
276
+ break
277
+ ```
278
+
279
+ For more details on using the library, check the quick start page in the documentation: https://huggingface.co/docs/datasets/quickstart and the specific pages on:
280
+
281
+ - Loading a dataset: https://huggingface.co/docs/datasets/loading
282
+ - What's in a Dataset: https://huggingface.co/docs/datasets/access
283
+ - Processing data with 🤗 Datasets: https://huggingface.co/docs/datasets/process
284
+ - Processing audio data: https://huggingface.co/docs/datasets/audio_process
285
+ - Processing image data: https://huggingface.co/docs/datasets/image_process
286
+ - Processing text data: https://huggingface.co/docs/datasets/nlp_process
287
+ - Streaming a dataset: https://huggingface.co/docs/datasets/stream
288
+ - Writing your own dataset loading script: https://huggingface.co/docs/datasets/dataset_script
289
+ - etc.
290
+
291
+ # Add a new dataset to the Hub
292
+
293
+ We have a very detailed step-by-step guide to add a new dataset to the ![number of datasets](https://img.shields.io/endpoint?url=https://huggingface.co/api/shields/datasets&color=brightgreen) datasets already provided on the [HuggingFace Datasets Hub](https://huggingface.co/datasets).
294
+
295
+ You can find:
296
+ - [how to upload a dataset to the Hub using your web browser or Python](https://huggingface.co/docs/datasets/upload_dataset) and also
297
+ - [how to upload it using Git](https://huggingface.co/docs/datasets/share).
298
+
299
+ # Main differences between 🤗 Datasets and `tfds`
300
+
301
+ If you are familiar with the great TensorFlow Datasets, here are the main differences between 🤗 Datasets and `tfds`:
302
+
303
+ - the scripts in 🤗 Datasets are not provided within the library but are queried, downloaded/cached and dynamically loaded upon request
304
+ - the backend serialization of 🤗 Datasets is based on [Apache Arrow](https://arrow.apache.org/) instead of TF Records and leverage python dataclasses for info and features with some diverging features (we mostly don't do encoding and store the raw data as much as possible in the backend serialization cache).
305
+ - the user-facing dataset object of 🤗 Datasets is not a `tf.data.Dataset` but a built-in framework-agnostic dataset class with methods inspired by what we like in `tf.data` (like a `map()` method). It basically wraps a memory-mapped Arrow table cache.
306
+
307
+ # Disclaimers
308
+
309
+ 🤗 Datasets may run Python code defined by the dataset authors to parse certain data formats or structures. For security reasons, we ask users to:
310
+ - check the dataset scripts they're going to run beforehand and
311
+ - pin the `revision` of the repositories they use.
312
+
313
+ If you're a dataset owner and wish to update any part of it (description, citation, license, etc.), or do not want your dataset to be included in the Hugging Face Hub, please get in touch by opening a discussion or a pull request in the Community tab of the dataset page. Thanks for your contribution to the ML community!
314
+
315
+ ## BibTeX
316
+
317
+ If you want to cite our 🤗 Datasets library, you can use our [paper](https://arxiv.org/abs/2109.02846):
318
+
319
+ ```bibtex
320
+ @inproceedings{lhoest-etal-2021-datasets,
321
+ title = "Datasets: A Community Library for Natural Language Processing",
322
+ author = "Lhoest, Quentin and
323
+ Villanova del Moral, Albert and
324
+ Jernite, Yacine and
325
+ Thakur, Abhishek and
326
+ von Platen, Patrick and
327
+ Patil, Suraj and
328
+ Chaumond, Julien and
329
+ Drame, Mariama and
330
+ Plu, Julien and
331
+ Tunstall, Lewis and
332
+ Davison, Joe and
333
+ {\v{S}}a{\v{s}}ko, Mario and
334
+ Chhablani, Gunjan and
335
+ Malik, Bhavitvya and
336
+ Brandeis, Simon and
337
+ Le Scao, Teven and
338
+ Sanh, Victor and
339
+ Xu, Canwen and
340
+ Patry, Nicolas and
341
+ McMillan-Major, Angelina and
342
+ Schmid, Philipp and
343
+ Gugger, Sylvain and
344
+ Delangue, Cl{\'e}ment and
345
+ Matussi{\`e}re, Th{\'e}o and
346
+ Debut, Lysandre and
347
+ Bekman, Stas and
348
+ Cistac, Pierric and
349
+ Goehringer, Thibault and
350
+ Mustar, Victor and
351
+ Lagunas, Fran{\c{c}}ois and
352
+ Rush, Alexander and
353
+ Wolf, Thomas",
354
+ booktitle = "Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing: System Demonstrations",
355
+ month = nov,
356
+ year = "2021",
357
+ address = "Online and Punta Cana, Dominican Republic",
358
+ publisher = "Association for Computational Linguistics",
359
+ url = "https://aclanthology.org/2021.emnlp-demo.21",
360
+ pages = "175--184",
361
+ abstract = "The scale, variety, and quantity of publicly-available NLP datasets has grown rapidly as researchers propose new tasks, larger models, and novel benchmarks. Datasets is a community library for contemporary NLP designed to support this ecosystem. Datasets aims to standardize end-user interfaces, versioning, and documentation, while providing a lightweight front-end that behaves similarly for small datasets as for internet-scale corpora. The design of the library incorporates a distributed, community-driven approach to adding datasets and documenting usage. After a year of development, the library now includes more than 650 unique datasets, has more than 250 contributors, and has helped support a variety of novel cross-dataset research projects and shared tasks. The library is available at https://github.com/huggingface/datasets.",
362
+ eprint={2109.02846},
363
+ archivePrefix={arXiv},
364
+ primaryClass={cs.CL},
365
+ }
366
+ ```
367
+
368
+ If you need to cite a specific version of our 🤗 Datasets library for reproducibility, you can use the corresponding version Zenodo DOI from this [list](https://zenodo.org/search?q=conceptrecid:%224817768%22&sort=-version&all_versions=True).
369
+
370
+
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/RECORD ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ../../../bin/datasets-cli,sha256=K-tpqks_eIrx2Yerd2SnheJGagjMuE6pK4dM_BD-VEw,263
2
+ datasets-2.19.1.dist-info/AUTHORS,sha256=L0FBY23tCNHLmvsOKAbumHn8WZZIK98sH53JYxhAchU,327
3
+ datasets-2.19.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
4
+ datasets-2.19.1.dist-info/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
5
+ datasets-2.19.1.dist-info/METADATA,sha256=1bepJrLNSvCU9SILMtmGo4wSoaniJLCETh3VrZLzEjQ,19467
6
+ datasets-2.19.1.dist-info/RECORD,,
7
+ datasets-2.19.1.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
8
+ datasets-2.19.1.dist-info/entry_points.txt,sha256=vhdg1JXUleCZtwvozP5q5iHqRpSETfyhSDJ39zW3KUA,70
9
+ datasets-2.19.1.dist-info/top_level.txt,sha256=9A857YvCQm_Dg3UjeKkWPz9sDBos0t3zN2pf5krTemQ,9
10
+ datasets/__init__.py,sha256=RbmsP1OwzxnzLbg9_Z69yD5LCCNzXpV5wwHc-XWVees,2455
11
+ datasets/__pycache__/__init__.cpython-310.pyc,,
12
+ datasets/__pycache__/arrow_dataset.cpython-310.pyc,,
13
+ datasets/__pycache__/arrow_reader.cpython-310.pyc,,
14
+ datasets/__pycache__/arrow_writer.cpython-310.pyc,,
15
+ datasets/__pycache__/builder.bak.cpython-310.pyc,,
16
+ datasets/__pycache__/builder.cpython-310.pyc,,
17
+ datasets/__pycache__/combine.cpython-310.pyc,,
18
+ datasets/__pycache__/config.cpython-310.pyc,,
19
+ datasets/__pycache__/data_files.cpython-310.pyc,,
20
+ datasets/__pycache__/dataset_dict.cpython-310.pyc,,
21
+ datasets/__pycache__/distributed.cpython-310.pyc,,
22
+ datasets/__pycache__/exceptions.cpython-310.pyc,,
23
+ datasets/__pycache__/fingerprint.cpython-310.pyc,,
24
+ datasets/__pycache__/info.cpython-310.pyc,,
25
+ datasets/__pycache__/inspect.cpython-310.pyc,,
26
+ datasets/__pycache__/iterable_dataset.cpython-310.pyc,,
27
+ datasets/__pycache__/keyhash.cpython-310.pyc,,
28
+ datasets/__pycache__/load.cpython-310.pyc,,
29
+ datasets/__pycache__/metric.cpython-310.pyc,,
30
+ datasets/__pycache__/naming.cpython-310.pyc,,
31
+ datasets/__pycache__/search.cpython-310.pyc,,
32
+ datasets/__pycache__/splits.cpython-310.pyc,,
33
+ datasets/__pycache__/streaming.cpython-310.pyc,,
34
+ datasets/__pycache__/table.cpython-310.pyc,,
35
+ datasets/arrow_dataset.py,sha256=tQDkdMyyiQSNYZLat4Fe4qsIKEi-oSyhhhhjUhlmr3o,298474
36
+ datasets/arrow_reader.py,sha256=xbU1VF-BBnX1qY8CaS9nlXILCExdPNtOuSI3TjoHM30,27236
37
+ datasets/arrow_writer.py,sha256=0rtyqfWyAHFijbqv17kdRqOkwE1yxP6-JD3vpYpolEU,33637
38
+ datasets/builder.bak.py,sha256=YZYHkGfXIFeM878CLLM0YvyrY6gLw3_z_MEh-QnCybE,111808
39
+ datasets/builder.py,sha256=xvSKSAT05S-w0tSA3_UN65IDVoC_66_XzxmUPtVGI0E,111377
40
+ datasets/combine.py,sha256=OvMg-5A_cBraHyEXbNTTrWjd9sbUiyA7PG6aBJpbg5Q,10924
41
+ datasets/commands/__init__.py,sha256=rujbQtxJbwHhF9WQqp2DD9tfVTghDMJdl0v6H551Pcs,312
42
+ datasets/commands/__pycache__/__init__.cpython-310.pyc,,
43
+ datasets/commands/__pycache__/convert.cpython-310.pyc,,
44
+ datasets/commands/__pycache__/convert_to_parquet.cpython-310.pyc,,
45
+ datasets/commands/__pycache__/datasets_cli.cpython-310.pyc,,
46
+ datasets/commands/__pycache__/dummy_data.cpython-310.pyc,,
47
+ datasets/commands/__pycache__/env.cpython-310.pyc,,
48
+ datasets/commands/__pycache__/run_beam.cpython-310.pyc,,
49
+ datasets/commands/__pycache__/test.cpython-310.pyc,,
50
+ datasets/commands/convert.py,sha256=-VOqHh0ySkIOfEYmR7HVs7PzouVrkVShqyUtNGcNCYU,7914
51
+ datasets/commands/convert_to_parquet.py,sha256=-_a683Kl8pHDAlO1izUfn_ToTo7vbnTGkTnazK0Xrok,5833
52
+ datasets/commands/datasets_cli.py,sha256=2X_zweHbPFV9KYfcdXiJCS4QXzUGfIyM7ZbOKiz88JU,1519
53
+ datasets/commands/dummy_data.py,sha256=rBVQAN1wd9fvldw79PVoL3vNZdqosjO_PPO_SFEYUqw,23106
54
+ datasets/commands/env.py,sha256=8qg-hpXSXXsHvtYFvJkn5rn9IncqPsjjx3nR8no4a2I,1239
55
+ datasets/commands/run_beam.py,sha256=Dg8migMADmQvUg0koc2MN-yOQts8olBw548gCejNuwM,7010
56
+ datasets/commands/test.py,sha256=zpkyGlt0fOkAEq1RxDbZjrzpGMjM6Lt8PWC81ZZnEKA,8785
57
+ datasets/config.py,sha256=bDKBe082TfxQHyH8CKIdBPEMQ4cNIimM_EkJMtOnMYU,10505
58
+ datasets/data_files.py,sha256=ismMV0qNJDy3uKyp7n2jUacZOx6TFS02N6EzJITsPw8,32222
59
+ datasets/dataset_dict.py,sha256=6jFex2mNY0obSame0DWffVDbzTw5vKeHP25AWPCbLQQ,105682
60
+ datasets/distributed.py,sha256=jZ31II0mmlPMhZbEtbAsX6jlK0U69qdpV3uS5U5JFYw,1560
61
+ datasets/download/__init__.py,sha256=lbFOtITDaR7PHrhzJ8VfRnpaOT6NYozSxUcLv_GVfTg,281
62
+ datasets/download/__pycache__/__init__.cpython-310.pyc,,
63
+ datasets/download/__pycache__/download_config.cpython-310.pyc,,
64
+ datasets/download/__pycache__/download_manager.cpython-310.pyc,,
65
+ datasets/download/__pycache__/mock_download_manager.cpython-310.pyc,,
66
+ datasets/download/__pycache__/streaming_download_manager.cpython-310.pyc,,
67
+ datasets/download/download_config.py,sha256=I2OBuePpZsZoktzBarjq4SJjJHM-1biTiletBPad8go,5097
68
+ datasets/download/download_manager.py,sha256=Fvz0FDBrmaH8gYfobcKeaT6p4gaCos9m0sjY3F8vKmg,17213
69
+ datasets/download/mock_download_manager.py,sha256=jpMYk8SFjqnoR9J-8qqldQyKCtzjCnUXKPkSp3og7DY,10351
70
+ datasets/download/streaming_download_manager.py,sha256=eqFKHDWSaP2bZpaDQIJbUIvZOSb6r6P1Kj4Ko7qGTVI,7339
71
+ datasets/exceptions.py,sha256=vUgW0Ow6qTG1p1XhhUcoVHz71L7upBsNU6L_teiW5XU,3163
72
+ datasets/features/__init__.py,sha256=D-O0b8-LWEcTHxADhKkQriOSlNBIUsWlfOU5WfD9AD0,445
73
+ datasets/features/__pycache__/__init__.cpython-310.pyc,,
74
+ datasets/features/__pycache__/audio.cpython-310.pyc,,
75
+ datasets/features/__pycache__/features.cpython-310.pyc,,
76
+ datasets/features/__pycache__/image.cpython-310.pyc,,
77
+ datasets/features/__pycache__/translation.cpython-310.pyc,,
78
+ datasets/features/audio.py,sha256=8_xpCxr5jyCM9zemFWTZK6mNfXv6VeF_3stNdQx0JFA,12225
79
+ datasets/features/features.py,sha256=ORF9sJjRZqa_6Njz0dLmIdNip45uZpam0BlM_hfQK6Q,89689
80
+ datasets/features/image.py,sha256=JoBseOcKuoa4d04xu-sQylvGWVURhZfJPml4pSTHDnQ,15526
81
+ datasets/features/translation.py,sha256=J6jxAcAPakmMwtaHhHAhDENi1AgIGmeNn4neuEeFWYg,4476
82
+ datasets/filesystems/__init__.py,sha256=lJ2GgJp4goEsGb2UQuILf12BwSyT3WbO1EN55tRX5Os,2282
83
+ datasets/filesystems/__pycache__/__init__.cpython-310.pyc,,
84
+ datasets/filesystems/__pycache__/compression.cpython-310.pyc,,
85
+ datasets/filesystems/__pycache__/s3filesystem.cpython-310.pyc,,
86
+ datasets/filesystems/compression.py,sha256=UkqXFsMgiDtqXbwZNZ-PUtg26bOXyEv-avnN85OFLwI,4299
87
+ datasets/filesystems/s3filesystem.py,sha256=KowTCvTSsrdAU4syiaRffNw4g25-DTbjsoXBIMWz2tk,5725
88
+ datasets/fingerprint.py,sha256=pDq49L1aSrD9WXyfEVsR0rt28jDdW7rj7CiTykbIMRo,22040
89
+ datasets/formatting/__init__.py,sha256=7WIy1k5cCRDk9-HyMA2EdzeZdxaB50hie_wOAEAZ0TE,5473
90
+ datasets/formatting/__pycache__/__init__.cpython-310.pyc,,
91
+ datasets/formatting/__pycache__/formatting.cpython-310.pyc,,
92
+ datasets/formatting/__pycache__/jax_formatter.cpython-310.pyc,,
93
+ datasets/formatting/__pycache__/np_formatter.cpython-310.pyc,,
94
+ datasets/formatting/__pycache__/polars_formatter.cpython-310.pyc,,
95
+ datasets/formatting/__pycache__/tf_formatter.cpython-310.pyc,,
96
+ datasets/formatting/__pycache__/torch_formatter.cpython-310.pyc,,
97
+ datasets/formatting/formatting.py,sha256=uq2z8ifT_q09Fp3mTa563LUiuVHsFI0SZLTNfVzXRrw,25853
98
+ datasets/formatting/jax_formatter.py,sha256=KoTbq0XSUQ1Rp3G5IzN3cU192JZ9t5HAZtHiVpHPbB4,6839
99
+ datasets/formatting/np_formatter.py,sha256=DJBnt3oF0fHWJCqe4j6o9BOupZ0uGrw_xxFfsGBVoyk,4525
100
+ datasets/formatting/polars_formatter.py,sha256=PoOZM4RLFvAJdRZyNG5w3aOps3W3Saq1F8Mfyapgv8I,4700
101
+ datasets/formatting/tf_formatter.py,sha256=QRzeq8f1ALa6961PBNFRTH3RT4S-_8soqfUl9a7F89I,4657
102
+ datasets/formatting/torch_formatter.py,sha256=s6bP2ktOa8GXkjXq46odn1VpnzZhHN4Wrh7JUP9_3Y0,4728
103
+ datasets/info.py,sha256=R-o9Uv97SUoSI_SV4_HQQX7rJx6RHHWFfFmGIrnmeWg,26789
104
+ datasets/inspect.py,sha256=CFZ-Z1l9umEZRftcBNXecEq9I47gMAC-0JOCg3QCRqA,26400
105
+ datasets/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
+ datasets/io/__pycache__/__init__.cpython-310.pyc,,
107
+ datasets/io/__pycache__/abc.cpython-310.pyc,,
108
+ datasets/io/__pycache__/csv.cpython-310.pyc,,
109
+ datasets/io/__pycache__/generator.cpython-310.pyc,,
110
+ datasets/io/__pycache__/json.cpython-310.pyc,,
111
+ datasets/io/__pycache__/parquet.cpython-310.pyc,,
112
+ datasets/io/__pycache__/spark.cpython-310.pyc,,
113
+ datasets/io/__pycache__/sql.cpython-310.pyc,,
114
+ datasets/io/__pycache__/text.cpython-310.pyc,,
115
+ datasets/io/abc.py,sha256=LwDMXYs6YkhZuz1JiMK4PDIqgNjv7I8xH3UMUELW2ys,1672
116
+ datasets/io/csv.py,sha256=v4zaWehHb9U3njbdhy7wQnb8qO_c_58XOUC9JgBBVwI,5265
117
+ datasets/io/generator.py,sha256=mHPZLq376-RQfq9bwxDp7EHjyCJKG242gaPWUmTNA5E,1788
118
+ datasets/io/json.py,sha256=_0PwV7ps8In3HQpNNJpYtdmafMEmQsF1lr74YuCRLlg,6459
119
+ datasets/io/parquet.py,sha256=qnPUUITsm-shWK2_6FcJE6rlRwivr97d7ghP0IT5QZA,5832
120
+ datasets/io/spark.py,sha256=VUIODLHgIbiK0CI0UvthQ_gUO0MQDtHUozvw7Dfs8FI,1797
121
+ datasets/io/sql.py,sha256=4Zjw7peVEhhzoDtz2VTCFPqt2Tpy4zMB7T7ajb2GVTY,4234
122
+ datasets/io/text.py,sha256=bebEzXBSGC40_Gy94j9ZTJ7Hg0IfrV_4pnIUEhQZVig,1975
123
+ datasets/iterable_dataset.py,sha256=IVlqE9PBtXT0jd9WXrCZ7cAcYiwCS8LCOiuIw70wAZ8,108229
124
+ datasets/keyhash.py,sha256=gZLJ-0lIaj5mXP3fm0zFz8oY9L3Qu_OMkgil06oq0eg,3872
125
+ datasets/load.py,sha256=q28Y1Sn9EXQO9iJNiQwT8WWCNGsU-oVzy_FiG63eyS0,126783
126
+ datasets/metric.py,sha256=BDyIxMAC7i9lGDrRcJjeMN1sxEcfXHGX29_SfTVfC3c,28065
127
+ datasets/naming.py,sha256=aqQqYG4QR8YoxJJMAUyVv_oQyudm4WAApsEHvcozpNg,3001
128
+ datasets/packaged_modules/__init__.py,sha256=-lAu9yy5otjt3GM5q_MeL4f628PMazSXpFcpYY5saXE,3018
129
+ datasets/packaged_modules/__pycache__/__init__.cpython-310.pyc,,
130
+ datasets/packaged_modules/arrow/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
131
+ datasets/packaged_modules/arrow/__pycache__/__init__.cpython-310.pyc,,
132
+ datasets/packaged_modules/arrow/__pycache__/arrow.cpython-310.pyc,,
133
+ datasets/packaged_modules/arrow/arrow.py,sha256=xIAV0IJoFiyjgRckRYrz-rt1JI081VMvliAQHYW4h8E,3378
134
+ datasets/packaged_modules/audiofolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
+ datasets/packaged_modules/audiofolder/__pycache__/__init__.cpython-310.pyc,,
136
+ datasets/packaged_modules/audiofolder/__pycache__/audiofolder.cpython-310.pyc,,
137
+ datasets/packaged_modules/audiofolder/audiofolder.py,sha256=BXRlK57KvYdyEo-L-Qs6qtrG2tL0QUF0cmJvl6L1N-w,1633
138
+ datasets/packaged_modules/cache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
139
+ datasets/packaged_modules/cache/__pycache__/__init__.cpython-310.pyc,,
140
+ datasets/packaged_modules/cache/__pycache__/cache.cpython-310.pyc,,
141
+ datasets/packaged_modules/cache/cache.py,sha256=XYXcLgZQRh8O85W-omwsnAJ9ZN3F1xz462PvU1n485o,8909
142
+ datasets/packaged_modules/csv/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
143
+ datasets/packaged_modules/csv/__pycache__/__init__.cpython-310.pyc,,
144
+ datasets/packaged_modules/csv/__pycache__/csv.cpython-310.pyc,,
145
+ datasets/packaged_modules/csv/csv.py,sha256=WCcsVrf85iua__7F_Kaq2a5DmiNIHEqRcx2zv8G-5Dw,8874
146
+ datasets/packaged_modules/folder_based_builder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
147
+ datasets/packaged_modules/folder_based_builder/__pycache__/__init__.cpython-310.pyc,,
148
+ datasets/packaged_modules/folder_based_builder/__pycache__/folder_based_builder.cpython-310.pyc,,
149
+ datasets/packaged_modules/folder_based_builder/folder_based_builder.py,sha256=cpTdV7db71rYaB0eiRF0wyVaXQp2LArfm06Xc7hxap0,22509
150
+ datasets/packaged_modules/generator/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
+ datasets/packaged_modules/generator/__pycache__/__init__.cpython-310.pyc,,
152
+ datasets/packaged_modules/generator/__pycache__/generator.cpython-310.pyc,,
153
+ datasets/packaged_modules/generator/generator.py,sha256=QZKrNB3ztWPXT_H5OFOl1CBlAlAeckW48kdyySyVVKw,928
154
+ datasets/packaged_modules/imagefolder/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
155
+ datasets/packaged_modules/imagefolder/__pycache__/__init__.cpython-310.pyc,,
156
+ datasets/packaged_modules/imagefolder/__pycache__/imagefolder.cpython-310.pyc,,
157
+ datasets/packaged_modules/imagefolder/imagefolder.py,sha256=SYu6yxe4iBZzclT7u3m0gaACa6udSi1YOfFSy7dzdwk,1975
158
+ datasets/packaged_modules/json/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
159
+ datasets/packaged_modules/json/__pycache__/__init__.cpython-310.pyc,,
160
+ datasets/packaged_modules/json/__pycache__/json.cpython-310.pyc,,
161
+ datasets/packaged_modules/json/json.py,sha256=xHsMHKRbPMjh8TZHr87-IVCl2fIhiQ1Pw7jwcga8a4k,9959
162
+ datasets/packaged_modules/pandas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
163
+ datasets/packaged_modules/pandas/__pycache__/__init__.cpython-310.pyc,,
164
+ datasets/packaged_modules/pandas/__pycache__/pandas.cpython-310.pyc,,
165
+ datasets/packaged_modules/pandas/pandas.py,sha256=SyVcaveIKo5X_fMXLArzZcKcQQxomtvUhulAS9Sd9wE,2485
166
+ datasets/packaged_modules/parquet/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
167
+ datasets/packaged_modules/parquet/__pycache__/__init__.cpython-310.pyc,,
168
+ datasets/packaged_modules/parquet/__pycache__/parquet.cpython-310.pyc,,
169
+ datasets/packaged_modules/parquet/parquet.py,sha256=dhJcVASXIYCoify8pCUms9_Ofvjb20W9ouvQGkVStm4,4892
170
+ datasets/packaged_modules/spark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
171
+ datasets/packaged_modules/spark/__pycache__/__init__.cpython-310.pyc,,
172
+ datasets/packaged_modules/spark/__pycache__/spark.cpython-310.pyc,,
173
+ datasets/packaged_modules/spark/spark.py,sha256=7z8KuKSRVxvmdNekgAVWC5ULP3OFR-iUdXhhkLOF-kU,13916
174
+ datasets/packaged_modules/sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
175
+ datasets/packaged_modules/sql/__pycache__/__init__.cpython-310.pyc,,
176
+ datasets/packaged_modules/sql/__pycache__/sql.cpython-310.pyc,,
177
+ datasets/packaged_modules/sql/sql.py,sha256=Fcnok2-1uX2XnQah4BrtE5SPli6O3JKb9tzMy1lachk,4482
178
+ datasets/packaged_modules/text/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
179
+ datasets/packaged_modules/text/__pycache__/__init__.cpython-310.pyc,,
180
+ datasets/packaged_modules/text/__pycache__/text.cpython-310.pyc,,
181
+ datasets/packaged_modules/text/text.py,sha256=B9TOS7SfIVTXWJtW_0rW6EgDGzsQ-kPZadxE39eqK4g,6321
182
+ datasets/packaged_modules/webdataset/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
183
+ datasets/packaged_modules/webdataset/__pycache__/__init__.cpython-310.pyc,,
184
+ datasets/packaged_modules/webdataset/__pycache__/_tenbin.cpython-310.pyc,,
185
+ datasets/packaged_modules/webdataset/__pycache__/webdataset.cpython-310.pyc,,
186
+ datasets/packaged_modules/webdataset/_tenbin.py,sha256=oovYsgR2R3eXSn1xSCLG3oTly1szKDP4UOiRp4ORdIk,8533
187
+ datasets/packaged_modules/webdataset/webdataset.py,sha256=CvN6UGdQtx__JT3Pxc0nlUPYFAbCjuSclYsCJ48hY8o,8768
188
+ datasets/parallel/__init__.py,sha256=dEhpBOLbCcsKClTXYJnJRp-ZtrfUV6jsH-CYqviXl-E,89
189
+ datasets/parallel/__pycache__/__init__.cpython-310.pyc,,
190
+ datasets/parallel/__pycache__/parallel.cpython-310.pyc,,
191
+ datasets/parallel/parallel.py,sha256=E-oOQ6zwKrkLFPwZ-3EOcr_aANJDhE-d6QTq7Mp7WvA,4738
192
+ datasets/search.py,sha256=oUh55M77KOxHU-V4ZFVosHCj3IaAOWJJ8bmkGD1aXxw,35606
193
+ datasets/splits.py,sha256=2FY6TPAvkBDhU4szbgHcOZTtKgngtNKE6noCmfhLP6U,23509
194
+ datasets/streaming.py,sha256=A04UAT5VeFrAn7t6gODgnxb7DgJvONLW-22uESg6-Fo,6540
195
+ datasets/table.py,sha256=EnBYIQQUpA_74DtAJwjADCS_udSNdemoFg61XouTY-8,95048
196
+ datasets/tasks/__init__.py,sha256=ExUcieN-G7_ybwfGqi3-Kt3akv3hrnlCq_RwEosFhiY,1614
197
+ datasets/tasks/__pycache__/__init__.cpython-310.pyc,,
198
+ datasets/tasks/__pycache__/audio_classification.cpython-310.pyc,,
199
+ datasets/tasks/__pycache__/automatic_speech_recognition.cpython-310.pyc,,
200
+ datasets/tasks/__pycache__/base.cpython-310.pyc,,
201
+ datasets/tasks/__pycache__/image_classification.cpython-310.pyc,,
202
+ datasets/tasks/__pycache__/language_modeling.cpython-310.pyc,,
203
+ datasets/tasks/__pycache__/question_answering.cpython-310.pyc,,
204
+ datasets/tasks/__pycache__/summarization.cpython-310.pyc,,
205
+ datasets/tasks/__pycache__/text_classification.cpython-310.pyc,,
206
+ datasets/tasks/audio_classification.py,sha256=fkR37qfJfJRPgCizf9iDV-dBnsGmLo2V0w8JpMwyX0M,1297
207
+ datasets/tasks/automatic_speech_recognition.py,sha256=zbTTsLX5N-_Da5oucuk6zBZhDdhD4N5_rzsni9lT_vo,1309
208
+ datasets/tasks/base.py,sha256=SlYEeDS87jruZNNkDRgz-U4q7EUijePL-RTN14ngwsk,1095
209
+ datasets/tasks/image_classification.py,sha256=llF5_koN5APq7cF_WlGy5c9hAVspRlYCprXgwAa7kCc,1297
210
+ datasets/tasks/language_modeling.py,sha256=Vdor-TdCGdiMpaIPZr0fRvgNrt5_D-1JElXKGbfQhvI,581
211
+ datasets/tasks/question_answering.py,sha256=z8a80QRTsouUuIYVKQRDMTxOGeSK1QMycyDHxUW42zg,1105
212
+ datasets/tasks/summarization.py,sha256=adrpmvgfAjXCyDRdZnZ52h0FKql5-EWU61Z2-v6rN-w,772
213
+ datasets/tasks/text_classification.py,sha256=KvlddXxnnzzjCjJmyY3Z-e1G4dpTN0UXqlmZ1L0LrjU,1403
214
+ datasets/utils/__init__.py,sha256=Y7X1Xnrg4L-YdtclluausV1giXB0_JbR9SEw9UfVw1Q,1013
215
+ datasets/utils/__pycache__/__init__.cpython-310.pyc,,
216
+ datasets/utils/__pycache__/_dataset_viewer.cpython-310.pyc,,
217
+ datasets/utils/__pycache__/_dill.cpython-310.pyc,,
218
+ datasets/utils/__pycache__/_filelock.cpython-310.pyc,,
219
+ datasets/utils/__pycache__/beam_utils.cpython-310.pyc,,
220
+ datasets/utils/__pycache__/cache.cpython-310.pyc,,
221
+ datasets/utils/__pycache__/deprecation_utils.cpython-310.pyc,,
222
+ datasets/utils/__pycache__/doc_utils.cpython-310.pyc,,
223
+ datasets/utils/__pycache__/download_manager.cpython-310.pyc,,
224
+ datasets/utils/__pycache__/experimental.cpython-310.pyc,,
225
+ datasets/utils/__pycache__/extract.cpython-310.pyc,,
226
+ datasets/utils/__pycache__/file_utils.cpython-310.pyc,,
227
+ datasets/utils/__pycache__/filelock.cpython-310.pyc,,
228
+ datasets/utils/__pycache__/hub.cpython-310.pyc,,
229
+ datasets/utils/__pycache__/info_utils.cpython-310.pyc,,
230
+ datasets/utils/__pycache__/logging.cpython-310.pyc,,
231
+ datasets/utils/__pycache__/metadata.cpython-310.pyc,,
232
+ datasets/utils/__pycache__/patching.cpython-310.pyc,,
233
+ datasets/utils/__pycache__/py_utils.cpython-310.pyc,,
234
+ datasets/utils/__pycache__/readme.cpython-310.pyc,,
235
+ datasets/utils/__pycache__/sharding.cpython-310.pyc,,
236
+ datasets/utils/__pycache__/stratify.cpython-310.pyc,,
237
+ datasets/utils/__pycache__/tf_utils.cpython-310.pyc,,
238
+ datasets/utils/__pycache__/tqdm.cpython-310.pyc,,
239
+ datasets/utils/__pycache__/track.cpython-310.pyc,,
240
+ datasets/utils/__pycache__/typing.cpython-310.pyc,,
241
+ datasets/utils/__pycache__/version.cpython-310.pyc,,
242
+ datasets/utils/_dataset_viewer.py,sha256=L9gqrMGS6FgmEJps2uBK7HFFPFENrL881ZKIVAiaF-E,4438
243
+ datasets/utils/_dill.py,sha256=_qM3dQ_9sYQg5PrQdGuzpNvdRhf1nLgYQNF4MZAaZ-8,16916
244
+ datasets/utils/_filelock.py,sha256=yl4ZQupEUyPu7f8D2ZCXitIMlajDu322QcO7Fio8eQI,2370
245
+ datasets/utils/beam_utils.py,sha256=DvA0ZVrx4-T9iHpB9VpduKn435p4rFaJw0Ua5cKmpeI,2029
246
+ datasets/utils/cache.py,sha256=ouFjySURlby2H9KqJLfpRBM8H1Fwiuo3LBlfZAB-OPo,10557
247
+ datasets/utils/deprecation_utils.py,sha256=hTHwlzRs92NfNVudH71LMpW70sjbsP5amebrIgi3A-U,3452
248
+ datasets/utils/doc_utils.py,sha256=HoSm0TFaQaCYGfDgNhpBJ4Xc2WQZuOD6dTxLd9D87fs,407
249
+ datasets/utils/download_manager.py,sha256=AXDA-dUNUOmmy4Z7e8A34BJtQPcbJhWSQuO4p5wnDWY,60
250
+ datasets/utils/experimental.py,sha256=JgOjaEY3RWZ--3u0-ry82gLCDUpudfBfl-hWZ46SyS4,1097
251
+ datasets/utils/extract.py,sha256=Pw00NNW-vbmTfHduB-YCBEaw8qEmR4z_Ira7ZMiSlXs,14189
252
+ datasets/utils/file_utils.py,sha256=C4zLt-y4qJfVFIFa9T6qUd1SaWRNdTfsiqn1vYeBU7I,65451
253
+ datasets/utils/filelock.py,sha256=H6C5dQGFCzVKyeDRRY8fZ4YGTEvvNd-MTjpL_sWYb5k,352
254
+ datasets/utils/hub.py,sha256=V2JGolL5VjFT0YiEhI0sxJED_9tGdvma7lH22d64S9I,130
255
+ datasets/utils/info_utils.py,sha256=uadj74BSn08F75wNanZkf-7z3Yo6aVoFNO8zMkJfmkk,5050
256
+ datasets/utils/logging.py,sha256=a9kgqN1Xo6HvsIPbrHY08n7cUukxQqd3vpwTubisL3E,5404
257
+ datasets/utils/metadata.py,sha256=EXuwMc0s3jgksgglAFYERpKUd5deEsjQZq5wlIImjUM,12440
258
+ datasets/utils/patching.py,sha256=iTeb7XG4faLJKNylq55EcZyCndUXU_XBDvOOkuDz_sc,4955
259
+ datasets/utils/py_utils.py,sha256=YsBGk9CGEEH3LOsHeQ2_xNKCLABeMl1YgKTjCI24Awg,27624
260
+ datasets/utils/readme.py,sha256=JFlaLMCGrIz0nQCdnYKUZk5d9D9DErEYfjtRrX9VzIw,12627
261
+ datasets/utils/resources/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
262
+ datasets/utils/resources/__pycache__/__init__.cpython-310.pyc,,
263
+ datasets/utils/resources/creators.json,sha256=XtIpMZefgBOdTevRrQTkFiufbgCbp_iyxseyphYQkn0,257
264
+ datasets/utils/resources/languages.json,sha256=Z0rQNPsfje8zMi8KdvvwxF4APwwqcskJFUvhNiLAgPM,199138
265
+ datasets/utils/resources/multilingualities.json,sha256=02Uc8RtRzfl13l98Y_alZm5HuMYwPzL78B0S5a1X-8c,205
266
+ datasets/utils/resources/readme_structure.yaml,sha256=hNf9msoBZw5jfakQrDb0Af8T325TXdcaHsAO2MUcZvY,3877
267
+ datasets/utils/resources/size_categories.json,sha256=_5nAP7z8R6t7_GfER81QudFO6Y1tqYu4AWrr4Aot8S8,171
268
+ datasets/utils/sharding.py,sha256=FDi895opKH7XkpfIu-ag9PqBQo2PGx0tSO3Dg-gDAAs,4288
269
+ datasets/utils/stratify.py,sha256=uMwuCDRbW342vy-lXDHs6IQusOr7c9nOG3PpnWyzJO4,4091
270
+ datasets/utils/tf_utils.py,sha256=YWmXP525b-kp1A-pnF-rGEOKqmg0Dv5j8RXJieSbkZc,25044
271
+ datasets/utils/tqdm.py,sha256=44F0g2fBpJwShh1l88PP7Z8kBihFWA_Yee4sjiQSxes,4303
272
+ datasets/utils/track.py,sha256=k0HYom1uxmEvYdIOMei6ie39mrrOBmf-Qd0HAYVnfag,1407
273
+ datasets/utils/typing.py,sha256=LznosIqUzjXgwbRLAGCv4_7-yZo7muYY42Y3495oz5I,224
274
+ datasets/utils/version.py,sha256=Z82cHpjTbQVJyWgnwSU8DsW2G0y-sSbSoOVeQrAds9k,3281
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/WHEEL ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.36.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/entry_points.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ [console_scripts]
2
+ datasets-cli = datasets.commands.datasets_cli:main
3
+
llmeval-env/lib/python3.10/site-packages/datasets-2.19.1.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ datasets
llmeval-env/lib/python3.10/site-packages/numexpr/__init__.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ###################################################################
2
+ # Numexpr - Fast numerical array expression evaluator for NumPy.
3
+ #
4
+ # License: MIT
5
+ # Author: See AUTHORS.txt
6
+ #
7
+ # See LICENSE.txt and LICENSES/*.txt for details about copyright and
8
+ # rights to use.
9
+ ####################################################################
10
+
11
+ """
12
+ Numexpr is a fast numerical expression evaluator for NumPy. With it,
13
+ expressions that operate on arrays (like "3*a+4*b") are accelerated
14
+ and use less memory than doing the same calculation in Python.
15
+
16
+ See:
17
+
18
+ https://github.com/pydata/numexpr
19
+
20
+ for more info about it.
21
+
22
+ """
23
+
24
+ from numexpr.interpreter import MAX_THREADS, use_vml, __BLOCK_SIZE1__
25
+
26
+ is_cpu_amd_intel = False # DEPRECATION WARNING: WILL BE REMOVED IN FUTURE RELEASE
27
+
28
+ # cpuinfo imports were moved into the test submodule function that calls them
29
+ # to improve import times.
30
+
31
+ import os, os.path
32
+ import platform
33
+ from numexpr.expressions import E
34
+ from numexpr.necompiler import (NumExpr, disassemble, evaluate, re_evaluate,
35
+ validate)
36
+
37
+ from numexpr.utils import (_init_num_threads,
38
+ get_vml_version, set_vml_accuracy_mode, set_vml_num_threads,
39
+ set_num_threads, get_num_threads,
40
+ detect_number_of_cores, detect_number_of_threads)
41
+
42
+ # Detect the number of cores
43
+ ncores = detect_number_of_cores()
44
+ # Initialize the number of threads to be used
45
+ nthreads = _init_num_threads()
46
+ # The default for VML is 1 thread (see #39)
47
+ # set_vml_num_threads(1)
48
+
49
+ from . import version
50
+ __version__ = version.version
51
+
52
+ def print_versions():
53
+ """Print the versions of software that numexpr relies on."""
54
+ try:
55
+ import numexpr.tests
56
+ return numexpr.tests.print_versions()
57
+ except ImportError:
58
+ # To maintain Python 2.6 compatibility we have simple error handling
59
+ raise ImportError('`numexpr.tests` could not be imported, likely it was excluded from the distribution.')
60
+
61
+ def test(verbosity=1):
62
+ """Run all the tests in the test suite."""
63
+ try:
64
+ import numexpr.tests
65
+ return numexpr.tests.test(verbosity=verbosity)
66
+ except ImportError:
67
+ # To maintain Python 2.6 compatibility we have simple error handling
68
+ raise ImportError('`numexpr.tests` could not be imported, likely it was excluded from the distribution.')
llmeval-env/lib/python3.10/site-packages/numexpr/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.77 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numexpr/__pycache__/necompiler.cpython-310.pyc ADDED
Binary file (30.6 kB). View file
 
llmeval-env/lib/python3.10/site-packages/numexpr/cpuinfo.py ADDED
@@ -0,0 +1,859 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ###################################################################
2
+ # cpuinfo - Get information about CPU
3
+ #
4
+ # License: BSD
5
+ # Author: Pearu Peterson <[email protected]>
6
+ #
7
+ # See LICENSES/cpuinfo.txt for details about copyright and
8
+ # rights to use.
9
+ ####################################################################
10
+
11
+ """
12
+ cpuinfo
13
+
14
+ Copyright 2002 Pearu Peterson all rights reserved,
15
+ Pearu Peterson <[email protected]>
16
+ Permission to use, modify, and distribute this software is given under the
17
+ terms of the NumPy (BSD style) license. See LICENSE.txt that came with
18
+ this distribution for specifics.
19
+
20
+ NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
21
+ Pearu Peterson
22
+ """
23
+
24
+ __all__ = ['cpu']
25
+
26
+ import sys, re, types
27
+ import os
28
+ import subprocess
29
+ import warnings
30
+ import platform
31
+ import inspect
32
+
33
+ is_cpu_amd_intel = False # DEPRECATION WARNING: WILL BE REMOVED IN FUTURE RELEASE
34
+
35
+ def getoutput(cmd, successful_status=(0,), stacklevel=1):
36
+ try:
37
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
38
+ output, _ = p.communicate()
39
+ status = p.returncode
40
+ except EnvironmentError as e:
41
+ warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
42
+ return False, ''
43
+ if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
44
+ return True, output
45
+ return False, output
46
+
47
+
48
+ def command_info(successful_status=(0,), stacklevel=1, **kw):
49
+ info = {}
50
+ for key in kw:
51
+ ok, output = getoutput(kw[key], successful_status=successful_status,
52
+ stacklevel=stacklevel + 1)
53
+ if ok:
54
+ info[key] = output.strip()
55
+ return info
56
+
57
+
58
+ def command_by_line(cmd, successful_status=(0,), stacklevel=1):
59
+ ok, output = getoutput(cmd, successful_status=successful_status,
60
+ stacklevel=stacklevel + 1)
61
+ if not ok:
62
+ return
63
+
64
+ # XXX: check
65
+ output = output.decode('ascii')
66
+
67
+ for line in output.splitlines():
68
+ yield line.strip()
69
+
70
+
71
+ def key_value_from_command(cmd, sep, successful_status=(0,),
72
+ stacklevel=1):
73
+ d = {}
74
+ for line in command_by_line(cmd, successful_status=successful_status,
75
+ stacklevel=stacklevel + 1):
76
+ l = [s.strip() for s in line.split(sep, 1)]
77
+ if len(l) == 2:
78
+ d[l[0]] = l[1]
79
+ return d
80
+
81
+
82
+ class CPUInfoBase(object):
83
+ """Holds CPU information and provides methods for requiring
84
+ the availability of various CPU features.
85
+ """
86
+
87
+ def _try_call(self, func):
88
+ try:
89
+ return func()
90
+ except:
91
+ pass
92
+
93
+ def __getattr__(self, name):
94
+ if not name.startswith('_'):
95
+ if hasattr(self, '_' + name):
96
+ attr = getattr(self, '_' + name)
97
+ if inspect.ismethod(attr):
98
+ return lambda func=self._try_call, attr=attr: func(attr)
99
+ else:
100
+ return lambda: None
101
+ raise AttributeError(name)
102
+
103
+ def _getNCPUs(self):
104
+ return 1
105
+
106
+ def __get_nbits(self):
107
+ abits = platform.architecture()[0]
108
+ nbits = re.compile(r'(\d+)bit').search(abits).group(1)
109
+ return nbits
110
+
111
+ def _is_32bit(self):
112
+ return self.__get_nbits() == '32'
113
+
114
+ def _is_64bit(self):
115
+ return self.__get_nbits() == '64'
116
+
117
+
118
+ class LinuxCPUInfo(CPUInfoBase):
119
+ info = None
120
+
121
+ def __init__(self):
122
+ if self.info is not None:
123
+ return
124
+ info = [{}]
125
+ ok, output = getoutput(['uname', '-m'])
126
+ if ok:
127
+ info[0]['uname_m'] = output.strip()
128
+ try:
129
+ fo = open('/proc/cpuinfo')
130
+ except EnvironmentError as e:
131
+ warnings.warn(str(e), UserWarning)
132
+ else:
133
+ for line in fo:
134
+ name_value = [s.strip() for s in line.split(':', 1)]
135
+ if len(name_value) != 2:
136
+ continue
137
+ name, value = name_value
138
+ if not info or name in info[-1]: # next processor
139
+ info.append({})
140
+ info[-1][name] = value
141
+ fo.close()
142
+ self.__class__.info = info
143
+
144
+ def _not_impl(self):
145
+ pass
146
+
147
+ # Athlon
148
+
149
+ def _is_AMD(self):
150
+ return self.info[0]['vendor_id'] == 'AuthenticAMD'
151
+
152
+ def _is_AthlonK6_2(self):
153
+ return self._is_AMD() and self.info[0]['model'] == '2'
154
+
155
+ def _is_AthlonK6_3(self):
156
+ return self._is_AMD() and self.info[0]['model'] == '3'
157
+
158
+ def _is_AthlonK6(self):
159
+ return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
160
+
161
+ def _is_AthlonK7(self):
162
+ return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
163
+
164
+ def _is_AthlonMP(self):
165
+ return re.match(r'.*?Athlon\(tm\) MP\b',
166
+ self.info[0]['model name']) is not None
167
+
168
+ def _is_AMD64(self):
169
+ return self.is_AMD() and self.info[0]['family'] == '15'
170
+
171
+ def _is_Athlon64(self):
172
+ return re.match(r'.*?Athlon\(tm\) 64\b',
173
+ self.info[0]['model name']) is not None
174
+
175
+ def _is_AthlonHX(self):
176
+ return re.match(r'.*?Athlon HX\b',
177
+ self.info[0]['model name']) is not None
178
+
179
+ def _is_Opteron(self):
180
+ return re.match(r'.*?Opteron\b',
181
+ self.info[0]['model name']) is not None
182
+
183
+ def _is_Hammer(self):
184
+ return re.match(r'.*?Hammer\b',
185
+ self.info[0]['model name']) is not None
186
+
187
+ # Alpha
188
+
189
+ def _is_Alpha(self):
190
+ return self.info[0]['cpu'] == 'Alpha'
191
+
192
+ def _is_EV4(self):
193
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
194
+
195
+ def _is_EV5(self):
196
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
197
+
198
+ def _is_EV56(self):
199
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
200
+
201
+ def _is_PCA56(self):
202
+ return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
203
+
204
+ # Intel
205
+
206
+ #XXX
207
+ _is_i386 = _not_impl
208
+
209
+ def _is_Intel(self):
210
+ return self.info[0]['vendor_id'] == 'GenuineIntel'
211
+
212
+ def _is_i486(self):
213
+ return self.info[0]['cpu'] == 'i486'
214
+
215
+ def _is_i586(self):
216
+ return self.is_Intel() and self.info[0]['cpu family'] == '5'
217
+
218
+ def _is_i686(self):
219
+ return self.is_Intel() and self.info[0]['cpu family'] == '6'
220
+
221
+ def _is_Celeron(self):
222
+ return re.match(r'.*?Celeron',
223
+ self.info[0]['model name']) is not None
224
+
225
+ def _is_Pentium(self):
226
+ return re.match(r'.*?Pentium',
227
+ self.info[0]['model name']) is not None
228
+
229
+ def _is_PentiumII(self):
230
+ return re.match(r'.*?Pentium.*?II\b',
231
+ self.info[0]['model name']) is not None
232
+
233
+ def _is_PentiumPro(self):
234
+ return re.match(r'.*?PentiumPro\b',
235
+ self.info[0]['model name']) is not None
236
+
237
+ def _is_PentiumMMX(self):
238
+ return re.match(r'.*?Pentium.*?MMX\b',
239
+ self.info[0]['model name']) is not None
240
+
241
+ def _is_PentiumIII(self):
242
+ return re.match(r'.*?Pentium.*?III\b',
243
+ self.info[0]['model name']) is not None
244
+
245
+ def _is_PentiumIV(self):
246
+ return re.match(r'.*?Pentium.*?(IV|4)\b',
247
+ self.info[0]['model name']) is not None
248
+
249
+ def _is_PentiumM(self):
250
+ return re.match(r'.*?Pentium.*?M\b',
251
+ self.info[0]['model name']) is not None
252
+
253
+ def _is_Prescott(self):
254
+ return self.is_PentiumIV() and self.has_sse3()
255
+
256
+ def _is_Nocona(self):
257
+ return (self.is_Intel() and
258
+ self.info[0]['cpu family'] in ('6', '15') and
259
+ # two s sse3; three s ssse3 not the same thing, this is fine
260
+ (self.has_sse3() and not self.has_ssse3()) and
261
+ re.match(r'.*?\blm\b', self.info[0]['flags']) is not None)
262
+
263
+ def _is_Core2(self):
264
+ return (self.is_64bit() and self.is_Intel() and
265
+ re.match(r'.*?Core\(TM\)2\b',
266
+ self.info[0]['model name']) is not None)
267
+
268
+ def _is_Itanium(self):
269
+ return re.match(r'.*?Itanium\b',
270
+ self.info[0]['family']) is not None
271
+
272
+ def _is_XEON(self):
273
+ return re.match(r'.*?XEON\b',
274
+ self.info[0]['model name'], re.IGNORECASE) is not None
275
+
276
+ _is_Xeon = _is_XEON
277
+
278
+ # Power
279
+ def _is_Power(self):
280
+ return re.match(r'.*POWER.*',
281
+ self.info[0]['cpu']) is not None
282
+
283
+ def _is_Power7(self):
284
+ return re.match(r'.*POWER7.*',
285
+ self.info[0]['cpu']) is not None
286
+
287
+ def _is_Power8(self):
288
+ return re.match(r'.*POWER8.*',
289
+ self.info[0]['cpu']) is not None
290
+
291
+ def _is_Power9(self):
292
+ return re.match(r'.*POWER9.*',
293
+ self.info[0]['cpu']) is not None
294
+
295
+ def _has_Altivec(self):
296
+ return re.match(r'.*altivec\ supported.*',
297
+ self.info[0]['cpu']) is not None
298
+
299
+ # Varia
300
+
301
+ def _is_singleCPU(self):
302
+ return len(self.info) == 1
303
+
304
+ def _getNCPUs(self):
305
+ return len(self.info)
306
+
307
+ def _has_fdiv_bug(self):
308
+ return self.info[0]['fdiv_bug'] == 'yes'
309
+
310
+ def _has_f00f_bug(self):
311
+ return self.info[0]['f00f_bug'] == 'yes'
312
+
313
+ def _has_mmx(self):
314
+ return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
315
+
316
+ def _has_sse(self):
317
+ return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
318
+
319
+ def _has_sse2(self):
320
+ return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
321
+
322
+ def _has_sse3(self):
323
+ return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
324
+
325
+ def _has_ssse3(self):
326
+ return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
327
+
328
+ def _has_3dnow(self):
329
+ return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
330
+
331
+ def _has_3dnowext(self):
332
+ return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
333
+
334
+
335
+ class IRIXCPUInfo(CPUInfoBase):
336
+ info = None
337
+
338
+ def __init__(self):
339
+ if self.info is not None:
340
+ return
341
+ info = key_value_from_command('sysconf', sep=' ',
342
+ successful_status=(0, 1))
343
+ self.__class__.info = info
344
+
345
+ def _not_impl(self):
346
+ pass
347
+
348
+ def _is_singleCPU(self):
349
+ return self.info.get('NUM_PROCESSORS') == '1'
350
+
351
+ def _getNCPUs(self):
352
+ return int(self.info.get('NUM_PROCESSORS', 1))
353
+
354
+ def __cputype(self, n):
355
+ return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
356
+
357
+ def _is_r2000(self):
358
+ return self.__cputype(2000)
359
+
360
+ def _is_r3000(self):
361
+ return self.__cputype(3000)
362
+
363
+ def _is_r3900(self):
364
+ return self.__cputype(3900)
365
+
366
+ def _is_r4000(self):
367
+ return self.__cputype(4000)
368
+
369
+ def _is_r4100(self):
370
+ return self.__cputype(4100)
371
+
372
+ def _is_r4300(self):
373
+ return self.__cputype(4300)
374
+
375
+ def _is_r4400(self):
376
+ return self.__cputype(4400)
377
+
378
+ def _is_r4600(self):
379
+ return self.__cputype(4600)
380
+
381
+ def _is_r4650(self):
382
+ return self.__cputype(4650)
383
+
384
+ def _is_r5000(self):
385
+ return self.__cputype(5000)
386
+
387
+ def _is_r6000(self):
388
+ return self.__cputype(6000)
389
+
390
+ def _is_r8000(self):
391
+ return self.__cputype(8000)
392
+
393
+ def _is_r10000(self):
394
+ return self.__cputype(10000)
395
+
396
+ def _is_r12000(self):
397
+ return self.__cputype(12000)
398
+
399
+ def _is_rorion(self):
400
+ return self.__cputype('orion')
401
+
402
+ def get_ip(self):
403
+ try:
404
+ return self.info.get('MACHINE')
405
+ except:
406
+ pass
407
+
408
+ def __machine(self, n):
409
+ return self.info.get('MACHINE').lower() == 'ip%s' % (n)
410
+
411
+ def _is_IP19(self):
412
+ return self.__machine(19)
413
+
414
+ def _is_IP20(self):
415
+ return self.__machine(20)
416
+
417
+ def _is_IP21(self):
418
+ return self.__machine(21)
419
+
420
+ def _is_IP22(self):
421
+ return self.__machine(22)
422
+
423
+ def _is_IP22_4k(self):
424
+ return self.__machine(22) and self._is_r4000()
425
+
426
+ def _is_IP22_5k(self):
427
+ return self.__machine(22) and self._is_r5000()
428
+
429
+ def _is_IP24(self):
430
+ return self.__machine(24)
431
+
432
+ def _is_IP25(self):
433
+ return self.__machine(25)
434
+
435
+ def _is_IP26(self):
436
+ return self.__machine(26)
437
+
438
+ def _is_IP27(self):
439
+ return self.__machine(27)
440
+
441
+ def _is_IP28(self):
442
+ return self.__machine(28)
443
+
444
+ def _is_IP30(self):
445
+ return self.__machine(30)
446
+
447
+ def _is_IP32(self):
448
+ return self.__machine(32)
449
+
450
+ def _is_IP32_5k(self):
451
+ return self.__machine(32) and self._is_r5000()
452
+
453
+ def _is_IP32_10k(self):
454
+ return self.__machine(32) and self._is_r10000()
455
+
456
+
457
+ class DarwinCPUInfo(CPUInfoBase):
458
+ info = None
459
+
460
+ def __init__(self):
461
+ if self.info is not None:
462
+ return
463
+ info = command_info(arch='arch',
464
+ machine='machine')
465
+ info['sysctl_hw'] = key_value_from_command(['sysctl', 'hw'], sep='=')
466
+ self.__class__.info = info
467
+
468
+ def _not_impl(self): pass
469
+
470
+ def _getNCPUs(self):
471
+ return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
472
+
473
+ def _is_Power_Macintosh(self):
474
+ return self.info['sysctl_hw']['hw.machine'] == 'Power Macintosh'
475
+
476
+ def _is_i386(self):
477
+ return self.info['arch'] == 'i386'
478
+
479
+ def _is_ppc(self):
480
+ return self.info['arch'] == 'ppc'
481
+
482
+ def __machine(self, n):
483
+ return self.info['machine'] == 'ppc%s' % n
484
+
485
+ def _is_ppc601(self): return self.__machine(601)
486
+
487
+ def _is_ppc602(self): return self.__machine(602)
488
+
489
+ def _is_ppc603(self): return self.__machine(603)
490
+
491
+ def _is_ppc603e(self): return self.__machine('603e')
492
+
493
+ def _is_ppc604(self): return self.__machine(604)
494
+
495
+ def _is_ppc604e(self): return self.__machine('604e')
496
+
497
+ def _is_ppc620(self): return self.__machine(620)
498
+
499
+ def _is_ppc630(self): return self.__machine(630)
500
+
501
+ def _is_ppc740(self): return self.__machine(740)
502
+
503
+ def _is_ppc7400(self): return self.__machine(7400)
504
+
505
+ def _is_ppc7450(self): return self.__machine(7450)
506
+
507
+ def _is_ppc750(self): return self.__machine(750)
508
+
509
+ def _is_ppc403(self): return self.__machine(403)
510
+
511
+ def _is_ppc505(self): return self.__machine(505)
512
+
513
+ def _is_ppc801(self): return self.__machine(801)
514
+
515
+ def _is_ppc821(self): return self.__machine(821)
516
+
517
+ def _is_ppc823(self): return self.__machine(823)
518
+
519
+ def _is_ppc860(self): return self.__machine(860)
520
+
521
+ class NetBSDCPUInfo(CPUInfoBase):
522
+ info = None
523
+
524
+ def __init__(self):
525
+ if self.info is not None:
526
+ return
527
+ info = {}
528
+ info['sysctl_hw'] = key_value_from_command(['sysctl', 'hw'], sep='=')
529
+ info['arch'] = info['sysctl_hw'].get('hw.machine_arch', 1)
530
+ info['machine'] = info['sysctl_hw'].get('hw.machine', 1)
531
+ self.__class__.info = info
532
+
533
+ def _not_impl(self): pass
534
+
535
+ def _getNCPUs(self):
536
+ return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
537
+
538
+ def _is_Intel(self):
539
+ if self.info['sysctl_hw'].get('hw.model', "")[0:5] == 'Intel':
540
+ return True
541
+ return False
542
+
543
+ def _is_AMD(self):
544
+ if self.info['sysctl_hw'].get('hw.model', "")[0:3] == 'AMD':
545
+ return True
546
+ return False
547
+
548
+ class SunOSCPUInfo(CPUInfoBase):
549
+ info = None
550
+
551
+ def __init__(self):
552
+ if self.info is not None:
553
+ return
554
+ info = command_info(arch='arch',
555
+ mach='mach',
556
+ uname_i=['uname', '-i'],
557
+ isainfo_b=['isainfo', '-b'],
558
+ isainfo_n=['isainfo', '-n'],
559
+ )
560
+ info['uname_X'] = key_value_from_command(['uname', '-X'], sep='=')
561
+ for line in command_by_line(['psrinfo', '-v', '0']):
562
+ m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
563
+ if m:
564
+ info['processor'] = m.group('p')
565
+ break
566
+ self.__class__.info = info
567
+
568
+ def _not_impl(self):
569
+ pass
570
+
571
+ def _is_i386(self):
572
+ return self.info['isainfo_n'] == 'i386'
573
+
574
+ def _is_sparc(self):
575
+ return self.info['isainfo_n'] == 'sparc'
576
+
577
+ def _is_sparcv9(self):
578
+ return self.info['isainfo_n'] == 'sparcv9'
579
+
580
+ def _getNCPUs(self):
581
+ return int(self.info['uname_X'].get('NumCPU', 1))
582
+
583
+ def _is_sun4(self):
584
+ return self.info['arch'] == 'sun4'
585
+
586
+ def _is_SUNW(self):
587
+ return re.match(r'SUNW', self.info['uname_i']) is not None
588
+
589
+ def _is_sparcstation5(self):
590
+ return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
591
+
592
+ def _is_ultra1(self):
593
+ return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
594
+
595
+ def _is_ultra250(self):
596
+ return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
597
+
598
+ def _is_ultra2(self):
599
+ return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
600
+
601
+ def _is_ultra30(self):
602
+ return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
603
+
604
+ def _is_ultra4(self):
605
+ return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
606
+
607
+ def _is_ultra5_10(self):
608
+ return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
609
+
610
+ def _is_ultra5(self):
611
+ return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
612
+
613
+ def _is_ultra60(self):
614
+ return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
615
+
616
+ def _is_ultra80(self):
617
+ return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
618
+
619
+ def _is_ultraenterprice(self):
620
+ return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
621
+
622
+ def _is_ultraenterprice10k(self):
623
+ return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
624
+
625
+ def _is_sunfire(self):
626
+ return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
627
+
628
+ def _is_ultra(self):
629
+ return re.match(r'.*Ultra', self.info['uname_i']) is not None
630
+
631
+ def _is_cpusparcv7(self):
632
+ return self.info['processor'] == 'sparcv7'
633
+
634
+ def _is_cpusparcv8(self):
635
+ return self.info['processor'] == 'sparcv8'
636
+
637
+ def _is_cpusparcv9(self):
638
+ return self.info['processor'] == 'sparcv9'
639
+
640
+
641
+ class Win32CPUInfo(CPUInfoBase):
642
+ info = None
643
+ pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
644
+ # XXX: what does the value of
645
+ # HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
646
+ # mean?
647
+
648
+ def __init__(self):
649
+ try:
650
+ import _winreg
651
+ except ImportError: # Python 3
652
+ import winreg as _winreg
653
+
654
+ if self.info is not None:
655
+ return
656
+ info = []
657
+ try:
658
+ #XXX: Bad style to use so long `try:...except:...`. Fix it!
659
+
660
+ prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"
661
+ r"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
662
+ chnd = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, self.pkey)
663
+ pnum = 0
664
+ while 1:
665
+ try:
666
+ proc = _winreg.EnumKey(chnd, pnum)
667
+ except _winreg.error:
668
+ break
669
+ else:
670
+ pnum += 1
671
+ info.append({"Processor": proc})
672
+ phnd = _winreg.OpenKey(chnd, proc)
673
+ pidx = 0
674
+ while True:
675
+ try:
676
+ name, value, vtpe = _winreg.EnumValue(phnd, pidx)
677
+ except _winreg.error:
678
+ break
679
+ else:
680
+ pidx = pidx + 1
681
+ info[-1][name] = value
682
+ if name == "Identifier":
683
+ srch = prgx.search(value)
684
+ if srch:
685
+ info[-1]["Family"] = int(srch.group("FML"))
686
+ info[-1]["Model"] = int(srch.group("MDL"))
687
+ info[-1]["Stepping"] = int(srch.group("STP"))
688
+ except:
689
+ print(sys.exc_value, '(ignoring)')
690
+ self.__class__.info = info
691
+
692
+ def _not_impl(self):
693
+ pass
694
+
695
+ # Athlon
696
+
697
+ def _is_AMD(self):
698
+ return self.info[0]['VendorIdentifier'] == 'AuthenticAMD'
699
+
700
+ def _is_Am486(self):
701
+ return self.is_AMD() and self.info[0]['Family'] == 4
702
+
703
+ def _is_Am5x86(self):
704
+ return self.is_AMD() and self.info[0]['Family'] == 4
705
+
706
+ def _is_AMDK5(self):
707
+ return (self.is_AMD() and self.info[0]['Family'] == 5 and
708
+ self.info[0]['Model'] in [0, 1, 2, 3])
709
+
710
+ def _is_AMDK6(self):
711
+ return (self.is_AMD() and self.info[0]['Family'] == 5 and
712
+ self.info[0]['Model'] in [6, 7])
713
+
714
+ def _is_AMDK6_2(self):
715
+ return (self.is_AMD() and self.info[0]['Family'] == 5 and
716
+ self.info[0]['Model'] == 8)
717
+
718
+ def _is_AMDK6_3(self):
719
+ return (self.is_AMD() and self.info[0]['Family'] == 5 and
720
+ self.info[0]['Model'] == 9)
721
+
722
+ def _is_AMDK7(self):
723
+ return self.is_AMD() and self.info[0]['Family'] == 6
724
+
725
+ # To reliably distinguish between the different types of AMD64 chips
726
+ # (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
727
+ # require looking at the 'brand' from cpuid
728
+
729
+ def _is_AMD64(self):
730
+ return self.is_AMD() and self.info[0]['Family'] == 15
731
+
732
+ # Intel
733
+
734
+ def _is_Intel(self):
735
+ return self.info[0]['VendorIdentifier'] == 'GenuineIntel'
736
+
737
+ def _is_i386(self):
738
+ return self.info[0]['Family'] == 3
739
+
740
+ def _is_i486(self):
741
+ return self.info[0]['Family'] == 4
742
+
743
+ def _is_i586(self):
744
+ return self.is_Intel() and self.info[0]['Family'] == 5
745
+
746
+ def _is_i686(self):
747
+ return self.is_Intel() and self.info[0]['Family'] == 6
748
+
749
+ def _is_Pentium(self):
750
+ return self.is_Intel() and self.info[0]['Family'] == 5
751
+
752
+ def _is_PentiumMMX(self):
753
+ return (self.is_Intel() and self.info[0]['Family'] == 5 and
754
+ self.info[0]['Model'] == 4)
755
+
756
+ def _is_PentiumPro(self):
757
+ return (self.is_Intel() and self.info[0]['Family'] == 6 and
758
+ self.info[0]['Model'] == 1)
759
+
760
+ def _is_PentiumII(self):
761
+ return (self.is_Intel() and self.info[0]['Family'] == 6 and
762
+ self.info[0]['Model'] in [3, 5, 6])
763
+
764
+ def _is_PentiumIII(self):
765
+ return (self.is_Intel() and self.info[0]['Family'] == 6 and
766
+ self.info[0]['Model'] in [7, 8, 9, 10, 11])
767
+
768
+ def _is_PentiumIV(self):
769
+ return self.is_Intel() and self.info[0]['Family'] == 15
770
+
771
+ def _is_PentiumM(self):
772
+ return (self.is_Intel() and self.info[0]['Family'] == 6 and
773
+ self.info[0]['Model'] in [9, 13, 14])
774
+
775
+ def _is_Core2(self):
776
+ return (self.is_Intel() and self.info[0]['Family'] == 6 and
777
+ self.info[0]['Model'] in [15, 16, 17])
778
+
779
+ # Varia
780
+
781
+ def _is_singleCPU(self):
782
+ return len(self.info) == 1
783
+
784
+ def _getNCPUs(self):
785
+ return len(self.info)
786
+
787
+ def _has_mmx(self):
788
+ if self.is_Intel():
789
+ return ((self.info[0]['Family'] == 5 and
790
+ self.info[0]['Model'] == 4) or
791
+ (self.info[0]['Family'] in [6, 15]))
792
+ elif self.is_AMD():
793
+ return self.info[0]['Family'] in [5, 6, 15]
794
+ else:
795
+ return False
796
+
797
+ def _has_sse(self):
798
+ if self.is_Intel():
799
+ return ((self.info[0]['Family'] == 6 and
800
+ self.info[0]['Model'] in [7, 8, 9, 10, 11]) or
801
+ self.info[0]['Family'] == 15)
802
+ elif self.is_AMD():
803
+ return ((self.info[0]['Family'] == 6 and
804
+ self.info[0]['Model'] in [6, 7, 8, 10]) or
805
+ self.info[0]['Family'] == 15)
806
+ else:
807
+ return False
808
+
809
+ def _has_sse2(self):
810
+ if self.is_Intel():
811
+ return self.is_Pentium4() or self.is_PentiumM() or self.is_Core2()
812
+ elif self.is_AMD():
813
+ return self.is_AMD64()
814
+ else:
815
+ return False
816
+
817
+ def _has_3dnow(self):
818
+ return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
819
+
820
+ def _has_3dnowext(self):
821
+ return self.is_AMD() and self.info[0]['Family'] in [6, 15]
822
+
823
+
824
+ if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
825
+ cpuinfo = LinuxCPUInfo
826
+ elif sys.platform.startswith('irix'):
827
+ cpuinfo = IRIXCPUInfo
828
+ elif sys.platform == 'darwin':
829
+ cpuinfo = DarwinCPUInfo
830
+ elif sys.platform[0:6] == 'netbsd':
831
+ cpuinfo = NetBSDCPUInfo
832
+ elif sys.platform.startswith('sunos'):
833
+ cpuinfo = SunOSCPUInfo
834
+ elif sys.platform.startswith('win32'):
835
+ cpuinfo = Win32CPUInfo
836
+ elif sys.platform.startswith('cygwin'):
837
+ cpuinfo = LinuxCPUInfo
838
+ #XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
839
+ else:
840
+ cpuinfo = CPUInfoBase
841
+
842
+ cpu = cpuinfo()
843
+
844
+ if __name__ == "__main__":
845
+
846
+ cpu.is_blaa()
847
+ cpu.is_Intel()
848
+ cpu.is_Alpha()
849
+
850
+ info = []
851
+ for name in dir(cpuinfo):
852
+ if name[0] == '_' and name[1] != '_':
853
+ r = getattr(cpu, name[1:])()
854
+ if r:
855
+ if r != 1:
856
+ info.append('%s=%s' % (name[1:], r))
857
+ else:
858
+ info.append(name[1:])
859
+ print('CPU information: ' + ' '.join(info))
llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ pip
llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/License.txt ADDED
@@ -0,0 +1,1568 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ End User License Agreement
2
+ --------------------------
3
+
4
+
5
+ Preface
6
+ -------
7
+
8
+ The Software License Agreement in Chapter 1 and the Supplement
9
+ in Chapter 2 contain license terms and conditions that govern
10
+ the use of NVIDIA software. By accepting this agreement, you
11
+ agree to comply with all the terms and conditions applicable
12
+ to the product(s) included herein.
13
+
14
+
15
+ NVIDIA Driver
16
+
17
+
18
+ Description
19
+
20
+ This package contains the operating system driver and
21
+ fundamental system software components for NVIDIA GPUs.
22
+
23
+
24
+ NVIDIA CUDA Toolkit
25
+
26
+
27
+ Description
28
+
29
+ The NVIDIA CUDA Toolkit provides command-line and graphical
30
+ tools for building, debugging and optimizing the performance
31
+ of applications accelerated by NVIDIA GPUs, runtime and math
32
+ libraries, and documentation including programming guides,
33
+ user manuals, and API references.
34
+
35
+
36
+ Default Install Location of CUDA Toolkit
37
+
38
+ Windows platform:
39
+
40
+ %ProgramFiles%\NVIDIA GPU Computing Toolkit\CUDA\v#.#
41
+
42
+ Linux platform:
43
+
44
+ /usr/local/cuda-#.#
45
+
46
+ Mac platform:
47
+
48
+ /Developer/NVIDIA/CUDA-#.#
49
+
50
+
51
+ NVIDIA CUDA Samples
52
+
53
+
54
+ Description
55
+
56
+ This package includes over 100+ CUDA examples that demonstrate
57
+ various CUDA programming principles, and efficient CUDA
58
+ implementation of algorithms in specific application domains.
59
+
60
+
61
+ Default Install Location of CUDA Samples
62
+
63
+ Windows platform:
64
+
65
+ %ProgramData%\NVIDIA Corporation\CUDA Samples\v#.#
66
+
67
+ Linux platform:
68
+
69
+ /usr/local/cuda-#.#/samples
70
+
71
+ and
72
+
73
+ $HOME/NVIDIA_CUDA-#.#_Samples
74
+
75
+ Mac platform:
76
+
77
+ /Developer/NVIDIA/CUDA-#.#/samples
78
+
79
+
80
+ NVIDIA Nsight Visual Studio Edition (Windows only)
81
+
82
+
83
+ Description
84
+
85
+ NVIDIA Nsight Development Platform, Visual Studio Edition is a
86
+ development environment integrated into Microsoft Visual
87
+ Studio that provides tools for debugging, profiling, analyzing
88
+ and optimizing your GPU computing and graphics applications.
89
+
90
+
91
+ Default Install Location of Nsight Visual Studio Edition
92
+
93
+ Windows platform:
94
+
95
+ %ProgramFiles(x86)%\NVIDIA Corporation\Nsight Visual Studio Edition #.#
96
+
97
+
98
+ 1. License Agreement for NVIDIA Software Development Kits
99
+ ---------------------------------------------------------
100
+
101
+
102
+ Release Date: July 26, 2018
103
+ ---------------------------
104
+
105
+
106
+ Important NoticeRead before downloading, installing,
107
+ copying or using the licensed software:
108
+ -------------------------------------------------------
109
+
110
+ This license agreement, including exhibits attached
111
+ ("Agreement”) is a legal agreement between you and NVIDIA
112
+ Corporation ("NVIDIA") and governs your use of a NVIDIA
113
+ software development kit (“SDK”).
114
+
115
+ Each SDK has its own set of software and materials, but here
116
+ is a description of the types of items that may be included in
117
+ a SDK: source code, header files, APIs, data sets and assets
118
+ (examples include images, textures, models, scenes, videos,
119
+ native API input/output files), binary software, sample code,
120
+ libraries, utility programs, programming code and
121
+ documentation.
122
+
123
+ This Agreement can be accepted only by an adult of legal age
124
+ of majority in the country in which the SDK is used.
125
+
126
+ If you are entering into this Agreement on behalf of a company
127
+ or other legal entity, you represent that you have the legal
128
+ authority to bind the entity to this Agreement, in which case
129
+ “you” will mean the entity you represent.
130
+
131
+ If you don’t have the required age or authority to accept
132
+ this Agreement, or if you don’t accept all the terms and
133
+ conditions of this Agreement, do not download, install or use
134
+ the SDK.
135
+
136
+ You agree to use the SDK only for purposes that are permitted
137
+ by (a) this Agreement, and (b) any applicable law, regulation
138
+ or generally accepted practices or guidelines in the relevant
139
+ jurisdictions.
140
+
141
+
142
+ 1.1. License
143
+
144
+
145
+ 1.1.1. License Grant
146
+
147
+ Subject to the terms of this Agreement, NVIDIA hereby grants
148
+ you a non-exclusive, non-transferable license, without the
149
+ right to sublicense (except as expressly provided in this
150
+ Agreement) to:
151
+
152
+ 1. Install and use the SDK,
153
+
154
+ 2. Modify and create derivative works of sample source code
155
+ delivered in the SDK, and
156
+
157
+ 3. Distribute those portions of the SDK that are identified
158
+ in this Agreement as distributable, as incorporated in
159
+ object code format into a software application that meets
160
+ the distribution requirements indicated in this Agreement.
161
+
162
+
163
+ 1.1.2. Distribution Requirements
164
+
165
+ These are the distribution requirements for you to exercise
166
+ the distribution grant:
167
+
168
+ 1. Your application must have material additional
169
+ functionality, beyond the included portions of the SDK.
170
+
171
+ 2. The distributable portions of the SDK shall only be
172
+ accessed by your application.
173
+
174
+ 3. The following notice shall be included in modifications
175
+ and derivative works of sample source code distributed:
176
+ “This software contains source code provided by NVIDIA
177
+ Corporation.”
178
+
179
+ 4. Unless a developer tool is identified in this Agreement
180
+ as distributable, it is delivered for your internal use
181
+ only.
182
+
183
+ 5. The terms under which you distribute your application
184
+ must be consistent with the terms of this Agreement,
185
+ including (without limitation) terms relating to the
186
+ license grant and license restrictions and protection of
187
+ NVIDIA’s intellectual property rights. Additionally, you
188
+ agree that you will protect the privacy, security and
189
+ legal rights of your application users.
190
+
191
+ 6. You agree to notify NVIDIA in writing of any known or
192
+ suspected distribution or use of the SDK not in compliance
193
+ with the requirements of this Agreement, and to enforce
194
+ the terms of your agreements with respect to distributed
195
+ SDK.
196
+
197
+
198
+ 1.1.3. Authorized Users
199
+
200
+ You may allow employees and contractors of your entity or of
201
+ your subsidiary(ies) to access and use the SDK from your
202
+ secure network to perform work on your behalf.
203
+
204
+ If you are an academic institution you may allow users
205
+ enrolled or employed by the academic institution to access and
206
+ use the SDK from your secure network.
207
+
208
+ You are responsible for the compliance with the terms of this
209
+ Agreement by your authorized users. If you become aware that
210
+ your authorized users didn’t follow the terms of this
211
+ Agreement, you agree to take reasonable steps to resolve the
212
+ non-compliance and prevent new occurrences.
213
+
214
+
215
+ 1.1.4. Pre-Release SDK
216
+
217
+ The SDK versions identified as alpha, beta, preview or
218
+ otherwise as pre-release, may not be fully functional, may
219
+ contain errors or design flaws, and may have reduced or
220
+ different security, privacy, accessibility, availability, and
221
+ reliability standards relative to commercial versions of
222
+ NVIDIA software and materials. Use of a pre-release SDK may
223
+ result in unexpected results, loss of data, project delays or
224
+ other unpredictable damage or loss.
225
+
226
+ You may use a pre-release SDK at your own risk, understanding
227
+ that pre-release SDKs are not intended for use in production
228
+ or business-critical systems.
229
+
230
+ NVIDIA may choose not to make available a commercial version
231
+ of any pre-release SDK. NVIDIA may also choose to abandon
232
+ development and terminate the availability of a pre-release
233
+ SDK at any time without liability.
234
+
235
+
236
+ 1.1.5. Updates
237
+
238
+ NVIDIA may, at its option, make available patches, workarounds
239
+ or other updates to this SDK. Unless the updates are provided
240
+ with their separate governing terms, they are deemed part of
241
+ the SDK licensed to you as provided in this Agreement. You
242
+ agree that the form and content of the SDK that NVIDIA
243
+ provides may change without prior notice to you. While NVIDIA
244
+ generally maintains compatibility between versions, NVIDIA may
245
+ in some cases make changes that introduce incompatibilities in
246
+ future versions of the SDK.
247
+
248
+
249
+ 1.1.6. Third Party Licenses
250
+
251
+ The SDK may come bundled with, or otherwise include or be
252
+ distributed with, third party software licensed by a NVIDIA
253
+ supplier and/or open source software provided under an open
254
+ source license. Use of third party software is subject to the
255
+ third-party license terms, or in the absence of third party
256
+ terms, the terms of this Agreement. Copyright to third party
257
+ software is held by the copyright holders indicated in the
258
+ third-party software or license.
259
+
260
+
261
+ 1.1.7. Reservation of Rights
262
+
263
+ NVIDIA reserves all rights, title, and interest in and to the
264
+ SDK, not expressly granted to you under this Agreement.
265
+
266
+
267
+ 1.2. Limitations
268
+
269
+ The following license limitations apply to your use of the
270
+ SDK:
271
+
272
+ 1. You may not reverse engineer, decompile or disassemble,
273
+ or remove copyright or other proprietary notices from any
274
+ portion of the SDK or copies of the SDK.
275
+
276
+ 2. Except as expressly provided in this Agreement, you may
277
+ not copy, sell, rent, sublicense, transfer, distribute,
278
+ modify, or create derivative works of any portion of the
279
+ SDK. For clarity, you may not distribute or sublicense the
280
+ SDK as a stand-alone product.
281
+
282
+ 3. Unless you have an agreement with NVIDIA for this
283
+ purpose, you may not indicate that an application created
284
+ with the SDK is sponsored or endorsed by NVIDIA.
285
+
286
+ 4. You may not bypass, disable, or circumvent any
287
+ encryption, security, digital rights management or
288
+ authentication mechanism in the SDK.
289
+
290
+ 5. You may not use the SDK in any manner that would cause it
291
+ to become subject to an open source software license. As
292
+ examples, licenses that require as a condition of use,
293
+ modification, and/or distribution that the SDK be:
294
+
295
+ a. Disclosed or distributed in source code form;
296
+
297
+ b. Licensed for the purpose of making derivative works;
298
+ or
299
+
300
+ c. Redistributable at no charge.
301
+
302
+ 6. Unless you have an agreement with NVIDIA for this
303
+ purpose, you may not use the SDK with any system or
304
+ application where the use or failure of the system or
305
+ application can reasonably be expected to threaten or
306
+ result in personal injury, death, or catastrophic loss.
307
+ Examples include use in avionics, navigation, military,
308
+ medical, life support or other life critical applications.
309
+ NVIDIA does not design, test or manufacture the SDK for
310
+ these critical uses and NVIDIA shall not be liable to you
311
+ or any third party, in whole or in part, for any claims or
312
+ damages arising from such uses.
313
+
314
+ 7. You agree to defend, indemnify and hold harmless NVIDIA
315
+ and its affiliates, and their respective employees,
316
+ contractors, agents, officers and directors, from and
317
+ against any and all claims, damages, obligations, losses,
318
+ liabilities, costs or debt, fines, restitutions and
319
+ expenses (including but not limited to attorney’s fees
320
+ and costs incident to establishing the right of
321
+ indemnification) arising out of or related to your use of
322
+ the SDK outside of the scope of this Agreement, or not in
323
+ compliance with its terms.
324
+
325
+
326
+ 1.3. Ownership
327
+
328
+ 1. NVIDIA or its licensors hold all rights, title and
329
+ interest in and to the SDK and its modifications and
330
+ derivative works, including their respective intellectual
331
+ property rights, subject to your rights described in this
332
+ section. This SDK may include software and materials from
333
+ NVIDIA’s licensors, and these licensors are intended
334
+ third party beneficiaries that may enforce this Agreement
335
+ with respect to their intellectual property rights.
336
+
337
+ 2. You hold all rights, title and interest in and to your
338
+ applications and your derivative works of the sample
339
+ source code delivered in the SDK, including their
340
+ respective intellectual property rights, subject to
341
+ NVIDIA’s rights described in this section.
342
+
343
+ 3. You may, but don’t have to, provide to NVIDIA
344
+ suggestions, feature requests or other feedback regarding
345
+ the SDK, including possible enhancements or modifications
346
+ to the SDK. For any feedback that you voluntarily provide,
347
+ you hereby grant NVIDIA and its affiliates a perpetual,
348
+ non-exclusive, worldwide, irrevocable license to use,
349
+ reproduce, modify, license, sublicense (through multiple
350
+ tiers of sublicensees), and distribute (through multiple
351
+ tiers of distributors) it without the payment of any
352
+ royalties or fees to you. NVIDIA will use feedback at its
353
+ choice. NVIDIA is constantly looking for ways to improve
354
+ its products, so you may send feedback to NVIDIA through
355
+ the developer portal at https://developer.nvidia.com.
356
+
357
+
358
+ 1.4. No Warranties
359
+
360
+ THE SDK IS PROVIDED BY NVIDIA “AS IS” AND “WITH ALL
361
+ FAULTS.” TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND
362
+ ITS AFFILIATES EXPRESSLY DISCLAIM ALL WARRANTIES OF ANY KIND
363
+ OR NATURE, WHETHER EXPRESS, IMPLIED OR STATUTORY, INCLUDING,
364
+ BUT NOT LIMITED TO, ANY WARRANTIES OF MERCHANTABILITY, FITNESS
365
+ FOR A PARTICULAR PURPOSE, TITLE, NON-INFRINGEMENT, OR THE
366
+ ABSENCE OF ANY DEFECTS THEREIN, WHETHER LATENT OR PATENT. NO
367
+ WARRANTY IS MADE ON THE BASIS OF TRADE USAGE, COURSE OF
368
+ DEALING OR COURSE OF TRADE.
369
+
370
+
371
+ 1.5. Limitation of Liability
372
+
373
+ TO THE MAXIMUM EXTENT PERMITTED BY LAW, NVIDIA AND ITS
374
+ AFFILIATES SHALL NOT BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
375
+ PUNITIVE OR CONSEQUENTIAL DAMAGES, OR ANY LOST PROFITS, LOSS
376
+ OF USE, LOSS OF DATA OR LOSS OF GOODWILL, OR THE COSTS OF
377
+ PROCURING SUBSTITUTE PRODUCTS, ARISING OUT OF OR IN CONNECTION
378
+ WITH THIS AGREEMENT OR THE USE OR PERFORMANCE OF THE SDK,
379
+ WHETHER SUCH LIABILITY ARISES FROM ANY CLAIM BASED UPON BREACH
380
+ OF CONTRACT, BREACH OF WARRANTY, TORT (INCLUDING NEGLIGENCE),
381
+ PRODUCT LIABILITY OR ANY OTHER CAUSE OF ACTION OR THEORY OF
382
+ LIABILITY. IN NO EVENT WILL NVIDIA’S AND ITS AFFILIATES
383
+ TOTAL CUMULATIVE LIABILITY UNDER OR ARISING OUT OF THIS
384
+ AGREEMENT EXCEED US$10.00. THE NATURE OF THE LIABILITY OR THE
385
+ NUMBER OF CLAIMS OR SUITS SHALL NOT ENLARGE OR EXTEND THIS
386
+ LIMIT.
387
+
388
+ These exclusions and limitations of liability shall apply
389
+ regardless if NVIDIA or its affiliates have been advised of
390
+ the possibility of such damages, and regardless of whether a
391
+ remedy fails its essential purpose. These exclusions and
392
+ limitations of liability form an essential basis of the
393
+ bargain between the parties, and, absent any of these
394
+ exclusions or limitations of liability, the provisions of this
395
+ Agreement, including, without limitation, the economic terms,
396
+ would be substantially different.
397
+
398
+
399
+ 1.6. Termination
400
+
401
+ 1. This Agreement will continue to apply until terminated by
402
+ either you or NVIDIA as described below.
403
+
404
+ 2. If you want to terminate this Agreement, you may do so by
405
+ stopping to use the SDK.
406
+
407
+ 3. NVIDIA may, at any time, terminate this Agreement if:
408
+
409
+ a. (i) you fail to comply with any term of this
410
+ Agreement and the non-compliance is not fixed within
411
+ thirty (30) days following notice from NVIDIA (or
412
+ immediately if you violate NVIDIA’s intellectual
413
+ property rights);
414
+
415
+ b. (ii) you commence or participate in any legal
416
+ proceeding against NVIDIA with respect to the SDK; or
417
+
418
+ c. (iii) NVIDIA decides to no longer provide the SDK in
419
+ a country or, in NVIDIA’s sole discretion, the
420
+ continued use of it is no longer commercially viable.
421
+
422
+ 4. Upon any termination of this Agreement, you agree to
423
+ promptly discontinue use of the SDK and destroy all copies
424
+ in your possession or control. Your prior distributions in
425
+ accordance with this Agreement are not affected by the
426
+ termination of this Agreement. Upon written request, you
427
+ will certify in writing that you have complied with your
428
+ commitments under this section. Upon any termination of
429
+ this Agreement all provisions survive except for the
430
+ license grant provisions.
431
+
432
+
433
+ 1.7. General
434
+
435
+ If you wish to assign this Agreement or your rights and
436
+ obligations, including by merger, consolidation, dissolution
437
+ or operation of law, contact NVIDIA to ask for permission. Any
438
+ attempted assignment not approved by NVIDIA in writing shall
439
+ be void and of no effect. NVIDIA may assign, delegate or
440
+ transfer this Agreement and its rights and obligations, and if
441
+ to a non-affiliate you will be notified.
442
+
443
+ You agree to cooperate with NVIDIA and provide reasonably
444
+ requested information to verify your compliance with this
445
+ Agreement.
446
+
447
+ This Agreement will be governed in all respects by the laws of
448
+ the United States and of the State of Delaware as those laws
449
+ are applied to contracts entered into and performed entirely
450
+ within Delaware by Delaware residents, without regard to the
451
+ conflicts of laws principles. The United Nations Convention on
452
+ Contracts for the International Sale of Goods is specifically
453
+ disclaimed. You agree to all terms of this Agreement in the
454
+ English language.
455
+
456
+ The state or federal courts residing in Santa Clara County,
457
+ California shall have exclusive jurisdiction over any dispute
458
+ or claim arising out of this Agreement. Notwithstanding this,
459
+ you agree that NVIDIA shall still be allowed to apply for
460
+ injunctive remedies or an equivalent type of urgent legal
461
+ relief in any jurisdiction.
462
+
463
+ If any court of competent jurisdiction determines that any
464
+ provision of this Agreement is illegal, invalid or
465
+ unenforceable, such provision will be construed as limited to
466
+ the extent necessary to be consistent with and fully
467
+ enforceable under the law and the remaining provisions will
468
+ remain in full force and effect. Unless otherwise specified,
469
+ remedies are cumulative.
470
+
471
+ Each party acknowledges and agrees that the other is an
472
+ independent contractor in the performance of this Agreement.
473
+
474
+ The SDK has been developed entirely at private expense and is
475
+ “commercial items” consisting of “commercial computer
476
+ software” and “commercial computer software
477
+ documentation” provided with RESTRICTED RIGHTS. Use,
478
+ duplication or disclosure by the U.S. Government or a U.S.
479
+ Government subcontractor is subject to the restrictions in
480
+ this Agreement pursuant to DFARS 227.7202-3(a) or as set forth
481
+ in subparagraphs (c)(1) and (2) of the Commercial Computer
482
+ Software - Restricted Rights clause at FAR 52.227-19, as
483
+ applicable. Contractor/manufacturer is NVIDIA, 2788 San Tomas
484
+ Expressway, Santa Clara, CA 95051.
485
+
486
+ The SDK is subject to United States export laws and
487
+ regulations. You agree that you will not ship, transfer or
488
+ export the SDK into any country, or use the SDK in any manner,
489
+ prohibited by the United States Bureau of Industry and
490
+ Security or economic sanctions regulations administered by the
491
+ U.S. Department of Treasury’s Office of Foreign Assets
492
+ Control (OFAC), or any applicable export laws, restrictions or
493
+ regulations. These laws include restrictions on destinations,
494
+ end users and end use. By accepting this Agreement, you
495
+ confirm that you are not a resident or citizen of any country
496
+ currently embargoed by the U.S. and that you are not otherwise
497
+ prohibited from receiving the SDK.
498
+
499
+ Any notice delivered by NVIDIA to you under this Agreement
500
+ will be delivered via mail, email or fax. You agree that any
501
+ notices that NVIDIA sends you electronically will satisfy any
502
+ legal communication requirements. Please direct your legal
503
+ notices or other correspondence to NVIDIA Corporation, 2788
504
+ San Tomas Expressway, Santa Clara, California 95051, United
505
+ States of America, Attention: Legal Department.
506
+
507
+ This Agreement and any exhibits incorporated into this
508
+ Agreement constitute the entire agreement of the parties with
509
+ respect to the subject matter of this Agreement and supersede
510
+ all prior negotiations or documentation exchanged between the
511
+ parties relating to this SDK license. Any additional and/or
512
+ conflicting terms on documents issued by you are null, void,
513
+ and invalid. Any amendment or waiver under this Agreement
514
+ shall be in writing and signed by representatives of both
515
+ parties.
516
+
517
+
518
+ 2. CUDA Toolkit Supplement to Software License Agreement for
519
+ NVIDIA Software Development Kits
520
+ ------------------------------------------------------------
521
+
522
+
523
+ Release date: August 16, 2018
524
+ -----------------------------
525
+
526
+ The terms in this supplement govern your use of the NVIDIA
527
+ CUDA Toolkit SDK under the terms of your license agreement
528
+ (“Agreement”) as modified by this supplement. Capitalized
529
+ terms used but not defined below have the meaning assigned to
530
+ them in the Agreement.
531
+
532
+ This supplement is an exhibit to the Agreement and is
533
+ incorporated as an integral part of the Agreement. In the
534
+ event of conflict between the terms in this supplement and the
535
+ terms in the Agreement, the terms in this supplement govern.
536
+
537
+
538
+ 2.1. License Scope
539
+
540
+ The SDK is licensed for you to develop applications only for
541
+ use in systems with NVIDIA GPUs.
542
+
543
+
544
+ 2.2. Distribution
545
+
546
+ The portions of the SDK that are distributable under the
547
+ Agreement are listed in Attachment A.
548
+
549
+
550
+ 2.3. Operating Systems
551
+
552
+ Those portions of the SDK designed exclusively for use on the
553
+ Linux or FreeBSD operating systems, or other operating systems
554
+ derived from the source code to these operating systems, may
555
+ be copied and redistributed for use in accordance with this
556
+ Agreement, provided that the object code files are not
557
+ modified in any way (except for unzipping of compressed
558
+ files).
559
+
560
+
561
+ 2.4. Audio and Video Encoders and Decoders
562
+
563
+ You acknowledge and agree that it is your sole responsibility
564
+ to obtain any additional third-party licenses required to
565
+ make, have made, use, have used, sell, import, and offer for
566
+ sale your products or services that include or incorporate any
567
+ third-party software and content relating to audio and/or
568
+ video encoders and decoders from, including but not limited
569
+ to, Microsoft, Thomson, Fraunhofer IIS, Sisvel S.p.A.,
570
+ MPEG-LA, and Coding Technologies. NVIDIA does not grant to you
571
+ under this Agreement any necessary patent or other rights with
572
+ respect to any audio and/or video encoders and decoders.
573
+
574
+
575
+ 2.5. Licensing
576
+
577
+ If the distribution terms in this Agreement are not suitable
578
+ for your organization, or for any questions regarding this
579
+ Agreement, please contact NVIDIA at
580
581
+
582
+
583
+ 2.6. Attachment A
584
+
585
+ The following portions of the SDK are distributable under the
586
+ Agreement:
587
+
588
+ Component
589
+
590
+ CUDA Runtime
591
+
592
+ Windows
593
+
594
+ cudart.dll, cudart_static.lib, cudadevrt.lib
595
+
596
+ Mac OSX
597
+
598
+ libcudart.dylib, libcudart_static.a, libcudadevrt.a
599
+
600
+ Linux
601
+
602
+ libcudart.so, libcudart_static.a, libcudadevrt.a
603
+
604
+ Android
605
+
606
+ libcudart.so, libcudart_static.a, libcudadevrt.a
607
+
608
+ Component
609
+
610
+ CUDA FFT Library
611
+
612
+ Windows
613
+
614
+ cufft.dll, cufftw.dll, cufft.lib, cufftw.lib
615
+
616
+ Mac OSX
617
+
618
+ libcufft.dylib, libcufft_static.a, libcufftw.dylib,
619
+ libcufftw_static.a
620
+
621
+ Linux
622
+
623
+ libcufft.so, libcufft_static.a, libcufftw.so,
624
+ libcufftw_static.a
625
+
626
+ Android
627
+
628
+ libcufft.so, libcufft_static.a, libcufftw.so,
629
+ libcufftw_static.a
630
+
631
+ Component
632
+
633
+ CUDA BLAS Library
634
+
635
+ Windows
636
+
637
+ cublas.dll, cublasLt.dll
638
+
639
+ Mac OSX
640
+
641
+ libcublas.dylib, libcublasLt.dylib, libcublas_static.a,
642
+ libcublasLt_static.a
643
+
644
+ Linux
645
+
646
+ libcublas.so, libcublasLt.so, libcublas_static.a,
647
+ libcublasLt_static.a
648
+
649
+ Android
650
+
651
+ libcublas.so, libcublasLt.so, libcublas_static.a,
652
+ libcublasLt_static.a
653
+
654
+ Component
655
+
656
+ NVIDIA "Drop-in" BLAS Library
657
+
658
+ Windows
659
+
660
+ nvblas.dll
661
+
662
+ Mac OSX
663
+
664
+ libnvblas.dylib
665
+
666
+ Linux
667
+
668
+ libnvblas.so
669
+
670
+ Component
671
+
672
+ CUDA Sparse Matrix Library
673
+
674
+ Windows
675
+
676
+ cusparse.dll, cusparse.lib
677
+
678
+ Mac OSX
679
+
680
+ libcusparse.dylib, libcusparse_static.a
681
+
682
+ Linux
683
+
684
+ libcusparse.so, libcusparse_static.a
685
+
686
+ Android
687
+
688
+ libcusparse.so, libcusparse_static.a
689
+
690
+ Component
691
+
692
+ CUDA Linear Solver Library
693
+
694
+ Windows
695
+
696
+ cusolver.dll, cusolver.lib
697
+
698
+ Mac OSX
699
+
700
+ libcusolver.dylib, libcusolver_static.a
701
+
702
+ Linux
703
+
704
+ libcusolver.so, libcusolver_static.a
705
+
706
+ Android
707
+
708
+ libcusolver.so, libcusolver_static.a
709
+
710
+ Component
711
+
712
+ CUDA Random Number Generation Library
713
+
714
+ Windows
715
+
716
+ curand.dll, curand.lib
717
+
718
+ Mac OSX
719
+
720
+ libcurand.dylib, libcurand_static.a
721
+
722
+ Linux
723
+
724
+ libcurand.so, libcurand_static.a
725
+
726
+ Android
727
+
728
+ libcurand.so, libcurand_static.a
729
+
730
+ Component
731
+
732
+ CUDA Accelerated Graph Library
733
+
734
+ Component
735
+
736
+ NVIDIA Performance Primitives Library
737
+
738
+ Windows
739
+
740
+ nppc.dll, nppc.lib, nppial.dll, nppial.lib, nppicc.dll,
741
+ nppicc.lib, nppicom.dll, nppicom.lib, nppidei.dll,
742
+ nppidei.lib, nppif.dll, nppif.lib, nppig.dll, nppig.lib,
743
+ nppim.dll, nppim.lib, nppist.dll, nppist.lib, nppisu.dll,
744
+ nppisu.lib, nppitc.dll, nppitc.lib, npps.dll, npps.lib
745
+
746
+ Mac OSX
747
+
748
+ libnppc.dylib, libnppc_static.a, libnppial.dylib,
749
+ libnppial_static.a, libnppicc.dylib, libnppicc_static.a,
750
+ libnppicom.dylib, libnppicom_static.a, libnppidei.dylib,
751
+ libnppidei_static.a, libnppif.dylib, libnppif_static.a,
752
+ libnppig.dylib, libnppig_static.a, libnppim.dylib,
753
+ libnppisu_static.a, libnppitc.dylib, libnppitc_static.a,
754
+ libnpps.dylib, libnpps_static.a
755
+
756
+ Linux
757
+
758
+ libnppc.so, libnppc_static.a, libnppial.so,
759
+ libnppial_static.a, libnppicc.so, libnppicc_static.a,
760
+ libnppicom.so, libnppicom_static.a, libnppidei.so,
761
+ libnppidei_static.a, libnppif.so, libnppif_static.a
762
+ libnppig.so, libnppig_static.a, libnppim.so,
763
+ libnppim_static.a, libnppist.so, libnppist_static.a,
764
+ libnppisu.so, libnppisu_static.a, libnppitc.so
765
+ libnppitc_static.a, libnpps.so, libnpps_static.a
766
+
767
+ Android
768
+
769
+ libnppc.so, libnppc_static.a, libnppial.so,
770
+ libnppial_static.a, libnppicc.so, libnppicc_static.a,
771
+ libnppicom.so, libnppicom_static.a, libnppidei.so,
772
+ libnppidei_static.a, libnppif.so, libnppif_static.a
773
+ libnppig.so, libnppig_static.a, libnppim.so,
774
+ libnppim_static.a, libnppist.so, libnppist_static.a,
775
+ libnppisu.so, libnppisu_static.a, libnppitc.so
776
+ libnppitc_static.a, libnpps.so, libnpps_static.a
777
+
778
+ Component
779
+
780
+ NVIDIA JPEG Library
781
+
782
+ Linux
783
+
784
+ libnvjpeg.so, libnvjpeg_static.a
785
+
786
+ Component
787
+
788
+ Internal common library required for statically linking to
789
+ cuBLAS, cuSPARSE, cuFFT, cuRAND, nvJPEG and NPP
790
+
791
+ Mac OSX
792
+
793
+ libculibos.a
794
+
795
+ Linux
796
+
797
+ libculibos.a
798
+
799
+ Component
800
+
801
+ NVIDIA Runtime Compilation Library and Header
802
+
803
+ All
804
+
805
+ nvrtc.h
806
+
807
+ Windows
808
+
809
+ nvrtc.dll, nvrtc-builtins.dll
810
+
811
+ Mac OSX
812
+
813
+ libnvrtc.dylib, libnvrtc-builtins.dylib
814
+
815
+ Linux
816
+
817
+ libnvrtc.so, libnvrtc-builtins.so
818
+
819
+ Component
820
+
821
+ NVIDIA Optimizing Compiler Library
822
+
823
+ Windows
824
+
825
+ nvvm.dll
826
+
827
+ Mac OSX
828
+
829
+ libnvvm.dylib
830
+
831
+ Linux
832
+
833
+ libnvvm.so
834
+
835
+ Component
836
+
837
+ NVIDIA Common Device Math Functions Library
838
+
839
+ Windows
840
+
841
+ libdevice.10.bc
842
+
843
+ Mac OSX
844
+
845
+ libdevice.10.bc
846
+
847
+ Linux
848
+
849
+ libdevice.10.bc
850
+
851
+ Component
852
+
853
+ CUDA Occupancy Calculation Header Library
854
+
855
+ All
856
+
857
+ cuda_occupancy.h
858
+
859
+ Component
860
+
861
+ CUDA Half Precision Headers
862
+
863
+ All
864
+
865
+ cuda_fp16.h, cuda_fp16.hpp
866
+
867
+ Component
868
+
869
+ CUDA Profiling Tools Interface (CUPTI) Library
870
+
871
+ Windows
872
+
873
+ cupti.dll
874
+
875
+ Mac OSX
876
+
877
+ libcupti.dylib
878
+
879
+ Linux
880
+
881
+ libcupti.so
882
+
883
+ Component
884
+
885
+ NVIDIA Tools Extension Library
886
+
887
+ Windows
888
+
889
+ nvToolsExt.dll, nvToolsExt.lib
890
+
891
+ Mac OSX
892
+
893
+ libnvToolsExt.dylib
894
+
895
+ Linux
896
+
897
+ libnvToolsExt.so
898
+
899
+ Component
900
+
901
+ NVIDIA CUDA Driver Libraries
902
+
903
+ Linux
904
+
905
+ libcuda.so, libnvidia-fatbinaryloader.so,
906
+ libnvidia-ptxjitcompiler.so
907
+
908
+ The NVIDIA CUDA Driver Libraries are only distributable in
909
+ applications that meet this criteria:
910
+
911
+ 1. The application was developed starting from a NVIDIA CUDA
912
+ container obtained from Docker Hub or the NVIDIA GPU
913
+ Cloud, and
914
+
915
+ 2. The resulting application is packaged as a Docker
916
+ container and distributed to users on Docker Hub or the
917
+ NVIDIA GPU Cloud only.
918
+
919
+
920
+ 2.7. Attachment B
921
+
922
+
923
+ Additional Licensing Obligations
924
+
925
+ The following third party components included in the SOFTWARE
926
+ are licensed to Licensee pursuant to the following terms and
927
+ conditions:
928
+
929
+ 1. Licensee's use of the GDB third party component is
930
+ subject to the terms and conditions of GNU GPL v3:
931
+
932
+ This product includes copyrighted third-party software licensed
933
+ under the terms of the GNU General Public License v3 ("GPL v3").
934
+ All third-party software packages are copyright by their respective
935
+ authors. GPL v3 terms and conditions are hereby incorporated into
936
+ the Agreement by this reference: http://www.gnu.org/licenses/gpl.txt
937
+
938
+ Consistent with these licensing requirements, the software
939
+ listed below is provided under the terms of the specified
940
+ open source software licenses. To obtain source code for
941
+ software provided under licenses that require
942
+ redistribution of source code, including the GNU General
943
+ Public License (GPL) and GNU Lesser General Public License
944
+ (LGPL), contact [email protected]. This offer is
945
+ valid for a period of three (3) years from the date of the
946
+ distribution of this product by NVIDIA CORPORATION.
947
+
948
+ Component License
949
+ CUDA-GDB GPL v3
950
+
951
+ 2. Licensee represents and warrants that any and all third
952
+ party licensing and/or royalty payment obligations in
953
+ connection with Licensee's use of the H.264 video codecs
954
+ are solely the responsibility of Licensee.
955
+
956
+ 3. Licensee's use of the Thrust library is subject to the
957
+ terms and conditions of the Apache License Version 2.0.
958
+ All third-party software packages are copyright by their
959
+ respective authors. Apache License Version 2.0 terms and
960
+ conditions are hereby incorporated into the Agreement by
961
+ this reference.
962
+ http://www.apache.org/licenses/LICENSE-2.0.html
963
+
964
+ In addition, Licensee acknowledges the following notice:
965
+ Thrust includes source code from the Boost Iterator,
966
+ Tuple, System, and Random Number libraries.
967
+
968
+ Boost Software License - Version 1.0 - August 17th, 2003
969
+ . . . .
970
+
971
+ Permission is hereby granted, free of charge, to any person or
972
+ organization obtaining a copy of the software and accompanying
973
+ documentation covered by this license (the "Software") to use,
974
+ reproduce, display, distribute, execute, and transmit the Software,
975
+ and to prepare derivative works of the Software, and to permit
976
+ third-parties to whom the Software is furnished to do so, all
977
+ subject to the following:
978
+
979
+ The copyright notices in the Software and this entire statement,
980
+ including the above license grant, this restriction and the following
981
+ disclaimer, must be included in all copies of the Software, in whole
982
+ or in part, and all derivative works of the Software, unless such
983
+ copies or derivative works are solely in the form of machine-executable
984
+ object code generated by a source language processor.
985
+
986
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
987
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
988
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
989
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
990
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
991
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
992
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
993
+ OTHER DEALINGS IN THE SOFTWARE.
994
+
995
+ 4. Licensee's use of the LLVM third party component is
996
+ subject to the following terms and conditions:
997
+
998
+ ======================================================
999
+ LLVM Release License
1000
+ ======================================================
1001
+ University of Illinois/NCSA
1002
+ Open Source License
1003
+
1004
+ Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign.
1005
+ All rights reserved.
1006
+
1007
+ Developed by:
1008
+
1009
+ LLVM Team
1010
+
1011
+ University of Illinois at Urbana-Champaign
1012
+
1013
+ http://llvm.org
1014
+
1015
+ Permission is hereby granted, free of charge, to any person obtaining a copy
1016
+ of this software and associated documentation files (the "Software"), to
1017
+ deal with the Software without restriction, including without limitation the
1018
+ rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
1019
+ sell copies of the Software, and to permit persons to whom the Software is
1020
+ furnished to do so, subject to the following conditions:
1021
+
1022
+ * Redistributions of source code must retain the above copyright notice,
1023
+ this list of conditions and the following disclaimers.
1024
+
1025
+ * Redistributions in binary form must reproduce the above copyright
1026
+ notice, this list of conditions and the following disclaimers in the
1027
+ documentation and/or other materials provided with the distribution.
1028
+
1029
+ * Neither the names of the LLVM Team, University of Illinois at Urbana-
1030
+ Champaign, nor the names of its contributors may be used to endorse or
1031
+ promote products derived from this Software without specific prior
1032
+ written permission.
1033
+
1034
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1035
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1036
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
1037
+ THE CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
1038
+ OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
1039
+ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
1040
+ DEALINGS WITH THE SOFTWARE.
1041
+
1042
+ 5. Licensee's use (e.g. nvprof) of the PCRE third party
1043
+ component is subject to the following terms and
1044
+ conditions:
1045
+
1046
+ ------------
1047
+ PCRE LICENCE
1048
+ ------------
1049
+ PCRE is a library of functions to support regular expressions whose syntax
1050
+ and semantics are as close as possible to those of the Perl 5 language.
1051
+ Release 8 of PCRE is distributed under the terms of the "BSD" licence, as
1052
+ specified below. The documentation for PCRE, supplied in the "doc"
1053
+ directory, is distributed under the same terms as the software itself. The
1054
+ basic library functions are written in C and are freestanding. Also
1055
+ included in the distribution is a set of C++ wrapper functions, and a just-
1056
+ in-time compiler that can be used to optimize pattern matching. These are
1057
+ both optional features that can be omitted when the library is built.
1058
+
1059
+ THE BASIC LIBRARY FUNCTIONS
1060
+ ---------------------------
1061
+ Written by: Philip Hazel
1062
+ Email local part: ph10
1063
+ Email domain: cam.ac.uk
1064
+ University of Cambridge Computing Service,
1065
+ Cambridge, England.
1066
+ Copyright (c) 1997-2012 University of Cambridge
1067
+ All rights reserved.
1068
+
1069
+ PCRE JUST-IN-TIME COMPILATION SUPPORT
1070
+ -------------------------------------
1071
+ Written by: Zoltan Herczeg
1072
+ Email local part: hzmester
1073
+ Emain domain: freemail.hu
1074
+ Copyright(c) 2010-2012 Zoltan Herczeg
1075
+ All rights reserved.
1076
+
1077
+ STACK-LESS JUST-IN-TIME COMPILER
1078
+ --------------------------------
1079
+ Written by: Zoltan Herczeg
1080
+ Email local part: hzmester
1081
+ Emain domain: freemail.hu
1082
+ Copyright(c) 2009-2012 Zoltan Herczeg
1083
+ All rights reserved.
1084
+
1085
+ THE C++ WRAPPER FUNCTIONS
1086
+ -------------------------
1087
+ Contributed by: Google Inc.
1088
+ Copyright (c) 2007-2012, Google Inc.
1089
+ All rights reserved.
1090
+
1091
+ THE "BSD" LICENCE
1092
+ -----------------
1093
+ Redistribution and use in source and binary forms, with or without
1094
+ modification, are permitted provided that the following conditions are met:
1095
+
1096
+ * Redistributions of source code must retain the above copyright notice,
1097
+ this list of conditions and the following disclaimer.
1098
+
1099
+ * Redistributions in binary form must reproduce the above copyright
1100
+ notice, this list of conditions and the following disclaimer in the
1101
+ documentation and/or other materials provided with the distribution.
1102
+
1103
+ * Neither the name of the University of Cambridge nor the name of Google
1104
+ Inc. nor the names of their contributors may be used to endorse or
1105
+ promote products derived from this software without specific prior
1106
+ written permission.
1107
+
1108
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
1109
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1110
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1111
+ ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
1112
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
1113
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
1114
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
1115
+ INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
1116
+ CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
1117
+ ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1118
+ POSSIBILITY OF SUCH DAMAGE.
1119
+
1120
+ 6. Some of the cuBLAS library routines were written by or
1121
+ derived from code written by Vasily Volkov and are subject
1122
+ to the Modified Berkeley Software Distribution License as
1123
+ follows:
1124
+
1125
+ Copyright (c) 2007-2009, Regents of the University of California
1126
+
1127
+ All rights reserved.
1128
+
1129
+ Redistribution and use in source and binary forms, with or without
1130
+ modification, are permitted provided that the following conditions are
1131
+ met:
1132
+ * Redistributions of source code must retain the above copyright
1133
+ notice, this list of conditions and the following disclaimer.
1134
+ * Redistributions in binary form must reproduce the above
1135
+ copyright notice, this list of conditions and the following
1136
+ disclaimer in the documentation and/or other materials provided
1137
+ with the distribution.
1138
+ * Neither the name of the University of California, Berkeley nor
1139
+ the names of its contributors may be used to endorse or promote
1140
+ products derived from this software without specific prior
1141
+ written permission.
1142
+
1143
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
1144
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1145
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
1146
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
1147
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
1148
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
1149
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
1150
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
1151
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
1152
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1153
+ POSSIBILITY OF SUCH DAMAGE.
1154
+
1155
+ 7. Some of the cuBLAS library routines were written by or
1156
+ derived from code written by Davide Barbieri and are
1157
+ subject to the Modified Berkeley Software Distribution
1158
+ License as follows:
1159
+
1160
+ Copyright (c) 2008-2009 Davide Barbieri @ University of Rome Tor Vergata.
1161
+
1162
+ All rights reserved.
1163
+
1164
+ Redistribution and use in source and binary forms, with or without
1165
+ modification, are permitted provided that the following conditions are
1166
+ met:
1167
+ * Redistributions of source code must retain the above copyright
1168
+ notice, this list of conditions and the following disclaimer.
1169
+ * Redistributions in binary form must reproduce the above
1170
+ copyright notice, this list of conditions and the following
1171
+ disclaimer in the documentation and/or other materials provided
1172
+ with the distribution.
1173
+ * The name of the author may not be used to endorse or promote
1174
+ products derived from this software without specific prior
1175
+ written permission.
1176
+
1177
+ THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
1178
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
1179
+ WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
1180
+ DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
1181
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
1182
+ (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
1183
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
1184
+ HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
1185
+ STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
1186
+ IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
1187
+ POSSIBILITY OF SUCH DAMAGE.
1188
+
1189
+ 8. Some of the cuBLAS library routines were derived from
1190
+ code developed by the University of Tennessee and are
1191
+ subject to the Modified Berkeley Software Distribution
1192
+ License as follows:
1193
+
1194
+ Copyright (c) 2010 The University of Tennessee.
1195
+
1196
+ All rights reserved.
1197
+
1198
+ Redistribution and use in source and binary forms, with or without
1199
+ modification, are permitted provided that the following conditions are
1200
+ met:
1201
+ * Redistributions of source code must retain the above copyright
1202
+ notice, this list of conditions and the following disclaimer.
1203
+ * Redistributions in binary form must reproduce the above
1204
+ copyright notice, this list of conditions and the following
1205
+ disclaimer listed in this license in the documentation and/or
1206
+ other materials provided with the distribution.
1207
+ * Neither the name of the copyright holders nor the names of its
1208
+ contributors may be used to endorse or promote products derived
1209
+ from this software without specific prior written permission.
1210
+
1211
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1212
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1213
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1214
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1215
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1216
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1217
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1218
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1219
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1220
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1221
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1222
+
1223
+ 9. Some of the cuBLAS library routines were written by or
1224
+ derived from code written by Jonathan Hogg and are subject
1225
+ to the Modified Berkeley Software Distribution License as
1226
+ follows:
1227
+
1228
+ Copyright (c) 2012, The Science and Technology Facilities Council (STFC).
1229
+
1230
+ All rights reserved.
1231
+
1232
+ Redistribution and use in source and binary forms, with or without
1233
+ modification, are permitted provided that the following conditions are
1234
+ met:
1235
+ * Redistributions of source code must retain the above copyright
1236
+ notice, this list of conditions and the following disclaimer.
1237
+ * Redistributions in binary form must reproduce the above
1238
+ copyright notice, this list of conditions and the following
1239
+ disclaimer in the documentation and/or other materials provided
1240
+ with the distribution.
1241
+ * Neither the name of the STFC nor the names of its contributors
1242
+ may be used to endorse or promote products derived from this
1243
+ software without specific prior written permission.
1244
+
1245
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1246
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1247
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1248
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE STFC BE
1249
+ LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
1250
+ CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
1251
+ SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
1252
+ BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
1253
+ WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
1254
+ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
1255
+ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1256
+
1257
+ 10. Some of the cuBLAS library routines were written by or
1258
+ derived from code written by Ahmad M. Abdelfattah, David
1259
+ Keyes, and Hatem Ltaief, and are subject to the Apache
1260
+ License, Version 2.0, as follows:
1261
+
1262
+ -- (C) Copyright 2013 King Abdullah University of Science and Technology
1263
+ Authors:
1264
+ Ahmad Abdelfattah ([email protected])
1265
+ David Keyes ([email protected])
1266
+ Hatem Ltaief ([email protected])
1267
+
1268
+ Redistribution and use in source and binary forms, with or without
1269
+ modification, are permitted provided that the following conditions
1270
+ are met:
1271
+
1272
+ * Redistributions of source code must retain the above copyright
1273
+ notice, this list of conditions and the following disclaimer.
1274
+ * Redistributions in binary form must reproduce the above copyright
1275
+ notice, this list of conditions and the following disclaimer in the
1276
+ documentation and/or other materials provided with the distribution.
1277
+ * Neither the name of the King Abdullah University of Science and
1278
+ Technology nor the names of its contributors may be used to endorse
1279
+ or promote products derived from this software without specific prior
1280
+ written permission.
1281
+
1282
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1283
+ ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1284
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1285
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1286
+ HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1287
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1288
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1289
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1290
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1291
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1292
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE
1293
+
1294
+ 11. Some of the cuSPARSE library routines were written by or
1295
+ derived from code written by Li-Wen Chang and are subject
1296
+ to the NCSA Open Source License as follows:
1297
+
1298
+ Copyright (c) 2012, University of Illinois.
1299
+
1300
+ All rights reserved.
1301
+
1302
+ Developed by: IMPACT Group, University of Illinois, http://impact.crhc.illinois.edu
1303
+
1304
+ Permission is hereby granted, free of charge, to any person obtaining
1305
+ a copy of this software and associated documentation files (the
1306
+ "Software"), to deal with the Software without restriction, including
1307
+ without limitation the rights to use, copy, modify, merge, publish,
1308
+ distribute, sublicense, and/or sell copies of the Software, and to
1309
+ permit persons to whom the Software is furnished to do so, subject to
1310
+ the following conditions:
1311
+ * Redistributions of source code must retain the above copyright
1312
+ notice, this list of conditions and the following disclaimer.
1313
+ * Redistributions in binary form must reproduce the above
1314
+ copyright notice, this list of conditions and the following
1315
+ disclaimers in the documentation and/or other materials provided
1316
+ with the distribution.
1317
+ * Neither the names of IMPACT Group, University of Illinois, nor
1318
+ the names of its contributors may be used to endorse or promote
1319
+ products derived from this Software without specific prior
1320
+ written permission.
1321
+
1322
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
1323
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
1324
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
1325
+ NONINFRINGEMENT. IN NO EVENT SHALL THE CONTRIBUTORS OR COPYRIGHT
1326
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
1327
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
1328
+ IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE
1329
+ SOFTWARE.
1330
+
1331
+ 12. Some of the cuRAND library routines were written by or
1332
+ derived from code written by Mutsuo Saito and Makoto
1333
+ Matsumoto and are subject to the following license:
1334
+
1335
+ Copyright (c) 2009, 2010 Mutsuo Saito, Makoto Matsumoto and Hiroshima
1336
+ University. All rights reserved.
1337
+
1338
+ Copyright (c) 2011 Mutsuo Saito, Makoto Matsumoto, Hiroshima
1339
+ University and University of Tokyo. All rights reserved.
1340
+
1341
+ Redistribution and use in source and binary forms, with or without
1342
+ modification, are permitted provided that the following conditions are
1343
+ met:
1344
+ * Redistributions of source code must retain the above copyright
1345
+ notice, this list of conditions and the following disclaimer.
1346
+ * Redistributions in binary form must reproduce the above
1347
+ copyright notice, this list of conditions and the following
1348
+ disclaimer in the documentation and/or other materials provided
1349
+ with the distribution.
1350
+ * Neither the name of the Hiroshima University nor the names of
1351
+ its contributors may be used to endorse or promote products
1352
+ derived from this software without specific prior written
1353
+ permission.
1354
+
1355
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1356
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1357
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1358
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1359
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1360
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1361
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1362
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1363
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1364
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1365
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1366
+
1367
+ 13. Some of the cuRAND library routines were derived from
1368
+ code developed by D. E. Shaw Research and are subject to
1369
+ the following license:
1370
+
1371
+ Copyright 2010-2011, D. E. Shaw Research.
1372
+
1373
+ All rights reserved.
1374
+
1375
+ Redistribution and use in source and binary forms, with or without
1376
+ modification, are permitted provided that the following conditions are
1377
+ met:
1378
+ * Redistributions of source code must retain the above copyright
1379
+ notice, this list of conditions, and the following disclaimer.
1380
+ * Redistributions in binary form must reproduce the above
1381
+ copyright notice, this list of conditions, and the following
1382
+ disclaimer in the documentation and/or other materials provided
1383
+ with the distribution.
1384
+ * Neither the name of D. E. Shaw Research nor the names of its
1385
+ contributors may be used to endorse or promote products derived
1386
+ from this software without specific prior written permission.
1387
+
1388
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1389
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1390
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1391
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1392
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1393
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1394
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1395
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1396
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1397
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1398
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1399
+
1400
+ 14. Some of the Math library routines were written by or
1401
+ derived from code developed by Norbert Juffa and are
1402
+ subject to the following license:
1403
+
1404
+ Copyright (c) 2015-2017, Norbert Juffa
1405
+ All rights reserved.
1406
+
1407
+ Redistribution and use in source and binary forms, with or without
1408
+ modification, are permitted provided that the following conditions
1409
+ are met:
1410
+
1411
+ 1. Redistributions of source code must retain the above copyright
1412
+ notice, this list of conditions and the following disclaimer.
1413
+
1414
+ 2. Redistributions in binary form must reproduce the above copyright
1415
+ notice, this list of conditions and the following disclaimer in the
1416
+ documentation and/or other materials provided with the distribution.
1417
+
1418
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1419
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1420
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1421
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1422
+ HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1423
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1424
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1425
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1426
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1427
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1428
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1429
+
1430
+ 15. Licensee's use of the lz4 third party component is
1431
+ subject to the following terms and conditions:
1432
+
1433
+ Copyright (C) 2011-2013, Yann Collet.
1434
+ BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
1435
+
1436
+ Redistribution and use in source and binary forms, with or without
1437
+ modification, are permitted provided that the following conditions are
1438
+ met:
1439
+
1440
+ * Redistributions of source code must retain the above copyright
1441
+ notice, this list of conditions and the following disclaimer.
1442
+ * Redistributions in binary form must reproduce the above
1443
+ copyright notice, this list of conditions and the following disclaimer
1444
+ in the documentation and/or other materials provided with the
1445
+ distribution.
1446
+
1447
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
1448
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
1449
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
1450
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
1451
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
1452
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
1453
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1454
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1455
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1456
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
1457
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1458
+
1459
+ 16. The NPP library uses code from the Boost Math Toolkit,
1460
+ and is subject to the following license:
1461
+
1462
+ Boost Software License - Version 1.0 - August 17th, 2003
1463
+ . . . .
1464
+
1465
+ Permission is hereby granted, free of charge, to any person or
1466
+ organization obtaining a copy of the software and accompanying
1467
+ documentation covered by this license (the "Software") to use,
1468
+ reproduce, display, distribute, execute, and transmit the Software,
1469
+ and to prepare derivative works of the Software, and to permit
1470
+ third-parties to whom the Software is furnished to do so, all
1471
+ subject to the following:
1472
+
1473
+ The copyright notices in the Software and this entire statement,
1474
+ including the above license grant, this restriction and the following
1475
+ disclaimer, must be included in all copies of the Software, in whole
1476
+ or in part, and all derivative works of the Software, unless such
1477
+ copies or derivative works are solely in the form of machine-executable
1478
+ object code generated by a source language processor.
1479
+
1480
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
1481
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
1482
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE AND
1483
+ NON-INFRINGEMENT. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR
1484
+ ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE FOR ANY DAMAGES OR
1485
+ OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE, ARISING
1486
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
1487
+ OTHER DEALINGS IN THE SOFTWARE.
1488
+
1489
+ 17. Portions of the Nsight Eclipse Edition is subject to the
1490
+ following license:
1491
+
1492
+ The Eclipse Foundation makes available all content in this plug-in
1493
+ ("Content"). Unless otherwise indicated below, the Content is provided
1494
+ to you under the terms and conditions of the Eclipse Public License
1495
+ Version 1.0 ("EPL"). A copy of the EPL is available at http://
1496
+ www.eclipse.org/legal/epl-v10.html. For purposes of the EPL, "Program"
1497
+ will mean the Content.
1498
+
1499
+ If you did not receive this Content directly from the Eclipse
1500
+ Foundation, the Content is being redistributed by another party
1501
+ ("Redistributor") and different terms and conditions may apply to your
1502
+ use of any object code in the Content. Check the Redistributor's
1503
+ license that was provided with the Content. If no such license exists,
1504
+ contact the Redistributor. Unless otherwise indicated below, the terms
1505
+ and conditions of the EPL still apply to any source code in the
1506
+ Content and such source code may be obtained at http://www.eclipse.org.
1507
+
1508
+ 18. Some of the cuBLAS library routines uses code from
1509
+ OpenAI, which is subject to the following license:
1510
+
1511
+ License URL
1512
+ https://github.com/openai/openai-gemm/blob/master/LICENSE
1513
+
1514
+ License Text
1515
+ The MIT License
1516
+
1517
+ Copyright (c) 2016 OpenAI (http://openai.com), 2016 Google Inc.
1518
+
1519
+ Permission is hereby granted, free of charge, to any person obtaining a copy
1520
+ of this software and associated documentation files (the "Software"), to deal
1521
+ in the Software without restriction, including without limitation the rights
1522
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
1523
+ copies of the Software, and to permit persons to whom the Software is
1524
+ furnished to do so, subject to the following conditions:
1525
+
1526
+ The above copyright notice and this permission notice shall be included in
1527
+ all copies or substantial portions of the Software.
1528
+
1529
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
1530
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1531
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
1532
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
1533
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
1534
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
1535
+ THE SOFTWARE.
1536
+
1537
+ 19. Licensee's use of the Visual Studio Setup Configuration
1538
+ Samples is subject to the following license:
1539
+
1540
+ The MIT License (MIT)
1541
+ Copyright (C) Microsoft Corporation. All rights reserved.
1542
+
1543
+ Permission is hereby granted, free of charge, to any person
1544
+ obtaining a copy of this software and associated documentation
1545
+ files (the "Software"), to deal in the Software without restriction,
1546
+ including without limitation the rights to use, copy, modify, merge,
1547
+ publish, distribute, sublicense, and/or sell copies of the Software,
1548
+ and to permit persons to whom the Software is furnished to do so,
1549
+ subject to the following conditions:
1550
+
1551
+ The above copyright notice and this permission notice shall be included
1552
+ in all copies or substantial portions of the Software.
1553
+
1554
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
1555
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
1556
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
1557
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
1558
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
1559
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
1560
+
1561
+ 20. Licensee's use of linmath.h header for CPU functions for
1562
+ GL vector/matrix operations from lunarG is subject to the
1563
+ Apache License Version 2.0.
1564
+
1565
+ 21. The DX12-CUDA sample uses the d3dx12.h header, which is
1566
+ subject to the MIT license .
1567
+
1568
+ -----------------
llmeval-env/lib/python3.10/site-packages/nvidia_cuda_runtime_cu12-12.1.105.dist-info/METADATA ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: nvidia-cuda-runtime-cu12
3
+ Version: 12.1.105
4
+ Summary: CUDA Runtime native Libraries
5
+ Home-page: https://developer.nvidia.com/cuda-zone
6
+ Author: Nvidia CUDA Installer Team
7
+ Author-email: [email protected]
8
+ License: NVIDIA Proprietary Software
9
+ Keywords: cuda,nvidia,runtime,machine learning,deep learning
10
+ Classifier: Development Status :: 4 - Beta
11
+ Classifier: Intended Audience :: Developers
12
+ Classifier: Intended Audience :: Education
13
+ Classifier: Intended Audience :: Science/Research
14
+ Classifier: License :: Other/Proprietary License
15
+ Classifier: Natural Language :: English
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.5
18
+ Classifier: Programming Language :: Python :: 3.6
19
+ Classifier: Programming Language :: Python :: 3.7
20
+ Classifier: Programming Language :: Python :: 3.8
21
+ Classifier: Programming Language :: Python :: 3.9
22
+ Classifier: Programming Language :: Python :: 3.10
23
+ Classifier: Programming Language :: Python :: 3.11
24
+ Classifier: Programming Language :: Python :: 3 :: Only
25
+ Classifier: Topic :: Scientific/Engineering
26
+ Classifier: Topic :: Scientific/Engineering :: Mathematics
27
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
28
+ Classifier: Topic :: Software Development
29
+ Classifier: Topic :: Software Development :: Libraries
30
+ Classifier: Operating System :: Microsoft :: Windows
31
+ Classifier: Operating System :: POSIX :: Linux
32
+ Requires-Python: >=3
33
+ License-File: License.txt
34
+
35
+ CUDA Runtime native Libraries