Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- env-llmeval/lib/python3.10/site-packages/torch/bin/protoc +3 -0
- env-llmeval/lib/python3.10/site-packages/torch/bin/protoc-3.13.0.0 +3 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__init__.py +33 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/decorator.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/_infra.py +284 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/context.py +415 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/decorator.py +158 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/formatter.py +114 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__init__.py +100 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_address.py +48 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact.py +88 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact_change.py +31 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact_content.py +33 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_code_flow.py +31 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_configuration_override.py +31 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_conversion.py +35 -0
- env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_edge_traversal.py +31 -0
.gitattributes
CHANGED
@@ -200,3 +200,5 @@ llmeval-env/lib/python3.10/site-packages/pandas/_libs/hashtable.cpython-310-x86_
|
|
200 |
llmeval-env/lib/python3.10/site-packages/nvidia/cublas/lib/libcublasLt.so.12 filter=lfs diff=lfs merge=lfs -text
|
201 |
llmeval-env/lib/python3.10/site-packages/safetensors/_safetensors_rust.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
202 |
llmeval-env/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_cnn_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
200 |
llmeval-env/lib/python3.10/site-packages/nvidia/cublas/lib/libcublasLt.so.12 filter=lfs diff=lfs merge=lfs -text
|
201 |
llmeval-env/lib/python3.10/site-packages/safetensors/_safetensors_rust.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
202 |
llmeval-env/lib/python3.10/site-packages/nvidia/cudnn/lib/libcudnn_cnn_infer.so.8 filter=lfs diff=lfs merge=lfs -text
|
203 |
+
env-llmeval/lib/python3.10/site-packages/torch/bin/protoc filter=lfs diff=lfs merge=lfs -text
|
204 |
+
env-llmeval/lib/python3.10/site-packages/torch/bin/protoc-3.13.0.0 filter=lfs diff=lfs merge=lfs -text
|
env-llmeval/lib/python3.10/site-packages/torch/bin/protoc
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3390873b2da56c1397adec3728f1588c51e182f15b123d3b4d4f248d31c1f4da
|
3 |
+
size 5330888
|
env-llmeval/lib/python3.10/site-packages/torch/bin/protoc-3.13.0.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3390873b2da56c1397adec3728f1588c51e182f15b123d3b4d4f248d31c1f4da
|
3 |
+
size 5330888
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__init__.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from ._infra import (
|
2 |
+
DiagnosticOptions,
|
3 |
+
Graph,
|
4 |
+
Invocation,
|
5 |
+
Level,
|
6 |
+
levels,
|
7 |
+
Location,
|
8 |
+
Rule,
|
9 |
+
RuleCollection,
|
10 |
+
Stack,
|
11 |
+
StackFrame,
|
12 |
+
Tag,
|
13 |
+
ThreadFlowLocation,
|
14 |
+
)
|
15 |
+
from .context import Diagnostic, DiagnosticContext, RuntimeErrorWithDiagnostic
|
16 |
+
|
17 |
+
__all__ = [
|
18 |
+
"Diagnostic",
|
19 |
+
"DiagnosticContext",
|
20 |
+
"DiagnosticOptions",
|
21 |
+
"Graph",
|
22 |
+
"Invocation",
|
23 |
+
"Level",
|
24 |
+
"levels",
|
25 |
+
"Location",
|
26 |
+
"Rule",
|
27 |
+
"RuleCollection",
|
28 |
+
"RuntimeErrorWithDiagnostic",
|
29 |
+
"Stack",
|
30 |
+
"StackFrame",
|
31 |
+
"Tag",
|
32 |
+
"ThreadFlowLocation",
|
33 |
+
]
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (663 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/_infra.cpython-310.pyc
ADDED
Binary file (11.2 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/context.cpython-310.pyc
ADDED
Binary file (16.5 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/decorator.cpython-310.pyc
ADDED
Binary file (4.29 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/formatter.cpython-310.pyc
ADDED
Binary file (3.42 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/__pycache__/utils.cpython-310.pyc
ADDED
Binary file (2.97 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/_infra.py
ADDED
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""This file defines an additional layer of abstraction on top of the SARIF OM."""
|
2 |
+
|
3 |
+
from __future__ import annotations
|
4 |
+
|
5 |
+
import dataclasses
|
6 |
+
import enum
|
7 |
+
import logging
|
8 |
+
from typing import FrozenSet, List, Mapping, Optional, Sequence, Tuple
|
9 |
+
|
10 |
+
from torch.onnx._internal.diagnostics.infra import formatter, sarif
|
11 |
+
|
12 |
+
|
13 |
+
class Level(enum.IntEnum):
|
14 |
+
"""The level of a diagnostic.
|
15 |
+
|
16 |
+
This class is used to represent the level of a diagnostic. The levels are defined
|
17 |
+
by the SARIF specification, and are not modifiable. For alternative categories,
|
18 |
+
please use infra.Tag instead. When selecting a level, please consider the following
|
19 |
+
guidelines:
|
20 |
+
|
21 |
+
- NONE: Informational result that does not indicate the presence of a problem.
|
22 |
+
- NOTE: An opportunity for improvement was found.
|
23 |
+
- WARNING: A potential problem was found.
|
24 |
+
- ERROR: A serious problem was found.
|
25 |
+
|
26 |
+
This level is a subclass of enum.IntEnum, and can be used as an integer. Its integer
|
27 |
+
value maps to the logging levels in Python's logging module. The mapping is as
|
28 |
+
follows:
|
29 |
+
|
30 |
+
Level.NONE = logging.DEBUG = 10
|
31 |
+
Level.NOTE = logging.INFO = 20
|
32 |
+
Level.WARNING = logging.WARNING = 30
|
33 |
+
Level.ERROR = logging.ERROR = 40
|
34 |
+
"""
|
35 |
+
|
36 |
+
NONE = 10
|
37 |
+
NOTE = 20
|
38 |
+
WARNING = 30
|
39 |
+
ERROR = 40
|
40 |
+
|
41 |
+
|
42 |
+
levels = Level
|
43 |
+
|
44 |
+
|
45 |
+
class Tag(enum.Enum):
|
46 |
+
"""The tag of a diagnostic. This class can be inherited to define custom tags."""
|
47 |
+
|
48 |
+
|
49 |
+
class PatchedPropertyBag(sarif.PropertyBag):
|
50 |
+
"""Key/value pairs that provide additional information about the object.
|
51 |
+
|
52 |
+
The definition of PropertyBag via SARIF spec is "A property bag is an object (§3.6)
|
53 |
+
containing an unordered set of properties with arbitrary names." However it is not
|
54 |
+
reflected in the json file, and therefore not captured by the python representation.
|
55 |
+
This patch adds additional **kwargs to the `__init__` method to allow recording
|
56 |
+
arbitrary key/value pairs.
|
57 |
+
"""
|
58 |
+
|
59 |
+
def __init__(self, tags: Optional[List[str]] = None, **kwargs):
|
60 |
+
super().__init__(tags=tags)
|
61 |
+
self.__dict__.update(kwargs)
|
62 |
+
|
63 |
+
|
64 |
+
@dataclasses.dataclass(frozen=True)
|
65 |
+
class Rule:
|
66 |
+
id: str
|
67 |
+
name: str
|
68 |
+
message_default_template: str
|
69 |
+
short_description: Optional[str] = None
|
70 |
+
full_description: Optional[str] = None
|
71 |
+
full_description_markdown: Optional[str] = None
|
72 |
+
help_uri: Optional[str] = None
|
73 |
+
|
74 |
+
@classmethod
|
75 |
+
def from_sarif(cls, **kwargs):
|
76 |
+
"""Returns a rule from the SARIF reporting descriptor."""
|
77 |
+
short_description = kwargs.get("short_description", {}).get("text")
|
78 |
+
full_description = kwargs.get("full_description", {}).get("text")
|
79 |
+
full_description_markdown = kwargs.get("full_description", {}).get("markdown")
|
80 |
+
help_uri = kwargs.get("help_uri")
|
81 |
+
|
82 |
+
rule = cls(
|
83 |
+
id=kwargs["id"],
|
84 |
+
name=kwargs["name"],
|
85 |
+
message_default_template=kwargs["message_strings"]["default"]["text"],
|
86 |
+
short_description=short_description,
|
87 |
+
full_description=full_description,
|
88 |
+
full_description_markdown=full_description_markdown,
|
89 |
+
help_uri=help_uri,
|
90 |
+
)
|
91 |
+
return rule
|
92 |
+
|
93 |
+
def sarif(self) -> sarif.ReportingDescriptor:
|
94 |
+
"""Returns a SARIF reporting descriptor of this Rule."""
|
95 |
+
short_description = (
|
96 |
+
sarif.MultiformatMessageString(text=self.short_description)
|
97 |
+
if self.short_description is not None
|
98 |
+
else None
|
99 |
+
)
|
100 |
+
full_description = (
|
101 |
+
sarif.MultiformatMessageString(
|
102 |
+
text=self.full_description, markdown=self.full_description_markdown
|
103 |
+
)
|
104 |
+
if self.full_description is not None
|
105 |
+
else None
|
106 |
+
)
|
107 |
+
return sarif.ReportingDescriptor(
|
108 |
+
id=self.id,
|
109 |
+
name=self.name,
|
110 |
+
short_description=short_description,
|
111 |
+
full_description=full_description,
|
112 |
+
help_uri=self.help_uri,
|
113 |
+
)
|
114 |
+
|
115 |
+
def format(self, level: Level, *args, **kwargs) -> Tuple[Rule, Level, str]:
|
116 |
+
"""Returns a tuple of (rule, level, message) for a diagnostic.
|
117 |
+
|
118 |
+
This method is used to format the message of a diagnostic. The message is
|
119 |
+
formatted using the default template of this rule, and the arguments passed in
|
120 |
+
as `*args` and `**kwargs`. The level is used to override the default level of
|
121 |
+
this rule.
|
122 |
+
"""
|
123 |
+
return (self, level, self.format_message(*args, **kwargs))
|
124 |
+
|
125 |
+
def format_message(self, *args, **kwargs) -> str:
|
126 |
+
"""Returns the formatted default message of this Rule.
|
127 |
+
|
128 |
+
This method should be overridden (with code generation) by subclasses to reflect
|
129 |
+
the exact arguments needed by the message template. This is a helper method to
|
130 |
+
create the default message for a diagnostic.
|
131 |
+
"""
|
132 |
+
return self.message_default_template.format(*args, **kwargs)
|
133 |
+
|
134 |
+
|
135 |
+
@dataclasses.dataclass
|
136 |
+
class Location:
|
137 |
+
uri: Optional[str] = None
|
138 |
+
line: Optional[int] = None
|
139 |
+
message: Optional[str] = None
|
140 |
+
start_column: Optional[int] = None
|
141 |
+
end_column: Optional[int] = None
|
142 |
+
snippet: Optional[str] = None
|
143 |
+
function: Optional[str] = None
|
144 |
+
|
145 |
+
def sarif(self) -> sarif.Location:
|
146 |
+
"""Returns the SARIF representation of this location."""
|
147 |
+
return sarif.Location(
|
148 |
+
physical_location=sarif.PhysicalLocation(
|
149 |
+
artifact_location=sarif.ArtifactLocation(uri=self.uri),
|
150 |
+
region=sarif.Region(
|
151 |
+
start_line=self.line,
|
152 |
+
start_column=self.start_column,
|
153 |
+
end_column=self.end_column,
|
154 |
+
snippet=sarif.ArtifactContent(text=self.snippet),
|
155 |
+
),
|
156 |
+
),
|
157 |
+
message=sarif.Message(text=self.message)
|
158 |
+
if self.message is not None
|
159 |
+
else None,
|
160 |
+
)
|
161 |
+
|
162 |
+
|
163 |
+
@dataclasses.dataclass
|
164 |
+
class StackFrame:
|
165 |
+
location: Location
|
166 |
+
|
167 |
+
def sarif(self) -> sarif.StackFrame:
|
168 |
+
"""Returns the SARIF representation of this stack frame."""
|
169 |
+
return sarif.StackFrame(location=self.location.sarif())
|
170 |
+
|
171 |
+
|
172 |
+
@dataclasses.dataclass
|
173 |
+
class Stack:
|
174 |
+
"""Records a stack trace. The frames are in order from newest to oldest stack frame."""
|
175 |
+
|
176 |
+
frames: List[StackFrame] = dataclasses.field(default_factory=list)
|
177 |
+
message: Optional[str] = None
|
178 |
+
|
179 |
+
def sarif(self) -> sarif.Stack:
|
180 |
+
"""Returns the SARIF representation of this stack."""
|
181 |
+
return sarif.Stack(
|
182 |
+
frames=[frame.sarif() for frame in self.frames],
|
183 |
+
message=sarif.Message(text=self.message)
|
184 |
+
if self.message is not None
|
185 |
+
else None,
|
186 |
+
)
|
187 |
+
|
188 |
+
|
189 |
+
@dataclasses.dataclass
|
190 |
+
class ThreadFlowLocation:
|
191 |
+
"""Records code location and the initial state."""
|
192 |
+
|
193 |
+
location: Location
|
194 |
+
state: Mapping[str, str]
|
195 |
+
index: int
|
196 |
+
stack: Optional[Stack] = None
|
197 |
+
|
198 |
+
def sarif(self) -> sarif.ThreadFlowLocation:
|
199 |
+
"""Returns the SARIF representation of this thread flow location."""
|
200 |
+
return sarif.ThreadFlowLocation(
|
201 |
+
location=self.location.sarif(),
|
202 |
+
state=self.state,
|
203 |
+
stack=self.stack.sarif() if self.stack is not None else None,
|
204 |
+
)
|
205 |
+
|
206 |
+
|
207 |
+
@dataclasses.dataclass
|
208 |
+
class Graph:
|
209 |
+
"""A graph of diagnostics.
|
210 |
+
|
211 |
+
This class stores the string representation of a model graph.
|
212 |
+
The `nodes` and `edges` fields are unused in the current implementation.
|
213 |
+
"""
|
214 |
+
|
215 |
+
graph: str
|
216 |
+
name: str
|
217 |
+
description: Optional[str] = None
|
218 |
+
|
219 |
+
def sarif(self) -> sarif.Graph:
|
220 |
+
"""Returns the SARIF representation of this graph."""
|
221 |
+
return sarif.Graph(
|
222 |
+
description=sarif.Message(text=self.graph),
|
223 |
+
properties=PatchedPropertyBag(name=self.name, description=self.description),
|
224 |
+
)
|
225 |
+
|
226 |
+
|
227 |
+
@dataclasses.dataclass
|
228 |
+
class RuleCollection:
|
229 |
+
_rule_id_name_set: FrozenSet[Tuple[str, str]] = dataclasses.field(init=False)
|
230 |
+
|
231 |
+
def __post_init__(self) -> None:
|
232 |
+
self._rule_id_name_set = frozenset(
|
233 |
+
{
|
234 |
+
(field.default.id, field.default.name)
|
235 |
+
for field in dataclasses.fields(self)
|
236 |
+
if isinstance(field.default, Rule)
|
237 |
+
}
|
238 |
+
)
|
239 |
+
|
240 |
+
def __contains__(self, rule: Rule) -> bool:
|
241 |
+
"""Checks if the rule is in the collection."""
|
242 |
+
return (rule.id, rule.name) in self._rule_id_name_set
|
243 |
+
|
244 |
+
@classmethod
|
245 |
+
def custom_collection_from_list(
|
246 |
+
cls, new_collection_class_name: str, rules: Sequence[Rule]
|
247 |
+
) -> RuleCollection:
|
248 |
+
"""Creates a custom class inherited from RuleCollection with the list of rules."""
|
249 |
+
return dataclasses.make_dataclass(
|
250 |
+
new_collection_class_name,
|
251 |
+
[
|
252 |
+
(
|
253 |
+
formatter.kebab_case_to_snake_case(rule.name),
|
254 |
+
type(rule),
|
255 |
+
dataclasses.field(default=rule),
|
256 |
+
)
|
257 |
+
for rule in rules
|
258 |
+
],
|
259 |
+
bases=(cls,),
|
260 |
+
)()
|
261 |
+
|
262 |
+
|
263 |
+
class Invocation:
|
264 |
+
# TODO: Implement this.
|
265 |
+
# Tracks top level call arguments and diagnostic options.
|
266 |
+
def __init__(self) -> None:
|
267 |
+
raise NotImplementedError()
|
268 |
+
|
269 |
+
|
270 |
+
@dataclasses.dataclass
|
271 |
+
class DiagnosticOptions:
|
272 |
+
"""Options for diagnostic context.
|
273 |
+
|
274 |
+
Attributes:
|
275 |
+
verbosity_level: Set the amount of information logged for each diagnostics,
|
276 |
+
equivalent to the 'level' in Python logging module.
|
277 |
+
warnings_as_errors: When True, warning diagnostics are treated as error diagnostics.
|
278 |
+
"""
|
279 |
+
|
280 |
+
verbosity_level: int = dataclasses.field(default=logging.INFO)
|
281 |
+
"""Set the amount of information logged for each diagnostics, equivalent to the 'level' in Python logging module."""
|
282 |
+
|
283 |
+
warnings_as_errors: bool = dataclasses.field(default=False)
|
284 |
+
"""If True, warning diagnostics are treated as error diagnostics."""
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/context.py
ADDED
@@ -0,0 +1,415 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""A diagnostic context based on SARIF."""
|
2 |
+
|
3 |
+
from __future__ import annotations
|
4 |
+
|
5 |
+
import contextlib
|
6 |
+
|
7 |
+
import dataclasses
|
8 |
+
import gzip
|
9 |
+
|
10 |
+
import logging
|
11 |
+
|
12 |
+
from typing import (
|
13 |
+
Callable,
|
14 |
+
Generator,
|
15 |
+
Generic,
|
16 |
+
List,
|
17 |
+
Literal,
|
18 |
+
Mapping,
|
19 |
+
Optional,
|
20 |
+
Type,
|
21 |
+
TypeVar,
|
22 |
+
)
|
23 |
+
|
24 |
+
from torch.onnx._internal.diagnostics import infra
|
25 |
+
from torch.onnx._internal.diagnostics.infra import formatter, sarif, utils
|
26 |
+
from torch.onnx._internal.diagnostics.infra.sarif import version as sarif_version
|
27 |
+
|
28 |
+
|
29 |
+
# This is a workaround for mypy not supporting Self from typing_extensions.
|
30 |
+
_Diagnostic = TypeVar("_Diagnostic", bound="Diagnostic")
|
31 |
+
diagnostic_logger: logging.Logger = logging.getLogger(__name__)
|
32 |
+
|
33 |
+
|
34 |
+
@dataclasses.dataclass
|
35 |
+
class Diagnostic:
|
36 |
+
rule: infra.Rule
|
37 |
+
level: infra.Level
|
38 |
+
message: Optional[str] = None
|
39 |
+
locations: List[infra.Location] = dataclasses.field(default_factory=list)
|
40 |
+
stacks: List[infra.Stack] = dataclasses.field(default_factory=list)
|
41 |
+
graphs: List[infra.Graph] = dataclasses.field(default_factory=list)
|
42 |
+
thread_flow_locations: List[infra.ThreadFlowLocation] = dataclasses.field(
|
43 |
+
default_factory=list
|
44 |
+
)
|
45 |
+
additional_messages: List[str] = dataclasses.field(default_factory=list)
|
46 |
+
tags: List[infra.Tag] = dataclasses.field(default_factory=list)
|
47 |
+
source_exception: Optional[Exception] = None
|
48 |
+
"""The exception that caused this diagnostic to be created."""
|
49 |
+
logger: logging.Logger = dataclasses.field(init=False, default=diagnostic_logger)
|
50 |
+
"""The logger for this diagnostic. Defaults to 'diagnostic_logger' which has the same
|
51 |
+
log level setting with `DiagnosticOptions.verbosity_level`."""
|
52 |
+
_current_log_section_depth: int = 0
|
53 |
+
|
54 |
+
def __post_init__(self) -> None:
|
55 |
+
pass
|
56 |
+
|
57 |
+
def sarif(self) -> sarif.Result:
|
58 |
+
"""Returns the SARIF Result representation of this diagnostic."""
|
59 |
+
message = self.message or self.rule.message_default_template
|
60 |
+
if self.additional_messages:
|
61 |
+
additional_message = "\n".join(self.additional_messages)
|
62 |
+
message_markdown = (
|
63 |
+
f"{message}\n\n## Additional Message:\n\n{additional_message}"
|
64 |
+
)
|
65 |
+
else:
|
66 |
+
message_markdown = message
|
67 |
+
|
68 |
+
kind: Literal["informational", "fail"] = (
|
69 |
+
"informational" if self.level == infra.Level.NONE else "fail"
|
70 |
+
)
|
71 |
+
|
72 |
+
sarif_result = sarif.Result(
|
73 |
+
message=sarif.Message(text=message, markdown=message_markdown),
|
74 |
+
level=self.level.name.lower(), # type: ignore[arg-type]
|
75 |
+
rule_id=self.rule.id,
|
76 |
+
kind=kind,
|
77 |
+
)
|
78 |
+
sarif_result.locations = [location.sarif() for location in self.locations]
|
79 |
+
sarif_result.stacks = [stack.sarif() for stack in self.stacks]
|
80 |
+
sarif_result.graphs = [graph.sarif() for graph in self.graphs]
|
81 |
+
sarif_result.code_flows = [
|
82 |
+
sarif.CodeFlow(
|
83 |
+
thread_flows=[
|
84 |
+
sarif.ThreadFlow(
|
85 |
+
locations=[loc.sarif() for loc in self.thread_flow_locations]
|
86 |
+
)
|
87 |
+
]
|
88 |
+
)
|
89 |
+
]
|
90 |
+
sarif_result.properties = sarif.PropertyBag(
|
91 |
+
tags=[tag.value for tag in self.tags]
|
92 |
+
)
|
93 |
+
return sarif_result
|
94 |
+
|
95 |
+
def with_location(self: _Diagnostic, location: infra.Location) -> _Diagnostic:
|
96 |
+
"""Adds a location to the diagnostic."""
|
97 |
+
self.locations.append(location)
|
98 |
+
return self
|
99 |
+
|
100 |
+
def with_thread_flow_location(
|
101 |
+
self: _Diagnostic, location: infra.ThreadFlowLocation
|
102 |
+
) -> _Diagnostic:
|
103 |
+
"""Adds a thread flow location to the diagnostic."""
|
104 |
+
self.thread_flow_locations.append(location)
|
105 |
+
return self
|
106 |
+
|
107 |
+
def with_stack(self: _Diagnostic, stack: infra.Stack) -> _Diagnostic:
|
108 |
+
"""Adds a stack to the diagnostic."""
|
109 |
+
self.stacks.append(stack)
|
110 |
+
return self
|
111 |
+
|
112 |
+
def with_graph(self: _Diagnostic, graph: infra.Graph) -> _Diagnostic:
|
113 |
+
"""Adds a graph to the diagnostic."""
|
114 |
+
self.graphs.append(graph)
|
115 |
+
return self
|
116 |
+
|
117 |
+
@contextlib.contextmanager
|
118 |
+
def log_section(
|
119 |
+
self, level: int, message: str, *args, **kwargs
|
120 |
+
) -> Generator[None, None, None]:
|
121 |
+
"""
|
122 |
+
Context manager for a section of log messages, denoted by a title message and increased indentation.
|
123 |
+
|
124 |
+
Same api as `logging.Logger.log`.
|
125 |
+
|
126 |
+
This context manager logs the given title at the specified log level, increases the current
|
127 |
+
section depth for subsequent log messages, and ensures that the section depth is decreased
|
128 |
+
again when exiting the context.
|
129 |
+
|
130 |
+
Args:
|
131 |
+
level: The log level.
|
132 |
+
message: The title message to log.
|
133 |
+
*args: The arguments to the message. Use `LazyString` to defer the
|
134 |
+
expensive evaluation of the arguments until the message is actually logged.
|
135 |
+
**kwargs: The keyword arguments for `logging.Logger.log`.
|
136 |
+
|
137 |
+
Yields:
|
138 |
+
None: This context manager does not yield any value.
|
139 |
+
|
140 |
+
Example:
|
141 |
+
>>> with DiagnosticContext("DummyContext", "1.0"):
|
142 |
+
... rule = infra.Rule("RuleID", "DummyRule", "Rule message")
|
143 |
+
... diagnostic = Diagnostic(rule, infra.Level.WARNING)
|
144 |
+
... with diagnostic.log_section(logging.INFO, "My Section"):
|
145 |
+
... diagnostic.log(logging.INFO, "My Message")
|
146 |
+
... with diagnostic.log_section(logging.INFO, "My Subsection"):
|
147 |
+
... diagnostic.log(logging.INFO, "My Submessage")
|
148 |
+
... diagnostic.additional_messages
|
149 |
+
['## My Section', 'My Message', '### My Subsection', 'My Submessage']
|
150 |
+
"""
|
151 |
+
if self.logger.isEnabledFor(level):
|
152 |
+
indented_format_message = (
|
153 |
+
f"##{'#' * self._current_log_section_depth } {message}"
|
154 |
+
)
|
155 |
+
self.log(
|
156 |
+
level,
|
157 |
+
indented_format_message,
|
158 |
+
*args,
|
159 |
+
**kwargs,
|
160 |
+
)
|
161 |
+
self._current_log_section_depth += 1
|
162 |
+
try:
|
163 |
+
yield
|
164 |
+
finally:
|
165 |
+
self._current_log_section_depth -= 1
|
166 |
+
|
167 |
+
def log(self, level: int, message: str, *args, **kwargs) -> None:
|
168 |
+
"""Logs a message within the diagnostic. Same api as `logging.Logger.log`.
|
169 |
+
|
170 |
+
If logger is not enabled for the given level, the message will not be logged.
|
171 |
+
Otherwise, the message will be logged and also added to the diagnostic's additional_messages.
|
172 |
+
|
173 |
+
The default setting for `DiagnosticOptions.verbosity_level` is `logging.INFO`. Based on this default,
|
174 |
+
the log level recommendations are as follows. If you've set a different default verbosity level in your
|
175 |
+
application, please adjust accordingly:
|
176 |
+
|
177 |
+
- logging.ERROR: Log any events leading to application failure.
|
178 |
+
- logging.WARNING: Log events that might result in application issues or failures, although not guaranteed.
|
179 |
+
- logging.INFO: Log general useful information, ensuring minimal performance overhead.
|
180 |
+
- logging.DEBUG: Log detailed debug information, which might affect performance when logged.
|
181 |
+
|
182 |
+
Args:
|
183 |
+
level: The log level.
|
184 |
+
message: The message to log.
|
185 |
+
*args: The arguments to the message. Use `LazyString` to defer the
|
186 |
+
expensive evaluation of the arguments until the message is actually logged.
|
187 |
+
**kwargs: The keyword arguments for `logging.Logger.log`.
|
188 |
+
"""
|
189 |
+
if self.logger.isEnabledFor(level):
|
190 |
+
formatted_message = message % args
|
191 |
+
self.logger.log(level, formatted_message, **kwargs)
|
192 |
+
self.additional_messages.append(formatted_message)
|
193 |
+
|
194 |
+
def debug(self, message: str, *args, **kwargs) -> None:
|
195 |
+
"""Logs a debug message within the diagnostic. Same api as logging.Logger.debug.
|
196 |
+
|
197 |
+
Checkout `log` for more details.
|
198 |
+
"""
|
199 |
+
self.log(logging.DEBUG, message, *args, **kwargs)
|
200 |
+
|
201 |
+
def info(self, message: str, *args, **kwargs) -> None:
|
202 |
+
"""Logs an info message within the diagnostic. Same api as logging.Logger.info.
|
203 |
+
|
204 |
+
Checkout `log` for more details.
|
205 |
+
"""
|
206 |
+
self.log(logging.INFO, message, *args, **kwargs)
|
207 |
+
|
208 |
+
def warning(self, message: str, *args, **kwargs) -> None:
|
209 |
+
"""Logs a warning message within the diagnostic. Same api as logging.Logger.warning.
|
210 |
+
|
211 |
+
Checkout `log` for more details.
|
212 |
+
"""
|
213 |
+
self.log(logging.WARNING, message, *args, **kwargs)
|
214 |
+
|
215 |
+
def error(self, message: str, *args, **kwargs) -> None:
|
216 |
+
"""Logs an error message within the diagnostic. Same api as logging.Logger.error.
|
217 |
+
|
218 |
+
Checkout `log` for more details.
|
219 |
+
"""
|
220 |
+
self.log(logging.ERROR, message, *args, **kwargs)
|
221 |
+
|
222 |
+
def log_source_exception(self, level: int, exception: Exception) -> None:
|
223 |
+
"""Logs a source exception within the diagnostic.
|
224 |
+
|
225 |
+
Invokes `log_section` and `log` to log the exception in markdown section format.
|
226 |
+
"""
|
227 |
+
self.source_exception = exception
|
228 |
+
with self.log_section(level, "Exception log"):
|
229 |
+
self.log(level, "%s", formatter.lazy_format_exception(exception))
|
230 |
+
|
231 |
+
def record_python_call_stack(self, frames_to_skip: int) -> infra.Stack:
|
232 |
+
"""Records the current Python call stack."""
|
233 |
+
frames_to_skip += 1 # Skip this function.
|
234 |
+
stack = utils.python_call_stack(frames_to_skip=frames_to_skip)
|
235 |
+
self.with_stack(stack)
|
236 |
+
if len(stack.frames) > 0:
|
237 |
+
self.with_location(stack.frames[0].location)
|
238 |
+
return stack
|
239 |
+
|
240 |
+
def record_python_call(
|
241 |
+
self,
|
242 |
+
fn: Callable,
|
243 |
+
state: Mapping[str, str],
|
244 |
+
message: Optional[str] = None,
|
245 |
+
frames_to_skip: int = 0,
|
246 |
+
) -> infra.ThreadFlowLocation:
|
247 |
+
"""Records a python call as one thread flow step."""
|
248 |
+
frames_to_skip += 1 # Skip this function.
|
249 |
+
stack = utils.python_call_stack(frames_to_skip=frames_to_skip, frames_to_log=5)
|
250 |
+
location = utils.function_location(fn)
|
251 |
+
location.message = message
|
252 |
+
# Add function location to the top of the stack.
|
253 |
+
stack.frames.insert(0, infra.StackFrame(location=location))
|
254 |
+
thread_flow_location = infra.ThreadFlowLocation(
|
255 |
+
location=location,
|
256 |
+
state=state,
|
257 |
+
index=len(self.thread_flow_locations),
|
258 |
+
stack=stack,
|
259 |
+
)
|
260 |
+
self.with_thread_flow_location(thread_flow_location)
|
261 |
+
return thread_flow_location
|
262 |
+
|
263 |
+
|
264 |
+
class RuntimeErrorWithDiagnostic(RuntimeError):
|
265 |
+
"""Runtime error with enclosed diagnostic information."""
|
266 |
+
|
267 |
+
def __init__(self, diagnostic: Diagnostic):
|
268 |
+
super().__init__(diagnostic.message)
|
269 |
+
self.diagnostic = diagnostic
|
270 |
+
|
271 |
+
|
272 |
+
@dataclasses.dataclass
|
273 |
+
class DiagnosticContext(Generic[_Diagnostic]):
|
274 |
+
name: str
|
275 |
+
version: str
|
276 |
+
options: infra.DiagnosticOptions = dataclasses.field(
|
277 |
+
default_factory=infra.DiagnosticOptions
|
278 |
+
)
|
279 |
+
diagnostics: List[_Diagnostic] = dataclasses.field(init=False, default_factory=list)
|
280 |
+
# TODO(bowbao): Implement this.
|
281 |
+
# _invocation: infra.Invocation = dataclasses.field(init=False)
|
282 |
+
_inflight_diagnostics: List[_Diagnostic] = dataclasses.field(
|
283 |
+
init=False, default_factory=list
|
284 |
+
)
|
285 |
+
_previous_log_level: int = dataclasses.field(init=False, default=logging.WARNING)
|
286 |
+
logger: logging.Logger = dataclasses.field(init=False, default=diagnostic_logger)
|
287 |
+
_bound_diagnostic_type: Type = dataclasses.field(init=False, default=Diagnostic)
|
288 |
+
|
289 |
+
def __enter__(self):
|
290 |
+
self._previous_log_level = self.logger.level
|
291 |
+
self.logger.setLevel(self.options.verbosity_level)
|
292 |
+
return self
|
293 |
+
|
294 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
295 |
+
self.logger.setLevel(self._previous_log_level)
|
296 |
+
return None
|
297 |
+
|
298 |
+
def sarif(self) -> sarif.Run:
|
299 |
+
"""Returns the SARIF Run object."""
|
300 |
+
unique_rules = {diagnostic.rule for diagnostic in self.diagnostics}
|
301 |
+
return sarif.Run(
|
302 |
+
sarif.Tool(
|
303 |
+
driver=sarif.ToolComponent(
|
304 |
+
name=self.name,
|
305 |
+
version=self.version,
|
306 |
+
rules=[rule.sarif() for rule in unique_rules],
|
307 |
+
)
|
308 |
+
),
|
309 |
+
results=[diagnostic.sarif() for diagnostic in self.diagnostics],
|
310 |
+
)
|
311 |
+
|
312 |
+
def sarif_log(self) -> sarif.SarifLog: # type: ignore[name-defined]
|
313 |
+
"""Returns the SARIF Log object."""
|
314 |
+
return sarif.SarifLog(
|
315 |
+
version=sarif_version.SARIF_VERSION,
|
316 |
+
schema_uri=sarif_version.SARIF_SCHEMA_LINK,
|
317 |
+
runs=[self.sarif()],
|
318 |
+
)
|
319 |
+
|
320 |
+
def to_json(self) -> str:
|
321 |
+
return formatter.sarif_to_json(self.sarif_log())
|
322 |
+
|
323 |
+
def dump(self, file_path: str, compress: bool = False) -> None:
|
324 |
+
"""Dumps the SARIF log to a file."""
|
325 |
+
if compress:
|
326 |
+
with gzip.open(file_path, "wt") as f:
|
327 |
+
f.write(self.to_json())
|
328 |
+
else:
|
329 |
+
with open(file_path, "w") as f:
|
330 |
+
f.write(self.to_json())
|
331 |
+
|
332 |
+
def log(self, diagnostic: _Diagnostic) -> None:
|
333 |
+
"""Logs a diagnostic.
|
334 |
+
|
335 |
+
This method should be used only after all the necessary information for the diagnostic
|
336 |
+
has been collected.
|
337 |
+
|
338 |
+
Args:
|
339 |
+
diagnostic: The diagnostic to add.
|
340 |
+
"""
|
341 |
+
if not isinstance(diagnostic, self._bound_diagnostic_type):
|
342 |
+
raise TypeError(
|
343 |
+
f"Expected diagnostic of type {self._bound_diagnostic_type}, got {type(diagnostic)}"
|
344 |
+
)
|
345 |
+
if self.options.warnings_as_errors and diagnostic.level == infra.Level.WARNING:
|
346 |
+
diagnostic.level = infra.Level.ERROR
|
347 |
+
self.diagnostics.append(diagnostic)
|
348 |
+
|
349 |
+
def log_and_raise_if_error(self, diagnostic: _Diagnostic) -> None:
|
350 |
+
"""Logs a diagnostic and raises an exception if it is an error.
|
351 |
+
|
352 |
+
Use this method for logging non inflight diagnostics where diagnostic level is not known or
|
353 |
+
lower than ERROR. If it is always expected raise, use `log` and explicit
|
354 |
+
`raise` instead. Otherwise there is no way to convey the message that it always
|
355 |
+
raises to Python intellisense and type checking tools.
|
356 |
+
|
357 |
+
This method should be used only after all the necessary information for the diagnostic
|
358 |
+
has been collected.
|
359 |
+
|
360 |
+
Args:
|
361 |
+
diagnostic: The diagnostic to add.
|
362 |
+
"""
|
363 |
+
self.log(diagnostic)
|
364 |
+
if diagnostic.level == infra.Level.ERROR:
|
365 |
+
if diagnostic.source_exception is not None:
|
366 |
+
raise diagnostic.source_exception
|
367 |
+
raise RuntimeErrorWithDiagnostic(diagnostic)
|
368 |
+
|
369 |
+
@contextlib.contextmanager
|
370 |
+
def add_inflight_diagnostic(
|
371 |
+
self, diagnostic: _Diagnostic
|
372 |
+
) -> Generator[_Diagnostic, None, None]:
|
373 |
+
"""Adds a diagnostic to the context.
|
374 |
+
|
375 |
+
Use this method to add diagnostics that are not created by the context.
|
376 |
+
Args:
|
377 |
+
diagnostic: The diagnostic to add.
|
378 |
+
"""
|
379 |
+
self._inflight_diagnostics.append(diagnostic)
|
380 |
+
try:
|
381 |
+
yield diagnostic
|
382 |
+
finally:
|
383 |
+
self._inflight_diagnostics.pop()
|
384 |
+
|
385 |
+
def push_inflight_diagnostic(self, diagnostic: _Diagnostic) -> None:
|
386 |
+
"""Pushes a diagnostic to the inflight diagnostics stack.
|
387 |
+
|
388 |
+
Args:
|
389 |
+
diagnostic: The diagnostic to push.
|
390 |
+
|
391 |
+
Raises:
|
392 |
+
ValueError: If the rule is not supported by the tool.
|
393 |
+
"""
|
394 |
+
self._inflight_diagnostics.append(diagnostic)
|
395 |
+
|
396 |
+
def pop_inflight_diagnostic(self) -> _Diagnostic:
|
397 |
+
"""Pops the last diagnostic from the inflight diagnostics stack.
|
398 |
+
|
399 |
+
Returns:
|
400 |
+
The popped diagnostic.
|
401 |
+
"""
|
402 |
+
return self._inflight_diagnostics.pop()
|
403 |
+
|
404 |
+
def inflight_diagnostic(self, rule: Optional[infra.Rule] = None) -> _Diagnostic:
|
405 |
+
if rule is None:
|
406 |
+
# TODO(bowbao): Create builtin-rules and create diagnostic using that.
|
407 |
+
if len(self._inflight_diagnostics) <= 0:
|
408 |
+
raise AssertionError("No inflight diagnostics")
|
409 |
+
|
410 |
+
return self._inflight_diagnostics[-1]
|
411 |
+
else:
|
412 |
+
for diagnostic in reversed(self._inflight_diagnostics):
|
413 |
+
if diagnostic.rule == rule:
|
414 |
+
return diagnostic
|
415 |
+
raise AssertionError(f"No inflight diagnostic for rule {rule.name}")
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/decorator.py
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import functools
|
4 |
+
import logging
|
5 |
+
import traceback
|
6 |
+
from typing import Any, Callable, Dict, Optional, Tuple, Type
|
7 |
+
|
8 |
+
from torch.onnx._internal import _beartype
|
9 |
+
from torch.onnx._internal.diagnostics import infra
|
10 |
+
from torch.onnx._internal.diagnostics.infra import formatter, utils
|
11 |
+
|
12 |
+
|
13 |
+
MessageFormatterType = Callable[..., str]
|
14 |
+
|
15 |
+
|
16 |
+
@_beartype.beartype
|
17 |
+
def format_message_in_text(fn: Callable, *args: Any, **kwargs: Any) -> str:
|
18 |
+
return f"{formatter.display_name(fn)}. "
|
19 |
+
|
20 |
+
|
21 |
+
@_beartype.beartype
|
22 |
+
def format_exception_in_markdown(exception: Exception) -> str:
|
23 |
+
msg_list = ["### Exception log", "```"]
|
24 |
+
msg_list.extend(
|
25 |
+
traceback.format_exception(type(exception), exception, exception.__traceback__)
|
26 |
+
)
|
27 |
+
msg_list.append("```")
|
28 |
+
return "\n".join(msg_list)
|
29 |
+
|
30 |
+
|
31 |
+
@_beartype.beartype
|
32 |
+
def format_function_signature_in_markdown(
|
33 |
+
fn: Callable,
|
34 |
+
args: Tuple[Any, ...],
|
35 |
+
kwargs: Dict[str, Any],
|
36 |
+
format_argument: Callable[[Any], str] = formatter.format_argument,
|
37 |
+
) -> str:
|
38 |
+
msg_list = [f"### Function Signature {formatter.display_name(fn)}"]
|
39 |
+
|
40 |
+
state = utils.function_state(fn, args, kwargs)
|
41 |
+
|
42 |
+
for k, v in state.items():
|
43 |
+
msg_list.append(f"- {k}: {format_argument(v)}")
|
44 |
+
|
45 |
+
return "\n".join(msg_list)
|
46 |
+
|
47 |
+
|
48 |
+
@_beartype.beartype
|
49 |
+
def format_return_values_in_markdown(
|
50 |
+
return_values: Any,
|
51 |
+
format_argument: Callable[[Any], str] = formatter.format_argument,
|
52 |
+
) -> str:
|
53 |
+
return f"{format_argument(return_values)}"
|
54 |
+
|
55 |
+
|
56 |
+
ModifierCallableType = Callable[
|
57 |
+
[infra.Diagnostic, Callable, Tuple[Any, ...], Dict[str, Any], Any], None
|
58 |
+
]
|
59 |
+
|
60 |
+
|
61 |
+
@_beartype.beartype
|
62 |
+
def diagnose_call(
|
63 |
+
rule: infra.Rule,
|
64 |
+
*,
|
65 |
+
level: infra.Level = infra.Level.NONE,
|
66 |
+
diagnostic_type: Type[infra.Diagnostic] = infra.Diagnostic,
|
67 |
+
format_argument: Callable[[Any], str] = formatter.format_argument,
|
68 |
+
diagnostic_message_formatter: MessageFormatterType = format_message_in_text,
|
69 |
+
) -> Callable:
|
70 |
+
def decorator(fn):
|
71 |
+
@functools.wraps(fn)
|
72 |
+
def wrapper(*args, **kwargs):
|
73 |
+
common_error_message = "diagnose_call can only be applied to callables"
|
74 |
+
if not callable(fn):
|
75 |
+
raise AssertionError(
|
76 |
+
f"{common_error_message}. Got {type(fn)} instead of callable."
|
77 |
+
)
|
78 |
+
arg0 = args[0] if len(args) > 0 else None
|
79 |
+
if isinstance(ctx := arg0, infra.DiagnosticContext):
|
80 |
+
pass
|
81 |
+
elif isinstance(
|
82 |
+
ctx := getattr(arg0, "diagnostic_context", None),
|
83 |
+
infra.DiagnosticContext,
|
84 |
+
):
|
85 |
+
pass
|
86 |
+
else:
|
87 |
+
# NOTE: At decorate time, it can't tell if a callable is function or method.
|
88 |
+
# Technically both are regarded as function at that time.
|
89 |
+
raise AssertionError(
|
90 |
+
f"{common_error_message}. For {fn}, "
|
91 |
+
f"If it is a function, a DiagnosticContext instance must be present as "
|
92 |
+
f"the first argument. "
|
93 |
+
f"If it is a method, a DiagnosticContext instance must be present as "
|
94 |
+
f"the attribute 'diagnostic_context' of the 'self' argument."
|
95 |
+
)
|
96 |
+
|
97 |
+
diag = diagnostic_type(
|
98 |
+
rule,
|
99 |
+
level,
|
100 |
+
diagnostic_message_formatter(fn, *args, **kwargs),
|
101 |
+
)
|
102 |
+
|
103 |
+
# pop the decorator frame
|
104 |
+
# TODO(bowbao): by default diagnostic doesn't have stack.
|
105 |
+
# So need to check before doing this. Make the code cleaner.
|
106 |
+
# Option: do not capture stack by default in diagnostic initialization.
|
107 |
+
stack: Optional[infra.Stack] = None
|
108 |
+
if len(diag.stacks) > 0:
|
109 |
+
stack = diag.stacks[0]
|
110 |
+
stack.frames.pop(0)
|
111 |
+
|
112 |
+
# set function location
|
113 |
+
fn_location = utils.function_location(fn)
|
114 |
+
diag.locations.insert(0, fn_location)
|
115 |
+
# Add function location to the top of the stack.
|
116 |
+
if stack is not None:
|
117 |
+
stack.frames.insert(0, infra.StackFrame(location=fn_location))
|
118 |
+
|
119 |
+
with diag.log_section(logging.INFO, "Function Signature"):
|
120 |
+
diag.log(
|
121 |
+
logging.INFO,
|
122 |
+
"%s",
|
123 |
+
formatter.LazyString(
|
124 |
+
format_function_signature_in_markdown,
|
125 |
+
fn,
|
126 |
+
args,
|
127 |
+
kwargs,
|
128 |
+
format_argument,
|
129 |
+
),
|
130 |
+
)
|
131 |
+
|
132 |
+
return_values: Any = None
|
133 |
+
with ctx.add_inflight_diagnostic(diag) as diag:
|
134 |
+
try:
|
135 |
+
return_values = fn(*args, **kwargs)
|
136 |
+
with diag.log_section(logging.INFO, "Return values"):
|
137 |
+
diag.log(
|
138 |
+
logging.INFO,
|
139 |
+
"%s",
|
140 |
+
formatter.LazyString(
|
141 |
+
format_return_values_in_markdown,
|
142 |
+
return_values,
|
143 |
+
format_argument,
|
144 |
+
),
|
145 |
+
)
|
146 |
+
return return_values
|
147 |
+
except Exception as e:
|
148 |
+
diag.log_source_exception(logging.ERROR, e)
|
149 |
+
diag.level = infra.Level.ERROR
|
150 |
+
finally:
|
151 |
+
ctx.log_and_raise_if_error(diag)
|
152 |
+
|
153 |
+
return wrapper
|
154 |
+
|
155 |
+
return decorator
|
156 |
+
|
157 |
+
|
158 |
+
# TODO(bowbao): decorator to report only when failed.
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/formatter.py
ADDED
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import dataclasses
|
4 |
+
import json
|
5 |
+
import re
|
6 |
+
import traceback
|
7 |
+
from typing import Any, Callable, Dict, List, Optional, Union
|
8 |
+
|
9 |
+
from torch._logging import LazyString
|
10 |
+
from torch.onnx._internal import _beartype
|
11 |
+
from torch.onnx._internal.diagnostics.infra import sarif
|
12 |
+
|
13 |
+
|
14 |
+
# A list of types in the SARIF module to support pretty printing.
|
15 |
+
# This is solely for type annotation for the functions below.
|
16 |
+
_SarifClass = Union[
|
17 |
+
sarif.SarifLog,
|
18 |
+
sarif.Run,
|
19 |
+
sarif.ReportingDescriptor,
|
20 |
+
sarif.Result,
|
21 |
+
]
|
22 |
+
|
23 |
+
|
24 |
+
def lazy_format_exception(exception: Exception) -> LazyString:
|
25 |
+
return LazyString(
|
26 |
+
lambda: "\n".join(
|
27 |
+
(
|
28 |
+
"```",
|
29 |
+
*traceback.format_exception(
|
30 |
+
type(exception), exception, exception.__traceback__
|
31 |
+
),
|
32 |
+
"```",
|
33 |
+
)
|
34 |
+
),
|
35 |
+
)
|
36 |
+
|
37 |
+
|
38 |
+
@_beartype.beartype
|
39 |
+
def snake_case_to_camel_case(s: str) -> str:
|
40 |
+
splits = s.split("_")
|
41 |
+
if len(splits) <= 1:
|
42 |
+
return s
|
43 |
+
return "".join([splits[0], *map(str.capitalize, splits[1:])])
|
44 |
+
|
45 |
+
|
46 |
+
@_beartype.beartype
|
47 |
+
def camel_case_to_snake_case(s: str) -> str:
|
48 |
+
return re.sub(r"([A-Z])", r"_\1", s).lower()
|
49 |
+
|
50 |
+
|
51 |
+
@_beartype.beartype
|
52 |
+
def kebab_case_to_snake_case(s: str) -> str:
|
53 |
+
return s.replace("-", "_")
|
54 |
+
|
55 |
+
|
56 |
+
@_beartype.beartype
|
57 |
+
def _convert_key(
|
58 |
+
object: Union[Dict[str, Any], Any], convert: Callable[[str], str]
|
59 |
+
) -> Union[Dict[str, Any], Any]:
|
60 |
+
"""Convert and update keys in a dictionary with "convert".
|
61 |
+
|
62 |
+
Any value that is a dictionary will be recursively updated.
|
63 |
+
Any value that is a list will be recursively searched.
|
64 |
+
|
65 |
+
Args:
|
66 |
+
object: The object to update.
|
67 |
+
convert: The function to convert the keys, e.g. `kebab_case_to_snake_case`.
|
68 |
+
|
69 |
+
Returns:
|
70 |
+
The updated object.
|
71 |
+
"""
|
72 |
+
if not isinstance(object, Dict):
|
73 |
+
return object
|
74 |
+
new_dict = {}
|
75 |
+
for k, v in object.items():
|
76 |
+
new_k = convert(k)
|
77 |
+
if isinstance(v, Dict):
|
78 |
+
new_v = _convert_key(v, convert)
|
79 |
+
elif isinstance(v, List):
|
80 |
+
new_v = [_convert_key(elem, convert) for elem in v]
|
81 |
+
else:
|
82 |
+
new_v = v
|
83 |
+
if new_v is None:
|
84 |
+
# Otherwise unnecessarily bloated sarif log with "null"s.
|
85 |
+
continue
|
86 |
+
if new_v == -1:
|
87 |
+
# WAR: -1 as default value shouldn't be logged into sarif.
|
88 |
+
continue
|
89 |
+
|
90 |
+
new_dict[new_k] = new_v
|
91 |
+
|
92 |
+
return new_dict
|
93 |
+
|
94 |
+
|
95 |
+
@_beartype.beartype
|
96 |
+
def sarif_to_json(attr_cls_obj: _SarifClass, indent: Optional[str] = " ") -> str:
|
97 |
+
dict = dataclasses.asdict(attr_cls_obj)
|
98 |
+
dict = _convert_key(dict, snake_case_to_camel_case)
|
99 |
+
return json.dumps(dict, indent=indent, separators=(",", ":"))
|
100 |
+
|
101 |
+
|
102 |
+
@_beartype.beartype
|
103 |
+
def format_argument(obj: Any) -> str:
|
104 |
+
return f"{type(obj)}"
|
105 |
+
|
106 |
+
|
107 |
+
@_beartype.beartype
|
108 |
+
def display_name(fn: Callable) -> str:
|
109 |
+
if hasattr(fn, "__qualname__"):
|
110 |
+
return fn.__qualname__
|
111 |
+
elif hasattr(fn, "__name__"):
|
112 |
+
return fn.__name__
|
113 |
+
else:
|
114 |
+
return str(fn)
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__init__.py
ADDED
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from torch.onnx._internal.diagnostics.infra.sarif._address import Address
|
5 |
+
from torch.onnx._internal.diagnostics.infra.sarif._artifact import Artifact
|
6 |
+
from torch.onnx._internal.diagnostics.infra.sarif._artifact_change import ArtifactChange
|
7 |
+
from torch.onnx._internal.diagnostics.infra.sarif._artifact_content import (
|
8 |
+
ArtifactContent,
|
9 |
+
)
|
10 |
+
from torch.onnx._internal.diagnostics.infra.sarif._artifact_location import (
|
11 |
+
ArtifactLocation,
|
12 |
+
)
|
13 |
+
from torch.onnx._internal.diagnostics.infra.sarif._attachment import Attachment
|
14 |
+
from torch.onnx._internal.diagnostics.infra.sarif._code_flow import CodeFlow
|
15 |
+
from torch.onnx._internal.diagnostics.infra.sarif._configuration_override import (
|
16 |
+
ConfigurationOverride,
|
17 |
+
)
|
18 |
+
from torch.onnx._internal.diagnostics.infra.sarif._conversion import Conversion
|
19 |
+
from torch.onnx._internal.diagnostics.infra.sarif._edge import Edge
|
20 |
+
from torch.onnx._internal.diagnostics.infra.sarif._edge_traversal import EdgeTraversal
|
21 |
+
from torch.onnx._internal.diagnostics.infra.sarif._exception import Exception
|
22 |
+
from torch.onnx._internal.diagnostics.infra.sarif._external_properties import (
|
23 |
+
ExternalProperties,
|
24 |
+
)
|
25 |
+
from torch.onnx._internal.diagnostics.infra.sarif._external_property_file_reference import (
|
26 |
+
ExternalPropertyFileReference,
|
27 |
+
)
|
28 |
+
from torch.onnx._internal.diagnostics.infra.sarif._external_property_file_references import (
|
29 |
+
ExternalPropertyFileReferences,
|
30 |
+
)
|
31 |
+
from torch.onnx._internal.diagnostics.infra.sarif._fix import Fix
|
32 |
+
from torch.onnx._internal.diagnostics.infra.sarif._graph import Graph
|
33 |
+
from torch.onnx._internal.diagnostics.infra.sarif._graph_traversal import GraphTraversal
|
34 |
+
from torch.onnx._internal.diagnostics.infra.sarif._invocation import Invocation
|
35 |
+
from torch.onnx._internal.diagnostics.infra.sarif._location import Location
|
36 |
+
from torch.onnx._internal.diagnostics.infra.sarif._location_relationship import (
|
37 |
+
LocationRelationship,
|
38 |
+
)
|
39 |
+
from torch.onnx._internal.diagnostics.infra.sarif._logical_location import (
|
40 |
+
LogicalLocation,
|
41 |
+
)
|
42 |
+
from torch.onnx._internal.diagnostics.infra.sarif._message import Message
|
43 |
+
from torch.onnx._internal.diagnostics.infra.sarif._multiformat_message_string import (
|
44 |
+
MultiformatMessageString,
|
45 |
+
)
|
46 |
+
from torch.onnx._internal.diagnostics.infra.sarif._node import Node
|
47 |
+
from torch.onnx._internal.diagnostics.infra.sarif._notification import Notification
|
48 |
+
from torch.onnx._internal.diagnostics.infra.sarif._physical_location import (
|
49 |
+
PhysicalLocation,
|
50 |
+
)
|
51 |
+
from torch.onnx._internal.diagnostics.infra.sarif._property_bag import PropertyBag
|
52 |
+
from torch.onnx._internal.diagnostics.infra.sarif._rectangle import Rectangle
|
53 |
+
from torch.onnx._internal.diagnostics.infra.sarif._region import Region
|
54 |
+
from torch.onnx._internal.diagnostics.infra.sarif._replacement import Replacement
|
55 |
+
from torch.onnx._internal.diagnostics.infra.sarif._reporting_configuration import (
|
56 |
+
ReportingConfiguration,
|
57 |
+
)
|
58 |
+
from torch.onnx._internal.diagnostics.infra.sarif._reporting_descriptor import (
|
59 |
+
ReportingDescriptor,
|
60 |
+
)
|
61 |
+
from torch.onnx._internal.diagnostics.infra.sarif._reporting_descriptor_reference import (
|
62 |
+
ReportingDescriptorReference,
|
63 |
+
)
|
64 |
+
from torch.onnx._internal.diagnostics.infra.sarif._reporting_descriptor_relationship import (
|
65 |
+
ReportingDescriptorRelationship,
|
66 |
+
)
|
67 |
+
from torch.onnx._internal.diagnostics.infra.sarif._result import Result
|
68 |
+
from torch.onnx._internal.diagnostics.infra.sarif._result_provenance import (
|
69 |
+
ResultProvenance,
|
70 |
+
)
|
71 |
+
from torch.onnx._internal.diagnostics.infra.sarif._run import Run
|
72 |
+
from torch.onnx._internal.diagnostics.infra.sarif._run_automation_details import (
|
73 |
+
RunAutomationDetails,
|
74 |
+
)
|
75 |
+
from torch.onnx._internal.diagnostics.infra.sarif._sarif_log import SarifLog
|
76 |
+
from torch.onnx._internal.diagnostics.infra.sarif._special_locations import (
|
77 |
+
SpecialLocations,
|
78 |
+
)
|
79 |
+
from torch.onnx._internal.diagnostics.infra.sarif._stack import Stack
|
80 |
+
from torch.onnx._internal.diagnostics.infra.sarif._stack_frame import StackFrame
|
81 |
+
from torch.onnx._internal.diagnostics.infra.sarif._suppression import Suppression
|
82 |
+
from torch.onnx._internal.diagnostics.infra.sarif._thread_flow import ThreadFlow
|
83 |
+
from torch.onnx._internal.diagnostics.infra.sarif._thread_flow_location import (
|
84 |
+
ThreadFlowLocation,
|
85 |
+
)
|
86 |
+
from torch.onnx._internal.diagnostics.infra.sarif._tool import Tool
|
87 |
+
from torch.onnx._internal.diagnostics.infra.sarif._tool_component import ToolComponent
|
88 |
+
from torch.onnx._internal.diagnostics.infra.sarif._tool_component_reference import (
|
89 |
+
ToolComponentReference,
|
90 |
+
)
|
91 |
+
from torch.onnx._internal.diagnostics.infra.sarif._translation_metadata import (
|
92 |
+
TranslationMetadata,
|
93 |
+
)
|
94 |
+
from torch.onnx._internal.diagnostics.infra.sarif._version_control_details import (
|
95 |
+
VersionControlDetails,
|
96 |
+
)
|
97 |
+
from torch.onnx._internal.diagnostics.infra.sarif._web_request import WebRequest
|
98 |
+
from torch.onnx._internal.diagnostics.infra.sarif._web_response import WebResponse
|
99 |
+
|
100 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (5.44 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_code_flow.cpython-310.pyc
ADDED
Binary file (1.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_conversion.cpython-310.pyc
ADDED
Binary file (1.27 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge.cpython-310.pyc
ADDED
Binary file (1.06 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_edge_traversal.cpython-310.pyc
ADDED
Binary file (1.17 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_exception.cpython-310.pyc
ADDED
Binary file (1.18 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_external_property_file_reference.cpython-310.pyc
ADDED
Binary file (1.22 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_graph_traversal.cpython-310.pyc
ADDED
Binary file (1.37 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_invocation.cpython-310.pyc
ADDED
Binary file (2.98 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_location_relationship.cpython-310.pyc
ADDED
Binary file (1.22 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_message.cpython-310.pyc
ADDED
Binary file (1.05 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_node.cpython-310.pyc
ADDED
Binary file (1.12 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_property_bag.cpython-310.pyc
ADDED
Binary file (748 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_region.cpython-310.pyc
ADDED
Binary file (1.62 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_configuration.cpython-310.pyc
ADDED
Binary file (1.14 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor.cpython-310.pyc
ADDED
Binary file (2.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_reporting_descriptor_reference.cpython-310.pyc
ADDED
Binary file (1.19 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result.cpython-310.pyc
ADDED
Binary file (3.71 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_result_provenance.cpython-310.pyc
ADDED
Binary file (1.45 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_run.cpython-310.pyc
ADDED
Binary file (4.07 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_sarif_log.cpython-310.pyc
ADDED
Binary file (1.31 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_stack_frame.cpython-310.pyc
ADDED
Binary file (1.12 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_thread_flow.cpython-310.pyc
ADDED
Binary file (1.35 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_version_control_details.cpython-310.pyc
ADDED
Binary file (1.35 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_request.cpython-310.pyc
ADDED
Binary file (1.27 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/_web_response.cpython-310.pyc
ADDED
Binary file (1.39 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/__pycache__/version.cpython-310.pyc
ADDED
Binary file (427 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_address.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import _property_bag
|
10 |
+
|
11 |
+
|
12 |
+
@dataclasses.dataclass
|
13 |
+
class Address(object):
|
14 |
+
"""A physical or virtual address, or a range of addresses, in an 'addressable region' (memory or a binary file)."""
|
15 |
+
|
16 |
+
absolute_address: int = dataclasses.field(
|
17 |
+
default=-1, metadata={"schema_property_name": "absoluteAddress"}
|
18 |
+
)
|
19 |
+
fully_qualified_name: Optional[str] = dataclasses.field(
|
20 |
+
default=None, metadata={"schema_property_name": "fullyQualifiedName"}
|
21 |
+
)
|
22 |
+
index: int = dataclasses.field(
|
23 |
+
default=-1, metadata={"schema_property_name": "index"}
|
24 |
+
)
|
25 |
+
kind: Optional[str] = dataclasses.field(
|
26 |
+
default=None, metadata={"schema_property_name": "kind"}
|
27 |
+
)
|
28 |
+
length: Optional[int] = dataclasses.field(
|
29 |
+
default=None, metadata={"schema_property_name": "length"}
|
30 |
+
)
|
31 |
+
name: Optional[str] = dataclasses.field(
|
32 |
+
default=None, metadata={"schema_property_name": "name"}
|
33 |
+
)
|
34 |
+
offset_from_parent: Optional[int] = dataclasses.field(
|
35 |
+
default=None, metadata={"schema_property_name": "offsetFromParent"}
|
36 |
+
)
|
37 |
+
parent_index: int = dataclasses.field(
|
38 |
+
default=-1, metadata={"schema_property_name": "parentIndex"}
|
39 |
+
)
|
40 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
41 |
+
default=None, metadata={"schema_property_name": "properties"}
|
42 |
+
)
|
43 |
+
relative_address: Optional[int] = dataclasses.field(
|
44 |
+
default=None, metadata={"schema_property_name": "relativeAddress"}
|
45 |
+
)
|
46 |
+
|
47 |
+
|
48 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import Any, List, Literal, Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_artifact_content,
|
11 |
+
_artifact_location,
|
12 |
+
_message,
|
13 |
+
_property_bag,
|
14 |
+
)
|
15 |
+
|
16 |
+
|
17 |
+
@dataclasses.dataclass
|
18 |
+
class Artifact(object):
|
19 |
+
"""A single artifact. In some cases, this artifact might be nested within another artifact."""
|
20 |
+
|
21 |
+
contents: Optional[_artifact_content.ArtifactContent] = dataclasses.field(
|
22 |
+
default=None, metadata={"schema_property_name": "contents"}
|
23 |
+
)
|
24 |
+
description: Optional[_message.Message] = dataclasses.field(
|
25 |
+
default=None, metadata={"schema_property_name": "description"}
|
26 |
+
)
|
27 |
+
encoding: Optional[str] = dataclasses.field(
|
28 |
+
default=None, metadata={"schema_property_name": "encoding"}
|
29 |
+
)
|
30 |
+
hashes: Any = dataclasses.field(
|
31 |
+
default=None, metadata={"schema_property_name": "hashes"}
|
32 |
+
)
|
33 |
+
last_modified_time_utc: Optional[str] = dataclasses.field(
|
34 |
+
default=None, metadata={"schema_property_name": "lastModifiedTimeUtc"}
|
35 |
+
)
|
36 |
+
length: int = dataclasses.field(
|
37 |
+
default=-1, metadata={"schema_property_name": "length"}
|
38 |
+
)
|
39 |
+
location: Optional[_artifact_location.ArtifactLocation] = dataclasses.field(
|
40 |
+
default=None, metadata={"schema_property_name": "location"}
|
41 |
+
)
|
42 |
+
mime_type: Optional[str] = dataclasses.field(
|
43 |
+
default=None, metadata={"schema_property_name": "mimeType"}
|
44 |
+
)
|
45 |
+
offset: Optional[int] = dataclasses.field(
|
46 |
+
default=None, metadata={"schema_property_name": "offset"}
|
47 |
+
)
|
48 |
+
parent_index: int = dataclasses.field(
|
49 |
+
default=-1, metadata={"schema_property_name": "parentIndex"}
|
50 |
+
)
|
51 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
52 |
+
default=None, metadata={"schema_property_name": "properties"}
|
53 |
+
)
|
54 |
+
roles: Optional[
|
55 |
+
List[
|
56 |
+
Literal[
|
57 |
+
"analysisTarget",
|
58 |
+
"attachment",
|
59 |
+
"responseFile",
|
60 |
+
"resultFile",
|
61 |
+
"standardStream",
|
62 |
+
"tracedFile",
|
63 |
+
"unmodified",
|
64 |
+
"modified",
|
65 |
+
"added",
|
66 |
+
"deleted",
|
67 |
+
"renamed",
|
68 |
+
"uncontrolled",
|
69 |
+
"driver",
|
70 |
+
"extension",
|
71 |
+
"translation",
|
72 |
+
"taxonomy",
|
73 |
+
"policy",
|
74 |
+
"referencedOnCommandLine",
|
75 |
+
"memoryContents",
|
76 |
+
"directory",
|
77 |
+
"userSpecifiedConfiguration",
|
78 |
+
"toolSpecifiedConfiguration",
|
79 |
+
"debugOutputFile",
|
80 |
+
]
|
81 |
+
]
|
82 |
+
] = dataclasses.field(default=None, metadata={"schema_property_name": "roles"})
|
83 |
+
source_language: Optional[str] = dataclasses.field(
|
84 |
+
default=None, metadata={"schema_property_name": "sourceLanguage"}
|
85 |
+
)
|
86 |
+
|
87 |
+
|
88 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact_change.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import List, Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_artifact_location,
|
11 |
+
_property_bag,
|
12 |
+
_replacement,
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
@dataclasses.dataclass
|
17 |
+
class ArtifactChange(object):
|
18 |
+
"""A change to a single artifact."""
|
19 |
+
|
20 |
+
artifact_location: _artifact_location.ArtifactLocation = dataclasses.field(
|
21 |
+
metadata={"schema_property_name": "artifactLocation"}
|
22 |
+
)
|
23 |
+
replacements: List[_replacement.Replacement] = dataclasses.field(
|
24 |
+
metadata={"schema_property_name": "replacements"}
|
25 |
+
)
|
26 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
27 |
+
default=None, metadata={"schema_property_name": "properties"}
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_artifact_content.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_multiformat_message_string,
|
11 |
+
_property_bag,
|
12 |
+
)
|
13 |
+
|
14 |
+
|
15 |
+
@dataclasses.dataclass
|
16 |
+
class ArtifactContent(object):
|
17 |
+
"""Represents the contents of an artifact."""
|
18 |
+
|
19 |
+
binary: Optional[str] = dataclasses.field(
|
20 |
+
default=None, metadata={"schema_property_name": "binary"}
|
21 |
+
)
|
22 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
23 |
+
default=None, metadata={"schema_property_name": "properties"}
|
24 |
+
)
|
25 |
+
rendered: Optional[
|
26 |
+
_multiformat_message_string.MultiformatMessageString
|
27 |
+
] = dataclasses.field(default=None, metadata={"schema_property_name": "rendered"})
|
28 |
+
text: Optional[str] = dataclasses.field(
|
29 |
+
default=None, metadata={"schema_property_name": "text"}
|
30 |
+
)
|
31 |
+
|
32 |
+
|
33 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_code_flow.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import List, Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_message,
|
11 |
+
_property_bag,
|
12 |
+
_thread_flow,
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
@dataclasses.dataclass
|
17 |
+
class CodeFlow(object):
|
18 |
+
"""A set of threadFlows which together describe a pattern of code execution relevant to detecting a result."""
|
19 |
+
|
20 |
+
thread_flows: List[_thread_flow.ThreadFlow] = dataclasses.field(
|
21 |
+
metadata={"schema_property_name": "threadFlows"}
|
22 |
+
)
|
23 |
+
message: Optional[_message.Message] = dataclasses.field(
|
24 |
+
default=None, metadata={"schema_property_name": "message"}
|
25 |
+
)
|
26 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
27 |
+
default=None, metadata={"schema_property_name": "properties"}
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_configuration_override.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_property_bag,
|
11 |
+
_reporting_configuration,
|
12 |
+
_reporting_descriptor_reference,
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
@dataclasses.dataclass
|
17 |
+
class ConfigurationOverride(object):
|
18 |
+
"""Information about how a specific rule or notification was reconfigured at runtime."""
|
19 |
+
|
20 |
+
configuration: _reporting_configuration.ReportingConfiguration = dataclasses.field(
|
21 |
+
metadata={"schema_property_name": "configuration"}
|
22 |
+
)
|
23 |
+
descriptor: _reporting_descriptor_reference.ReportingDescriptorReference = (
|
24 |
+
dataclasses.field(metadata={"schema_property_name": "descriptor"})
|
25 |
+
)
|
26 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
27 |
+
default=None, metadata={"schema_property_name": "properties"}
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_conversion.py
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import List, Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import (
|
10 |
+
_artifact_location,
|
11 |
+
_invocation,
|
12 |
+
_property_bag,
|
13 |
+
_tool,
|
14 |
+
)
|
15 |
+
|
16 |
+
|
17 |
+
@dataclasses.dataclass
|
18 |
+
class Conversion(object):
|
19 |
+
"""Describes how a converter transformed the output of a static analysis tool from the analysis tool's native output format into the SARIF format."""
|
20 |
+
|
21 |
+
tool: _tool.Tool = dataclasses.field(metadata={"schema_property_name": "tool"})
|
22 |
+
analysis_tool_log_files: Optional[
|
23 |
+
List[_artifact_location.ArtifactLocation]
|
24 |
+
] = dataclasses.field(
|
25 |
+
default=None, metadata={"schema_property_name": "analysisToolLogFiles"}
|
26 |
+
)
|
27 |
+
invocation: Optional[_invocation.Invocation] = dataclasses.field(
|
28 |
+
default=None, metadata={"schema_property_name": "invocation"}
|
29 |
+
)
|
30 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
31 |
+
default=None, metadata={"schema_property_name": "properties"}
|
32 |
+
)
|
33 |
+
|
34 |
+
|
35 |
+
# flake8: noqa
|
env-llmeval/lib/python3.10/site-packages/torch/onnx/_internal/diagnostics/infra/sarif/_edge_traversal.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# DO NOT EDIT! This file was generated by jschema_to_python version 0.0.1.dev29,
|
2 |
+
# with extension for dataclasses and type annotation.
|
3 |
+
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
import dataclasses
|
7 |
+
from typing import Any, Optional
|
8 |
+
|
9 |
+
from torch.onnx._internal.diagnostics.infra.sarif import _message, _property_bag
|
10 |
+
|
11 |
+
|
12 |
+
@dataclasses.dataclass
|
13 |
+
class EdgeTraversal(object):
|
14 |
+
"""Represents the traversal of a single edge during a graph traversal."""
|
15 |
+
|
16 |
+
edge_id: str = dataclasses.field(metadata={"schema_property_name": "edgeId"})
|
17 |
+
final_state: Any = dataclasses.field(
|
18 |
+
default=None, metadata={"schema_property_name": "finalState"}
|
19 |
+
)
|
20 |
+
message: Optional[_message.Message] = dataclasses.field(
|
21 |
+
default=None, metadata={"schema_property_name": "message"}
|
22 |
+
)
|
23 |
+
properties: Optional[_property_bag.PropertyBag] = dataclasses.field(
|
24 |
+
default=None, metadata={"schema_property_name": "properties"}
|
25 |
+
)
|
26 |
+
step_over_edge_count: Optional[int] = dataclasses.field(
|
27 |
+
default=None, metadata={"schema_property_name": "stepOverEdgeCount"}
|
28 |
+
)
|
29 |
+
|
30 |
+
|
31 |
+
# flake8: noqa
|