Spaces:
Running
on
Zero
Running
on
Zero
up
Browse files- optimization.py +4 -4
optimization.py
CHANGED
@@ -21,12 +21,12 @@ P = ParamSpec("P")
|
|
21 |
|
22 |
# Sequence packing in LTX is a bit of a pain.
|
23 |
# See: https://github.com/huggingface/diffusers/blob/c052791b5fe29ce8a308bf63dda97aa205b729be/src/diffusers/pipelines/ltx/pipeline_ltx.py#L420
|
24 |
-
TRANSFORMER_NUM_FRAMES_DIM = torch.export.Dim("seq_len", min=4680, max=4680)
|
25 |
|
26 |
# Unused currently as I don't know how to make the best use of it for LTX.
|
27 |
-
TRANSFORMER_DYNAMIC_SHAPES = {
|
28 |
-
|
29 |
-
}
|
30 |
|
31 |
INDUCTOR_CONFIGS = {
|
32 |
"conv_1x1_as_mm": True,
|
|
|
21 |
|
22 |
# Sequence packing in LTX is a bit of a pain.
|
23 |
# See: https://github.com/huggingface/diffusers/blob/c052791b5fe29ce8a308bf63dda97aa205b729be/src/diffusers/pipelines/ltx/pipeline_ltx.py#L420
|
24 |
+
# TRANSFORMER_NUM_FRAMES_DIM = torch.export.Dim("seq_len", min=4680, max=4680)
|
25 |
|
26 |
# Unused currently as I don't know how to make the best use of it for LTX.
|
27 |
+
# TRANSFORMER_DYNAMIC_SHAPES = {
|
28 |
+
# "hidden_states": {1: TRANSFORMER_NUM_FRAMES_DIM},
|
29 |
+
# }
|
30 |
|
31 |
INDUCTOR_CONFIGS = {
|
32 |
"conv_1x1_as_mm": True,
|