danieldk HF Staff commited on
Commit
16f313a
·
1 Parent(s): a20b2e3

Add Torch 2.4 build only

Browse files
build/torch24-cxx11-cu118-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd148c4b4904486fe5bcf6ae7d1f414ca09d5eae355b8a8937a0a52d9a165ef3
3
+ size 2370120
build/torch24-cxx11-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"
build/torch24-cxx11-cu121-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:335e141bdd6e9f8ef2b2323808d9a351a409a6abf3aa031d86aa6cb52d5736b6
3
+ size 2393224
build/torch24-cxx11-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"
build/torch24-cxx11-cu124-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx11-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bff7c511ab7af2c6897c14f1eaabaf36e68c850291133d87ae5f3db315781c36
3
+ size 2427896
build/torch24-cxx11-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"
build/torch24-cxx98-cu118-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu118-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:799fb5d554d86f26c2bf6466a7a9fdd4a211614c85091b57a09cf2727ae594d5
3
+ size 2362560
build/torch24-cxx98-cu118-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"
build/torch24-cxx98-cu121-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu121-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f26b5cba546f0f8dcd45712b3371d4f1c1c07e0071b7aa9b7f785c76b3f1d34
3
+ size 2385400
build/torch24-cxx98-cu121-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"
build/torch24-cxx98-cu124-x86_64-linux/activation/__init__.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ try:
4
+ from ._ops import ops
5
+ except ImportError as e:
6
+ # Fallback for local development.
7
+ try:
8
+ import _activation
9
+
10
+ ops = torch.ops._activition
11
+ except ImportError:
12
+ raise e
13
+
14
+
15
+ def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
16
+ ops.silu_and_mul(out, x)
17
+ return out
18
+
19
+
20
+ def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
21
+ ops.gelu_and_mul(out, x)
22
+ return out
23
+
24
+
25
+ def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
26
+ ops.gelu_tanh_and_mul(out, x)
27
+ return out
28
+
29
+
30
+ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
31
+ ops.fatrelu_and_mul(out, x, threshold)
32
+ return out
33
+
34
+
35
+ def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
36
+ ops.gelu_fast(out, x)
37
+ return out
38
+
39
+
40
+ def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
41
+ ops.gelu_new(out, x)
42
+ return out
43
+
44
+
45
+ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
46
+ ops.gelu_quick(out, x)
47
+ return out
build/torch24-cxx98-cu124-x86_64-linux/activation/_activation_0_0_1.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b0cf579f43032de08184ec94303298241bbb49c6f634017d1b8bb831728aca3
3
+ size 2420152
build/torch24-cxx98-cu124-x86_64-linux/activation/_ops.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from . import _activation_0_0_1
3
+ ops = torch.ops._activation_0_0_1
4
+
5
+ def add_op_namespace_prefix(op_name: str):
6
+ """
7
+ Prefix op by namespace.
8
+ """
9
+ return f"_activation_0_0_1::{op_name}"