danieldk HF Staff commited on
Commit
1fc12c5
·
1 Parent(s): beeaae6

Build (x86_64-linux)

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. build/torch26-cxx11-cu118-x86_64-linux/activation/layers.py +0 -128
  2. build/torch26-cxx98-cu118-x86_64-linux/activation/__init__.py +0 -57
  3. build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py +0 -9
  4. build/torch26-cxx98-cu124-x86_64-linux/activation/__init__.py +0 -57
  5. build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_be5bedb.abi3.so +0 -3
  6. build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py +0 -9
  7. build/torch26-cxx98-cu124-x86_64-linux/activation/layers.py +0 -128
  8. build/torch26-cxx98-cu126-x86_64-linux/activation/__init__.py +0 -57
  9. build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_be5bedb.abi3.so +0 -3
  10. build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py +0 -9
  11. build/torch26-cxx98-cu126-x86_64-linux/activation/layers.py +0 -128
  12. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  13. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  14. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  15. build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  16. build/{torch26-cxx11-cu124-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu118-x86_64-linux/activation/_activation_beeaae6.abi3.so} +2 -2
  17. build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  18. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  19. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  20. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  21. build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  22. build/{torch26-cxx11-cu118-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu126-x86_64-linux/activation/_activation_beeaae6.abi3.so} +2 -2
  23. build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  24. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  25. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  26. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  27. build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  28. build/{torch26-cxx11-cu126-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu128-x86_64-linux/activation/_activation_beeaae6.abi3.so} +2 -2
  29. build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py +3 -3
  30. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  31. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  32. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  33. build/torch28-cxx11-cu126-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  34. build/{torch26-cxx98-cu118-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch28-cxx11-cu126-x86_64-linux/activation/_activation_beeaae6.abi3.so} +2 -2
  35. build/torch28-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  36. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  37. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  38. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  39. build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  40. build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_beeaae6.abi3.so +3 -0
  41. build/torch28-cxx11-cu128-x86_64-linux/activation/_ops.py +3 -3
  42. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  43. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  44. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  45. build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_20250917153858.abi3.so +0 -3
  46. build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_beeaae6.abi3.so +3 -0
  47. build/torch28-cxx11-cu129-x86_64-linux/activation/_ops.py +3 -3
  48. build/{torch26-cxx11-cu118-x86_64-linux → torch29-cxx11-cu126-x86_64-linux}/activation/__init__.py +18 -0
  49. build/torch29-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  50. build/torch29-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
build/torch26-cxx11-cu118-x86_64-linux/activation/layers.py DELETED
@@ -1,128 +0,0 @@
1
- import torch
2
- import torch.nn as nn
3
-
4
- from ._ops import ops
5
-
6
-
7
- class SiluAndMul(nn.Module):
8
- """An activation function for SwiGLU.
9
-
10
- The function computes x -> silu(x[:d]) * x[d:] where d = x.shape[-1] // 2.
11
-
12
- Shapes:
13
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
14
- return: (num_tokens, d) or (batch_size, seq_len, d)
15
- """
16
-
17
- can_torch_compile: bool = True
18
-
19
- def forward(self, x: torch.Tensor):
20
- d = x.shape[-1] // 2
21
- output_shape = x.shape[:-1] + (d,)
22
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
23
- ops.silu_and_mul(out, x)
24
- return out
25
-
26
-
27
- class MulAndSilu(nn.Module):
28
- """An activation function for SwiGLU.
29
-
30
- The function computes x -> x[:d] * silu(x[d:]) where d = x.shape[-1] // 2.
31
-
32
- Shapes:
33
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
34
- return: (num_tokens, d) or (batch_size, seq_len, d)
35
- """
36
-
37
- can_torch_compile: bool = True
38
-
39
- def forward(self, x: torch.Tensor) -> torch.Tensor:
40
- d = x.shape[-1] // 2
41
- output_shape = x.shape[:-1] + (d,)
42
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
- ops.mul_and_silu(out, x)
44
- return out
45
-
46
-
47
- class GeluAndMul(nn.Module):
48
- """An activation function for GeGLU.
49
-
50
- The function computes x -> GELU(x[:d]) * x[d:] where d = x.shape[-1] // 2.
51
-
52
- Shapes:
53
- x: (batch_size, seq_len, 2 * d) or (num_tokens, 2 * d)
54
- return: (batch_size, seq_len, d) or (num_tokens, d)
55
- """
56
-
57
- can_torch_compile: bool = True
58
-
59
- def forward(self, x: torch.Tensor):
60
- d = x.shape[-1] // 2
61
- output_shape = x.shape[:-1] + (d,)
62
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
63
- ops.gelu_and_mul(out, x)
64
- return out
65
-
66
-
67
- class GeluTanhAndMul(nn.Module):
68
- can_torch_compile: bool = True
69
-
70
- def forward(self, x: torch.Tensor):
71
- d = x.shape[-1] // 2
72
- output_shape = x.shape[:-1] + (d,)
73
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
74
- ops.gelu_tanh_and_mul(out, x)
75
- return out
76
-
77
-
78
- class FatreluAndMul(nn.Module):
79
- """An activation function for FATReLU.
80
-
81
- The function computes x -> FATReLU(x[:d]) * x[d:] where
82
- d = x.shape[-1] // 2.
83
- This is used in openbmb/MiniCPM-S-1B-sft.
84
-
85
- Shapes:
86
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
87
- return: (num_tokens, d) or (batch_size, seq_len, d)
88
- """
89
-
90
- can_torch_compile: bool = True
91
-
92
- def __init__(self, threshold: float = 0.0):
93
- super().__init__()
94
- self.threshold = threshold
95
-
96
- def forward(self, x: torch.Tensor):
97
- d = x.shape[-1] // 2
98
- output_shape = x.shape[:-1] + (d,)
99
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
100
- ops.fatrelu_and_mul(out, x, self.threshold)
101
- return out
102
-
103
-
104
- class FastGELU(nn.Module):
105
- can_torch_compile: bool = True
106
-
107
- def forward(self, x: torch.Tensor) -> torch.Tensor:
108
- out = torch.empty_like(x)
109
- ops.gelu_fast(out, x)
110
- return out
111
-
112
-
113
- class NewGELU(nn.Module):
114
- can_torch_compile: bool = True
115
-
116
- def forward(self, x: torch.Tensor) -> torch.Tensor:
117
- out = torch.empty_like(x)
118
- ops.gelu_new(out, x)
119
- return out
120
-
121
-
122
- class QuickGELU(nn.Module):
123
- can_torch_compile: bool = True
124
-
125
- def forward(self, x: torch.Tensor) -> torch.Tensor:
126
- out = torch.empty_like(x)
127
- ops.gelu_quick(out, x)
128
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu118-x86_64-linux/activation/__init__.py DELETED
@@ -1,57 +0,0 @@
1
- import torch
2
-
3
- from ._ops import ops
4
-
5
- from . import layers
6
-
7
-
8
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
- ops.silu_and_mul(out, x)
10
- return out
11
-
12
-
13
- def mul_and_silu(out: torch.Tensor, x: torch.Tensor) -> None:
14
- ops.mul_and_silu(out, x)
15
- return out
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
- return out
21
-
22
-
23
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
24
- ops.gelu_tanh_and_mul(out, x)
25
- return out
26
-
27
-
28
- def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
- return out
31
-
32
-
33
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
- ops.gelu_fast(out, x)
35
- return out
36
-
37
-
38
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
39
- ops.gelu_new(out, x)
40
- return out
41
-
42
-
43
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
44
- ops.gelu_quick(out, x)
45
- return out
46
-
47
-
48
- __all__ = [
49
- "silu_and_mul",
50
- "gelu_and_mul",
51
- "gelu_tanh_and_mul",
52
- "fatrelu_and_mul",
53
- "gelu_fast",
54
- "gelu_new",
55
- "gelu_quick",
56
- "layers",
57
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu118-x86_64-linux/activation/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _activation_be5bedb
3
- ops = torch.ops._activation_be5bedb
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_activation_be5bedb::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/__init__.py DELETED
@@ -1,57 +0,0 @@
1
- import torch
2
-
3
- from ._ops import ops
4
-
5
- from . import layers
6
-
7
-
8
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
- ops.silu_and_mul(out, x)
10
- return out
11
-
12
-
13
- def mul_and_silu(out: torch.Tensor, x: torch.Tensor) -> None:
14
- ops.mul_and_silu(out, x)
15
- return out
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
- return out
21
-
22
-
23
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
24
- ops.gelu_tanh_and_mul(out, x)
25
- return out
26
-
27
-
28
- def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
- return out
31
-
32
-
33
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
- ops.gelu_fast(out, x)
35
- return out
36
-
37
-
38
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
39
- ops.gelu_new(out, x)
40
- return out
41
-
42
-
43
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
44
- ops.gelu_quick(out, x)
45
- return out
46
-
47
-
48
- __all__ = [
49
- "silu_and_mul",
50
- "gelu_and_mul",
51
- "gelu_tanh_and_mul",
52
- "fatrelu_and_mul",
53
- "gelu_fast",
54
- "gelu_new",
55
- "gelu_quick",
56
- "layers",
57
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/_activation_be5bedb.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:53ddfb42466bfe01feb98348f5c2d6beefd589aeb3dec4c5c36609e11a6bde4c
3
- size 2605136
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _activation_be5bedb
3
- ops = torch.ops._activation_be5bedb
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_activation_be5bedb::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu124-x86_64-linux/activation/layers.py DELETED
@@ -1,128 +0,0 @@
1
- import torch
2
- import torch.nn as nn
3
-
4
- from ._ops import ops
5
-
6
-
7
- class SiluAndMul(nn.Module):
8
- """An activation function for SwiGLU.
9
-
10
- The function computes x -> silu(x[:d]) * x[d:] where d = x.shape[-1] // 2.
11
-
12
- Shapes:
13
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
14
- return: (num_tokens, d) or (batch_size, seq_len, d)
15
- """
16
-
17
- can_torch_compile: bool = True
18
-
19
- def forward(self, x: torch.Tensor):
20
- d = x.shape[-1] // 2
21
- output_shape = x.shape[:-1] + (d,)
22
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
23
- ops.silu_and_mul(out, x)
24
- return out
25
-
26
-
27
- class MulAndSilu(nn.Module):
28
- """An activation function for SwiGLU.
29
-
30
- The function computes x -> x[:d] * silu(x[d:]) where d = x.shape[-1] // 2.
31
-
32
- Shapes:
33
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
34
- return: (num_tokens, d) or (batch_size, seq_len, d)
35
- """
36
-
37
- can_torch_compile: bool = True
38
-
39
- def forward(self, x: torch.Tensor) -> torch.Tensor:
40
- d = x.shape[-1] // 2
41
- output_shape = x.shape[:-1] + (d,)
42
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
- ops.mul_and_silu(out, x)
44
- return out
45
-
46
-
47
- class GeluAndMul(nn.Module):
48
- """An activation function for GeGLU.
49
-
50
- The function computes x -> GELU(x[:d]) * x[d:] where d = x.shape[-1] // 2.
51
-
52
- Shapes:
53
- x: (batch_size, seq_len, 2 * d) or (num_tokens, 2 * d)
54
- return: (batch_size, seq_len, d) or (num_tokens, d)
55
- """
56
-
57
- can_torch_compile: bool = True
58
-
59
- def forward(self, x: torch.Tensor):
60
- d = x.shape[-1] // 2
61
- output_shape = x.shape[:-1] + (d,)
62
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
63
- ops.gelu_and_mul(out, x)
64
- return out
65
-
66
-
67
- class GeluTanhAndMul(nn.Module):
68
- can_torch_compile: bool = True
69
-
70
- def forward(self, x: torch.Tensor):
71
- d = x.shape[-1] // 2
72
- output_shape = x.shape[:-1] + (d,)
73
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
74
- ops.gelu_tanh_and_mul(out, x)
75
- return out
76
-
77
-
78
- class FatreluAndMul(nn.Module):
79
- """An activation function for FATReLU.
80
-
81
- The function computes x -> FATReLU(x[:d]) * x[d:] where
82
- d = x.shape[-1] // 2.
83
- This is used in openbmb/MiniCPM-S-1B-sft.
84
-
85
- Shapes:
86
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
87
- return: (num_tokens, d) or (batch_size, seq_len, d)
88
- """
89
-
90
- can_torch_compile: bool = True
91
-
92
- def __init__(self, threshold: float = 0.0):
93
- super().__init__()
94
- self.threshold = threshold
95
-
96
- def forward(self, x: torch.Tensor):
97
- d = x.shape[-1] // 2
98
- output_shape = x.shape[:-1] + (d,)
99
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
100
- ops.fatrelu_and_mul(out, x, self.threshold)
101
- return out
102
-
103
-
104
- class FastGELU(nn.Module):
105
- can_torch_compile: bool = True
106
-
107
- def forward(self, x: torch.Tensor) -> torch.Tensor:
108
- out = torch.empty_like(x)
109
- ops.gelu_fast(out, x)
110
- return out
111
-
112
-
113
- class NewGELU(nn.Module):
114
- can_torch_compile: bool = True
115
-
116
- def forward(self, x: torch.Tensor) -> torch.Tensor:
117
- out = torch.empty_like(x)
118
- ops.gelu_new(out, x)
119
- return out
120
-
121
-
122
- class QuickGELU(nn.Module):
123
- can_torch_compile: bool = True
124
-
125
- def forward(self, x: torch.Tensor) -> torch.Tensor:
126
- out = torch.empty_like(x)
127
- ops.gelu_quick(out, x)
128
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/__init__.py DELETED
@@ -1,57 +0,0 @@
1
- import torch
2
-
3
- from ._ops import ops
4
-
5
- from . import layers
6
-
7
-
8
- def silu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
9
- ops.silu_and_mul(out, x)
10
- return out
11
-
12
-
13
- def mul_and_silu(out: torch.Tensor, x: torch.Tensor) -> None:
14
- ops.mul_and_silu(out, x)
15
- return out
16
-
17
-
18
- def gelu_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
19
- ops.gelu_and_mul(out, x)
20
- return out
21
-
22
-
23
- def gelu_tanh_and_mul(out: torch.Tensor, x: torch.Tensor) -> None:
24
- ops.gelu_tanh_and_mul(out, x)
25
- return out
26
-
27
-
28
- def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0) -> None:
29
- ops.fatrelu_and_mul(out, x, threshold)
30
- return out
31
-
32
-
33
- def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
- ops.gelu_fast(out, x)
35
- return out
36
-
37
-
38
- def gelu_new(out: torch.Tensor, x: torch.Tensor) -> None:
39
- ops.gelu_new(out, x)
40
- return out
41
-
42
-
43
- def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
44
- ops.gelu_quick(out, x)
45
- return out
46
-
47
-
48
- __all__ = [
49
- "silu_and_mul",
50
- "gelu_and_mul",
51
- "gelu_tanh_and_mul",
52
- "fatrelu_and_mul",
53
- "gelu_fast",
54
- "gelu_new",
55
- "gelu_quick",
56
- "layers",
57
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/_activation_be5bedb.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ac7174352dea307231f308c84ca32ee001cdbcefd976de860e76501c52aae591
3
- size 2613776
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/_ops.py DELETED
@@ -1,9 +0,0 @@
1
- import torch
2
- from . import _activation_be5bedb
3
- ops = torch.ops._activation_be5bedb
4
-
5
- def add_op_namespace_prefix(op_name: str):
6
- """
7
- Prefix op by namespace.
8
- """
9
- return f"_activation_be5bedb::{op_name}"
 
 
 
 
 
 
 
 
 
 
build/torch26-cxx98-cu126-x86_64-linux/activation/layers.py DELETED
@@ -1,128 +0,0 @@
1
- import torch
2
- import torch.nn as nn
3
-
4
- from ._ops import ops
5
-
6
-
7
- class SiluAndMul(nn.Module):
8
- """An activation function for SwiGLU.
9
-
10
- The function computes x -> silu(x[:d]) * x[d:] where d = x.shape[-1] // 2.
11
-
12
- Shapes:
13
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
14
- return: (num_tokens, d) or (batch_size, seq_len, d)
15
- """
16
-
17
- can_torch_compile: bool = True
18
-
19
- def forward(self, x: torch.Tensor):
20
- d = x.shape[-1] // 2
21
- output_shape = x.shape[:-1] + (d,)
22
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
23
- ops.silu_and_mul(out, x)
24
- return out
25
-
26
-
27
- class MulAndSilu(nn.Module):
28
- """An activation function for SwiGLU.
29
-
30
- The function computes x -> x[:d] * silu(x[d:]) where d = x.shape[-1] // 2.
31
-
32
- Shapes:
33
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
34
- return: (num_tokens, d) or (batch_size, seq_len, d)
35
- """
36
-
37
- can_torch_compile: bool = True
38
-
39
- def forward(self, x: torch.Tensor) -> torch.Tensor:
40
- d = x.shape[-1] // 2
41
- output_shape = x.shape[:-1] + (d,)
42
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
43
- ops.mul_and_silu(out, x)
44
- return out
45
-
46
-
47
- class GeluAndMul(nn.Module):
48
- """An activation function for GeGLU.
49
-
50
- The function computes x -> GELU(x[:d]) * x[d:] where d = x.shape[-1] // 2.
51
-
52
- Shapes:
53
- x: (batch_size, seq_len, 2 * d) or (num_tokens, 2 * d)
54
- return: (batch_size, seq_len, d) or (num_tokens, d)
55
- """
56
-
57
- can_torch_compile: bool = True
58
-
59
- def forward(self, x: torch.Tensor):
60
- d = x.shape[-1] // 2
61
- output_shape = x.shape[:-1] + (d,)
62
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
63
- ops.gelu_and_mul(out, x)
64
- return out
65
-
66
-
67
- class GeluTanhAndMul(nn.Module):
68
- can_torch_compile: bool = True
69
-
70
- def forward(self, x: torch.Tensor):
71
- d = x.shape[-1] // 2
72
- output_shape = x.shape[:-1] + (d,)
73
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
74
- ops.gelu_tanh_and_mul(out, x)
75
- return out
76
-
77
-
78
- class FatreluAndMul(nn.Module):
79
- """An activation function for FATReLU.
80
-
81
- The function computes x -> FATReLU(x[:d]) * x[d:] where
82
- d = x.shape[-1] // 2.
83
- This is used in openbmb/MiniCPM-S-1B-sft.
84
-
85
- Shapes:
86
- x: (num_tokens, 2 * d) or (batch_size, seq_len, 2 * d)
87
- return: (num_tokens, d) or (batch_size, seq_len, d)
88
- """
89
-
90
- can_torch_compile: bool = True
91
-
92
- def __init__(self, threshold: float = 0.0):
93
- super().__init__()
94
- self.threshold = threshold
95
-
96
- def forward(self, x: torch.Tensor):
97
- d = x.shape[-1] // 2
98
- output_shape = x.shape[:-1] + (d,)
99
- out = torch.empty(output_shape, dtype=x.dtype, device=x.device)
100
- ops.fatrelu_and_mul(out, x, self.threshold)
101
- return out
102
-
103
-
104
- class FastGELU(nn.Module):
105
- can_torch_compile: bool = True
106
-
107
- def forward(self, x: torch.Tensor) -> torch.Tensor:
108
- out = torch.empty_like(x)
109
- ops.gelu_fast(out, x)
110
- return out
111
-
112
-
113
- class NewGELU(nn.Module):
114
- can_torch_compile: bool = True
115
-
116
- def forward(self, x: torch.Tensor) -> torch.Tensor:
117
- out = torch.empty_like(x)
118
- ops.gelu_new(out, x)
119
- return out
120
-
121
-
122
- class QuickGELU(nn.Module):
123
- can_torch_compile: bool = True
124
-
125
- def forward(self, x: torch.Tensor) -> torch.Tensor:
126
- out = torch.empty_like(x)
127
- ops.gelu_quick(out, x)
128
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:618cdba5f19eabc1f9c1d33e130ef03ab1b11b52f1e7b00b73f2a10d5cf1e62f
3
- size 2773664
 
 
 
 
build/{torch26-cxx11-cu124-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu118-x86_64-linux/activation/_activation_beeaae6.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:331dcb3900d5e47a11d3577cdbac54f15a0b6e14910239293323c1d9e4eb9f49
3
- size 2616928
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce06ec284ecd4ac5423d3822a60cd9eeb686d0054b38d66567de73e1137b0567
3
+ size 2773632
build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:87ee9280b670b3323378c17d75ee7082f419987a568769fe8479bf509ee6c245
3
- size 2852232
 
 
 
 
build/{torch26-cxx11-cu118-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu126-x86_64-linux/activation/_activation_beeaae6.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9b6ba32ecc6fc898df3b0cebee85e9afc6881749fe58142280f051ca3332d913
3
- size 2546864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a529bd105aca5081398d63329e829b6b159570424cd654d3a9f275ca9a720e82
3
+ size 2852200
build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:28ca9a3e35c49ae49694d7c6c77f85f3664622cad9c857bf13dfbf3bc144ae1b
3
- size 4127912
 
 
 
 
build/{torch26-cxx11-cu126-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch27-cxx11-cu128-x86_64-linux/activation/_activation_beeaae6.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:1ce11492b9675a44afb3b896ed80e425f2a47e29481c4aad9c4a6ac59520f011
3
- size 2621472
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f2cffcb6b5b9a49f03a2df46fc2ad36765676edecb468c233e78e1f5e21e206
3
+ size 4127872
build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0fcd47dd765bba10bb09f65388f6c1b357b117b2611c17bae5bf8214499a9e39
3
- size 2837224
 
 
 
 
build/{torch26-cxx98-cu118-x86_64-linux/activation/_activation_be5bedb.abi3.so → torch28-cxx11-cu126-x86_64-linux/activation/_activation_beeaae6.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:434bd1ae43b7cbdb10d86b82da9a237ec05ef9d9fb4fc15cdc9096d3d5ed3fa7
3
- size 2539352
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:972be0b2b7ce4f771028406367437488743dc81d70e6316e7a2694df1422b23d
3
+ size 2837192
build/torch28-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:0e6d88c71eebabc842f6a566de7cfaf24d3d90a30572eae584a3b51dcb7e838e
3
- size 4117000
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_beeaae6.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec0756eb56dab9c57cc1aa01cfc2301d508fdf11ac4d02d015f7c16dd2246f2f
3
+ size 4116960
build/torch28-cxx11-cu128-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_20250917153858.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f3c1b86db31b04bd5fe75b0c9d6915ba2766a2456ea9bd1a20f2d75c4b1acf35
3
- size 4154880
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_beeaae6.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de346c02f046cbb177556580efc9994632adad1439bb90f451f2f690e326c39c
3
+ size 4154840
build/torch28-cxx11-cu129-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_20250917153858
3
- ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_20250917153858::{op_name}"
 
1
  import torch
2
+ from . import _activation_beeaae6
3
+ ops = torch.ops._activation_beeaae6
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_beeaae6::{op_name}"
build/{torch26-cxx11-cu118-x86_64-linux → torch29-cxx11-cu126-x86_64-linux}/activation/__init__.py RENAMED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch29-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (3.25 kB). View file
 
build/torch29-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc ADDED
Binary file (526 Bytes). View file