Upload custom kernels
Browse files
build/torch-universal/liger_kernels/_ops.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
| 1 |
import torch
|
| 2 |
-
ops = torch.ops.
|
| 3 |
|
| 4 |
def add_op_namespace_prefix(op_name: str):
|
| 5 |
"""
|
| 6 |
Prefix op by namespace.
|
| 7 |
"""
|
| 8 |
-
return f"
|
|
|
|
| 1 |
import torch
|
| 2 |
+
ops = torch.ops._liger_kernels_20250507091026
|
| 3 |
|
| 4 |
def add_op_namespace_prefix(op_name: str):
|
| 5 |
"""
|
| 6 |
Prefix op by namespace.
|
| 7 |
"""
|
| 8 |
+
return f"_liger_kernels_20250507091026::{op_name}"
|
build/torch-universal/liger_kernels/rms_norm.py
CHANGED
|
@@ -377,13 +377,12 @@ class LigerRMSNorm(torch.nn.Module):
|
|
| 377 |
in_place (bool, optional): Whether to modify dY in-place to store dX during backward. Defaults to True.
|
| 378 |
"""
|
| 379 |
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
self.in_place = in_place
|
| 387 |
|
| 388 |
def forward(self, hidden_states):
|
| 389 |
"""
|
|
|
|
| 377 |
in_place (bool, optional): Whether to modify dY in-place to store dX during backward. Defaults to True.
|
| 378 |
"""
|
| 379 |
|
| 380 |
+
|
| 381 |
+
weight: torch.Tensor
|
| 382 |
+
variance_epsilon: float
|
| 383 |
+
offset: float = 0
|
| 384 |
+
casting_mode: str = "llama"
|
| 385 |
+
in_place: bool = True
|
|
|
|
| 386 |
|
| 387 |
def forward(self, hidden_states):
|
| 388 |
"""
|
torch-ext/liger_kernels/rms_norm.py
CHANGED
|
@@ -377,13 +377,12 @@ class LigerRMSNorm(torch.nn.Module):
|
|
| 377 |
in_place (bool, optional): Whether to modify dY in-place to store dX during backward. Defaults to True.
|
| 378 |
"""
|
| 379 |
|
| 380 |
-
|
| 381 |
-
|
| 382 |
-
|
| 383 |
-
|
| 384 |
-
|
| 385 |
-
|
| 386 |
-
self.in_place = in_place
|
| 387 |
|
| 388 |
def forward(self, hidden_states):
|
| 389 |
"""
|
|
|
|
| 377 |
in_place (bool, optional): Whether to modify dY in-place to store dX during backward. Defaults to True.
|
| 378 |
"""
|
| 379 |
|
| 380 |
+
|
| 381 |
+
weight: torch.Tensor
|
| 382 |
+
variance_epsilon: float
|
| 383 |
+
offset: float = 0
|
| 384 |
+
casting_mode: str = "llama"
|
| 385 |
+
in_place: bool = True
|
|
|
|
| 386 |
|
| 387 |
def forward(self, hidden_states):
|
| 388 |
"""
|