Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/ntops/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from ntops import torch
from ntops import kernels, torch

__all__ = ["torch"]
__all__ = ["kernels", "torch"]
79 changes: 79 additions & 0 deletions src/ntops/kernels/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
from ntops.kernels import (
abs,
add,
addmm,
bitwise_and,
bitwise_not,
bitwise_or,
bmm,
clamp,
cos,
div,
dropout,
eq,
exp,
ge,
gelu,
gt,
isinf,
isnan,
layer_norm,
le,
lt,
mm,
mul,
ne,
neg,
pow,
relu,
rms_norm,
rotary_position_embedding,
rsqrt,
scaled_dot_product_attention,
sigmoid,
silu,
sin,
softmax,
sub,
tanh,
)

__all__ = [
"abs",
"add",
"addmm",
"bitwise_and",
"bitwise_not",
"bitwise_or",
"bmm",
"clamp",
"cos",
"div",
"dropout",
"eq",
"exp",
"ge",
"gelu",
"gt",
"isinf",
"isnan",
"layer_norm",
"le",
"lt",
"mm",
"mul",
"ne",
"neg",
"pow",
"relu",
"rms_norm",
"rotary_position_embedding",
"rsqrt",
"scaled_dot_product_attention",
"sigmoid",
"silu",
"sin",
"softmax",
"sub",
"tanh",
]
38 changes: 1 addition & 37 deletions src/ntops/torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,43 +5,7 @@
import ninetoothed
import torch

import ntops.kernels.abs
import ntops.kernels.add
import ntops.kernels.addmm
import ntops.kernels.bitwise_and
import ntops.kernels.bitwise_not
import ntops.kernels.bitwise_or
import ntops.kernels.bmm
import ntops.kernels.clamp
import ntops.kernels.cos
import ntops.kernels.div
import ntops.kernels.dropout
import ntops.kernels.eq
import ntops.kernels.exp
import ntops.kernels.ge
import ntops.kernels.gelu
import ntops.kernels.gt
import ntops.kernels.isinf
import ntops.kernels.isnan
import ntops.kernels.layer_norm
import ntops.kernels.le
import ntops.kernels.lt
import ntops.kernels.mm
import ntops.kernels.mul
import ntops.kernels.ne
import ntops.kernels.neg
import ntops.kernels.pow
import ntops.kernels.relu
import ntops.kernels.rms_norm
import ntops.kernels.rotary_position_embedding
import ntops.kernels.rsqrt
import ntops.kernels.scaled_dot_product_attention
import ntops.kernels.sigmoid
import ntops.kernels.silu
import ntops.kernels.sin
import ntops.kernels.softmax
import ntops.kernels.sub
import ntops.kernels.tanh
import ntops
from ntops.kernels.scaled_dot_product_attention import CausalVariant


Expand Down