Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Ruff
on: [push, pull_request]
jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
src: './python/'
- uses: chartboost/ruff-action@v1
with:
src: './python/'
args: format --check
8 changes: 6 additions & 2 deletions python/infinicore/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import contextlib

import infinicore.nn as nn
from infinicore.device import device
from infinicore.dtype import (
bfloat16,
Expand Down Expand Up @@ -33,19 +34,21 @@
from infinicore.tensor import (
Tensor,
empty,
empty_like,
from_blob,
ones,
strided_empty,
strided_from_blob,
zeros,
)

from infinicore import nn as nn

__all__ = [
# Modules.
"nn",
# Classes.
"device",
"dtype",
"Tensor",
# Data Types.
"bfloat16",
"bool",
Expand Down Expand Up @@ -75,6 +78,7 @@
"matmul",
"rearrange",
"empty",
"empty_like",
"from_blob",
"ones",
"strided_empty",
Expand Down
54 changes: 26 additions & 28 deletions python/infinicore/nn/functional.py
Original file line number Diff line number Diff line change
@@ -1,68 +1,66 @@
import infinicore
from infinicore.lib import _infinicore
from infinicore.tensor import Tensor

__all__ = ["causal_softmax", "rms_norm", "silu", "swiglu"]


def causal_softmax(
input: infinicore.Tensor,
out=None
) -> infinicore.Tensor:
r"""Apply a causal softmax function.
"""
def causal_softmax(input: Tensor, out=None) -> Tensor:
r"""Apply a causal softmax function."""

if out is None:
return infinicore.Tensor(_infinicore.causal_softmax(input._underlying))
return Tensor(_infinicore.causal_softmax(input._underlying))

_infinicore.causal_softmax_(out._underlying, input._underlying)

return out


def rms_norm(
input: infinicore.Tensor,
normalized_shape: list[int],
weight: infinicore.Tensor,
eps: float = 1e-5,
out=None
) -> infinicore.Tensor:
r"""Apply Root Mean Square Layer Normalization.
"""

assert normalized_shape == weight.shape, "normalized_shape does not match weight.shape."
input: Tensor,
normalized_shape: list[int],
weight: Tensor,
eps: float = 1e-5,
*,
out=None,
) -> Tensor:
r"""Apply Root Mean Square Layer Normalization."""

assert normalized_shape == weight.shape, (
"normalized_shape does not match weight.shape."
)

if out is None:
return infinicore.Tensor(
_infinicore.rms_norm(input._underlying, weight._underlying, eps)
)
return Tensor(_infinicore.rms_norm(input._underlying, weight._underlying, eps))

_infinicore.rms_norm_(out._underlying, input._underlying, weight._underlying, eps)

return out


def silu(input: infinicore.Tensor, inplace: bool = False, out=None) -> infinicore.Tensor:
r"""Apply the Sigmoid Linear Unit (SiLU) function, element-wise.
"""
def silu(input: Tensor, inplace: bool = False, *, out=None) -> Tensor:
r"""Apply the Sigmoid Linear Unit (SiLU) function, element-wise."""

if infinicore.use_ntops and input.device.type in ("cuda", "musa") and out is None:
return infinicore.ntops.torch.silu(input, inplace=inplace)

if inplace:
_infinicore.silu_(input._underlying, input._underlying)
return input

if out is None:
return infinicore.Tensor(_infinicore.silu(input._underlying))
return Tensor(_infinicore.silu(input._underlying))

_infinicore.silu_(out._underlying, input._underlying)

return out


def swiglu(input: infinicore.Tensor, other: infinicore.Tensor, out=None):
r"""Apply the Swish-Gated Linear Unit (SwiGLU) function, element-wise.
"""
def swiglu(input: Tensor, other: Tensor, *, out=None):
r"""Apply the Swish-Gated Linear Unit (SwiGLU) function, element-wise."""

if out is None:
return infinicore.Tensor(_infinicore.swiglu(input._underlying, other._underlying))
return Tensor(_infinicore.swiglu(input._underlying, other._underlying))

_infinicore.swiglu_(out._underlying, input._underlying, other._underlying)

Expand Down
12 changes: 11 additions & 1 deletion python/infinicore/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def view(self, shape):

def debug(self, filename=None):
"""Print tensor data or save to file for debugging

Args:
filename: Optional filename to save raw binary data. If None, prints to stdout.
"""
Expand All @@ -93,6 +93,16 @@ def empty(size, *, dtype=None, device=None, pin_memory=False):
)


def empty_like(input, *, dtype=None, device=None):
if dtype is None:
dtype = input.dtype

if device is None:
device = input.device

return empty(input.size(), dtype=dtype, device=device)


def strided_empty(size, strides, *, dtype=None, device=None, pin_memory=False):
return Tensor(
_infinicore.strided_empty(
Expand Down