Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
File renamed without changes.
8 changes: 7 additions & 1 deletion mindnlp/core/_dtype.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

from .configs import ON_A1

bool_alias = bool

if ON_A1:
warnings.warn('MindSpore on GPU/910A do not support bfloat16, use float16 instead.')
bfloat16 = float16
Expand Down Expand Up @@ -98,4 +100,8 @@ def __gt__(self, other):
float16 : np.float16,
float32 : np.float32,
float64 : np.float64,
}
}

py2dtype = {
bool_alias: bool
}
6 changes: 6 additions & 0 deletions mindnlp/core/_prims/ascend.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,12 @@ def argmax_with_value(*args):

__all__.append('argmax_with_value')

def argmin_with_value(*args):
return pyboost_inner_prim.argmin_with_value_impl(*args)

__all__.append('argmin_with_value')


right_shift_op = ops.RightShift().set_device('Ascend')
def right_shift(input, other):
if isinstance(other, numbers.Number):
Expand Down
Empty file added mindnlp/core/_prims/gpu.py
Empty file.
29 changes: 29 additions & 0 deletions mindnlp/core/_prims/meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,3 +262,32 @@ def concat(tensors, dim):
return core.Tensor(out)

__all__.append('concat')

def tril_ext(input, k):
return input

__all__.append('tril_ext')

def reshape(input, shape):
out = Tensor_(shape=tuple(shape), dtype=input.dtype)
return core.Tensor(out)

__all__.append('reshape')

def linalg_vector_norm(input, p, dim, keepdim, dtype):
input_shape = list(input.shape)
if isinstance(dim, int):
dim = (dim,)
for d in dim:
input_shape[d] = 1 if keepdim else 0

new_shape = []
for s in input_shape:
if s != 0:
new_shape.append(s)
if dtype is None:
dtype = input.dtype
out = Tensor_(shape=tuple(new_shape), dtype=dtype)
return core.Tensor(out)

__all__.append('linalg_vector_norm')
8 changes: 8 additions & 0 deletions mindnlp/core/_prims/numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,8 @@ def sub_ext(input, other, alpha):
elif not isinstance(other, numbers.Number):
other = other.numpy()
out = np.subtract(input, other * alpha)
if not isinstance(out, np.ndarray):
out = np.array(out)
return core.Tensor.from_numpy(out)

__all__.append('sub_ext')
Expand Down Expand Up @@ -503,3 +505,9 @@ def less_equal(input, other):
return core.Tensor.from_numpy(out)

__all__.append('less_equal')

def tril_ext(input, diagonal):
out = np.tril(input.numpy(), diagonal)
return core.Tensor.from_numpy(out)

__all__.append('tril_ext')
4 changes: 3 additions & 1 deletion mindnlp/core/ops/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def from_numpy(ndarray):

# frombuffer
def frombuffer(buffer, *, dtype, count=-1, offset=0, requires_grad=False):
arr = np.frombuffer(buffer=buffer, dtype=core.dtype_to_nptype(dtype), count=count, offset=offset)
arr = np.frombuffer(buffer=buffer, dtype=core.dtype2np[dtype], count=count, offset=offset)
tensor = core.Tensor(arr)
tensor.requires_grad_(requires_grad)
return tensor
Expand Down Expand Up @@ -62,6 +62,8 @@ def zeros_like(input, *, dtype=None, layout=None, device=None, requires_grad=Fal
def ones(*size, out=None, dtype=None, layout=None, device=None, requires_grad=False):
if dtype is None:
dtype = get_default_dtype()
if isinstance(dtype, type):
dtype = core.py2dtype[dtype]
if device is None:
device = get_device_in_context()
if isinstance(size[0], (tuple, list)):
Expand Down
2 changes: 1 addition & 1 deletion mindnlp/core/ops/other.py
Original file line number Diff line number Diff line change
Expand Up @@ -866,7 +866,7 @@ def iinfo(dtype):
return iinfo_dtype[dtype]

def iinfo(dtype):
return np.iinfo(mindspore.dtype_to_nptype(dtype))
return np.iinfo(core.dtype2np[dtype])


def contains(self, key):
Expand Down
2 changes: 1 addition & 1 deletion mindnlp/core/ops/reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def min(input, dim=None, keepdim=False, *, out=None):
return execute('min', input)
if core.is_tensor(dim):
return core.minimum(input, dim)
output = execute('argmin_ext', input, dim, keepdim)
output = execute('argmin_with_value', input, dim, keepdim)
if out is None:
return min_out(values=output[1], indices=output[0])

Expand Down
Empty file added mindnlp/factory/__init__.py
Empty file.
Empty file added mindnlp/factory/cli.py
Empty file.
Loading