Skip to content

Commit 408ce34

Browse files
authored
fix m,n class on OrangePi (#2223)
1 parent ae24d47 commit 408ce34

File tree

4 files changed

+14
-5
lines changed

4 files changed

+14
-5
lines changed

mindtorch/_apis/npu.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1887,7 +1887,7 @@ def relu6(input):
18871887
def col2im(input, output_size, kernel_size, dilation=1, padding=0, stride=1):
18881888
if use_pyboost():
18891889
return pyboost.col2im_ext_op(input, output_size, kernel_size, dilation, padding, stride)
1890-
return legacy.col2im(input, output_size, kernel_size, dilation, padding, stride)
1890+
return legacy.col2_im(input, mindspore.Tensor(output_size), kernel_size, dilation, padding, stride)
18911891

18921892
def flash_attention_score(query, key, value, real_shift, drop_mask, padding_mask, attn_mask, prefix, actual_seq_qlen, actual_seq_kvlen, head_num, keep_prob, scale_value, pre_tokens, next_tokens, inner_precise, input_layout, sparse_mode):
18931893
if use_pyboost():
@@ -1992,4 +1992,10 @@ def logaddexp(input, other):
19921992
return y
19931993

19941994
def reflection_pad_1d(input, padding):
1995-
return pyboost.reflection_pad_1d_op(input, padding)
1995+
return pyboost.reflection_pad_1d_op(input, padding)
1996+
1997+
def replication_pad_1d(input, padding):
1998+
return pyboost.reflection_pad_1d_op(input, padding)
1999+
2000+
def hardtanh(input, min_val, max_val):
2001+
return pyboost.hardtanh_op(input, min_val, max_val)

mindtorch/nn/functional.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1897,4 +1897,7 @@ def make_causal_mask(
18971897
)
18981898

18991899
def rotary_position_embedding(x, cos, sin, mode=0):
1900-
return ops.rotary_position_embedding(x, cos, sin, mode)
1900+
return ops.rotary_position_embedding(x, cos, sin, mode)
1901+
1902+
def hardtanh(input, min_val=-1.0, max_val=1.0):
1903+
return execute('hardtanh', input, min_val, max_val)

mindtorch/nn/modules/activation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -289,7 +289,7 @@ def forward(self, input: Tensor) -> Tensor:
289289
"""
290290
Runs the forward pass.
291291
"""
292-
return F.hardtanh(input, self.min_val, self.max_val, self.inplace)
292+
return F.hardtanh(input, self.min_val, self.max_val)
293293

294294
def extra_repr(self) -> str:
295295
"""

mindtorch/nn/modules/dropout.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ def forward(self, input: Tensor) -> Tensor:
173173
"""
174174
Runs the forward pass.
175175
"""
176-
return F.dropout2d(input, self.p, self.training, self.inplace)
176+
return F.dropout2d(input, self.p, self.training)
177177

178178

179179
class Dropout3d(_DropoutNd):

0 commit comments

Comments
 (0)