Skip to content

Commit 3d09ac6

Browse files
authored
Modularize remaining unary ops (#3605)
modularize round, rsqrt, sigmoid, isNan, isInf, isFinite, softplus, sqrt, step DEV
1 parent 3c31971 commit 3d09ac6

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+1845
-1130
lines changed

tfjs-core/src/gradients/Abs_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import {Abs} from '../kernel_names';
1919
import {GradConfig} from '../kernel_registry';
2020
import {cast} from '../ops/cast';
2121
import {mul} from '../ops/mul';
22-
import {step} from '../ops/unary_ops';
22+
import {step} from '../ops/step';
2323
import {Tensor} from '../tensor';
2424

2525
export const absGradConfig: GradConfig = {

tfjs-core/src/gradients/Acos_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,9 @@ import {cast} from '../ops/cast';
2121
import {div} from '../ops/div';
2222
import {neg} from '../ops/neg';
2323
import {scalar} from '../ops/scalar';
24+
import {sqrt} from '../ops/sqrt';
2425
import {square} from '../ops/square';
2526
import {sub} from '../ops/sub';
26-
import {sqrt} from '../ops/unary_ops';
2727
import {Tensor} from '../tensor';
2828

2929
export const acosGradConfig: GradConfig = {

tfjs-core/src/gradients/Acosh_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@ import {Acosh} from '../kernel_names';
1919
import {GradConfig} from '../kernel_registry';
2020
import {cast} from '../ops/cast';
2121
import {div} from '../ops/div';
22+
import {sqrt} from '../ops/sqrt';
2223
import {square} from '../ops/square';
2324
import {sub} from '../ops/sub';
24-
import {sqrt} from '../ops/unary_ops';
2525
import {Tensor} from '../tensor';
2626

2727
export const acoshGradConfig: GradConfig = {

tfjs-core/src/gradients/Asin_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@ import {GradConfig} from '../kernel_registry';
2020
import {cast} from '../ops/cast';
2121
import {div} from '../ops/div';
2222
import {scalar} from '../ops/scalar';
23+
import {sqrt} from '../ops/sqrt';
2324
import {square} from '../ops/square';
2425
import {sub} from '../ops/sub';
25-
import {sqrt} from '../ops/unary_ops';
2626
import {Tensor} from '../tensor';
2727

2828
export const asinGradConfig: GradConfig = {

tfjs-core/src/gradients/Asinh_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,8 @@ import {add} from '../ops/add';
2121
import {cast} from '../ops/cast';
2222
import {div} from '../ops/div';
2323
import {scalar} from '../ops/scalar';
24+
import {sqrt} from '../ops/sqrt';
2425
import {square} from '../ops/square';
25-
import {sqrt} from '../ops/unary_ops';
2626
import {Tensor} from '../tensor';
2727

2828
export const asinhGradConfig: GradConfig = {

tfjs-core/src/gradients/FusedBatchNorm_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,11 +20,11 @@ import {add} from '../ops/add';
2020
import {getReductionAxes} from '../ops/broadcast_util';
2121
import {mul} from '../ops/mul';
2222
import {reshape} from '../ops/reshape';
23+
import {rsqrt} from '../ops/rsqrt';
2324
import {scalar} from '../ops/scalar';
2425
import {sub} from '../ops/sub';
2526
import {sum} from '../ops/sum';
2627
import {tile} from '../ops/tile';
27-
import {rsqrt} from '../ops/unary_ops';
2828
import {Tensor} from '../tensor';
2929
import {Rank, ShapeMap} from '../types';
3030

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/**
2+
* @license
3+
* Copyright 2020 Google LLC. All Rights Reserved.
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
* =============================================================================
16+
*/
17+
18+
import {IsFinite} from '../kernel_names';
19+
import {GradConfig} from '../kernel_registry';
20+
import {zerosLike} from '../ops/zeros_like';
21+
import {Tensor} from '../tensor';
22+
23+
export const isFiniteGradConfig: GradConfig = {
24+
kernelName: IsFinite,
25+
gradFunc: (dy: Tensor) => {
26+
// TODO(nsthorat): Let gradients be null for cases where we want to stop
27+
// backpropgation.
28+
return {x: () => zerosLike(dy)};
29+
}
30+
};
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
2+
/**
3+
* @license
4+
* Copyright 2020 Google LLC. All Rights Reserved.
5+
* Licensed under the Apache License, Version 2.0 (the "License");
6+
* you may not use this file except in compliance with the License.
7+
* You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
* =============================================================================
17+
*/
18+
19+
import {IsInf} from '../kernel_names';
20+
import {GradConfig} from '../kernel_registry';
21+
import {zerosLike} from '../ops/zeros_like';
22+
import {Tensor} from '../tensor';
23+
24+
export const isInfGradConfig: GradConfig = {
25+
kernelName: IsInf,
26+
gradFunc: (dy: Tensor) => {
27+
// TODO(nsthorat): Let gradients be null for cases where we want to stop
28+
// backpropgation.
29+
return {x: () => zerosLike(dy)};
30+
}
31+
};
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/**
2+
* @license
3+
* Copyright 2020 Google LLC. All Rights Reserved.
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
* =============================================================================
16+
*/
17+
18+
import {IsNan} from '../kernel_names';
19+
import {GradConfig} from '../kernel_registry';
20+
import {zerosLike} from '../ops/zeros_like';
21+
import {Tensor} from '../tensor';
22+
23+
export const isNanGradConfig: GradConfig = {
24+
kernelName: IsNan,
25+
gradFunc: (dy: Tensor) => {
26+
// TODO(nsthorat): Let gradients be null for cases where we want to stop
27+
// backpropgation.
28+
return {x: () => zerosLike(dy)};
29+
}
30+
};

tfjs-core/src/gradients/Relu6_grad.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import {GradConfig} from '../kernel_registry';
1919
import {cast} from '../ops/cast';
2020
import {lessEqual} from '../ops/less_equal';
2121
import {mul} from '../ops/mul';
22-
import {step} from '../ops/unary_ops';
22+
import {step} from '../ops/step';
2323
import {Tensor} from '../tensor';
2424

2525
export const relu6GradConfig: GradConfig = {

0 commit comments

Comments
 (0)