Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 6 additions & 16 deletions tfjs-backend-wasm/src/kernels/ClipByValue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,14 @@
* =============================================================================
*/

import {KernelConfig, NamedAttrMap, NamedTensorInfoMap} from '@tensorflow/tfjs-core';
import {TensorInfo} from '@tensorflow/tfjs-core';
import {ClipByValue, ClipByValueAttrs, ClipByValueInputs, KernelConfig, KernelFunc} from '@tensorflow/tfjs-core';

import {BackendWasm} from '../backend_wasm';

interface ClipByValueInputs extends NamedTensorInfoMap {
x: TensorInfo;
}

interface ClipByValueAttrs extends NamedAttrMap {
min: number;
max: number;
}

let wasmClip: (xId: number, min: number, max: number, outId: number) => void;

function setup(backend: BackendWasm) {
wasmClip = backend.wasm.cwrap('ClipByValue', null /* void */, [
wasmClip = backend.wasm.cwrap(ClipByValue, null /* void */, [
'number', // x_id
'number', // min
'number', // max
Expand All @@ -47,17 +37,17 @@ function clip(args: {
}) {
const {inputs, backend, attrs} = args;
const {x} = inputs;
const {min, max} = attrs;
const {clipValueMin, clipValueMax} = attrs;
const xId = backend.dataIdMap.get(x.dataId).id;
const out = backend.makeOutput(x.shape, 'float32');
const outId = backend.dataIdMap.get(out.dataId).id;
wasmClip(xId, min, max, outId);
wasmClip(xId, clipValueMin, clipValueMax, outId);
return out;
}

export const clipByValueConfig: KernelConfig = {
kernelName: 'ClipByValue',
kernelName: ClipByValue,
backendName: 'wasm',
setupFunc: setup,
kernelFunc: clip
kernelFunc: clip as {} as KernelFunc
};
2 changes: 1 addition & 1 deletion tfjs-core/scripts/touch_modular_op_files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ export const ${downcaseFirstChar(kernelName)}GradConfig: GradConfig = {
outputsToSave: [], // UPDATE ME
gradFunc: (dy: Tensor, saved: Tensor[], attrs: NamedAttrMap) => {
const [] = saved;
const {} = attrs as {} as KernelNameAttrs;
const {} = attrs as {} as ${kernelName}Attrs;
return {
};
}
Expand Down
39 changes: 39 additions & 0 deletions tfjs-core/src/gradients/ClipByValue_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {ClipByValue, ClipByValueAttrs} from '../kernel_names';
import {GradConfig, NamedAttrMap} from '../kernel_registry';
import {greaterEqual} from '../ops/greater_equal';
import {lessEqual} from '../ops/less_equal';
import {logicalAnd} from '../ops/logical_and';
import {zerosLike} from '../ops/tensor_ops';
import {where} from '../ops/where';
import {Tensor} from '../tensor';

export const clipByValueGradConfig: GradConfig = {
kernelName: ClipByValue,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[], attrs: NamedAttrMap) => {
const [x] = saved;
const {clipValueMin, clipValueMax} = attrs as {} as ClipByValueAttrs;
return {
x: () => where(
logicalAnd(greaterEqual(x, clipValueMin), lessEqual(x, clipValueMax)),
dy, zerosLike(dy)),
};
}
};
34 changes: 34 additions & 0 deletions tfjs-core/src/gradients/Erf_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Erf} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {exp} from '../ops/exp';
import {mul} from '../ops/mul';
import {neg} from '../ops/neg';
import {square} from '../ops/square';
import {Tensor} from '../tensor';

export const erfGradConfig: GradConfig = {
kernelName: Erf,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [x] = saved;
const a = mul(exp(neg(square(x))), 2 / Math.sqrt(Math.PI));
return {x: () => mul(dy, a)};
}
};
30 changes: 30 additions & 0 deletions tfjs-core/src/gradients/Exp_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Exp} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {mul} from '../ops/mul';
import {Tensor} from '../tensor';

export const expGradConfig: GradConfig = {
kernelName: Exp,
outputsToSave: [true],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [y] = saved;
return {x: () => mul(dy, y)};
}
};
31 changes: 31 additions & 0 deletions tfjs-core/src/gradients/Expm1_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Expm1} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {exp} from '../ops/exp';
import {mul} from '../ops/mul';
import {Tensor} from '../tensor';

export const expm1GradConfig: GradConfig = {
kernelName: Expm1,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [x] = saved;
return {x: () => mul(dy, exp(x))};
}
};
31 changes: 31 additions & 0 deletions tfjs-core/src/gradients/Log1p_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Log1p} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {add} from '../ops/add';
import {div} from '../ops/div';
import {Tensor} from '../tensor';

export const log1pGradConfig: GradConfig = {
kernelName: Log1p,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [x] = saved;
return {x: () => div(dy, add(x, 1))};
}
};
31 changes: 31 additions & 0 deletions tfjs-core/src/gradients/Log_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Log} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {cast} from '../ops/array_ops';
import {div} from '../ops/div';
import {Tensor} from '../tensor';

export const logGradConfig: GradConfig = {
kernelName: Log,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [x] = saved;
return {x: () => div(dy, cast(x, 'float32'))};
}
};
2 changes: 1 addition & 1 deletion tfjs-core/src/gradients/Pow_grad.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ import {GradConfig} from '../kernel_registry';
import {cast} from '../ops/array_ops';
import * as broadcast_util from '../ops/broadcast_util';
import {greater} from '../ops/greater';
import {log} from '../ops/log';
import {mul} from '../ops/mul';
import {pow} from '../ops/pow';
import {reshape} from '../ops/reshape';
import {sub} from '../ops/sub';
import {sum} from '../ops/sum';
import {scalar, zerosLike} from '../ops/tensor_ops';
import {log} from '../ops/unary_ops';
import {where} from '../ops/where';
import {Tensor} from '../tensor';

Expand Down
32 changes: 32 additions & 0 deletions tfjs-core/src/gradients/Reciprocal_grad.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/

import {Reciprocal} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {div} from '../ops/div';
import {neg} from '../ops/neg';
import {square} from '../ops/square';
import {Tensor} from '../tensor';

export const reciprocalGradConfig: GradConfig = {
kernelName: Reciprocal,
inputsToSave: ['x'],
gradFunc: (dy: Tensor, saved: Tensor[]) => {
const [x] = saved;
return {x: () => div(dy, neg(square(x)))};
}
};
2 changes: 1 addition & 1 deletion tfjs-core/src/gradients/Selu_grad.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
import {Selu} from '../kernel_names';
import {GradConfig} from '../kernel_registry';
import {cast} from '../ops/array_ops';
import {exp} from '../ops/exp';
import {greater} from '../ops/greater';
import {mul} from '../ops/mul';
import {SELU_SCALE, SELU_SCALEALPHA} from '../ops/selu_util';
import {scalar} from '../ops/tensor_ops';
import {exp} from '../ops/unary_ops';
import {where} from '../ops/where';
import {Tensor} from '../tensor';

Expand Down
25 changes: 25 additions & 0 deletions tfjs-core/src/kernel_names.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,13 @@ export interface BroadCastToAttrs {
export const Ceil = 'Ceil';
export type CeilInputs = UnaryInputs;

export const ClipByValue = 'ClipByValue';
export type ClipByValueInputs = UnaryInputs;
export interface ClipByValueAttrs {
clipValueMin: number;
clipValueMax: number;
}

export const Complex = 'Complex';
export type ComplexInputs = Pick<NamedTensorInfoMap, 'real'|'imag'>;

Expand Down Expand Up @@ -283,9 +290,18 @@ export type EluInputs = Pick<NamedTensorInfoMap, 'x'>;
export const EluGrad = 'EluGrad';
export type EluGradInputs = Pick<NamedTensorInfoMap, 'dy'|'y'>;

export const Erf = 'Erf';
export type ErfInputs = UnaryInputs;

export const Equal = 'Equal';
export type EqualInputs = BinaryInputs;

export const Exp = 'Exp';
export type ExpInputs = UnaryInputs;

export const Expm1 = 'Expm1';
export type Expm1Inputs = UnaryInputs;

export const Floor = 'Floor';
export type FloorInputs = UnaryInputs;

Expand Down Expand Up @@ -333,6 +349,12 @@ export type LessInputs = BinaryInputs;
export const LessEqual = 'LessEqual';
export type LessEqualInputs = BinaryInputs;

export const Log = 'Log';
export type LogInputs = UnaryInputs;

export const Log1p = 'Log1p';
export type Log1pInputs = UnaryInputs;

export const LogicalAnd = 'LogicalAnd';
export type LogicalAndInputs = BinaryInputs;

Expand Down Expand Up @@ -502,6 +524,9 @@ export interface ProdAttrs {
export const Real = 'Real';
export type RealInputs = Pick<NamedTensorInfoMap, 'input'>;

export const Reciprocal = 'Reciprocal';
export type ReciprocalInputs = UnaryInputs;

export const Relu = 'Relu';
export type ReluInputs = Pick<NamedTensorInfoMap, 'x'>;

Expand Down
Loading