From d359e654abd6641f061a0aa05407d4dc8d7e4bcb Mon Sep 17 00:00:00 2001 From: Ann Yuan Date: Thu, 21 Nov 2019 10:08:01 -0500 Subject: [PATCH 1/3] fused logic --- tfjs-core/src/ops/fused_util.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tfjs-core/src/ops/fused_util.ts b/tfjs-core/src/ops/fused_util.ts index 73f1fc6d9dc..2a31c9d2984 100644 --- a/tfjs-core/src/ops/fused_util.ts +++ b/tfjs-core/src/ops/fused_util.ts @@ -43,5 +43,5 @@ export type FusedConv2DConfig = { // Whether we should call fused ops. export const shouldFuse = (gradientDepth: number, activation: Activation) => { const gradientMode = gradientDepth > 0; - return !gradientMode && (activation === 'linear' || activation === 'relu'); + return !gradientMode || (activation === 'linear' || activation === 'relu'); }; From eada4b11d3a4373ff19342fb94bf335de39f9c58 Mon Sep 17 00:00:00 2001 From: Ann Yuan Date: Thu, 21 Nov 2019 11:12:04 -0500 Subject: [PATCH 2/3] update --- tfjs-core/src/ops/fused_ops.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tfjs-core/src/ops/fused_ops.ts b/tfjs-core/src/ops/fused_ops.ts index 04645d191fe..9657d04c575 100644 --- a/tfjs-core/src/ops/fused_ops.ts +++ b/tfjs-core/src/ops/fused_ops.ts @@ -407,7 +407,7 @@ function fusedConv2d_({ let biasGradient = {}; if (bias != null) { - biasGradient = {$bias: () => getFusedBiasGradient($bias, dyActivation)}; + biasGradient = {bias: () => getFusedBiasGradient($bias, dyActivation)}; } return Object.assign( From 80de333c562858a4d13f4494ec1a61e69a8c3f17 Mon Sep 17 00:00:00 2001 From: Ann Yuan Date: Thu, 21 Nov 2019 17:52:06 -0500 Subject: [PATCH 3/3] fix --- tfjs-core/src/ops/fused_util.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tfjs-core/src/ops/fused_util.ts b/tfjs-core/src/ops/fused_util.ts index 2a31c9d2984..d9f16133966 100644 --- a/tfjs-core/src/ops/fused_util.ts +++ b/tfjs-core/src/ops/fused_util.ts @@ -43,5 +43,5 @@ export type FusedConv2DConfig = { // Whether we should call fused ops. export const shouldFuse = (gradientDepth: number, activation: Activation) => { const gradientMode = gradientDepth > 0; - return !gradientMode || (activation === 'linear' || activation === 'relu'); + return !gradientMode || activation === 'linear'; };