diff --git a/scripts/release-util.ts b/scripts/release-util.ts index 893fffff5fd..db10b6b6d32 100755 --- a/scripts/release-util.ts +++ b/scripts/release-util.ts @@ -51,7 +51,7 @@ export const UNION_PHASE: Phase = { // the test to tf.layers. export const NODE_PHASE: Phase = { packages: ['tfjs-node', 'tfjs-node-gpu'], - deps: ['tfjs', 'tfjs-core', 'tfjs-layers'], + deps: ['tfjs', 'tfjs-core'], scripts: {'tfjs-node-gpu': {'before-yarn': ['yarn prep-gpu']}} }; diff --git a/tfjs-core/yarn.lock b/tfjs-core/yarn.lock index e708e93c2b3..78de9633ccd 100644 --- a/tfjs-core/yarn.lock +++ b/tfjs-core/yarn.lock @@ -18,29 +18,10 @@ esutils "^2.0.2" js-tokens "^4.0.0" -"@bazel/bazel-darwin_x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@bazel/bazel-darwin_x64/-/bazel-darwin_x64-0.24.0.tgz#828ef298d8d542961df388f17b0244f4f4302a74" - integrity sha512-xly44vkcD/fauUb7Lm5Lme4qhEZdkuuyBKSVQUHPbYAGDdbj/W8dupI3bZREkJAgG/WrRU+WXUemMj4U8ZcLcw== - -"@bazel/bazel-linux_x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@bazel/bazel-linux_x64/-/bazel-linux_x64-0.24.0.tgz#9ef2e7266833ad2221fe4af4ceb6763d2897e3ff" - integrity sha512-p5ylPLWnJZDGbaIFBrtD/tp3Su5rMdzeeNJKU24XyiWQTHVZ3OD3I2Fb0ILCgfBjY8AlA7EtCtOI4hYnAuIOtg== - -"@bazel/bazel-win32_x64@0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@bazel/bazel-win32_x64/-/bazel-win32_x64-0.24.0.tgz#02d83113a6c6ed99795a3e41bff5631aa141638d" - integrity sha512-/bcSEx+GoV/q7H4WM0jazfxTcurSiIIePhRv+d05mxRDcaWwhCO8KzmmZRWH1abW6npvq5tLkbSQi7G7nUBhgg== - -"@bazel/bazel@^0.24.0": - version "0.24.0" - resolved "https://registry.yarnpkg.com/@bazel/bazel/-/bazel-0.24.0.tgz#f4e68e3680ac299858c24c26be3d08d1151e78fc" - integrity sha512-/5E55tqH9ogAGF9Dd7RSCJmk7/xdlsPTAhsX3yEsEMs7GLdHlgD3jbeePsKUiHKKr8LXAufjTs2pXQfjrkZRMg== - optionalDependencies: - "@bazel/bazel-darwin_x64" "0.24.0" - "@bazel/bazel-linux_x64" "0.24.0" - "@bazel/bazel-win32_x64" "0.24.0" +"@bazel/bazelisk@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@bazel/bazelisk/-/bazelisk-1.3.0.tgz#dc312dd30ad01e9af86e53b40795ab6e545fa55b" + integrity sha512-73H1nq3572tTf+dhDT86aWQN+LCyfxrh05jabqPXp6cpR8soxte3gS5oUqkN36fUe+J2HzNiV4CXZTz4Xytd3Q== "@bazel/typescript@^0.27.8": version "0.27.10" diff --git a/tfjs-node/package.json b/tfjs-node/package.json index 555241ed621..16dd6f64de8 100644 --- a/tfjs-node/package.json +++ b/tfjs-node/package.json @@ -47,7 +47,6 @@ "upload-windows-addon": "./scripts/build-and-upload-windows-addon.bat" }, "devDependencies": { - "@tensorflow/tfjs-layers": "link:../tfjs-layers", "@types/jasmine": "~2.8.6", "@types/node": "^10.5.1", "@types/progress": "^2.0.1", diff --git a/tfjs-node/src/canvas_test.ts b/tfjs-node/src/canvas_test.ts index 051464c0ea4..2c8089dac7a 100644 --- a/tfjs-node/src/canvas_test.ts +++ b/tfjs-node/src/canvas_test.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import * as tf from '@tensorflow/tfjs-core'; +import * as tf from '@tensorflow/tfjs'; class MockContext { getImageData(x: number, y: number, width: number, height: number) { diff --git a/tfjs-node/src/image.ts b/tfjs-node/src/image.ts index e408f346392..d3b0101ffc6 100644 --- a/tfjs-node/src/image.ts +++ b/tfjs-node/src/image.ts @@ -15,8 +15,7 @@ * ============================================================================= */ -import {Tensor} from '@tensorflow/tfjs'; -import {Tensor3D, Tensor4D, tidy, util} from '@tensorflow/tfjs-core'; +import {Tensor, Tensor3D, Tensor4D, tidy, util} from '@tensorflow/tfjs'; import {ensureTensorflowBackend, nodeBackend} from './nodejs_kernel_backend'; export enum ImageType { diff --git a/tfjs-node/src/image_test.ts b/tfjs-node/src/image_test.ts index 665e220e65a..f0131dfb0a7 100644 --- a/tfjs-node/src/image_test.ts +++ b/tfjs-node/src/image_test.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {memory, setBackend, test_util} from '@tensorflow/tfjs-core'; +import {memory, setBackend, test_util} from '@tensorflow/tfjs'; import * as fs from 'fs'; import {promisify} from 'util'; import {getImageType, ImageType} from './image'; diff --git a/tfjs-node/src/io/file_system.ts b/tfjs-node/src/io/file_system.ts index 3fc021f4ce0..f6413cc5520 100644 --- a/tfjs-node/src/io/file_system.ts +++ b/tfjs-node/src/io/file_system.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import * as tfc from '@tensorflow/tfjs-core'; +import * as tf from '@tensorflow/tfjs'; import * as fs from 'fs'; import {dirname, join, resolve} from 'path'; import {promisify} from 'util'; @@ -39,7 +39,7 @@ function doesNotExistHandler(name: string): (e: NodeJS.ErrnoException) => }; } -export class NodeFileSystem implements tfc.io.IOHandler { +export class NodeFileSystem implements tf.io.IOHandler { static readonly URL_SCHEME = 'file://'; protected readonly path: string|string[]; @@ -68,7 +68,7 @@ export class NodeFileSystem implements tfc.io.IOHandler { */ constructor(path: string|string[]) { if (Array.isArray(path)) { - tfc.util.assert( + tf.util.assert( path.length === 2, () => 'file paths must have a length of 2, ' + `(actual length is ${path.length}).`); @@ -78,8 +78,8 @@ export class NodeFileSystem implements tfc.io.IOHandler { } } - async save(modelArtifacts: tfc.io.ModelArtifacts): - Promise { + async save(modelArtifacts: tf.io.ModelArtifacts): + Promise { if (Array.isArray(this.path)) { throw new Error('Cannot perform saving to multiple paths.'); } @@ -98,7 +98,7 @@ export class NodeFileSystem implements tfc.io.IOHandler { paths: [this.WEIGHTS_BINARY_FILENAME], weights: modelArtifacts.weightSpecs }]; - const modelJSON: tfc.io.ModelJSON = { + const modelJSON: tf.io.ModelJSON = { modelTopology: modelArtifacts.modelTopology, weightsManifest, format: modelArtifacts.format, @@ -117,19 +117,19 @@ export class NodeFileSystem implements tfc.io.IOHandler { weightsBinPath, Buffer.from(modelArtifacts.weightData), 'binary'); return { - // TODO(cais): Use explicit tfc.io.ModelArtifactsInfo type below once it + // TODO(cais): Use explicit tf.io.ModelArtifactsInfo type below once it // is available. // tslint:disable-next-line:no-any modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) as any }; } } - async load(): Promise { + async load(): Promise { return Array.isArray(this.path) ? this.loadBinaryModel() : this.loadJSONModel(); } - protected async loadBinaryModel(): Promise { + protected async loadBinaryModel(): Promise { const topologyPath = this.path[0]; const weightManifestPath = this.path[1]; const topology = @@ -150,7 +150,7 @@ export class NodeFileSystem implements tfc.io.IOHandler { const modelTopology = await readFile(this.path[0]); const weightsManifest = JSON.parse(await readFile(this.path[1], 'utf8')); - const modelArtifacts: tfc.io.ModelArtifacts = { + const modelArtifacts: tf.io.ModelArtifacts = { modelTopology, }; const [weightSpecs, weightData] = @@ -162,7 +162,7 @@ export class NodeFileSystem implements tfc.io.IOHandler { return modelArtifacts; } - protected async loadJSONModel(): Promise { + protected async loadJSONModel(): Promise { const path = this.path as string; const info = await stat(path).catch(doesNotExistHandler('Path')); @@ -171,7 +171,7 @@ export class NodeFileSystem implements tfc.io.IOHandler { if (info.isFile()) { const modelJSON = JSON.parse(await readFile(path, 'utf8')); - const modelArtifacts: tfc.io.ModelArtifacts = { + const modelArtifacts: tf.io.ModelArtifacts = { modelTopology: modelJSON.modelTopology, format: modelJSON.format, generatedBy: modelJSON.generatedBy, @@ -198,11 +198,11 @@ export class NodeFileSystem implements tfc.io.IOHandler { } private async loadWeights( - weightsManifest: tfc.io.WeightsManifestConfig, - path: string): Promise<[tfc.io.WeightsManifestEntry[], ArrayBuffer]> { + weightsManifest: tf.io.WeightsManifestConfig, + path: string): Promise<[tf.io.WeightsManifestEntry[], ArrayBuffer]> { const dirName = dirname(path); const buffers: Buffer[] = []; - const weightSpecs: tfc.io.WeightsManifestEntry[] = []; + const weightSpecs: tf.io.WeightsManifestEntry[] = []; for (const group of weightsManifest) { for (const path of group.paths) { const weightFilePath = join(dirName, path); diff --git a/tfjs-node/src/io/file_system_test.ts b/tfjs-node/src/io/file_system_test.ts index 6ca0ee8f1ce..5e3049d9d0d 100644 --- a/tfjs-node/src/io/file_system_test.ts +++ b/tfjs-node/src/io/file_system_test.ts @@ -15,9 +15,7 @@ * ============================================================================= */ -import * as tfc from '@tensorflow/tfjs-core'; -import {test_util} from '@tensorflow/tfjs-core'; -import * as tfl from '@tensorflow/tfjs-layers'; +import * as tf from '@tensorflow/tfjs'; import * as fs from 'fs'; import * as path from 'path'; import * as rimraf from 'rimraf'; @@ -65,7 +63,7 @@ describe('File system IOHandler', () => { }], 'backend': 'tensorflow' }; - const weightSpecs1: tfc.io.WeightsManifestEntry[] = [ + const weightSpecs1: tf.io.WeightsManifestEntry[] = [ { name: 'dense/kernel', shape: [3, 1], @@ -93,7 +91,7 @@ describe('File system IOHandler', () => { it('save succeeds with newly created directory', async done => { const t0 = new Date(); const dir = path.join(testDir, 'save-destination'); - const handler = tfc.io.getSaveHandlers(`file://${dir}`)[0]; + const handler = tf.io.getSaveHandlers(`file://${dir}`)[0]; handler .save({ modelTopology: modelTopology1, @@ -128,7 +126,7 @@ describe('File system IOHandler', () => { const dir = path.join(testDir, 'save-destination'); // Create a file at the locatin. await writeFile(dir, 'foo'); - const handler = tfc.io.getSaveHandlers(`file://${dir}`)[0]; + const handler = tf.io.getSaveHandlers(`file://${dir}`)[0]; handler .save({ modelTopology: modelTopology1, @@ -145,7 +143,7 @@ describe('File system IOHandler', () => { }); it('save-load round trip: one weight file', done => { - const handler1 = tfc.io.getSaveHandlers(`file://${testDir}`)[0]; + const handler1 = tf.io.getSaveHandlers(`file://${testDir}`)[0]; handler1 .save({ modelTopology: modelTopology1, @@ -154,7 +152,7 @@ describe('File system IOHandler', () => { }) .then(saveResult => { const modelJSONPath = path.join(testDir, 'model.json'); - const handler2 = tfc.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; + const handler2 = tf.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; handler2.load() .then(modelArtifacts => { expect(modelArtifacts.modelTopology).toEqual(modelTopology1); @@ -170,7 +168,7 @@ describe('File system IOHandler', () => { describe('load json model', () => { it('load: two weight files', async done => { - const weightsManifest: tfc.io.WeightsManifestConfig = [ + const weightsManifest: tf.io.WeightsManifestConfig = [ { paths: ['weights.1.bin'], weights: [{ @@ -209,7 +207,7 @@ describe('File system IOHandler', () => { // Load the artifacts consisting of a model.json and two binary weight // files. - const handler = tfc.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; + const handler = tf.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; handler.load() .then(modelArtifacts => { expect(modelArtifacts.modelTopology).toEqual(modelTopology1); @@ -225,7 +223,7 @@ describe('File system IOHandler', () => { dtype: 'float32', } ]); - test_util.expectArraysClose( + tf.test_util.expectArraysClose( new Float32Array(modelArtifacts.weightData), new Float32Array([-1.1, -3.3, -3.3, -7.7])); done(); @@ -235,7 +233,7 @@ describe('File system IOHandler', () => { it('loading from nonexistent model.json path fails', done => { const handler = - tfc.io.getLoadHandlers(`file://${testDir}/foo/model.json`)[0]; + tf.io.getLoadHandlers(`file://${testDir}/foo/model.json`)[0]; handler.load() .then(getModelArtifactsInfoForJSON => { done.fail( @@ -250,7 +248,7 @@ describe('File system IOHandler', () => { }); it('loading from missing weights path fails', async done => { - const weightsManifest: tfc.io.WeightsManifestConfig = [ + const weightsManifest: tf.io.WeightsManifestConfig = [ { paths: ['weights.1.bin'], weights: [{ @@ -286,7 +284,7 @@ describe('File system IOHandler', () => { // Load the artifacts consisting of a model.json and two binary weight // files. - const handler = tfc.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; + const handler = tf.io.getLoadHandlers(`file://${modelJSONPath}`)[0]; handler.load() .then(modelArtifacts => { done.fail( @@ -303,7 +301,7 @@ describe('File system IOHandler', () => { describe('load binary model', () => { it('load: two weight files', async done => { - const weightsManifest: tfc.io.WeightsManifestConfig = [ + const weightsManifest: tf.io.WeightsManifestConfig = [ { paths: ['weights.1.bin'], weights: [{ @@ -348,7 +346,7 @@ describe('File system IOHandler', () => { new NodeFileSystem([`${modelPath}`, `${modelManifestJSONPath}`]); handler.load() .then(modelArtifacts => { - test_util.expectArraysClose( + tf.test_util.expectArraysClose( new Uint8Array(modelArtifacts.modelTopology as ArrayBuffer), new Uint8Array(modelData)); expect(modelArtifacts.weightSpecs).toEqual([ @@ -363,7 +361,7 @@ describe('File system IOHandler', () => { dtype: 'float32', } ]); - test_util.expectArraysClose( + tf.test_util.expectArraysClose( new Float32Array(modelArtifacts.weightData), new Float32Array([-1.1, -3.3, -3.3, -7.7])); done(); @@ -394,7 +392,7 @@ describe('File system IOHandler', () => { }); it('loading from missing weights path fails', async done => { - const weightsManifest: tfc.io.WeightsManifestConfig = [ + const weightsManifest: tf.io.WeightsManifestConfig = [ { paths: ['weights.1.bin'], weights: [{ @@ -454,21 +452,21 @@ describe('File system IOHandler', () => { }); it('Save and load model with loss and optimizer', async () => { - const model = tfl.sequential(); - model.add(tfl.layers.dense( + const model = tf.sequential(); + model.add(tf.layers.dense( {units: 1, kernelInitializer: 'zeros', inputShape: [1]})); model.compile( - {loss: 'meanSquaredError', optimizer: tfc.train.adam(2.5e-2)}); + {loss: 'meanSquaredError', optimizer: tf.train.adam(2.5e-2)}); - const xs = tfc.tensor2d([1, 2, 3, 4], [4, 1]); - const ys = tfc.tensor2d([-1, -3, -5, -7], [4, 1]); + const xs = tf.tensor2d([1, 2, 3, 4], [4, 1]); + const ys = tf.tensor2d([-1, -3, -5, -7], [4, 1]); await model.fit(xs, ys, {epochs: 2, shuffle: false, verbose: 0}); const saveURL = `file://${testDir}`; const loadURL = `file://${testDir}/model.json`; await model.save(saveURL, {includeOptimizer: true}); - const model2 = await tfl.loadLayersModel(loadURL); + const model2 = await tf.loadLayersModel(loadURL); const optimizerConfig = model2.optimizer.getConfig(); expect(model2.optimizer.getClassName()).toEqual('Adam'); expect(optimizerConfig['learningRate']).toEqual(2.5e-2); @@ -484,8 +482,8 @@ describe('File system IOHandler', () => { }); it('Save and load model with user-defined metadata', async () => { - const model = tfl.sequential(); - model.add(tfl.layers.dense({units: 3, inputShape: [4]})); + const model = tf.sequential(); + model.add(tf.layers.dense({units: 3, inputShape: [4]})); model.setUserDefinedMetadata( {'outputLabels': ['Label1', 'Label2', 'Label3']}); @@ -493,7 +491,7 @@ describe('File system IOHandler', () => { const loadURL = `file://${testDir}/model.json`; await model.save(saveURL); - const model2 = await tfl.loadLayersModel(loadURL); + const model2 = await tf.loadLayersModel(loadURL); expect(model2.getUserDefinedMetadata()).toEqual({ 'outputLabels': ['Label1', 'Label2', 'Label3'] }); diff --git a/tfjs-node/src/io/io_utils.ts b/tfjs-node/src/io/io_utils.ts index 98c1c81e5e9..185494df3d6 100644 --- a/tfjs-node/src/io/io_utils.ts +++ b/tfjs-node/src/io/io_utils.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import * as tfc from '@tensorflow/tfjs-core'; +import * as tf from '@tensorflow/tfjs'; /** * Convert an ArrayBuffer to a Buffer. @@ -52,7 +52,7 @@ export function toArrayBuffer(buf: Buffer|Buffer[]): ArrayBuffer { } } -// TODO(cais): Use explicit tfc.io.ModelArtifactsInfo return type below once it +// TODO(cais): Use explicit tf.io.ModelArtifactsInfo return type below once it // is available. /** * Populate ModelArtifactsInfo fields for a model with JSON topology. @@ -60,7 +60,7 @@ export function toArrayBuffer(buf: Buffer|Buffer[]): ArrayBuffer { * @returns A ModelArtifactsInfo object. */ export function getModelArtifactsInfoForJSON( - modelArtifacts: tfc.io.ModelArtifacts) { + modelArtifacts: tf.io.ModelArtifacts) { if (modelArtifacts.modelTopology instanceof ArrayBuffer) { throw new Error('Expected JSON model topology, received ArrayBuffer.'); } diff --git a/tfjs-node/src/io/node_http.ts b/tfjs-node/src/io/node_http.ts index 6583b0410dc..ab644474ca0 100644 --- a/tfjs-node/src/io/node_http.ts +++ b/tfjs-node/src/io/node_http.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {io} from '@tensorflow/tfjs-core'; +import {io} from '@tensorflow/tfjs'; /** * Factory function for HTTP IO Handler in Node.js. diff --git a/tfjs-node/src/io/node_http_test.ts b/tfjs-node/src/io/node_http_test.ts index dafd4aa2476..75b33ec0ba8 100644 --- a/tfjs-node/src/io/node_http_test.ts +++ b/tfjs-node/src/io/node_http_test.ts @@ -15,13 +15,12 @@ * ============================================================================= */ -import * as tfc from '@tensorflow/tfjs-core'; -import * as tfl from '@tensorflow/tfjs-layers'; +import * as tf from '@tensorflow/tfjs'; import * as tfn from '../index'; // We still need node-fetch so that we can mock the core -// tfc.env().platform.fetch call and return a valid response. +// tf.env().platform.fetch call and return a valid response. // tslint:disable-next-line:no-require-imports const fetch = require('node-fetch'); @@ -68,7 +67,7 @@ describe('nodeHTTPRequest-load', () => { [filename: string]: string|Float32Array|Int32Array|ArrayBuffer|Uint8Array| Uint16Array }) => { - spyOn(tfc.env().platform, 'fetch') + spyOn(tf.env().platform, 'fetch') .and.callFake((path: string, init: RequestInit) => { return new Promise((resolve, reject) => { let contentType = ''; @@ -100,7 +99,7 @@ describe('nodeHTTPRequest-load', () => { }); it('Load through NodeHTTPRequest object', async () => { - const weightManifest1: tfc.io.WeightsManifestConfig = [{ + const weightManifest1: tf.io.WeightsManifestConfig = [{ paths: ['weightfile0'], weights: [ { @@ -137,7 +136,7 @@ describe('nodeHTTPRequest-load', () => { }); it('Load through registered handler', async () => { - const weightManifest1: tfc.io.WeightsManifestConfig = [{ + const weightManifest1: tf.io.WeightsManifestConfig = [{ paths: ['weightfile0'], weights: [ { @@ -159,7 +158,7 @@ describe('nodeHTTPRequest-load', () => { 'https://localhost/weightfile0': floatData, }); - const model = await tfl.loadLayersModel('https://localhost/model.json'); + const model = await tf.loadLayersModel('https://localhost/model.json'); expect(model.inputs.length).toEqual(1); expect(model.inputs[0].shape).toEqual([null, 3]); expect(model.outputs.length).toEqual(1); diff --git a/tfjs-node/src/kernels/Softmax.ts b/tfjs-node/src/kernels/Softmax.ts index af08a9f15d0..698d72b68c1 100644 --- a/tfjs-node/src/kernels/Softmax.ts +++ b/tfjs-node/src/kernels/Softmax.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {NamedTensorInfoMap, registerKernel, TensorInfo} from '@tensorflow/tfjs-core'; +import {NamedTensorInfoMap, registerKernel, TensorInfo} from '@tensorflow/tfjs'; import {createTypeOpAttr, NodeJSKernelBackend} from '../nodejs_kernel_backend'; diff --git a/tfjs-node/src/kernels/SquaredDifference.ts b/tfjs-node/src/kernels/SquaredDifference.ts index 1ff84ee41fe..aae976d44fc 100644 --- a/tfjs-node/src/kernels/SquaredDifference.ts +++ b/tfjs-node/src/kernels/SquaredDifference.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {KernelConfig, registerKernel} from '@tensorflow/tfjs-core'; +import {KernelConfig, registerKernel} from '@tensorflow/tfjs'; import {createTypeOpAttr, NodeJSKernelBackend} from '../nodejs_kernel_backend'; export const squaredDifferenceConfig: KernelConfig = { diff --git a/tfjs-node/src/kernels/non_max_suppression_v5.ts b/tfjs-node/src/kernels/non_max_suppression_v5.ts index 6f6dbb46b89..83b9e7ada05 100644 --- a/tfjs-node/src/kernels/non_max_suppression_v5.ts +++ b/tfjs-node/src/kernels/non_max_suppression_v5.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {NamedAttrMap, NamedTensorInfoMap, registerKernel, scalar, Tensor1D, Tensor2D, TensorInfo} from '@tensorflow/tfjs-core'; +import {NamedAttrMap, NamedTensorInfoMap, registerKernel, scalar, Tensor1D, Tensor2D, TensorInfo} from '@tensorflow/tfjs'; import {createTypeOpAttr, NodeJSKernelBackend} from '../nodejs_kernel_backend'; diff --git a/tfjs-node/src/nodejs_kernel_backend.ts b/tfjs-node/src/nodejs_kernel_backend.ts index 0545aee0fdc..ab4df67d161 100644 --- a/tfjs-node/src/nodejs_kernel_backend.ts +++ b/tfjs-node/src/nodejs_kernel_backend.ts @@ -15,8 +15,8 @@ * ============================================================================= */ -import * as tfc from '@tensorflow/tfjs-core'; -import {backend_util, BackendTimingInfo, DataId, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, Tensor4D, Tensor5D, TensorInfo, tidy, util} from '@tensorflow/tfjs-core'; +import * as tf from '@tensorflow/tfjs'; +import {backend_util, BackendTimingInfo, DataId, DataType, fill, KernelBackend, ones, Rank, rsqrt, Scalar, scalar, ShapeMap, Tensor, Tensor1D, tensor1d, Tensor2D, tensor2d, Tensor3D, Tensor4D, Tensor5D, TensorInfo, tidy, util} from '@tensorflow/tfjs'; // tslint:disable-next-line: no-imports-from-dist import {EPSILON_FLOAT32} from '@tensorflow/tfjs-core/dist/backends/backend'; // tslint:disable-next-line: no-imports-from-dist @@ -37,14 +37,14 @@ export class NodeJSKernelBackend extends KernelBackend { binding: TFJSBinding; isGPUPackage: boolean; isUsingGpuDevice: boolean; - private tensorMap: tfc.DataStorage; + private tensorMap: tf.DataStorage; constructor(binding: TFJSBinding, packageName: string) { super(); this.binding = binding; this.isGPUPackage = packageName === '@tensorflow/tfjs-node-gpu'; this.isUsingGpuDevice = this.binding.isUsingGpuDevice(); - this.tensorMap = new tfc.DataStorage(this, tfc.engine()); + this.tensorMap = new tf.DataStorage(this, tf.engine()); } private getDTypeInteger(dtype: DataType): number { @@ -109,7 +109,7 @@ export class NodeJSKernelBackend extends KernelBackend { default: throw new Error(`Unknown dtype enum ${metadata.dtype}`); } - return tfc.engine().makeTensorFromDataId(newId, metadata.shape, dtype); + return tf.engine().makeTensorFromDataId(newId, metadata.shape, dtype); } // Prepares Tensor instances for Op execution. @@ -1971,11 +1971,11 @@ export class NodeJSKernelBackend extends KernelBackend { /** Returns an instance of the Node.js backend. */ export function nodeBackend(): NodeJSKernelBackend { - return tfc.findBackend('tensorflow') as NodeJSKernelBackend; + return tf.findBackend('tensorflow') as NodeJSKernelBackend; } /** Returns the TF dtype for a given DataType. */ -export function getTFDType(dataType: tfc.DataType): number { +export function getTFDType(dataType: tf.DataType): number { const binding = nodeBackend().binding; switch (dataType) { case 'float32': @@ -2006,7 +2006,7 @@ export function getTFDType(dataType: tfc.DataType): number { * @deprecated Please use createTensorsTypeOpAttr() going forward. */ export function createTypeOpAttr( - attrName: string, dtype: tfc.DataType): TFEOpAttr { + attrName: string, dtype: tf.DataType): TFEOpAttr { return { name: attrName, type: nodeBackend().binding.TF_ATTR_TYPE, @@ -2019,7 +2019,7 @@ export function createTypeOpAttr( * Tensors. */ export function createTensorsTypeOpAttr( - attrName: string, tensors: tfc.Tensor|tfc.Tensor[]) { + attrName: string, tensors: tf.Tensor|tf.Tensor[]) { if (isNullOrUndefined(tensors)) { throw new Error('Invalid input tensors value.'); } @@ -2031,7 +2031,7 @@ export function createTensorsTypeOpAttr( } /** Returns the dtype number for a single or list of input Tensors. */ -function getTFDTypeForInputs(tensors: tfc.Tensor|tfc.Tensor[]): number { +function getTFDTypeForInputs(tensors: tf.Tensor|tf.Tensor[]): number { if (isNullOrUndefined(tensors)) { throw new Error('Invalid input tensors value.'); } @@ -2046,8 +2046,8 @@ function getTFDTypeForInputs(tensors: tfc.Tensor|tfc.Tensor[]): number { } export function ensureTensorflowBackend() { - tfc.util.assert( - tfc.getBackend() === 'tensorflow', + tf.util.assert( + tf.getBackend() === 'tensorflow', () => `Expect the current backend to be "tensorflow", but got "${ - tfc.getBackend()}"`); + tf.getBackend()}"`); } diff --git a/tfjs-node/src/nodejs_kernel_backend_test.ts b/tfjs-node/src/nodejs_kernel_backend_test.ts index 810dfbe051d..44653247b87 100644 --- a/tfjs-node/src/nodejs_kernel_backend_test.ts +++ b/tfjs-node/src/nodejs_kernel_backend_test.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import * as tf from '@tensorflow/tfjs-core'; +import * as tf from '@tensorflow/tfjs'; import {createTensorsTypeOpAttr, createTypeOpAttr, ensureTensorflowBackend, getTFDType, nodeBackend, NodeJSKernelBackend} from './nodejs_kernel_backend'; describe('delayed upload', () => { diff --git a/tfjs-node/src/saved_model_test.ts b/tfjs-node/src/saved_model_test.ts index d0a9b903fe7..830ebdfb630 100644 --- a/tfjs-node/src/saved_model_test.ts +++ b/tfjs-node/src/saved_model_test.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {NamedTensorMap, test_util} from '@tensorflow/tfjs-core'; +import {NamedTensorMap, test_util} from '@tensorflow/tfjs'; import * as tf from './index'; import {nodeBackend} from './nodejs_kernel_backend'; import {getEnumKeyFromValue, getInputAndOutputNodeNameFromMetaGraphInfo, readSavedModelProto} from './saved_model'; diff --git a/tfjs-node/src/tfjs_binding.ts b/tfjs-node/src/tfjs_binding.ts index a01c46c9310..d4120b0f9e5 100644 --- a/tfjs-node/src/tfjs_binding.ts +++ b/tfjs-node/src/tfjs_binding.ts @@ -15,7 +15,7 @@ * ============================================================================= */ -import {backend_util} from '@tensorflow/tfjs-core'; +import {backend_util} from '@tensorflow/tfjs'; export declare class TensorMetadata { id: number;