From f7669447009069a6fd74675e09753a7158cfa72e Mon Sep 17 00:00:00 2001 From: David Dengg Date: Wed, 3 Jun 2020 09:19:53 +0200 Subject: [PATCH 1/2] Adds loadSync to GraphModel --- tfjs-converter/src/executor/graph_model.ts | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/tfjs-converter/src/executor/graph_model.ts b/tfjs-converter/src/executor/graph_model.ts index 2bbda896651..c640717e75f 100644 --- a/tfjs-converter/src/executor/graph_model.ts +++ b/tfjs-converter/src/executor/graph_model.ts @@ -107,6 +107,7 @@ export class GraphModel implements InferenceModel { * Loads the model and weight files, construct the in memory weight map and * compile the inference graph. */ + async load(): Promise { this.findIOHandler(); if (this.handler.load == null) { @@ -114,7 +115,18 @@ export class GraphModel implements InferenceModel { 'Cannot proceed with model loading because the IOHandler provided ' + 'does not have the `load` method implemented.'); } - this.artifacts = await this.handler.load(); + const artifacts = await this.handler.load(); + + return this.loadSync(artifacts); + } + + /** + * Synchronously construct the in memory weight map and + * compile the inference graph. + @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + loadSync(artifacts:io.ModelArtifacts) { + this.artifacts = artifacts; const graph = this.artifacts.modelTopology as tensorflow.IGraphDef; let signature = {}; if (this.artifacts.userDefinedMetadata != null) { From 948b97b0876fb06f14786a530688c3efedd8330f Mon Sep 17 00:00:00 2001 From: davlhd Date: Thu, 4 Jun 2020 08:28:14 +0200 Subject: [PATCH 2/2] Formatting changes --- tfjs-converter/src/executor/graph_model.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tfjs-converter/src/executor/graph_model.ts b/tfjs-converter/src/executor/graph_model.ts index c640717e75f..9a47af0da1a 100644 --- a/tfjs-converter/src/executor/graph_model.ts +++ b/tfjs-converter/src/executor/graph_model.ts @@ -107,7 +107,6 @@ export class GraphModel implements InferenceModel { * Loads the model and weight files, construct the in memory weight map and * compile the inference graph. */ - async load(): Promise { this.findIOHandler(); if (this.handler.load == null) { @@ -123,8 +122,8 @@ export class GraphModel implements InferenceModel { /** * Synchronously construct the in memory weight map and * compile the inference graph. - @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} */ + /** @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} */ loadSync(artifacts:io.ModelArtifacts) { this.artifacts = artifacts; const graph = this.artifacts.modelTopology as tensorflow.IGraphDef;