/
tflite_web_model_runner.ts
127 lines (112 loc) · 3.71 KB
/
tflite_web_model_runner.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
/**
* @license
* Copyright 2021 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import {BaseTaskLibrary} from './common';
/** TFLiteWebModelRunner class type. */
export declare interface TFLiteWebModelRunnerClass {
/**
* The factory function to create a TFLiteWebModelRunner instance.
*
* @param model The path to load the TFLite model from, or the model content
* in memory.
* @param options Available options.
*/
create(model: string|ArrayBuffer, options: TFLiteWebModelRunnerOptions):
Promise<TFLiteWebModelRunner>;
}
/**
* The main TFLiteWebModelRunner class interface.
*
* It is a wrapper around TFLite Interpreter. See
* https://www.tensorflow.org/lite/guide/inference for more info about related
* concepts.
*/
export declare interface TFLiteWebModelRunner extends BaseTaskLibrary {
/** Gets model inputs. */
getInputs(): TFLiteWebModelRunnerTensorInfo[];
/** Gets model outputs. */
getOutputs(): TFLiteWebModelRunnerTensorInfo[];
/**
* Run inference.
*
* @return Whether the inference is successful or not.
*/
infer(): boolean;
/**
* Gets per-node profiling results.
*
* This is only useful when TFLiteWebModelRunnerOptions.enableProfiling is
* set to true.
*/
getProfilingResults(): ProfileItem[];
/**
* Gets the profiling summary.
*
* This is only useful when TFLiteWebModelRunnerOptions.enableProfiling is
* set to true.
*/
getProfilingSummary(): string;
}
export declare interface ProfileItem {
/** The type of the node, e.g. "CONV_2D". */
nodeType: string;
/** The name of the node, e.g. "MobilenetV1/MobilenetV1/Conv2d_0/Relu6". */
nodeName: string;
/** The execution time (in ms) of the node. */
nodeExecMs: number;
}
/** Options for TFLiteWebModelRunner. */
export declare interface TFLiteWebModelRunnerOptions {
/**
* Number of threads to use when running inference.
*
* Default to number of physical CPU cores, or -1 if WASM multi-threading is
* not supported by user's browser.
*/
numThreads?: number;
/**
* Whether to enable profiling.
*
* Default to false. After it is enabled, the profiling results can be
* retrieved by calling TFLiteWebModelRunner.getProfilingResults or
* TFLiteWebModelRunner.getProfilingSummary. See their comments for more
* details.
*/
enableProfiling?: boolean;
/**
* Maximum nmber of entries that the profiler can keep.
*
* Default to 1024.
*/
maxProfilingBufferEntries?: number;
}
/** Types of TFLite tensor data. */
export type TFLiteDataType =
'int8'|'uint8'|'bool'|'int16'|'int32'|'uint32'|'float32'|'float64';
/** Stores metadata for a TFLite tensor. */
export declare interface TFLiteWebModelRunnerTensorInfo {
/** The id of the tensor (generated by TFLite runtime). */
id: number;
/** TFLite data type. */
dataType: TFLiteDataType;
/** The name of the TFLite tensor. */
name: string;
/** The shape of the tensor in string form, e.g. "2,3,5". */
shape: string;
/** Gets the direct access to the underlying buffer. */
data(): Int8Array|Uint8Array|Int16Array|Int32Array|Uint32Array|Float32Array
|Float64Array;
}