Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 8 additions & 8 deletions tfjs-backend-wasm/src/cc/kernels/CropAndResize.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ namespace {
void interpolate_nearest(float* out_buf_ptr, const float* images_buf,
std::vector<int> images_strides, int crop_width,
int image_width, int image_width_m1, int num_channels,
float extrapolation_value, int box_ind, float y_ind,
float extrapolation_value, int box_offset, float y_ind,
float width_scale, float x1, float x2) {
for (int x = 0; x < crop_width; ++x) {
const float x_ind = (crop_width > 1) ? x1 * image_width_m1 + x * width_scale
Expand All @@ -52,7 +52,7 @@ void interpolate_nearest(float* out_buf_ptr, const float* images_buf,
float closest_y = round(y_ind);
for (int c = 0; c < num_channels; ++c) {
const int in_ind = c + closest_x * images_strides[2] +
closest_y * images_strides[1] + box_ind;
closest_y * images_strides[1] + box_offset;
*out_buf_ptr = images_buf[in_ind];
out_buf_ptr++;
}
Expand Down Expand Up @@ -123,7 +123,7 @@ void CropAndResize(int images_id, int boxes_id, int box_ind_id, int num_boxes,
continue;
}

const int box_ind = *box_ind_buf * images_strides[0];
const int box_offset = *box_ind_buf * images_strides[0];

const float height_scale =
(crop_height > 1) ? (y2 - y1) * image_height_m1 / (crop_height - 1) : 0;
Expand Down Expand Up @@ -175,22 +175,22 @@ void CropAndResize(int images_id, int boxes_id, int box_ind_id, int num_boxes,

if (should_memcpy) {
int y_ind_int = y_ind;
images_buf += (y_ind_int * images_strides[1] + box_ind);

memcpy(out_buf_ptr, images_buf, sizeof(float) * crop_width);
int offset = box_offset + y_ind_int * images_strides[1];
memcpy(out_buf_ptr, images_buf + offset,
sizeof(float) * crop_width * num_channels);
continue;
}

if (method == InterpolationMethod::BILINEAR) {
tfjs::wasm::interpolate_bilinear(
out_buf_ptr, images_buf, images_strides, crop_width, image_width,
image_width_m1, image_height_m1, num_channels, should_extrapolate,
extrapolation_value, box_ind, y_ind, width_scale, x1, x2);
extrapolation_value, box_offset, y_ind, width_scale, x1, x2);

} else {
interpolate_nearest(out_buf_ptr, images_buf, images_strides, crop_width,
image_width, image_width_m1, num_channels,
extrapolation_value, box_ind, y_ind, width_scale,
extrapolation_value, box_offset, y_ind, width_scale,
x1, x2);
}
}
Expand Down
4 changes: 3 additions & 1 deletion tfjs-core/benchmarks/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,11 @@ <h2>TensorFlow.js Model Benchmark</h2>
<script src="https://unpkg.com/@tensorflow/tfjs-layers@latest/dist/tf-layers.js"></script>
<script src="https://unpkg.com/@tensorflow/tfjs-converter@latest/dist/tf-converter.js"></script>
<script src="https://unpkg.com/@tensorflow/tfjs-backend-wasm@latest/dist/tf-backend-wasm.js"></script>
<script src="https://unpkg.com/@tensorflow/tfjs-automl@latest/dist/tf-automl.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/universal-sentence-encoder"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/posenet@2"></script>
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/body-pix@2"></script>

<script src="./modelConfig.js"></script>
<script src="./util.js"></script>
<script>
Expand Down Expand Up @@ -214,7 +216,7 @@ <h2>TensorFlow.js Model Benchmark</h2>
const start = performance.now();
let res = predict(model);
if (res instanceof Promise) {
await res;
res = await res;
}

if (res instanceof tf.Tensor) {
Expand Down
31 changes: 28 additions & 3 deletions tfjs-core/benchmarks/modelConfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,8 @@ const sentences = [
const benchmarks = {
'mobilenet_v2': {
load: async () => {
const url = 'https://storage.googleapis.com/learnjs-data/mobilenet_v2_100_fused/model.json';
const url =
'https://storage.googleapis.com/learnjs-data/mobilenet_v2_100_fused/model.json';
return tf.loadGraphModel(url);
},
predictFunc: () => {
Expand All @@ -97,7 +98,8 @@ const benchmarks = {
},
'face_detector': {
load: async () => {
const url = 'https://storage.googleapis.com/learnjs-data/face_detector_front/model.json';
const url =
'https://storage.googleapis.com/learnjs-data/face_detector_front/model.json';
return tf.loadGraphModel(url);
},
predictFunc: () => {
Expand All @@ -107,6 +109,28 @@ const benchmarks = {
};
},
},
'AutoML Image': {
load: async () => {
const url =
'https://storage.googleapis.com/tfjs-testing/tfjs-automl/img_classification/model.json';
return tf.automl.loadImageClassification(url);
},
predictFunc: () => {
const zeros = tf.zeros([224, 224, 3]);
return model => model.classify(zeros);
}
},
'AutoML Object': {
load: async () => {
const url =
'https://storage.googleapis.com/tfjs-testing/tfjs-automl/object_detection/model.json';
return tf.automl.loadObjectDetection(url);
},
predictFunc: () => {
const zeros = tf.zeros([224, 224, 3]);
return model => model.detect(zeros);
}
},
'USE - batchsize 30': {
load: async () => {
return use.load();
Expand Down Expand Up @@ -160,7 +184,8 @@ const benchmarks = {
},
};

const imageBucket = 'https://storage.googleapis.com/tfjs-models/assets/posenet/';
const imageBucket =
'https://storage.googleapis.com/tfjs-models/assets/posenet/';
async function loadImage(imagePath) {
const image = new Image();
const promise = new Promise((resolve, reject) => {
Expand Down
27 changes: 27 additions & 0 deletions tfjs-core/src/ops/image_ops_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,33 @@ describeWithFlags('cropAndResize', ALL_ENVS, () => {
expect(output.dtype).toBe('float32');
expectArraysClose(await output.data(), [2.5]);
});

it('5x5-bilinear, no change in shape', async () => {
const image: tf.Tensor4D = tf.ones([1, 5, 5, 3]);
const boxes: tf.Tensor2D = tf.tensor2d([0, 0, 1, 1], [1, 4]);
const boxInd: tf.Tensor1D = tf.tensor1d([0], 'int32');

const output =
tf.image.cropAndResize(image, boxes, boxInd, [5, 5], 'bilinear', 0);

expect(output.shape).toEqual([1, 5, 5, 3]);
expect(output.dtype).toBe('float32');
expectArraysClose(await output.data(), await image.data());
});

it('5x5-bilinear, just a crop, no resize', async () => {
const image: tf.Tensor4D = tf.ones([1, 6, 6, 3]);
const boxes: tf.Tensor2D = tf.tensor2d([0.5, 0.5, 1, 1], [1, 4]);
const boxInd: tf.Tensor1D = tf.tensor1d([0], 'int32');

const output =
tf.image.cropAndResize(image, boxes, boxInd, [3, 3], 'bilinear', 0);

expect(output.shape).toEqual([1, 3, 3, 3]);
expect(output.dtype).toBe('float32');
expectArraysClose(await output.data(), await tf.ones([1, 3, 3, 3]).data());
});

it('1x1-nearest', async () => {
const image: tf.Tensor4D = tf.tensor4d([1, 2, 3, 4], [1, 2, 2, 1]);
const boxes: tf.Tensor2D = tf.tensor2d([0, 0, 1, 1], [1, 4]);
Expand Down
11 changes: 11 additions & 0 deletions tfjs-core/src/ops/resize_bilinear_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,17 @@ describeWithFlags('resizeBilinear', ALL_ENVS, () => {
await output.data(), [2, 2, 2, 10 / 3, 10 / 3, 10 / 3, 4, 4, 4]);
});

it('5x5-bilinear, no change in shape', async () => {
const image: tf.Tensor4D = tf.ones([1, 5, 5, 3]);

const alignCorners = false;
const output = tf.image.resizeBilinear(image, [5, 5], alignCorners);

expect(output.shape).toEqual([1, 5, 5, 3]);
expect(output.dtype).toBe('float32');
expectArraysClose(await output.data(), await image.data());
});

it('simple alignCorners=true', async () => {
const input = tf.tensor3d([2, 2, 4, 4], [2, 2, 1]);
const output = input.resizeBilinear([3, 3], true);
Expand Down