Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add attention_x and attention_y as output to attention based crop #3470

Merged
merged 5 commits into from
Jan 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lib/output.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ const bitdepthFromColourCount = (colours) => 1 << 31 - Math.clz32(Math.ceil(Math
* `info` contains the output image `format`, `size` (bytes), `width`, `height`,
* `channels` and `premultiplied` (indicating if premultiplication was used).
* When using a crop strategy also contains `cropOffsetLeft` and `cropOffsetTop`.
* When using attention as crop strategy also contains the center of the cropped region in the fields `attentionX` and `attentionY`.
* May also contain `textAutofitDpi` (dpi the font was rendered at) if image was created from text.
* @returns {Promise<Object>} - when no callback is provided
* @throws {Error} Invalid parameters
Expand Down
16 changes: 15 additions & 1 deletion src/pipeline.cc
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,7 @@ class PipelineWorker : public Napi::AsyncWorker {
// Gravity-based crop
int left;
int top;

std::tie(left, top) = sharp::CalculateCrop(
inputWidth, inputHeight, baton->width, baton->height, baton->position);
int width = std::min(inputWidth, baton->width);
Expand All @@ -466,16 +467,25 @@ class PipelineWorker : public Napi::AsyncWorker {
left, top, width, height, nPages, &targetPageHeight)
: image.extract_area(left, top, width, height);
} else {
int attention_x;
int attention_y;

// Attention-based or Entropy-based crop
MultiPageUnsupported(nPages, "Resize strategy");
image = image.tilecache(VImage::option()
->set("access", VIPS_ACCESS_RANDOM)
->set("threaded", TRUE));

image = image.smartcrop(baton->width, baton->height, VImage::option()
->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION));
->set("interesting", baton->position == 16 ? VIPS_INTERESTING_ENTROPY : VIPS_INTERESTING_ATTENTION)
->set("attention_x", &attention_x)
->set("attention_y", &attention_y));
baton->hasCropOffset = true;
baton->cropOffsetLeft = static_cast<int>(image.xoffset());
baton->cropOffsetTop = static_cast<int>(image.yoffset());
baton->hasAttentionCenter = true;
baton->attentionX = static_cast<int>(attention_x * jpegShrinkOnLoad / scale);
baton->attentionY = static_cast<int>(attention_y * jpegShrinkOnLoad / scale);
}
}
}
Expand Down Expand Up @@ -1198,6 +1208,10 @@ class PipelineWorker : public Napi::AsyncWorker {
info.Set("cropOffsetLeft", static_cast<int32_t>(baton->cropOffsetLeft));
info.Set("cropOffsetTop", static_cast<int32_t>(baton->cropOffsetTop));
}
if (baton->hasAttentionCenter) {
info.Set("attentionX", static_cast<int32_t>(baton->attentionX));
info.Set("attentionY", static_cast<int32_t>(baton->attentionY));
}
if (baton->trimThreshold > 0.0) {
info.Set("trimOffsetLeft", static_cast<int32_t>(baton->trimOffsetLeft));
info.Set("trimOffsetTop", static_cast<int32_t>(baton->trimOffsetTop));
Expand Down
6 changes: 6 additions & 0 deletions src/pipeline.h
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ struct PipelineBaton {
bool hasCropOffset;
int cropOffsetLeft;
int cropOffsetTop;
bool hasAttentionCenter;
int attentionX;
int attentionY;
bool premultiplied;
bool tileCentre;
bool fastShrinkOnLoad;
Expand Down Expand Up @@ -236,6 +239,9 @@ struct PipelineBaton {
hasCropOffset(false),
cropOffsetLeft(0),
cropOffsetTop(0),
hasAttentionCenter(false),
attentionX(0),
attentionY(0),
premultiplied(false),
tintA(128.0),
tintB(128.0),
Expand Down
Binary file added test/fixtures/expected/crop-strategy.webp
Binary file not shown.
24 changes: 24 additions & 0 deletions test/unit/resize-cover.js
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,8 @@ describe('Resize fit=cover', function () {
assert.strictEqual(320, info.height);
assert.strictEqual(-107, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
assert.strictEqual(588, info.attentionX);
assert.strictEqual(640, info.attentionY);
fixtures.assertSimilar(fixtures.expected('crop-strategy-attention.jpg'), data, done);
});
});
Expand All @@ -394,10 +396,32 @@ describe('Resize fit=cover', function () {
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(0, info.cropOffsetTop);
assert.strictEqual(0, info.attentionX);
assert.strictEqual(0, info.attentionY);
fixtures.assertSimilar(fixtures.expected('crop-strategy.png'), data, done);
});
});

it('WebP', function (done) {
sharp(fixtures.inputWebP)
.resize(320, 80, {
fit: 'cover',
position: sharp.strategy.attention
})
.toBuffer(function (err, data, info) {
if (err) throw err;
assert.strictEqual('webp', info.format);
assert.strictEqual(3, info.channels);
assert.strictEqual(320, info.width);
assert.strictEqual(80, info.height);
assert.strictEqual(0, info.cropOffsetLeft);
assert.strictEqual(-161, info.cropOffsetTop);
assert.strictEqual(288, info.attentionX);
assert.strictEqual(745, info.attentionY);
fixtures.assertSimilar(fixtures.expected('crop-strategy.webp'), data, done);
});
});

it('supports the strategy passed as a string', function (done) {
sharp(fixtures.inputPngWithTransparency)
.resize(320, 80, {
Expand Down