Skip to content

Commit

Permalink
🐛 FIX: Only auto decompress response stream on args.compressed=true (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
fengmk2 committed Oct 15, 2022
1 parent d3f6809 commit d5270f1
Show file tree
Hide file tree
Showing 5 changed files with 140 additions and 4 deletions.
5 changes: 3 additions & 2 deletions src/HttpClient.ts
Expand Up @@ -462,7 +462,8 @@ export class HttpClient extends EventEmitter {
timing,
socket: socketInfo,
};
if (isCompressedContent) {
// only auto decompress on request args.compressed = true
if (args.compressed === true && isCompressedContent) {
// gzip or br
const decoder = contentEncoding === 'gzip' ? createGunzip() : createBrotliDecompress();
responseBodyStream = Object.assign(pipeline(response.body, decoder, noop), meta);
Expand All @@ -472,7 +473,7 @@ export class HttpClient extends EventEmitter {
} else if (args.writeStream) {
// streaming mode will disable retry
args.retry = 0;
if (isCompressedContent) {
if (args.compressed === true && isCompressedContent) {
const decoder = contentEncoding === 'gzip' ? createGunzip() : createBrotliDecompress();
await pipelinePromise(response.body, decoder, args.writeStream);
} else {
Expand Down
33 changes: 33 additions & 0 deletions test/index.test.ts
Expand Up @@ -133,6 +133,26 @@ describe('index.test.ts', () => {
message: 'mock 400 bad request',
});

mockPool.intercept({
path: '/bar',
method: 'GET',
query: {
q: '1',
},
}).reply(200, {
message: 'mock bar with q=1',
});

mockPool.intercept({
path: '/bar',
method: 'GET',
query: {
q: '2',
},
}).reply(200, {
message: 'mock bar with q=2',
});

mockPool.intercept({
path: /\.tgz$/,
method: 'GET',
Expand All @@ -147,6 +167,19 @@ describe('index.test.ts', () => {
assert.equal(response.status, 400);
assert.deepEqual(response.data, { message: 'mock 400 bad request' });

response = await urllib.request(`${_url}bar?q=1`, {
method: 'GET',
dataType: 'json',
});
assert.equal(response.status, 200);
assert.deepEqual(response.data, { message: 'mock bar with q=1' });
response = await urllib.request(`${_url}bar?q=2`, {
method: 'GET',
dataType: 'json',
});
assert.equal(response.status, 200);
assert.deepEqual(response.data, { message: 'mock bar with q=2' });

response = await urllib.request(`${_url}download/foo.tgz`, {
method: 'GET',
dataType: 'json',
Expand Down
13 changes: 13 additions & 0 deletions test/options.compressed.test.ts
Expand Up @@ -29,6 +29,19 @@ describe('options.compressed.test.ts', () => {
await cleanup();
});

it('should default compressed = false', async () => {
const response = await urllib.request(`${_url}brotli`, {
dataType: 'text',
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-encoding'], 'br');
// console.log(response.headers);
const requestHeaders = JSON.parse(response.headers['x-request-headers'] as string);
assert(!requestHeaders['accept-encoding'],
`should not contains accept-encoding header: ${requestHeaders['accept-encoding']}`);
assert.match(response.data, /export async function startServer/);
});

it('should deflate content when server accept brotli', async () => {
const response = await urllib.request(`${_url}brotli`, {
dataType: 'text',
Expand Down
62 changes: 61 additions & 1 deletion test/options.streaming.test.ts
@@ -1,6 +1,7 @@
import { describe, it, beforeAll, afterAll } from 'vitest';
import { strict as assert } from 'assert';
import { isReadable, Readable } from 'stream';
import { isReadable, Readable, pipeline } from 'stream';
import { createBrotliDecompress } from 'zlib';
import urllib from '../src';
import { startServer } from './fixtures/server';
import { readableToBytes } from './utils';
Expand Down Expand Up @@ -39,6 +40,65 @@ describe('options.streaming.test.ts', () => {
assert.equal(data.requestBody, '');
});

it('should work on streaming=true and compressed=true/false', async () => {
let response = await urllib.request(`${_url}brotli`, {
streaming: true,
compressed: true,
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-encoding'], 'br');
// console.log(response.headers);
let requestHeaders = JSON.parse(response.headers['x-request-headers'] as string);
assert.equal(requestHeaders['accept-encoding'], 'gzip, br');
assert.equal(response.data, null);
// console.log(response.res);
// response.res stream is decompressed
isReadable && assert(isReadable(response.res as any));
let bytes = await readableToBytes(response.res as Readable);
let data = bytes.toString();
assert.match(data, /export async function startServer/);

response = await urllib.request(`${_url}brotli`, {
streaming: true,
// compressed: false,
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-encoding'], 'br');
// console.log(response.headers);
requestHeaders = JSON.parse(response.headers['x-request-headers'] as string);
assert(!requestHeaders['accept-encoding'],
`should not contains accept-encoding header: ${requestHeaders['accept-encoding']}`);
assert.equal(response.data, null);
// console.log(response.res);
// response.res stream is not decompressed
isReadable && assert(isReadable(response.res as any));
let decoder = createBrotliDecompress();
bytes = await readableToBytes(pipeline(response.res as Readable, decoder, () => {}));
data = bytes.toString();
assert.match(data, /export async function startServer/);

response = await urllib.request(`${_url}brotli`, {
streaming: true,
compressed: false,
headers: {
'accept-encoding': 'gzip, deflate, br',
},
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-encoding'], 'br');
// console.log(response.headers);
requestHeaders = JSON.parse(response.headers['x-request-headers'] as string);
assert.equal(requestHeaders['accept-encoding'], 'gzip, deflate, br');
assert.equal(response.data, null);
// console.log(response.res);
// response.res stream is not decompressed
isReadable && assert(isReadable(response.res as any));
decoder = createBrotliDecompress();
bytes = await readableToBytes(pipeline(response.res as Readable, decoder, () => {}));
data = bytes.toString();
assert.match(data, /export async function startServer/);
});

it('should get big streaming response', async () => {
const response = await urllib.request(`${_url}mock-bytes?size=1024102400`, {
streaming: true,
Expand Down
31 changes: 30 additions & 1 deletion test/options.writeStream.test.ts
Expand Up @@ -2,7 +2,8 @@ import { describe, it, beforeAll, afterAll, beforeEach, afterEach } from 'vitest
import { strict as assert } from 'assert';
import { createWriteStream } from 'fs';
import { join } from 'path';
import { stat } from 'fs/promises';
import { gunzipSync } from 'zlib';
import { stat, readFile } from 'fs/promises';
import urllib from '../src';
import { startServer } from './fixtures/server';
import { createTempfile, sleep } from './utils';
Expand Down Expand Up @@ -44,6 +45,34 @@ describe('options.writeStream.test.ts', () => {
assert.equal(stats.size, 1024123);
});

it('should work with compressed=true/false', async () => {
let writeStream = createWriteStream(tmpfile);
let response = await urllib.request(`${_url}gzip`, {
writeStream,
compressed: true,
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-type'], undefined);
assert.equal(response.data, null);
// console.log(response.headers);
// writeStream is decompressed
let data = await readFile(tmpfile, 'utf-8');
assert.match(data, /export async function startServer/);

writeStream = createWriteStream(tmpfile);
response = await urllib.request(`${_url}gzip`, {
writeStream,
compressed: false,
});
assert.equal(response.status, 200);
assert.equal(response.headers['content-type'], undefined);
assert.equal(response.data, null);
// console.log(response.headers);
// writeStream is not decompressed
data = gunzipSync(await readFile(tmpfile)).toString();
assert.match(data, /export async function startServer/);
});

it('should close writeStream when request timeout', async () => {
const writeStream = createWriteStream(tmpfile);
assert.equal(writeStream.destroyed, false);
Expand Down

0 comments on commit d5270f1

Please sign in to comment.