Skip to content

Commit

Permalink
Merge 6253b2e into 5fd68dd
Browse files Browse the repository at this point in the history
  • Loading branch information
exogen committed Jun 12, 2019
2 parents 5fd68dd + 6253b2e commit 60d169e
Show file tree
Hide file tree
Showing 3 changed files with 102 additions and 23 deletions.
7 changes: 7 additions & 0 deletions demo/pages/index.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import baseFetch from "isomorphic-unfetch";
import Link from "next/link";

/**
* Return a `fetch` implementation and a function that will return the full HAR
Expand Down Expand Up @@ -170,6 +171,12 @@ export default function DemoPage({ harData }) {
</a>
</div>
) : null}
<p>
<Link href="/">
<a>Send requests in browser</a>
</Link>{" "}
for comparison
</p>
<pre>{JSON.stringify(harData, null, 2)}</pre>
</main>
);
Expand Down
90 changes: 67 additions & 23 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,19 @@ const querystring = require("querystring");
const generateId = require("nanoid");
const cookie = require("cookie");
const setCookie = require("set-cookie-parser");
const {
name: packageName,
version: packageVersion
} = require("./package.json");

const headerName = "x-har-request-id";

function getDuration(a, b) {
const seconds = b[0] - a[0];
const nanoseconds = b[1] - a[1];
return seconds * 1000 + nanoseconds / 1e6;
}

function handleRequest(harEntryMap, request, options) {
if (!options || typeof options !== "object") {
throw new Error("Unsupported Node.js Agent implementation");
Expand All @@ -30,13 +40,13 @@ function handleRequest(harEntryMap, request, options) {
}

const now = Date.now();
const startTime = process.hrtime();
const url = new URL(options.url || options.href); // Depends on Node version?

const entry = {
_parent: parentEntry,
_timestamps: {
start: now,
sent: now
start: startTime
},
startedDateTime: new Date(now).toISOString(),
cache: {
Expand Down Expand Up @@ -66,10 +76,6 @@ function handleRequest(harEntryMap, request, options) {
})),
headersSize: -1,
bodySize: -1
},
response: {
headersSize: -1,
bodySize: 0
}
};

Expand Down Expand Up @@ -109,7 +115,7 @@ function handleRequest(harEntryMap, request, options) {

if (requestBody != null) {
// Works for both buffers and strings.
entry.request.bodySize = requestBody.length;
entry.request.bodySize = Buffer.byteLength(requestBody);

let mimeType;
for (const name in headers) {
Expand All @@ -135,12 +141,16 @@ function handleRequest(harEntryMap, request, options) {
return _end.call(this, ...args);
};

request.on("finish", () => {
entry._timestamps.sent = process.hrtime();
});

request.on("response", response => {
entry._timestamps.firstByte = Date.now();
entry._timestamps.firstByte = process.hrtime();
harEntryMap.set(requestId, entry);
const httpVersion = `HTTP/${response.httpVersion}`;

// Populate request info that isn't available until now.
const httpVersion = `HTTP/${response.httpVersion}`;
entry.request.httpVersion = httpVersion;

entry.response = {
Expand All @@ -157,6 +167,21 @@ function handleRequest(harEntryMap, request, options) {
headersSize: -1,
bodySize: -1
};

// Detect supported compression encodings.
const compressed = /^(gzip|compress|deflate|br)$/.test(
response.headers["content-encoding"]
);

if (compressed) {
entry._compressed = true;
response.on("data", chunk => {
if (entry.response.bodySize === -1) {
entry.response.bodySize = 0;
}
entry.response.bodySize += Buffer.byteLength(chunk);
});
}
});
}

Expand Down Expand Up @@ -356,14 +381,19 @@ function withHar(baseFetch, defaults = {}) {
const text = await response.text();

const { _timestamps: time } = entry;
time.received = Date.now();
time.received = process.hrtime();

const parents = [];
let parent = entry._parent;
while (parent) {
parents.unshift(parent);
parent = parent._parent;
}
let child = entry;
do {
let parent = child._parent;
// Remove linked parent references as they're flattened.
delete child._parent;
if (parent) {
parents.unshift(parent);
}
child = parent;
} while (child);

// In some versions of `node-fetch`, the returned `response` is actually
// an instance of `Body`, not `Response`, and the `Body` class does not
Expand Down Expand Up @@ -395,16 +425,30 @@ function withHar(baseFetch, defaults = {}) {
// Allow grouping by pages.
entry.pageref = harPageRef || "page_1";
parents.forEach(parent => {
parent.pageref = harPageRef || "page_1";
parent.pageref = entry.pageref;
});
// Response content info.
const bodySize = Buffer.byteLength(text);
entry.response.content.text = text;
entry.response.content.size = text.length;
entry.response.bodySize = text.length;
entry.response.content.size = bodySize;
if (entry._compressed) {
if (entry.response.bodySize !== -1) {
entry.response.content.compression =
entry.response.content.size - entry.response.bodySize;
}
} else {
entry.response.bodySize = bodySize;
}
// Finalize timing info.
entry.timings.send = time.sent - time.start;
entry.timings.wait = time.firstByte - time.sent;
entry.timings.receive = time.received - time.firstByte;
if (time.sent == null) {
time.sent = time.start;
}
entry.timings.send = getDuration(time.start, time.sent);
entry.timings.wait = Math.max(
getDuration(time.sent, time.firstByte),
0
);
entry.timings.receive = getDuration(time.firstByte, time.received);
entry.time =
entry.timings.blocked +
entry.timings.send +
Expand Down Expand Up @@ -439,8 +483,8 @@ function createHarLog(entries = [], pageInfo = {}) {
log: {
version: "1.2",
creator: {
name: "node-fetch-har",
version: "0.4"
name: packageName,
version: packageVersion
},
pages: [
Object.assign(
Expand Down
28 changes: 28 additions & 0 deletions test.js
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,34 @@ fragment TypeRef on __Type {
]
});
});

it("supports compression savings detection (gzip)", async () => {
const fetch = withHar(baseFetch);
const response = await fetch("https://postman-echo.com/gzip");
const body = await response.json();
expect(body.gzipped).toBe(true);
expect(response.harEntry.response.bodySize).toBeLessThan(
response.harEntry.response.content.size
);
expect(response.harEntry.response.content.compression).toBe(
response.harEntry.response.content.size -
response.harEntry.response.bodySize
);
});

it("supports compression savings detection (deflate)", async () => {
const fetch = withHar(baseFetch);
const response = await fetch("https://postman-echo.com/deflate");
const body = await response.json();
expect(body.deflated).toBe(true);
expect(response.harEntry.response.bodySize).toBeLessThan(
response.harEntry.response.content.size
);
expect(response.harEntry.response.content.compression).toBe(
response.harEntry.response.content.size -
response.harEntry.response.bodySize
);
});
});

it("reports entries with the onHarEntry option", async () => {
Expand Down

0 comments on commit 60d169e

Please sign in to comment.