diff --git a/build/build-treemap.js b/build/build-treemap.js index bbc1e9f45b6b..c614dd8541ec 100644 --- a/build/build-treemap.js +++ b/build/build-treemap.js @@ -61,10 +61,12 @@ async function run() { fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/sort.js'), 'utf8'), fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/format.js'), 'utf8'), fs.readFileSync(require.resolve('tabulator-tables/dist/js/modules/resize_columns.js'), 'utf8'), + fs.readFileSync(require.resolve('pako/dist/pako_inflate.js'), 'utf-8'), /* eslint-enable max-len */ buildStrings(), {path: '../../lighthouse-core/report/html/renderer/logger.js'}, {path: '../../lighthouse-core/report/html/renderer/i18n.js'}, + {path: '../../lighthouse-core/report/html/renderer/text-encoding.js'}, {path: '../../lighthouse-viewer/app/src/drag-and-drop.js'}, {path: '../../lighthouse-viewer/app/src/github-api.js'}, {path: '../../lighthouse-viewer/app/src/firebase-auth.js'}, diff --git a/lighthouse-core/report/html/html-report-assets.js b/lighthouse-core/report/html/html-report-assets.js index 32863013a395..af2ef7c8afcb 100644 --- a/lighthouse-core/report/html/html-report-assets.js +++ b/lighthouse-core/report/html/html-report-assets.js @@ -23,6 +23,7 @@ const REPORT_JAVASCRIPT = [ fs.readFileSync(__dirname + '/renderer/pwa-category-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/report-renderer.js', 'utf8'), fs.readFileSync(__dirname + '/renderer/i18n.js', 'utf8'), + fs.readFileSync(__dirname + '/renderer/text-encoding.js', 'utf8'), ].join(';\n'); const REPORT_CSS = fs.readFileSync(__dirname + '/report-styles.css', 'utf8'); const REPORT_TEMPLATES = fs.readFileSync(__dirname + '/templates.html', 'utf8'); diff --git a/lighthouse-core/report/html/renderer/psi.js b/lighthouse-core/report/html/renderer/psi.js index f0b3561e5f7c..de6248bcc5e2 100644 --- a/lighthouse-core/report/html/renderer/psi.js +++ b/lighthouse-core/report/html/renderer/psi.js @@ -119,7 +119,7 @@ function prepareLabData(LHResult, document) { container: reportEl.querySelector('.lh-audit-group--metrics'), text: Util.i18n.strings.viewTreemapLabel, icon: 'treemap', - onClick: () => ReportUIFeatures.openTreemap(lhResult, 'url'), + onClick: () => ReportUIFeatures.openTreemap(lhResult), }); } }; diff --git a/lighthouse-core/report/html/renderer/report-ui-features.js b/lighthouse-core/report/html/renderer/report-ui-features.js index 04c9f3d7e688..7b229fbdadfb 100644 --- a/lighthouse-core/report/html/renderer/report-ui-features.js +++ b/lighthouse-core/report/html/renderer/report-ui-features.js @@ -23,7 +23,7 @@ * the report. */ -/* globals getFilenamePrefix Util ElementScreenshotRenderer */ +/* globals getFilenamePrefix Util TextEncoding ElementScreenshotRenderer */ /** @typedef {import('./dom')} DOM */ @@ -157,8 +157,7 @@ class ReportUIFeatures { this.addButton({ text: Util.i18n.strings.viewTreemapLabel, icon: 'treemap', - onClick: () => ReportUIFeatures.openTreemap( - this.json, this._dom.isDevTools() ? 'url' : 'postMessage'), + onClick: () => ReportUIFeatures.openTreemap(this.json), }); } @@ -535,27 +534,34 @@ class ReportUIFeatures { } /** - * Opens a new tab to the online viewer and sends the local page's JSON results - * to the online viewer using postMessage. + * The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly. * @param {LH.Result} json * @protected */ - static openTabAndSendJsonReportToViewer(json) { - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly + static computeWindowNameSuffix(json) { // @ts-ignore - If this is a v2 LHR, use old `generatedTime`. const fallbackFetchTime = /** @type {string} */ (json.generatedTime); const fetchTime = json.fetchTime || fallbackFetchTime; - const windowName = `${json.lighthouseVersion}-${json.requestedUrl}-${fetchTime}`; + return `${json.lighthouseVersion}-${json.requestedUrl}-${fetchTime}`; + } + + /** + * Opens a new tab to the online viewer and sends the local page's JSON results + * to the online viewer using postMessage. + * @param {LH.Result} json + * @protected + */ + static openTabAndSendJsonReportToViewer(json) { + const windowName = 'viewer-' + this.computeWindowNameSuffix(json); const url = getAppsOrigin() + '/viewer/'; ReportUIFeatures.openTabAndSendData({lhr: json}, url, windowName); } /** - * Opens a new tab to the treemap app and sends the JSON results using postMessage. + * Opens a new tab to the treemap app and sends the JSON results using URL.fragment * @param {LH.Result} json - * @param {'postMessage'|'url'} method */ - static openTreemap(json, method = 'postMessage') { + static openTreemap(json) { const treemapData = json.audits['script-treemap-data'].details; if (!treemapData) { throw new Error('no script treemap data found'); @@ -575,13 +581,9 @@ class ReportUIFeatures { }, }; const url = getAppsOrigin() + '/treemap/'; - const windowName = `treemap-${json.requestedUrl}`; + const windowName = 'treemap-' + this.computeWindowNameSuffix(json); - if (method === 'postMessage') { - ReportUIFeatures.openTabAndSendData(treemapOptions, url, windowName); - } else { - ReportUIFeatures.openTabWithUrlData(treemapOptions, url, windowName); - } + ReportUIFeatures.openTabWithUrlData(treemapOptions, url, windowName); } /** @@ -607,7 +609,6 @@ class ReportUIFeatures { } }); - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly const popup = window.open(url, windowName); } @@ -618,23 +619,14 @@ class ReportUIFeatures { * @param {string} windowName * @protected */ - static openTabWithUrlData(data, url_, windowName) { + static async openTabWithUrlData(data, url_, windowName) { const url = new URL(url_); - url.hash = toBinary(JSON.stringify(data)); - - // The popup's window.name is keyed by version+url+fetchTime, so we reuse/select tabs correctly + const gzip = Boolean(window.CompressionStream); + url.hash = await TextEncoding.toBase64(JSON.stringify(data), { + gzip, + }); + if (gzip) url.searchParams.set('gzip', '1'); window.open(url.toString(), windowName); - - /** - * @param {string} string - */ - function toBinary(string) { - const codeUnits = new Uint16Array(string.length); - for (let i = 0; i < codeUnits.length; i++) { - codeUnits[i] = string.charCodeAt(i); - } - return btoa(String.fromCharCode(...new Uint8Array(codeUnits.buffer))); - } } /** diff --git a/lighthouse-core/report/html/renderer/text-encoding.js b/lighthouse-core/report/html/renderer/text-encoding.js new file mode 100644 index 000000000000..4386eff0fd55 --- /dev/null +++ b/lighthouse-core/report/html/renderer/text-encoding.js @@ -0,0 +1,78 @@ +/** + * @license Copyright 2021 The Lighthouse Authors. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +/* global self btoa atob window CompressionStream Response */ + +const btoa_ = typeof btoa !== 'undefined' ? + btoa : + /** @param {string} str */ + (str) => Buffer.from(str).toString('base64'); +const atob_ = typeof atob !== 'undefined' ? + atob : + /** @param {string} str */ + (str) => Buffer.from(str, 'base64').toString(); + +/** + * Takes an UTF-8 string and returns a base64 encoded string. + * If gzip is true, the UTF-8 bytes are gzipped before base64'd, using + * CompressionStream (currently only in Chrome), falling back to pako + * (which is only used to encode in our Node tests). + * @param {string} string + * @param {{gzip: boolean}} options + * @return {Promise} + */ +async function toBase64(string, options) { + let bytes = new TextEncoder().encode(string); + + if (options.gzip) { + if (typeof CompressionStream !== 'undefined') { + const cs = new CompressionStream('gzip'); + const writer = cs.writable.getWriter(); + writer.write(bytes); + writer.close(); + const compAb = await new Response(cs.readable).arrayBuffer(); + bytes = new Uint8Array(compAb); + } else { + /** @type {import('pako')=} */ + const pako = window.pako; + bytes = pako.gzip(string); + } + } + + let binaryString = ''; + // This is ~25% faster than building the string one character at a time. + // https://jsbench.me/2gkoxazvjl + const chunkSize = 5000; + for (let i = 0; i < bytes.length; i += chunkSize) { + binaryString += String.fromCharCode(...bytes.subarray(i, i + chunkSize)); + } + return btoa_(binaryString); +} + +/** + * @param {string} encoded + * @param {{gzip: boolean}} options + * @return {string} + */ +function fromBase64(encoded, options) { + const binaryString = atob_(encoded); + const bytes = Uint8Array.from(binaryString, c => c.charCodeAt(0)); + + if (options.gzip) { + /** @type {import('pako')=} */ + const pako = window.pako; + return pako.ungzip(bytes, {to: 'string'}); + } else { + return new TextDecoder().decode(bytes); + } +} + +if (typeof module !== 'undefined' && module.exports) { + module.exports = {toBase64, fromBase64}; +} else { + self.TextEncoding = {toBase64, fromBase64}; +} diff --git a/lighthouse-core/test/report/html/renderer/text-encoding-test.js b/lighthouse-core/test/report/html/renderer/text-encoding-test.js new file mode 100644 index 000000000000..493e1b26b28c --- /dev/null +++ b/lighthouse-core/test/report/html/renderer/text-encoding-test.js @@ -0,0 +1,41 @@ +/** + * @license Copyright 2021 The Lighthouse Authors. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const TextEncoding = require('../../../../report/html/renderer/text-encoding.js'); + +/* eslint-env jest */ + +describe('TextEncoding', () => { + beforeAll(() => { + global.window = {pako: require('pako')}; + }); + + afterAll(() => { + global.window = undefined; + }); + + /** @type {string} */ + async function test(str) { + for (const gzip of [false, true]) { + const binary = await TextEncoding.toBase64(str, {gzip}); + const roundtrip = TextEncoding.fromBase64(binary, {gzip}); + expect(roundtrip.length).toEqual(str.length); + expect(roundtrip).toEqual(str); + } + } + + it('works', async () => { + await test(''); + await test('hello'); + await test('๐Ÿ˜ƒ'); + await test('{รฅรŸโˆ‚ล“โˆ‘ยด}'); + await test('Some examples of emoji are ๐Ÿ˜ƒ, ๐Ÿง˜๐Ÿปโ€โ™‚๏ธ, ๐ŸŒ, ๐Ÿž, ๐Ÿš—, ๐Ÿ“ž, ๐ŸŽ‰, โ™ฅ๏ธ, ๐Ÿ†, and ๐Ÿ.'); + await test('.'.repeat(125183)); + await test('๐Ÿ˜ƒ'.repeat(125183)); + await test(JSON.stringify(require('../../../../../lighthouse-treemap/app/debug.json'))); + }); +}); diff --git a/lighthouse-treemap/app/src/main.js b/lighthouse-treemap/app/src/main.js index 6b9e9e67f0f7..ede9f55bd55c 100644 --- a/lighthouse-treemap/app/src/main.js +++ b/lighthouse-treemap/app/src/main.js @@ -9,7 +9,7 @@ /* eslint-env browser */ -/* globals I18n webtreemap strings TreemapUtil Tabulator Cell Row DragAndDrop Logger GithubApi */ +/* globals I18n webtreemap strings TreemapUtil TextEncoding Tabulator Cell Row DragAndDrop Logger GithubApi */ const DUPLICATED_MODULES_IGNORE_THRESHOLD = 1024; const DUPLICATED_MODULES_IGNORE_ROOT_RATIO = 0.01; @@ -884,22 +884,13 @@ class LighthouseTreemap { } } -/** - * @param {string} encoded - */ -function fromBinary(encoded) { - const binary = atob(encoded); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < bytes.length; i++) { - bytes[i] = binary.charCodeAt(i); - } - return String.fromCharCode(...new Uint16Array(bytes.buffer)); -} - async function main() { const app = new LighthouseTreemap(); const queryParams = new URLSearchParams(window.location.search); - const hashParams = location.hash ? JSON.parse(fromBinary(location.hash.substr(1))) : {}; + const gzip = queryParams.get('gzip') === '1'; + const hashParams = location.hash ? + JSON.parse(TextEncoding.fromBase64(location.hash.substr(1), {gzip})) : + {}; /** @type {Record} */ const params = { ...Object.fromEntries(queryParams.entries()), @@ -912,6 +903,11 @@ async function main() { } else if ('debug' in params) { const response = await fetch('debug.json'); app.init(await response.json()); + } else if (params.lhr) { + const options = { + lhr: params.lhr, + }; + app.init(options); } else if (params.gist) { let json; let options; @@ -923,6 +919,7 @@ async function main() { } if (options) app.init(options); } else { + // TODO: remove for v8. window.addEventListener('message', e => { if (e.source !== self.opener) return; @@ -938,6 +935,7 @@ async function main() { }); } + // TODO: remove for v8. // If the page was opened as a popup, tell the opening window we're ready. if (self.opener && !self.opener.closed) { self.opener.postMessage({opened: true}, '*'); diff --git a/lighthouse-treemap/test/treemap-test-pptr.js b/lighthouse-treemap/test/treemap-test-pptr.js index f386807aea41..1a9ae72f7558 100644 --- a/lighthouse-treemap/test/treemap-test-pptr.js +++ b/lighthouse-treemap/test/treemap-test-pptr.js @@ -9,6 +9,7 @@ /* global document, window */ +const fs = require('fs'); const puppeteer = require('puppeteer'); const {server} = require('../../lighthouse-cli/test/fixtures/static-server.js'); const portNumber = 10200; @@ -42,7 +43,11 @@ describe('Lighthouse Treemap', () => { }); beforeEach(async () => { - if (!browser) browser = await puppeteer.launch({headless: true}); + if (!browser) { + browser = await puppeteer.launch({ + headless: true, + }); + } page = await browser.newPage(); page.on('pageerror', pageError => pageErrors.push(pageError)); }); @@ -66,6 +71,7 @@ describe('Lighthouse Treemap', () => { expect(options.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); }); + // TODO: remove for v8 async function loadFromPostMessage(options) { const openerPage = await browser.newPage(); await openerPage.evaluate((treemapUrl, options) => { @@ -87,16 +93,52 @@ describe('Lighthouse Treemap', () => { it('from window postMessage', async () => { await loadFromPostMessage(debugOptions); - const options = await page.evaluate(() => window.__treemapOptions); - expect(options.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(debugOptions.lhr.requestedUrl); }); it('handles errors', async () => { await loadFromPostMessage({}); - const options = await page.evaluate(() => window.__treemapOptions); - expect(options).toBeUndefined(); + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage).toBeUndefined(); const error = await page.evaluate(() => document.querySelector('#lh-log').textContent); expect(error).toBe('Error: Invalid options'); }); + + it('from encoded fragment (gzip)', async () => { + const options = JSON.parse(JSON.stringify(debugOptions)); + options.lhr.requestedUrl += '๐Ÿ˜ƒ๐Ÿ˜ƒ๐Ÿ˜ƒ'; + const json = JSON.stringify(options); + const encoded = await page.evaluate(` + ${fs.readFileSync( + require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} + TextEncoding.toBase64(${JSON.stringify(json)}, {gzip: true}); + `); + + await page.goto(`${treemapUrl}?gzip=1#${encoded}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); + }); + + it('from encoded fragment (no gzip)', async () => { + const options = JSON.parse(JSON.stringify(debugOptions)); + options.lhr.requestedUrl += '๐Ÿ˜ƒ๐Ÿ˜ƒ๐Ÿ˜ƒ'; + const json = JSON.stringify(options); + const encoded = await page.evaluate(` + ${fs.readFileSync( + require.resolve('../../lighthouse-core/report/html/renderer/text-encoding.js'), 'utf-8')} + TextEncoding.toBase64(${JSON.stringify(json)}, {gzip: false}); + `); + + await page.goto(`${treemapUrl}#${encoded}`); + await page.waitForFunction( + () => window.__treemapOptions || document.body.textContent.startsWith('Error')); + + const optionsInPage = await page.evaluate(() => window.__treemapOptions); + expect(optionsInPage.lhr.requestedUrl).toBe(options.lhr.requestedUrl); + }); }); }); diff --git a/lighthouse-treemap/types/treemap.d.ts b/lighthouse-treemap/types/treemap.d.ts index 392a543c10f5..4c44d77b31f0 100644 --- a/lighthouse-treemap/types/treemap.d.ts +++ b/lighthouse-treemap/types/treemap.d.ts @@ -1,4 +1,5 @@ import _TreemapUtil = require('../app/src/util.js'); +import _TextEncoding = require('../../lighthouse-core/report/html/renderer/text-encoding.js'); import _DragAndDrop = require('../../lighthouse-viewer/app/src/drag-and-drop.js'); import _FirebaseAuth = require('../../lighthouse-viewer/app/src/firebase-auth.js'); import _GithubApi = require('../../lighthouse-viewer/app/src/github-api.js'); @@ -36,6 +37,7 @@ declare global { sort(data: any): void; }; var TreemapUtil: typeof _TreemapUtil; + var TextEncoding: typeof _TextEncoding; var Logger: typeof _Logger; var DragAndDrop: typeof _DragAndDrop; var GithubApi: typeof _GithubApi; diff --git a/package.json b/package.json index eaa958fa73e7..4d7a46836512 100644 --- a/package.json +++ b/package.json @@ -107,6 +107,7 @@ "@types/lodash.isequal": "^4.5.2", "@types/lodash.set": "^4.3.6", "@types/node": "*", + "@types/pako": "^1.0.1", "@types/raven": "^2.5.1", "@types/resize-observer-browser": "^0.1.1", "@types/semver": "^5.5.0", @@ -148,6 +149,7 @@ "node-fetch": "^2.6.1", "npm-run-posix-or-windows": "^2.0.2", "package-json-versionify": "^1.0.4", + "pako": "^2.0.3", "prettier": "^1.14.3", "pretty-json-stringify": "^0.0.2", "puppeteer": "^9.1.1", diff --git a/types/html-renderer.d.ts b/types/html-renderer.d.ts index daf4c75ed4ff..4d03fef5e601 100644 --- a/types/html-renderer.d.ts +++ b/types/html-renderer.d.ts @@ -16,6 +16,7 @@ import _PwaCategoryRenderer = require('../lighthouse-core/report/html/renderer/p import _ReportRenderer = require('../lighthouse-core/report/html/renderer/report-renderer.js'); import _ReportUIFeatures = require('../lighthouse-core/report/html/renderer/report-ui-features.js'); import _Util = require('../lighthouse-core/report/html/renderer/util.js'); +import _TextEncoding = require('../lighthouse-core/report/html/renderer/text-encoding.js'); import _prepareLabData = require('../lighthouse-core/report/html/renderer/psi.js'); import _FileNamer = require('../lighthouse-core/lib/file-namer.js'); @@ -33,7 +34,16 @@ declare global { var ReportRenderer: typeof _ReportRenderer; var ReportUIFeatures: typeof _ReportUIFeatures; var Util: typeof _Util; + var TextEncoding: typeof _TextEncoding; var prepareLabData: typeof _prepareLabData; + var CompressionStream: { + prototype: CompressionStream, + new (format: string): CompressionStream, + }; + + interface CompressionStream extends GenericTransformStream { + readonly format: string; + } interface Window { CategoryRenderer: typeof _CategoryRenderer; diff --git a/yarn.lock b/yarn.lock index 5a697c4cebd2..8cd37b3f8a4a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -878,6 +878,11 @@ resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== +"@types/pako@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@types/pako/-/pako-1.0.1.tgz#33b237f3c9aff44d0f82fe63acffa4a365ef4a61" + integrity sha512-GdZbRSJ3Cv5fiwT6I0SQ3ckeN2PWNqxd26W9Z2fCK1tGrrasGy4puvNFtnddqH9UJFMQYXxEuuB7B8UK+LLwSg== + "@types/prettier@^2.0.0": version "2.2.3" resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.2.3.tgz#ef65165aea2924c9359205bf748865b8881753c0" @@ -6140,6 +6145,11 @@ package-json@^6.3.0: registry-url "^5.0.0" semver "^6.2.0" +pako@^2.0.3: + version "2.0.3" + resolved "https://registry.yarnpkg.com/pako/-/pako-2.0.3.tgz#cdf475e31b678565251406de9e759196a0ea7a43" + integrity sha512-WjR1hOeg+kki3ZIOjaf4b5WVcay1jaliKSYiEaB1XzwhMQZJxRdQRv0V31EKBYlxb4T7SK3hjfc/jxyU64BoSw== + pako@~1.0.5: version "1.0.8" resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.8.tgz#6844890aab9c635af868ad5fecc62e8acbba3ea4"