From f2f78ef7e3aea4990582ee3e68e733fdda807e25 Mon Sep 17 00:00:00 2001 From: Evan Wallace Date: Tue, 13 Dec 2022 22:51:46 -0500 Subject: [PATCH] add external imports to metafile fix #905 fix #1933 fix #1939 --- CHANGELOG.md | 73 +++++++++++++++++++ internal/ast/ast.go | 3 + internal/bundler/bundler.go | 11 +++ internal/bundler/linker.go | 54 +++++++++++--- .../bundler/snapshots/snapshots_loader.txt | 65 ++++++++++++++++- internal/css_printer/css_printer.go | 26 ++++++- internal/js_printer/js_printer.go | 34 ++++++--- lib/shared/types.ts | 2 + scripts/js-api-tests.js | 15 +++- 9 files changed, 254 insertions(+), 29 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 721c52d305a..f48531fc89c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,79 @@ * Leading and trailing `.` such as `0.` and `.0` * Numbers with a space after the `-` such as `- 1` +* Add external imports to metafile ([#905](https://github.com/evanw/esbuild/issues/905), [#1933](https://github.com/evanw/esbuild/issues/1933), [#1939](https://github.com/evanw/esbuild/issues/1939)) + + External imports now appear in `imports` arrays in the metafile (which is present when bundling with `metafile: true`) next to normal imports, but additionally have `external: true` to set them apart. This applies both to files in the `inputs` section and the `outputs` section. Here's an example: + + ```diff + { + "inputs": { + "style.css": { + "bytes": 83, + "imports": [ + + { + + "path": "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css", + + "kind": "import-rule", + + "external": true + + } + ] + }, + "app.js": { + "bytes": 100, + "imports": [ + + { + + "path": "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.min.js", + + "kind": "import-statement", + + "external": true + + }, + { + "path": "style.css", + "kind": "import-statement" + } + ] + } + }, + "outputs": { + "out/app.js": { + "imports": [ + + { + + "path": "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/js/bootstrap.min.js", + + "kind": "require-call", + + "external": true + + } + ], + "exports": [], + "entryPoint": "app.js", + "cssBundle": "out/app.css", + "inputs": { + "app.js": { + "bytesInOutput": 113 + }, + "style.css": { + "bytesInOutput": 0 + } + }, + "bytes": 528 + }, + "out/app.css": { + "imports": [ + + { + + "path": "https://cdn.jsdelivr.net/npm/bootstrap@5.2.3/dist/css/bootstrap.min.css", + + "kind": "import-rule", + + "external": true + + } + ], + "inputs": { + "style.css": { + "bytesInOutput": 0 + } + }, + "bytes": 100 + } + } + } + ``` + ## 0.16.5 * Make it easy to exclude all packages from a bundle ([#1958](https://github.com/evanw/esbuild/issues/1958), [#1975](https://github.com/evanw/esbuild/issues/1975), [#2164](https://github.com/evanw/esbuild/issues/2164), [#2246](https://github.com/evanw/esbuild/issues/2246), [#2542](https://github.com/evanw/esbuild/issues/2542)) diff --git a/internal/ast/ast.go b/internal/ast/ast.go index 1f79bf78a1c..831a1e48b9f 100644 --- a/internal/ast/ast.go +++ b/internal/ast/ast.go @@ -116,6 +116,9 @@ const ( // If true, "assert { type: 'json' }" was present AssertTypeJSON + + // If true, do not generate "external": true in the metafile + ShouldNotBeExternalInMetafile ) func (flags ImportRecordFlags) Has(flag ImportRecordFlags) bool { diff --git a/internal/bundler/bundler.go b/internal/bundler/bundler.go index 7288b4ea090..cbdb9d59386 100644 --- a/internal/bundler/bundler.go +++ b/internal/bundler/bundler.go @@ -1819,6 +1819,17 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann // Skip this import record if the previous resolver call failed resolveResult := result.resolveResults[importRecordIndex] if resolveResult == nil || !record.SourceIndex.IsValid() { + if s.options.NeedsMetafile { + if isFirstImport { + isFirstImport = false + sb.WriteString("\n ") + } else { + sb.WriteString(",\n ") + } + sb.WriteString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s,\n \"external\": true\n }", + helpers.QuoteForJSON(record.Path.Text, s.options.ASCIIOnly), + helpers.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly))) + } continue } diff --git a/internal/bundler/linker.go b/internal/bundler/linker.go index 60ae5ef5a3c..da01fc11298 100644 --- a/internal/bundler/linker.go +++ b/internal/bundler/linker.go @@ -874,6 +874,7 @@ func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) { otherChunkIndex := c.graph.Files[record.SourceIndex.GetIndex()].EntryPointChunkIndex record.Path.Text = chunks[otherChunkIndex].uniqueKey record.SourceIndex = ast.Index32{} + record.Flags |= ast.ShouldNotBeExternalInMetafile // Track this cross-chunk dynamic import so we make sure to // include its hash when we're calculating the hashes of all @@ -1228,6 +1229,7 @@ func (c *linkerContext) scanImportsAndExports() { record.Path.Text = otherRepr.AST.URLForCSS record.Path.Namespace = "" record.SourceIndex = ast.Index32{} + record.Flags |= ast.ShouldNotBeExternalInMetafile // Copy the additional files to the output directory additionalFiles = append(additionalFiles, otherFile.InputFile.AdditionalFiles...) @@ -1238,6 +1240,7 @@ func (c *linkerContext) scanImportsAndExports() { record.Path.Text = otherRepr.URLForCode record.Path.Namespace = "" record.CopySourceIndex = ast.Index32{} + record.Flags |= ast.ShouldNotBeExternalInMetafile // Copy the additional files to the output directory additionalFiles = append(additionalFiles, otherFile.InputFile.AdditionalFiles...) @@ -1255,6 +1258,7 @@ func (c *linkerContext) scanImportsAndExports() { record.Path.Text = otherRepr.URLForCode record.Path.Namespace = "" record.CopySourceIndex = ast.Index32{} + record.Flags |= ast.ShouldNotBeExternalInMetafile // Copy the additional files to the output directory additionalFiles = append(additionalFiles, otherFile.InputFile.AdditionalFiles...) @@ -4209,6 +4213,7 @@ func (c *linkerContext) generateCodeForFileInChunkJS( LineOffsetTables: lineOffsetTables, RequireOrImportMetaForSource: c.requireOrImportMetaForSource, MangledProps: c.mangledProps, + NeedsMetafile: c.options.NeedsMetafile, } tree := repr.AST tree.Directive = "" // This is handled elsewhere @@ -4826,6 +4831,7 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun // Also generate the cross-chunk binding code var crossChunkPrefix []byte var crossChunkSuffix []byte + var jsonMetadataImports []string { // Indent the file if everything is wrapped in an IIFE indent := 0 @@ -4838,18 +4844,22 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun MinifyIdentifiers: c.options.MinifyIdentifiers, MinifyWhitespace: c.options.MinifyWhitespace, MinifySyntax: c.options.MinifySyntax, + NeedsMetafile: c.options.NeedsMetafile, } crossChunkImportRecords := make([]ast.ImportRecord, len(chunk.crossChunkImports)) for i, chunkImport := range chunk.crossChunkImports { crossChunkImportRecords[i] = ast.ImportRecord{ - Kind: chunkImport.importKind, - Path: logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey}, + Kind: chunkImport.importKind, + Path: logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey}, + Flags: ast.ShouldNotBeExternalInMetafile, } } - crossChunkPrefix = js_printer.Print(js_ast.AST{ + crossChunkResult := js_printer.Print(js_ast.AST{ ImportRecords: crossChunkImportRecords, Parts: []js_ast.Part{{Stmts: chunkRepr.crossChunkPrefixStmts}}, - }, c.graph.Symbols, r, printOptions).JS + }, c.graph.Symbols, r, printOptions) + crossChunkPrefix = crossChunkResult.JS + jsonMetadataImports = crossChunkResult.JSONMetadataImports crossChunkSuffix = js_printer.Print(js_ast.AST{ Parts: []js_ast.Part{{Stmts: chunkRepr.crossChunkSuffixStmts}}, }, c.graph.Symbols, r, printOptions).JS @@ -4949,15 +4959,23 @@ func (c *linkerContext) generateChunkJS(chunks []chunkInfo, chunkIndex int, chun // Print imports isFirstMeta := true jMeta.AddString("{\n \"imports\": [") - for _, chunkImport := range chunk.crossChunkImports { + for _, json := range jsonMetadataImports { if isFirstMeta { isFirstMeta = false } else { jMeta.AddString(",") } - jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s\n }", - helpers.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), - helpers.QuoteForJSON(chunkImport.importKind.StringForMetafile(), c.options.ASCIIOnly))) + jMeta.AddString(json) + } + for _, compileResult := range compileResults { + for _, json := range compileResult.JSONMetadataImports { + if isFirstMeta { + isFirstMeta = false + } else { + jMeta.AddString(",") + } + jMeta.AddString(json) + } } if !isFirstMeta { jMeta.AddString("\n ") @@ -5330,6 +5348,7 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu AddSourceMappings: addSourceMappings, InputSourceMap: inputSourceMap, LineOffsetTables: lineOffsetTables, + NeedsMetafile: c.options.NeedsMetafile, } compileResult.PrintResult = css_printer.Print(asts[i], cssOptions) compileResult.sourceIndex = sourceIndex @@ -5352,6 +5371,7 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu } // Generate any prefix rules now + var jsonMetadataImports []string { tree := css_ast.AST{} @@ -5383,7 +5403,9 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu result := css_printer.Print(tree, css_printer.Options{ MinifyWhitespace: c.options.MinifyWhitespace, ASCIIOnly: c.options.ASCIIOnly, + NeedsMetafile: c.options.NeedsMetafile, }) + jsonMetadataImports = result.JSONMetadataImports if len(result.CSS) > 0 { prevOffset.AdvanceBytes(result.CSS) j.AddBytes(result.CSS) @@ -5397,15 +5419,23 @@ func (c *linkerContext) generateChunkCSS(chunks []chunkInfo, chunkIndex int, chu if c.options.NeedsMetafile { isFirstMeta := true jMeta.AddString("{\n \"imports\": [") - for _, chunkImport := range chunk.crossChunkImports { + for _, json := range jsonMetadataImports { if isFirstMeta { isFirstMeta = false } else { jMeta.AddString(",") } - jMeta.AddString(fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s\n }", - helpers.QuoteForJSON(c.res.PrettyPath(logger.Path{Text: chunks[chunkImport.chunkIndex].uniqueKey, Namespace: "file"}), c.options.ASCIIOnly), - helpers.QuoteForJSON(chunkImport.importKind.StringForMetafile(), c.options.ASCIIOnly))) + jMeta.AddString(json) + } + for _, compileResult := range compileResults { + for _, json := range compileResult.JSONMetadataImports { + if isFirstMeta { + isFirstMeta = false + } else { + jMeta.AddString(",") + } + jMeta.AddString(json) + } } if !isFirstMeta { jMeta.AddString("\n ") diff --git a/internal/bundler/snapshots/snapshots_loader.txt b/internal/bundler/snapshots/snapshots_loader.txt index 0c66095aba1..e48312953e5 100644 --- a/internal/bundler/snapshots/snapshots_loader.txt +++ b/internal/bundler/snapshots/snapshots_loader.txt @@ -1087,6 +1087,11 @@ d { "project/entry.js": { "bytes": 333, "imports": [ + { + "path": "extern-esm", + "kind": "import-statement", + "external": true + }, { "path": "project/esm.js", "kind": "import-statement" @@ -1103,6 +1108,11 @@ d { "path": "project/copy.copy", "kind": "import-statement" }, + { + "path": "extern-cjs", + "kind": "require-call", + "external": true + }, { "path": "project/cjs.js", "kind": "require-call" @@ -1120,6 +1130,11 @@ d { "project/entry.css": { "bytes": 180, "imports": [ + { + "path": "extern.css", + "kind": "import-rule", + "external": true + }, { "path": "project/inline.svg", "kind": "url-token" @@ -1131,6 +1146,11 @@ d { { "path": "project/copy.copy", "kind": "url-token" + }, + { + "path": "extern.png", + "kind": "url-token", + "external": true } ] } @@ -1159,12 +1179,26 @@ d { "out/entry.js": { "imports": [ { - "path": "out/dynamic-4QVDQQPM.js", - "kind": "dynamic-import" + "path": "out/chunk-3MN5TIYV.js", + "kind": "import-statement" }, { - "path": "out/chunk-3MN5TIYV.js", + "path": "extern-esm", + "kind": "import-statement", + "external": true + }, + { + "path": "out/copy-O3Y5SCJE.copy", "kind": "import-statement" + }, + { + "path": "extern-cjs", + "kind": "require-call", + "external": true + }, + { + "path": "out/dynamic-4QVDQQPM.js", + "kind": "dynamic-import" } ], "exports": [ @@ -1218,7 +1252,30 @@ d { "bytes": 38 }, "out/entry.css": { - "imports": [], + "imports": [ + { + "path": "extern.css", + "kind": "import-rule", + "external": true + }, + { + "path": "data:image/svg+xml,", + "kind": "url-token" + }, + { + "path": "out/file-NVISQQTV.file", + "kind": "url-token" + }, + { + "path": "out/copy-O3Y5SCJE.copy", + "kind": "url-token" + }, + { + "path": "extern.png", + "kind": "url-token", + "external": true + } + ], "entryPoint": "project/entry.css", "inputs": { "project/entry.css": { diff --git a/internal/css_printer/css_printer.go b/internal/css_printer/css_printer.go index 7ac83c0b76f..86523f600a2 100644 --- a/internal/css_printer/css_printer.go +++ b/internal/css_printer/css_printer.go @@ -21,6 +21,7 @@ type printer struct { importRecords []ast.ImportRecord css []byte extractedLegalComments map[string]bool + jsonMetadataImports []string builder sourcemap.ChunkBuilder } @@ -39,12 +40,18 @@ type Options struct { SourceMap config.SourceMap AddSourceMappings bool LegalComments config.LegalComments + NeedsMetafile bool } type PrintResult struct { CSS []byte ExtractedLegalComments map[string]bool - SourceMapChunk sourcemap.Chunk + JSONMetadataImports []string + + // This source map chunk just contains the VLQ-encoded offsets for the "CSS" + // field above. It's not a full source map. The bundler will be joining many + // source map chunks together to form the final source map. + SourceMapChunk sourcemap.Chunk } func Print(tree css_ast.AST, options Options) PrintResult { @@ -59,6 +66,7 @@ func Print(tree css_ast.AST, options Options) PrintResult { result := PrintResult{ CSS: p.css, ExtractedLegalComments: p.extractedLegalComments, + JSONMetadataImports: p.jsonMetadataImports, } if options.SourceMap != config.SourceMapNone { // This is expensive. Only do this if it's necessary. For example, skipping @@ -69,6 +77,20 @@ func Print(tree css_ast.AST, options Options) PrintResult { return result } +func (p *printer) recordImportPathForMetafile(importRecordIndex uint32) { + if p.options.NeedsMetafile { + record := p.importRecords[importRecordIndex] + external := "" + if (record.Flags & ast.ShouldNotBeExternalInMetafile) == 0 { + external = ",\n \"external\": true" + } + p.jsonMetadataImports = append(p.jsonMetadataImports, fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s%s\n }", + helpers.QuoteForJSON(record.Path.Text, p.options.ASCIIOnly), + helpers.QuoteForJSON(record.Kind.StringForMetafile(), p.options.ASCIIOnly), + external)) + } +} + func (p *printer) printRule(rule css_ast.Rule, indent int32, omitTrailingSemicolon bool) { if r, ok := rule.Data.(*css_ast.RComment); ok { switch p.options.LegalComments { @@ -110,6 +132,7 @@ func (p *printer) printRule(rule css_ast.Rule, indent int32, omitTrailingSemicol p.print("@import ") } p.printQuoted(p.importRecords[r.ImportRecordIndex].Path.Text) + p.recordImportPathForMetafile(r.ImportRecordIndex) p.printTokens(r.ImportConditions, printTokensOpts{}) p.print(";") @@ -783,6 +806,7 @@ func (p *printer) printTokens(tokens []css_ast.Token, opts printTokensOpts) bool p.print("url(") p.printQuotedWithQuote(text, bestQuoteCharForString(text, true)) p.print(")") + p.recordImportPathForMetafile(t.ImportRecordIndex) default: p.print(t.Text) diff --git a/internal/js_printer/js_printer.go b/internal/js_printer/js_printer.go index 1cbacbd3010..94aaaf694eb 100644 --- a/internal/js_printer/js_printer.go +++ b/internal/js_printer/js_printer.go @@ -336,6 +336,7 @@ type printer struct { callTarget js_ast.E extractedLegalComments map[string]bool js []byte + jsonMetadataImports []string options Options builder sourcemap.ChunkBuilder stmtStart int @@ -1121,8 +1122,7 @@ func (p *printer) printRequireOrImportExpr( } p.print("(") - p.addSourceMapping(record.Range.Loc) - p.printQuotedUTF8(record.Path.Text, true /* allowBacktick */) + p.printPath(importRecordIndex, ast.ImportRequire) p.print(")") // Finish the call to "__toESM()" @@ -1138,11 +1138,13 @@ func (p *printer) printRequireOrImportExpr( } // External "import()" + kind := ast.ImportDynamic if !p.options.UnsupportedFeatures.Has(compat.DynamicImport) { p.printSpaceBeforeIdentifier() p.print("import(") defer p.print(")") } else { + kind = ast.ImportRequire p.printSpaceBeforeIdentifier() p.print("Promise.resolve()") p.printDotThenPrefix() @@ -1183,7 +1185,7 @@ func (p *printer) printRequireOrImportExpr( p.printIndent() } p.addSourceMapping(record.Range.Loc) - p.printQuotedUTF8(record.Path.Text, true /* allowBacktick */) + p.printPath(importRecordIndex, kind) if !p.options.UnsupportedFeatures.Has(compat.DynamicImport) { p.printImportCallAssertions(record.Assertions) } @@ -1925,7 +1927,7 @@ func (p *printer) printExpr(expr js_ast.Expr, level js_ast.L, flags printExprFla p.printSpaceBeforeIdentifier() p.addSourceMapping(expr.Loc) p.print("require.resolve(") - p.printQuotedUTF8(p.importRecords[e.ImportRecordIndex].Path.Text, true /* allowBacktick */) + p.printPath(e.ImportRecordIndex, ast.ImportRequireResolve) p.print(")") if wrap { p.print(")") @@ -3104,17 +3106,28 @@ func (p *printer) printIndentedComment(text string) { } } -func (p *printer) printPath(importRecordIndex uint32) { +func (p *printer) printPath(importRecordIndex uint32, importKind ast.ImportKind) { record := p.importRecords[importRecordIndex] p.addSourceMapping(record.Range.Loc) p.printQuotedUTF8(record.Path.Text, false /* allowBacktick */) + if p.options.NeedsMetafile { + external := "" + if (record.Flags & ast.ShouldNotBeExternalInMetafile) == 0 { + external = ",\n \"external\": true" + } + p.jsonMetadataImports = append(p.jsonMetadataImports, fmt.Sprintf("\n {\n \"path\": %s,\n \"kind\": %s%s\n }", + helpers.QuoteForJSON(record.Path.Text, p.options.ASCIIOnly), + helpers.QuoteForJSON(importKind.StringForMetafile(), p.options.ASCIIOnly), + external)) + } + // Just omit import assertions if they aren't supported if p.options.UnsupportedFeatures.Has(compat.ImportAssertions) { return } - if record.Assertions != nil { + if record.Assertions != nil && importKind == ast.ImportStmt { p.printSpace() p.print("assert") p.printSpace() @@ -3311,7 +3324,7 @@ func (p *printer) printStmt(stmt js_ast.Stmt, flags printStmtFlags) { } p.print("from") p.printSpace() - p.printPath(s.ImportRecordIndex) + p.printPath(s.ImportRecordIndex, ast.ImportStmt) p.printSemicolonAfterStatement() case *js_ast.SExportClause: @@ -3406,7 +3419,7 @@ func (p *printer) printStmt(stmt js_ast.Stmt, flags printStmtFlags) { p.printSpace() p.print("from") p.printSpace() - p.printPath(s.ImportRecordIndex) + p.printPath(s.ImportRecordIndex, ast.ImportStmt) p.printSemicolonAfterStatement() case *js_ast.SLocal: @@ -3736,7 +3749,7 @@ func (p *printer) printStmt(stmt js_ast.Stmt, flags printStmtFlags) { p.printSpace() } - p.printPath(s.ImportRecordIndex) + p.printPath(s.ImportRecordIndex, ast.ImportStmt) p.printSemicolonAfterStatement() case *js_ast.SBlock: @@ -3875,6 +3888,7 @@ type Options struct { LegalComments config.LegalComments SourceMap config.SourceMap AddSourceMappings bool + NeedsMetafile bool } type RequireOrImportMeta struct { @@ -3889,6 +3903,7 @@ type RequireOrImportMeta struct { type PrintResult struct { JS []byte ExtractedLegalComments map[string]bool + JSONMetadataImports []string // This source map chunk just contains the VLQ-encoded offsets for the "JS" // field above. It's not a full source map. The bundler will be joining many @@ -3935,6 +3950,7 @@ func Print(tree js_ast.AST, symbols js_ast.SymbolMap, r renamer.Renamer, options result := PrintResult{ JS: p.js, + JSONMetadataImports: p.jsonMetadataImports, ExtractedLegalComments: p.extractedLegalComments, } if options.SourceMap != config.SourceMapNone { diff --git a/lib/shared/types.ts b/lib/shared/types.ts index 858aa871d6c..574aea58eb3 100644 --- a/lib/shared/types.ts +++ b/lib/shared/types.ts @@ -439,6 +439,7 @@ export interface Metafile { imports: { path: string kind: ImportKind + external?: boolean }[] } } @@ -453,6 +454,7 @@ export interface Metafile { imports: { path: string kind: ImportKind + external?: boolean }[] exports: string[] entryPoint?: string diff --git a/scripts/js-api-tests.js b/scripts/js-api-tests.js index 61daed0862d..d30728bbe30 100644 --- a/scripts/js-api-tests.js +++ b/scripts/js-api-tests.js @@ -1228,9 +1228,9 @@ body { ]) assert.deepStrictEqual(json.outputs[outEntry].imports, [ + { path: outChunk, kind: 'import-statement' }, { path: outImport1, kind: 'dynamic-import' }, { path: outImport2, kind: 'dynamic-import' }, - { path: outChunk, kind: 'import-statement' }, ]) assert.deepStrictEqual(json.outputs[outImport1].imports, [{ path: outChunk, kind: 'import-statement' }]) assert.deepStrictEqual(json.outputs[outImport2].imports, [{ path: outChunk, kind: 'import-statement' }]) @@ -1434,7 +1434,13 @@ body { // Check inputs assert.deepStrictEqual(json, { inputs: { - [makePath(entry)]: { bytes: 98, imports: [{ path: makePath(imported), kind: 'import-rule' }] }, + [makePath(entry)]: { + bytes: 98, + imports: [ + { path: makePath(imported), kind: 'import-rule' }, + { external: true, kind: 'url-token', path: 'https://example.com/external.png' }, + ] + }, [makePath(image)]: { bytes: 8, imports: [] }, [makePath(imported)]: { bytes: 48, imports: [{ path: makePath(image), kind: 'url-token' }] }, }, @@ -1442,7 +1448,10 @@ body { [makePath(output)]: { bytes: 253, entryPoint: makePath(entry), - imports: [], + imports: [ + { kind: 'url-token', path: 'data:image/png,an image' }, + { external: true, kind: 'url-token', path: 'https://example.com/external.png' }, + ], inputs: { [makePath(entry)]: { bytesInOutput: 62 }, [makePath(imported)]: { bytesInOutput: 51 },