Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add hash to outputs in metafile #3530

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion internal/bundler/bundler.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"bytes"
"encoding/base32"
"encoding/base64"
"encoding/binary"
"fmt"
"math/rand"
"net/http"
Expand Down Expand Up @@ -2572,6 +2573,8 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann
Ext: &templateExt,
})) + ext

bytesHash := GenerateOutputFileHash(bytes)

// Optionally add metadata about the file
var jsonMetadataChunk string
if s.options.NeedsMetafile {
Expand All @@ -2580,16 +2583,18 @@ func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scann
len(bytes),
)
jsonMetadataChunk = fmt.Sprintf(
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d\n }",
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": %s,\n \"bytes\": %d,\n \"hash\": \"%s\"\n }",
inputs,
len(bytes),
bytesHash,
)
}

// Generate the additional file to copy into the output directory
result.file.inputFile.AdditionalFiles = []graph.OutputFile{{
AbsPath: s.fs.Join(s.options.AbsOutputDir, relPath),
Contents: bytes,
Hash: bytesHash,
JSONMetadataChunk: jsonMetadataChunk,
}}
}
Expand Down Expand Up @@ -3312,3 +3317,9 @@ func sanitizeFilePathForVirtualModulePath(path string) string {
// avoid forbidden file names such as ".." since ".js" is a valid file name.
return sb.String()
}

func GenerateOutputFileHash(bytes []byte) string {
var hashBytes [8]byte
binary.LittleEndian.PutUint64(hashBytes[:], xxhash.Sum64(bytes))
return base64.RawStdEncoding.EncodeToString(hashBytes[:])
}
9 changes: 6 additions & 3 deletions internal/bundler_tests/snapshots/snapshots_css.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3395,7 +3395,8 @@ console.log("bar");
"bytesInOutput": 20
}
},
"bytes": 36
"bytes": 36,
"hash": "br5jNYccCrA"
},
"out/css/DIO3TRUB.css": {
"imports": [],
Expand All @@ -3404,7 +3405,8 @@ console.log("bar");
"bytesInOutput": 23
}
},
"bytes": 40
"bytes": 40,
"hash": "pQk0CKFT9YM"
},
"out/js/MA6C7ZBK.js": {
"imports": [],
Expand All @@ -3419,7 +3421,8 @@ console.log("bar");
"bytesInOutput": 20
}
},
"bytes": 36
"bytes": 36,
"hash": "W/iiZ007tkU"
}
}
}
Expand Down
51 changes: 34 additions & 17 deletions internal/bundler_tests/snapshots/snapshots_default.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4191,7 +4191,8 @@ x = [data_default, data_default, data_default2];
"bytesInOutput": 49
}
},
"bytes": 210
"bytes": 210,
"hash": "gbBJ/xD/zGg"
}
}
}
Expand Down Expand Up @@ -4270,7 +4271,8 @@ a {
"bytesInOutput": 148
}
},
"bytes": 148
"bytes": 148,
"hash": "+6xuYxf/kAo"
},
"out/entry.css": {
"imports": [
Expand Down Expand Up @@ -4301,7 +4303,8 @@ a {
"bytesInOutput": 65
}
},
"bytes": 98
"bytes": 98,
"hash": "DqD6C877XPw"
}
}
}
Expand Down Expand Up @@ -4505,7 +4508,8 @@ d {
"bytesInOutput": 4
}
},
"bytes": 4
"bytes": 4,
"hash": "aGYVdUIoUW0"
},
"out/copy-O3Y5SCJE.copy": {
"imports": [],
Expand All @@ -4515,7 +4519,8 @@ d {
"bytesInOutput": 4
}
},
"bytes": 4
"bytes": 4,
"hash": "WHi8JAnZ8XY"
},
"out/entry.js": {
"imports": [
Expand Down Expand Up @@ -4567,7 +4572,8 @@ d {
"bytesInOutput": 43
}
},
"bytes": 642
"bytes": 642,
"hash": "rHP7YPDyyRg"
},
"out/dynamic-TGITTCVZ.js": {
"imports": [
Expand All @@ -4585,7 +4591,8 @@ d {
"bytesInOutput": 25
}
},
"bytes": 119
"bytes": 119,
"hash": "cj974rDz0MQ"
},
"out/chunk-WXLYCZIT.js": {
"imports": [],
Expand All @@ -4594,7 +4601,8 @@ d {
"__require"
],
"inputs": {},
"bytes": 38
"bytes": 38,
"hash": "c4FAahkR2Qs"
},
"out/entry.css": {
"imports": [
Expand Down Expand Up @@ -4627,7 +4635,8 @@ d {
"bytesInOutput": 187
}
},
"bytes": 234
"bytes": 234,
"hash": "3QK6r342r5w"
}
}
}
Expand Down Expand Up @@ -4734,7 +4743,8 @@ a {
"bytesInOutput": 0
}
},
"bytes": 0
"bytes": 0,
"hash": "menYUTfbRu8"
},
"out/bytesInOutput should be at least 99 (1).js": {
"imports": [
Expand All @@ -4753,7 +4763,8 @@ a {
"bytesInOutput": 24
}
},
"bytes": 330
"bytes": 330,
"hash": "E7nBKlvM0YE"
},
"out/222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222222-55DNWN2R.copy": {
"imports": [],
Expand All @@ -4763,7 +4774,8 @@ a {
"bytesInOutput": 0
}
},
"bytes": 0
"bytes": 0,
"hash": "menYUTfbRu8"
},
"out/bytesInOutput should be at least 99 (2).js": {
"imports": [
Expand All @@ -4779,7 +4791,8 @@ a {
"bytesInOutput": 149
}
},
"bytes": 203
"bytes": 203,
"hash": "GICfQTR1+uE"
},
"out/bytesInOutput should be at least 99 (3).js": {
"imports": [
Expand All @@ -4795,7 +4808,8 @@ a {
"bytesInOutput": 143
}
},
"bytes": 197
"bytes": 197,
"hash": "x6ltC//xHBY"
},
"out/333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333333-DH3FVEAA.js": {
"imports": [],
Expand All @@ -4806,7 +4820,8 @@ a {
"bytesInOutput": 0
}
},
"bytes": 0
"bytes": 0,
"hash": "menYUTfbRu8"
},
"out/444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444444-55DNWN2R.file": {
"imports": [],
Expand All @@ -4816,7 +4831,8 @@ a {
"bytesInOutput": 0
}
},
"bytes": 0
"bytes": 0,
"hash": "menYUTfbRu8"
},
"out/bytesInOutput should be at least 99.css": {
"imports": [
Expand All @@ -4831,7 +4847,8 @@ a {
"bytesInOutput": 144
}
},
"bytes": 198
"bytes": 198,
"hash": "OrtofXGpbWg"
}
}
}
Expand Down
12 changes: 8 additions & 4 deletions internal/bundler_tests/snapshots/snapshots_loader.txt
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ a {
"imports": [],
"exports": [],
"inputs": {},
"bytes": 203
"bytes": 203,
"hash": "Bm4aUvCX2dw"
},
"entry.css": {
"imports": [
Expand All @@ -74,7 +75,8 @@ a {
"bytesInOutput": 27
}
},
"bytes": 43
"bytes": 43,
"hash": "UXY2JikiRN4"
}
}
}
Expand Down Expand Up @@ -158,7 +160,8 @@ console.log(ns, import_c.default, void 0);
"imports": [],
"exports": [],
"inputs": {},
"bytes": 377
"bytes": 377,
"hash": "W0RIyQIh2TA"
},
"entry.js": {
"imports": [],
Expand All @@ -175,7 +178,8 @@ console.log(ns, import_c.default, void 0);
"bytesInOutput": 111
}
},
"bytes": 253
"bytes": 253,
"hash": "biWEeH6oSCs"
}
}
}
Expand Down
1 change: 1 addition & 0 deletions internal/graph/input.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ type OutputFile struct {

AbsPath string
Contents []byte
Hash string
IsExecutable bool
}

Expand Down
22 changes: 14 additions & 8 deletions internal/linker/linker.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ type chunkInfo struct {
waitForIsolatedHash func() []byte

// Other fields relating to the output file for this chunk
jsonMetadataChunkCallback func(finalOutputSize int) helpers.Joiner
jsonMetadataChunkCallback func(finalOutputSize int, finalOutputHash string) helpers.Joiner
outputSourceMap sourcemap.SourceMapPieces

// When this chunk is initially generated in isolation, the output pieces
Expand Down Expand Up @@ -701,11 +701,13 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
}

// Write the external legal comments file
legalCommentsHash := bundler.GenerateOutputFileHash(chunk.externalLegalComments)
outputFiles = append(outputFiles, graph.OutputFile{
AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForLegalComments),
Contents: chunk.externalLegalComments,
Hash: legalCommentsHash,
JSONMetadataChunk: fmt.Sprintf(
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(chunk.externalLegalComments)),
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", len(chunk.externalLegalComments), legalCommentsHash),
})
}

Expand Down Expand Up @@ -738,22 +740,25 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
// Potentially write the external source map file
switch c.options.SourceMap {
case config.SourceMapLinkedWithComment, config.SourceMapInlineAndExternal, config.SourceMapExternalWithoutComment:
outputSourceMapHash := bundler.GenerateOutputFileHash(outputSourceMap)
outputFiles = append(outputFiles, graph.OutputFile{
AbsPath: c.fs.Join(c.options.AbsOutputDir, finalRelPathForSourceMap),
Contents: outputSourceMap,
Hash: outputSourceMapHash,
JSONMetadataChunk: fmt.Sprintf(
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d\n }", len(outputSourceMap)),
"{\n \"imports\": [],\n \"exports\": [],\n \"inputs\": {},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", len(outputSourceMap), outputSourceMapHash),
})
}
}

// Finalize the output contents
outputContents := outputContentsJoiner.Done()
outputHash := bundler.GenerateOutputFileHash(outputContents)

// Path substitution for the JSON metadata
var jsonMetadataChunk string
if c.options.NeedsMetafile {
jsonMetadataChunkPieces := c.breakJoinerIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents)))
jsonMetadataChunkPieces := c.breakJoinerIntoPieces(chunk.jsonMetadataChunkCallback(len(outputContents), outputHash))
jsonMetadataChunkBytes, _ := c.substituteFinalPaths(jsonMetadataChunkPieces, func(finalRelPathForImport string) string {
return resolver.PrettyPath(c.fs, logger.Path{Text: c.fs.Join(c.options.AbsOutputDir, finalRelPathForImport), Namespace: "file"})
})
Expand All @@ -764,6 +769,7 @@ func (c *linkerContext) generateChunksInParallel(additionalFiles []graph.OutputF
outputFiles = append(outputFiles, graph.OutputFile{
AbsPath: c.fs.Join(c.options.AbsOutputDir, chunk.finalRelPath),
Contents: outputContents,
Hash: outputHash,
JSONMetadataChunk: jsonMetadataChunk,
IsExecutable: chunk.isExecutable,
})
Expand Down Expand Up @@ -5886,7 +5892,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
}
pieces[i] = outputs
}
chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner {
chunk.jsonMetadataChunkCallback = func(finalOutputSize int, finalOutputHash string) helpers.Joiner {
finalRelDir := c.fs.Dir(chunk.finalRelPath)
for i, sourceIndex := range metaOrder {
if i > 0 {
Expand All @@ -5903,7 +5909,7 @@ func (c *linkerContext) generateChunkJS(chunkIndex int, chunkWaitGroup *sync.Wai
if len(metaOrder) > 0 {
jMeta.AddString("\n ")
}
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize))
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", finalOutputSize, finalOutputHash))
return jMeta
}
}
Expand Down Expand Up @@ -6328,7 +6334,7 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
for i, compileResult := range compileResults {
pieces[i] = c.breakOutputIntoPieces(compileResult.CSS)
}
chunk.jsonMetadataChunkCallback = func(finalOutputSize int) helpers.Joiner {
chunk.jsonMetadataChunkCallback = func(finalOutputSize int, finalOutputHash string) helpers.Joiner {
finalRelDir := c.fs.Dir(chunk.finalRelPath)
isFirst := true
for i, compileResult := range compileResults {
Expand All @@ -6347,7 +6353,7 @@ func (c *linkerContext) generateChunkCSS(chunkIndex int, chunkWaitGroup *sync.Wa
if len(compileResults) > 0 {
jMeta.AddString("\n ")
}
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d\n }", finalOutputSize))
jMeta.AddString(fmt.Sprintf("},\n \"bytes\": %d,\n \"hash\": \"%s\"\n }", finalOutputSize, finalOutputHash))
return jMeta
}
}
Expand Down
1 change: 1 addition & 0 deletions lib/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -477,6 +477,7 @@ export interface Metafile {
outputs: {
[path: string]: {
bytes: number
hash: string
inputs: {
[path: string]: {
bytesInOutput: number
Expand Down
Loading