/
linker.go
4325 lines (3780 loc) · 148 KB
/
linker.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
package bundler
import (
"bytes"
"encoding/base64"
"fmt"
"path"
"sort"
"strings"
"sync"
"github.com/evanw/esbuild/internal/ast"
"github.com/evanw/esbuild/internal/compat"
"github.com/evanw/esbuild/internal/config"
"github.com/evanw/esbuild/internal/css_ast"
"github.com/evanw/esbuild/internal/css_printer"
"github.com/evanw/esbuild/internal/fs"
"github.com/evanw/esbuild/internal/js_ast"
"github.com/evanw/esbuild/internal/js_lexer"
"github.com/evanw/esbuild/internal/js_printer"
"github.com/evanw/esbuild/internal/logger"
"github.com/evanw/esbuild/internal/renamer"
"github.com/evanw/esbuild/internal/resolver"
"github.com/evanw/esbuild/internal/runtime"
"github.com/evanw/esbuild/internal/sourcemap"
)
type bitSet struct {
entries []byte
}
func newBitSet(bitCount uint) bitSet {
return bitSet{make([]byte, (bitCount+7)/8)}
}
func (bs bitSet) hasBit(bit uint) bool {
return (bs.entries[bit/8] & (1 << (bit & 7))) != 0
}
func (bs bitSet) setBit(bit uint) {
bs.entries[bit/8] |= 1 << (bit & 7)
}
func (bs bitSet) equals(other bitSet) bool {
return bytes.Equal(bs.entries, other.entries)
}
func (bs bitSet) copyFrom(other bitSet) {
copy(bs.entries, other.entries)
}
func (bs *bitSet) bitwiseOrWith(other bitSet) {
for i := range bs.entries {
bs.entries[i] |= other.entries[i]
}
}
type linkerContext struct {
options *config.Options
log logger.Log
fs fs.FS
res resolver.Resolver
symbols js_ast.SymbolMap
entryPoints []uint32
files []file
hasErrors bool
// This helps avoid an infinite loop when matching imports to exports
cycleDetector []importTracker
// We should avoid traversing all files in the bundle, because the linker
// should be able to run a linking operation on a large bundle where only
// a few files are needed (e.g. an incremental compilation scenario). This
// holds all files that could possibly be reached through the entry points.
// If you need to iterate over all files in the linking operation, iterate
// over this array. This array is also sorted in a deterministic ordering
// to help ensure deterministic builds (source indices are random).
reachableFiles []uint32
// This maps from unstable source index to stable reachable file index. This
// is useful as a deterministic key for sorting if you need to sort something
// containing a source index (such as "js_ast.Ref" symbol references).
stableSourceIndices []uint32
// We may need to refer to the CommonJS "module" symbol for exports
unboundModuleRef js_ast.Ref
}
// This contains linker-specific metadata corresponding to a "file" struct
// from the initial scan phase of the bundler. It's separated out because it's
// conceptually only used for a single linking operation and because multiple
// linking operations may be happening in parallel with different metadata for
// the same file.
type fileMeta struct {
partMeta []partMeta
// This is the index to the automatically-generated part containing code that
// calls "__export(exports, { ... getters ... })". This is used to generate
// getters on an exports object for ES6 export statements, and is both for
// ES6 star imports and CommonJS-style modules.
nsExportPartIndex uint32
// The index of the automatically-generated part containing export statements
// for every export in the entry point. This also contains the call to the
// require wrapper for CommonJS-style entry points.
entryPointExportPartIndex *uint32
// This is only for TypeScript files. If an import symbol is in this map, it
// means the import couldn't be found and doesn't actually exist. This is not
// an error in TypeScript because the import is probably just a type.
//
// Normally we remove all unused imports for TypeScript files during parsing,
// which automatically removes type-only imports. But there are certain re-
// export situations where it's impossible to tell if an import is a type or
// not:
//
// import {typeOrNotTypeWhoKnows} from 'path';
// export {typeOrNotTypeWhoKnows};
//
// Really people should be using the TypeScript "isolatedModules" flag with
// bundlers like this one that compile TypeScript files independently without
// type checking. That causes the TypeScript type checker to emit the error
// "Re-exporting a type when the '--isolatedModules' flag is provided requires
// using 'export type'." But we try to be robust to such code anyway.
isProbablyTypeScriptType map[js_ast.Ref]bool
// Imports are matched with exports in a separate pass from when the matched
// exports are actually bound to the imports. Here "binding" means adding non-
// local dependencies on the parts in the exporting file that declare the
// exported symbol to all parts in the importing file that use the imported
// symbol.
//
// This must be a separate pass because of the "probably TypeScript type"
// check above. We can't generate the part for the export namespace until
// we've matched imports with exports because the generated code must omit
// type-only imports in the export namespace code. And we can't bind exports
// to imports until the part for the export namespace is generated since that
// part needs to participate in the binding.
//
// This array holds the deferred imports to bind so the pass can be split
// into two separate passes.
importsToBind map[js_ast.Ref]importToBind
// If true, the module must be bundled CommonJS-style like this:
//
// // foo.ts
// let require_foo = __commonJS((exports, module) => {
// ...
// });
//
// // bar.ts
// let foo = flag ? require_foo() : null;
//
cjsWrap bool
// If true, all exports must be reached via property accesses off a call to
// the CommonJS wrapper for this module. In addition, all ES6 exports for
// this module must be added as getters to the CommonJS "exports" object.
cjsStyleExports bool
// If true, the "__export(exports, { ... })" call will be force-included even
// if there are no parts that reference "exports". Otherwise this call will
// be removed due to the tree shaking pass. This is used when for entry point
// files when code related to the current output format needs to reference
// the "exports" variable.
forceIncludeExportsForEntryPoint bool
// This is set when we need to pull in the "__export" symbol in to the part
// at "nsExportPartIndex". This can't be done in "createExportsForFile"
// because of concurrent map hazards. Instead, it must be done later.
needsExportSymbolFromRuntime bool
// The index of the automatically-generated part used to represent the
// CommonJS wrapper. This part is empty and is only useful for tree shaking
// and code splitting. The CommonJS wrapper can't be inserted into the part
// because the wrapper contains other parts, which can't be represented by
// the current part system.
cjsWrapperPartIndex *uint32
// This includes both named exports and re-exports.
//
// Named exports come from explicit export statements in the original file,
// and are copied from the "NamedExports" field in the AST.
//
// Re-exports come from other files and are the result of resolving export
// star statements (i.e. "export * from 'foo'").
resolvedExports map[string]exportData
// Never iterate over "resolvedExports" directly. Instead, iterate over this
// array. Some exports in that map aren't meant to end up in generated code.
// This array excludes these exports and is also sorted, which avoids non-
// determinism due to random map iteration order.
sortedAndFilteredExportAliases []string
}
type importToBind struct {
sourceIndex uint32
ref js_ast.Ref
}
type exportData struct {
ref js_ast.Ref
// Export star resolution happens first before import resolution. That means
// it cannot yet determine if duplicate names from export star resolution are
// ambiguous (point to different symbols) or not (point to the same symbol).
// This issue can happen in the following scenario:
//
// // entry.js
// export * from './a'
// export * from './b'
//
// // a.js
// export * from './c'
//
// // b.js
// export {x} from './c'
//
// // c.js
// export let x = 1, y = 2
//
// In this case "entry.js" should have two exports "x" and "y", neither of
// which are ambiguous. To handle this case, ambiguity resolution must be
// deferred until import resolution time. That is done using this array.
potentiallyAmbiguousExportStarRefs []importToBind
// This is the file that the named export above came from. This will be
// different from the file that contains this object if this is a re-export.
sourceIndex uint32
// Exports from export stars are shadowed by other exports. This flag helps
// implement this behavior.
isFromExportStar bool
}
// This contains linker-specific metadata corresponding to a "js_ast.Part" struct
// from the initial scan phase of the bundler. It's separated out because it's
// conceptually only used for a single linking operation and because multiple
// linking operations may be happening in parallel with different metadata for
// the same part in the same file.
type partMeta struct {
// This holds all entry points that can reach this part. It will be used to
// assign this part to a chunk.
entryBits bitSet
// If present, this is a circular doubly-linked list of all other parts in
// this file that need to be in the same chunk as this part to avoid cross-
// chunk assignments, which are not allowed in ES6 modules.
//
// This used to be an array but that was generating lots of allocations.
// Changing this to a circular doubly-linked list was a substantial speedup.
prevSibling uint32
nextSibling uint32
// These are dependencies that come from other files via import statements.
nonLocalDependencies []partRef
}
type partRef struct {
sourceIndex uint32
partIndex uint32
}
type partRange struct {
sourceIndex uint32
partIndexBegin uint32
partIndexEnd uint32
}
type chunkInfo struct {
// The path of this chunk's directory relative to the output directory. Note:
// this must have OS-independent path separators (i.e. '/' not '\').
relDir string
// The name of this chunk. This is initially empty for non-entry point chunks
// because the file name contains a hash of the file contents, which haven't
// been generated yet. Don't access this directly. Instead call "relPath()"
// which first checks that the base name is not empty.
baseNameOrEmpty string
filesWithPartsInChunk map[uint32]bool
filesInChunkInOrder []uint32
partsInChunkInOrder []partRange
entryBits bitSet
// This information is only useful if "isEntryPoint" is true
isEntryPoint bool
sourceIndex uint32 // An index into "c.sources"
entryPointBit uint // An index into "c.entryPoints"
// For code splitting
crossChunkImports []uint32
// This is the representation-specific information
repr chunkRepr
}
type chunkRepr interface {
generate(c *linkerContext, chunk *chunkInfo) func(crossChunkImportRecords []ast.ImportRecord) []OutputFile
}
type chunkReprJS struct {
// For code splitting
crossChunkPrefixStmts []js_ast.Stmt
crossChunkSuffixStmts []js_ast.Stmt
exportsToOtherChunks map[js_ast.Ref]string
importsFromOtherChunks map[uint32]crossChunkImportItemArray
}
type chunkReprCSS struct {
}
// Returns the path of this chunk relative to the output directory. Note:
// this must have OS-independent path separators (i.e. '/' not '\').
func (chunk *chunkInfo) relPath() string {
if chunk.baseNameOrEmpty == "" {
panic("Internal error")
}
return path.Join(chunk.relDir, chunk.baseNameOrEmpty)
}
func newLinkerContext(
options *config.Options,
log logger.Log,
fs fs.FS,
res resolver.Resolver,
files []file,
entryPoints []uint32,
) linkerContext {
// Clone information about symbols and files so we don't mutate the input data
c := linkerContext{
options: options,
log: log,
fs: fs,
res: res,
entryPoints: append([]uint32{}, entryPoints...),
files: make([]file, len(files)),
symbols: js_ast.NewSymbolMap(len(files)),
reachableFiles: findReachableFiles(files, entryPoints),
}
// Clone various things since we may mutate them later
for _, sourceIndex := range c.reachableFiles {
file := files[sourceIndex]
switch repr := file.repr.(type) {
case *reprJS:
// Clone the representation
{
clone := *repr
repr = &clone
file.repr = repr
}
// Clone the symbol map
fileSymbols := append([]js_ast.Symbol{}, repr.ast.Symbols...)
c.symbols.Outer[sourceIndex] = fileSymbols
repr.ast.Symbols = nil
// Clone the parts
repr.ast.Parts = append([]js_ast.Part{}, repr.ast.Parts...)
for i, part := range repr.ast.Parts {
clone := make(map[js_ast.Ref]js_ast.SymbolUse, len(part.SymbolUses))
for ref, uses := range part.SymbolUses {
clone[ref] = uses
}
repr.ast.Parts[i].SymbolUses = clone
}
// Clone the import records
repr.ast.ImportRecords = append([]ast.ImportRecord{}, repr.ast.ImportRecords...)
// Clone the import map
namedImports := make(map[js_ast.Ref]js_ast.NamedImport, len(repr.ast.NamedImports))
for k, v := range repr.ast.NamedImports {
namedImports[k] = v
}
repr.ast.NamedImports = namedImports
// Clone the export map
resolvedExports := make(map[string]exportData)
for alias, name := range repr.ast.NamedExports {
resolvedExports[alias] = exportData{
ref: name.Ref,
sourceIndex: sourceIndex,
}
}
// Clone the top-level symbol-to-parts map
topLevelSymbolToParts := make(map[js_ast.Ref][]uint32)
for ref, parts := range repr.ast.TopLevelSymbolToParts {
topLevelSymbolToParts[ref] = parts
}
repr.ast.TopLevelSymbolToParts = topLevelSymbolToParts
// Clone the top-level scope so we can generate more variables
{
new := &js_ast.Scope{}
*new = *repr.ast.ModuleScope
new.Generated = append([]js_ast.Ref{}, new.Generated...)
repr.ast.ModuleScope = new
}
// Also associate some default metadata with the file
repr.meta = fileMeta{
cjsStyleExports: repr.ast.HasCommonJSFeatures() ||
(options.Mode == config.ModeBundle && repr.ast.ModuleScope.ContainsDirectEval) ||
(repr.ast.HasLazyExport && (c.options.Mode == config.ModePassThrough ||
(c.options.Mode == config.ModeConvertFormat && !c.options.OutputFormat.KeepES6ImportExportSyntax()))),
partMeta: make([]partMeta, len(repr.ast.Parts)),
resolvedExports: resolvedExports,
isProbablyTypeScriptType: make(map[js_ast.Ref]bool),
importsToBind: make(map[js_ast.Ref]importToBind),
}
case *reprCSS:
// Clone the representation
{
clone := *repr
repr = &clone
file.repr = repr
}
// Clone the import records
repr.ast.ImportRecords = append([]ast.ImportRecord{}, repr.ast.ImportRecords...)
}
// All files start off as far as possible from an entry point
file.distanceFromEntryPoint = ^uint32(0)
// Update the file in our copy of the file array
c.files[sourceIndex] = file
}
// Create a way to convert source indices to a stable ordering
c.stableSourceIndices = make([]uint32, len(c.files))
for stableIndex, sourceIndex := range c.reachableFiles {
c.stableSourceIndices[sourceIndex] = uint32(stableIndex)
}
// Mark all entry points so we don't add them again for import() expressions
for _, sourceIndex := range entryPoints {
file := &c.files[sourceIndex]
file.isEntryPoint = true
// Entry points with ES6 exports must generate an exports object when
// targeting non-ES6 formats. Note that the IIFE format only needs this
// when the global name is present, since that's the only way the exports
// can actually be observed externally.
if repr, ok := file.repr.(*reprJS); ok && repr.ast.HasES6Exports && (options.OutputFormat == config.FormatCommonJS ||
(options.OutputFormat == config.FormatIIFE && len(options.GlobalName) > 0)) {
repr.ast.UsesExportsRef = true
repr.meta.forceIncludeExportsForEntryPoint = true
}
}
// Allocate a new unbound symbol called "module" in case we need it later
if c.options.OutputFormat == config.FormatCommonJS {
runtimeSymbols := &c.symbols.Outer[runtime.SourceIndex]
runtimeScope := c.files[runtime.SourceIndex].repr.(*reprJS).ast.ModuleScope
c.unboundModuleRef = js_ast.Ref{OuterIndex: runtime.SourceIndex, InnerIndex: uint32(len(*runtimeSymbols))}
runtimeScope.Generated = append(runtimeScope.Generated, c.unboundModuleRef)
*runtimeSymbols = append(*runtimeSymbols, js_ast.Symbol{
Kind: js_ast.SymbolUnbound,
OriginalName: "module",
Link: js_ast.InvalidRef,
})
} else {
c.unboundModuleRef = js_ast.InvalidRef
}
return c
}
// Find all files reachable from all entry points. This order should be
// deterministic given that the entry point order is deterministic, since the
// returned order is the postorder of the graph traversal and import record
// order within a given file is deterministic.
func findReachableFiles(files []file, entryPoints []uint32) []uint32 {
visited := make(map[uint32]bool)
var order []uint32
var visit func(uint32)
// Include this file and all files it imports
visit = func(sourceIndex uint32) {
if !visited[sourceIndex] {
visited[sourceIndex] = true
file := &files[sourceIndex]
if repr, ok := file.repr.(*reprJS); ok && repr.cssSourceIndex != nil {
visit(*repr.cssSourceIndex)
}
for _, record := range *file.repr.importRecords() {
if record.SourceIndex != nil {
visit(*record.SourceIndex)
}
}
// Each file must come after its dependencies
order = append(order, sourceIndex)
}
}
// The runtime is always included in case it's needed
visit(runtime.SourceIndex)
// Include all files reachable from any entry point
for _, entryPoint := range entryPoints {
visit(entryPoint)
}
return order
}
func (c *linkerContext) addRangeError(source logger.Source, r logger.Range, text string) {
c.log.AddRangeError(&source, r, text)
c.hasErrors = true
}
func (c *linkerContext) addPartToFile(sourceIndex uint32, part js_ast.Part, partMeta partMeta) uint32 {
if part.LocalDependencies == nil {
part.LocalDependencies = make(map[uint32]bool)
}
if part.SymbolUses == nil {
part.SymbolUses = make(map[js_ast.Ref]js_ast.SymbolUse)
}
if partMeta.entryBits.entries == nil {
partMeta.entryBits = newBitSet(uint(len(c.entryPoints)))
}
repr := c.files[sourceIndex].repr.(*reprJS)
partIndex := uint32(len(repr.ast.Parts))
partMeta.prevSibling = partIndex
partMeta.nextSibling = partIndex
repr.ast.Parts = append(repr.ast.Parts, part)
repr.meta.partMeta = append(repr.meta.partMeta, partMeta)
return partIndex
}
func (c *linkerContext) link() []OutputFile {
c.scanImportsAndExports()
// Stop now if there were errors
if c.hasErrors {
return []OutputFile{}
}
c.markPartsReachableFromEntryPoints()
c.handleCrossChunkAssignments()
if c.options.Mode == config.ModePassThrough {
for _, entryPoint := range c.entryPoints {
c.preventExportsFromBeingRenamed(entryPoint)
}
}
chunks := c.computeChunks()
c.computeCrossChunkDependencies(chunks)
// Make sure calls to "js_ast.FollowSymbols()" in parallel goroutines after this
// won't hit concurrent map mutation hazards
js_ast.FollowAllSymbols(c.symbols)
return c.generateChunksInParallel(chunks)
}
func (c *linkerContext) generateChunksInParallel(chunks []chunkInfo) []OutputFile {
// Determine the order of files within the chunk ahead of time. This may
// generate additional CSS chunks from JS chunks that import CSS files.
{
originalChunks := chunks
for i, chunk := range originalChunks {
js, jsParts, css := c.chunkFileOrder(&chunk)
switch chunk.repr.(type) {
case *chunkReprJS:
chunks[i].filesInChunkInOrder = js
chunks[i].partsInChunkInOrder = jsParts
// If JS files include CSS files, make a sibling chunk for the CSS
if len(css) > 0 {
baseNameOrEmpty := chunk.baseNameOrEmpty
if baseNameOrEmpty != "" {
if js := c.options.OutputExtensionJS; strings.HasSuffix(baseNameOrEmpty, js) {
baseNameOrEmpty = baseNameOrEmpty[:len(baseNameOrEmpty)-len(js)]
}
baseNameOrEmpty += c.options.OutputExtensionCSS
}
chunks = append(chunks, chunkInfo{
filesInChunkInOrder: css,
entryBits: chunk.entryBits,
isEntryPoint: chunk.isEntryPoint,
sourceIndex: chunk.sourceIndex,
entryPointBit: chunk.entryPointBit,
relDir: chunk.relDir,
baseNameOrEmpty: baseNameOrEmpty,
filesWithPartsInChunk: make(map[uint32]bool),
repr: &chunkReprCSS{},
})
}
case *chunkReprCSS:
chunks[i].filesInChunkInOrder = css
}
}
}
// We want to process chunks with as much parallelism as possible. However,
// content hashing means chunks that import other chunks must be completed
// after the imported chunks are completed because the import paths contain
// the content hash. It's only safe to process a chunk when the dependency
// count reaches zero.
type ordering struct {
dependencies sync.WaitGroup
dependents []uint32
}
chunkOrdering := make([]ordering, len(chunks))
for chunkIndex, chunk := range chunks {
chunkOrdering[chunkIndex].dependencies.Add(len(chunk.crossChunkImports))
for _, otherChunkIndex := range chunk.crossChunkImports {
dependents := &chunkOrdering[otherChunkIndex].dependents
*dependents = append(*dependents, uint32(chunkIndex))
}
}
// Check for loops in the dependency graph since they cause a deadlock
var check func(int, []int)
check = func(chunkIndex int, path []int) {
for _, otherChunkIndex := range path {
if chunkIndex == otherChunkIndex {
panic("Internal error: Chunk import graph contains a cycle")
}
}
path = append(path, chunkIndex)
for _, otherChunkIndex := range chunks[chunkIndex].crossChunkImports {
check(int(otherChunkIndex), path)
}
}
for i := range chunks {
check(i, nil)
}
results := make([][]OutputFile, len(chunks))
resultsWaitGroup := sync.WaitGroup{}
resultsWaitGroup.Add(len(chunks))
// Generate each chunk on a separate goroutine
for i := range chunks {
go func(i int) {
chunk := &chunks[i]
order := &chunkOrdering[i]
// Start generating the chunk without dependencies, but stop when
// dependencies are needed. This returns a callback that is called
// later to resume generating the chunk once dependencies are known.
resume := chunk.repr.generate(c, chunk)
// Wait for all dependencies to be resolved first
order.dependencies.Wait()
// Fill in the cross-chunk import records now that the paths are known
crossChunkImportRecords := make([]ast.ImportRecord, len(chunk.crossChunkImports))
for i, otherChunkIndex := range chunk.crossChunkImports {
crossChunkImportRecords[i] = ast.ImportRecord{
Kind: ast.ImportStmt,
Path: logger.Path{Text: c.pathBetweenChunks(chunk.relDir, chunks[otherChunkIndex].relPath())},
}
}
// Generate the chunk
results[i] = resume(crossChunkImportRecords)
// Wake up any dependents now that we're done
for _, chunkIndex := range order.dependents {
chunkOrdering[chunkIndex].dependencies.Done()
}
resultsWaitGroup.Done()
}(i)
}
// Join the results in chunk order for determinism
resultsWaitGroup.Wait()
var outputFiles []OutputFile
for _, group := range results {
outputFiles = append(outputFiles, group...)
}
return outputFiles
}
func (c *linkerContext) pathBetweenChunks(fromRelDir string, toRelPath string) string {
// Return an absolute path if a public path has been configured
if c.options.PublicPath != "" {
return c.options.PublicPath + toRelPath
}
// Otherwise, return a relative path
relPath, ok := c.fs.Rel(fromRelDir, toRelPath)
if !ok {
c.log.AddError(nil, logger.Loc{},
fmt.Sprintf("Cannot traverse from directory %q to chunk %q", fromRelDir, toRelPath))
return ""
}
// Make sure to always use forward slashes, even on Windows
relPath = strings.ReplaceAll(relPath, "\\", "/")
// Make sure the relative path doesn't start with a name, since that could
// be interpreted as a package path instead of a relative path
if !strings.HasPrefix(relPath, "./") && !strings.HasPrefix(relPath, "../") {
relPath = "./" + relPath
}
return relPath
}
func (c *linkerContext) computeCrossChunkDependencies(chunks []chunkInfo) {
if len(chunks) < 2 {
// No need to compute cross-chunk dependencies if there can't be any
return
}
type chunkMeta struct {
imports map[js_ast.Ref]bool
exports map[js_ast.Ref]bool
}
chunkMetas := make([]chunkMeta, len(chunks))
// For each chunk, see what symbols it uses from other chunks. Do this in
// parallel because it's the most expensive part of this function.
waitGroup := sync.WaitGroup{}
waitGroup.Add(len(chunks))
for chunkIndex, chunk := range chunks {
go func(chunkIndex int, chunk chunkInfo) {
chunkKey := string(chunk.entryBits.entries)
imports := make(map[js_ast.Ref]bool)
chunkMetas[chunkIndex] = chunkMeta{imports: imports, exports: make(map[js_ast.Ref]bool)}
// Go over each file in this chunk
for sourceIndex := range chunk.filesWithPartsInChunk {
// Go over each part in this file that's marked for inclusion in this chunk
switch repr := c.files[sourceIndex].repr.(type) {
case *reprJS:
for partIndex, partMeta := range repr.meta.partMeta {
if string(partMeta.entryBits.entries) != chunkKey {
continue
}
part := &repr.ast.Parts[partIndex]
// Rewrite external dynamic imports to point to the chunk for that entry point
for _, importRecordIndex := range part.ImportRecordIndices {
record := &repr.ast.ImportRecords[importRecordIndex]
if record.SourceIndex != nil && c.isExternalDynamicImport(record) {
record.Path.Text = c.pathBetweenChunks(chunk.relDir, c.files[*record.SourceIndex].entryPointRelPath)
record.SourceIndex = nil
}
}
// Remember what chunk each top-level symbol is declared in. Symbols
// with multiple declarations such as repeated "var" statements with
// the same name should already be marked as all being in a single
// chunk. In that case this will overwrite the same value below which
// is fine.
for _, declared := range part.DeclaredSymbols {
if declared.IsTopLevel {
c.symbols.Get(declared.Ref).ChunkIndex = ^uint32(chunkIndex)
}
}
// Record each symbol used in this part. This will later be matched up
// with our map of which chunk a given symbol is declared in to
// determine if the symbol needs to be imported from another chunk.
for ref := range part.SymbolUses {
symbol := c.symbols.Get(ref)
// Ignore unbound symbols, which don't have declarations
if symbol.Kind == js_ast.SymbolUnbound {
continue
}
// Ignore symbols that are going to be replaced by undefined
if symbol.ImportItemStatus == js_ast.ImportItemMissing {
continue
}
// If this is imported from another file, follow the import
// reference and reference the symbol in that file instead
if importToBind, ok := repr.meta.importsToBind[ref]; ok {
ref = importToBind.ref
symbol = c.symbols.Get(ref)
}
// If this is an ES6 import from a CommonJS file, it will become a
// property access off the namespace symbol instead of a bare
// identifier. In that case we want to pull in the namespace symbol
// instead. The namespace symbol stores the result of "require()".
if symbol.NamespaceAlias != nil {
ref = symbol.NamespaceAlias.NamespaceRef
}
// We must record this relationship even for symbols that are not
// imports. Due to code splitting, the definition of a symbol may
// be moved to a separate chunk than the use of a symbol even if
// the definition and use of that symbol are originally from the
// same source file.
imports[ref] = true
}
}
}
}
waitGroup.Done()
}(chunkIndex, chunk)
}
waitGroup.Wait()
// Mark imported symbols as exported in the chunk from which they are declared
for chunkIndex := range chunks {
chunk := &chunks[chunkIndex]
repr, ok := chunk.repr.(*chunkReprJS)
if !ok {
continue
}
// Find all uses in this chunk of symbols from other chunks
repr.importsFromOtherChunks = make(map[uint32]crossChunkImportItemArray)
for importRef := range chunkMetas[chunkIndex].imports {
// Ignore uses that aren't top-level symbols
otherChunkIndex := ^c.symbols.Get(importRef).ChunkIndex
if otherChunkIndex != ^uint32(0) && otherChunkIndex != uint32(chunkIndex) {
repr.importsFromOtherChunks[otherChunkIndex] =
append(repr.importsFromOtherChunks[otherChunkIndex], crossChunkImportItem{ref: importRef})
chunkMetas[otherChunkIndex].exports[importRef] = true
}
}
// If this is an entry point, make sure we import all chunks belonging to
// this entry point, even if there are no imports. We need to make sure
// these chunks are evaluated for their side effects too.
if chunk.isEntryPoint {
for otherChunkIndex, otherChunk := range chunks {
if chunkIndex != otherChunkIndex && otherChunk.entryBits.hasBit(chunk.entryPointBit) {
imports := repr.importsFromOtherChunks[uint32(otherChunkIndex)]
repr.importsFromOtherChunks[uint32(otherChunkIndex)] = imports
}
}
}
}
// Generate cross-chunk exports. These must be computed before cross-chunk
// imports because of export alias renaming, which must consider all export
// aliases simultaneously to avoid collisions.
for chunkIndex := range chunks {
chunk := &chunks[chunkIndex]
repr, ok := chunk.repr.(*chunkReprJS)
if !ok {
continue
}
repr.exportsToOtherChunks = make(map[js_ast.Ref]string)
switch c.options.OutputFormat {
case config.FormatESModule:
r := renamer.ExportRenamer{}
var items []js_ast.ClauseItem
for _, export := range c.sortedCrossChunkExportItems(chunkMetas[chunkIndex].exports) {
var alias string
if c.options.MinifyIdentifiers {
alias = r.NextMinifiedName()
} else {
alias = r.NextRenamedName(c.symbols.Get(export.ref).OriginalName)
}
items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: export.ref}, Alias: alias})
repr.exportsToOtherChunks[export.ref] = alias
}
if len(items) > 0 {
repr.crossChunkSuffixStmts = []js_ast.Stmt{{Data: &js_ast.SExportClause{
Items: items,
}}}
}
default:
panic("Internal error")
}
}
// Generate cross-chunk imports. These must be computed after cross-chunk
// exports because the export aliases must already be finalized so they can
// be embedded in the generated import statements.
for chunkIndex := range chunks {
chunk := &chunks[chunkIndex]
repr, ok := chunk.repr.(*chunkReprJS)
if !ok {
continue
}
var crossChunkImports []uint32
var crossChunkPrefixStmts []js_ast.Stmt
for _, crossChunkImport := range c.sortedCrossChunkImports(chunks, repr.importsFromOtherChunks) {
switch c.options.OutputFormat {
case config.FormatESModule:
var items []js_ast.ClauseItem
for _, item := range crossChunkImport.sortedImportItems {
items = append(items, js_ast.ClauseItem{Name: js_ast.LocRef{Ref: item.ref}, Alias: item.exportAlias})
}
importRecordIndex := uint32(len(crossChunkImports))
crossChunkImports = append(crossChunkImports, crossChunkImport.chunkIndex)
if len(items) > 0 {
// "import {a, b} from './chunk.js'"
crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{
Items: &items,
ImportRecordIndex: importRecordIndex,
}})
} else {
// "import './chunk.js'"
crossChunkPrefixStmts = append(crossChunkPrefixStmts, js_ast.Stmt{Data: &js_ast.SImport{
ImportRecordIndex: importRecordIndex,
}})
}
default:
panic("Internal error")
}
}
chunk.crossChunkImports = crossChunkImports
repr.crossChunkPrefixStmts = crossChunkPrefixStmts
}
}
type crossChunkImport struct {
chunkIndex uint32
sortingKey string
sortedImportItems crossChunkImportItemArray
}
// This type is just so we can use Go's native sort function
type crossChunkImportArray []crossChunkImport
func (a crossChunkImportArray) Len() int { return len(a) }
func (a crossChunkImportArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
func (a crossChunkImportArray) Less(i int, j int) bool {
return a[i].sortingKey < a[j].sortingKey
}
// Sort cross-chunk imports by chunk name for determinism
func (c *linkerContext) sortedCrossChunkImports(chunks []chunkInfo, importsFromOtherChunks map[uint32]crossChunkImportItemArray) crossChunkImportArray {
result := make(crossChunkImportArray, 0, len(importsFromOtherChunks))
for otherChunkIndex, importItems := range importsFromOtherChunks {
// Sort imports from a single chunk by alias for determinism
exportsToOtherChunks := chunks[otherChunkIndex].repr.(*chunkReprJS).exportsToOtherChunks
for i, item := range importItems {
importItems[i].exportAlias = exportsToOtherChunks[item.ref]
}
sort.Sort(importItems)
result = append(result, crossChunkImport{
chunkIndex: otherChunkIndex,
sortingKey: string(chunks[otherChunkIndex].entryBits.entries),
sortedImportItems: importItems,
})
}
sort.Sort(result)
return result
}
type crossChunkImportItem struct {
ref js_ast.Ref
exportAlias string
}
// This type is just so we can use Go's native sort function
type crossChunkImportItemArray []crossChunkImportItem
func (a crossChunkImportItemArray) Len() int { return len(a) }
func (a crossChunkImportItemArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
func (a crossChunkImportItemArray) Less(i int, j int) bool {
return a[i].exportAlias < a[j].exportAlias
}
type crossChunkExportItem struct {
ref js_ast.Ref
keyPath logger.Path
}
// This type is just so we can use Go's native sort function
type crossChunkExportItemArray []crossChunkExportItem
func (a crossChunkExportItemArray) Len() int { return len(a) }
func (a crossChunkExportItemArray) Swap(i int, j int) { a[i], a[j] = a[j], a[i] }
func (a crossChunkExportItemArray) Less(i int, j int) bool {
ai := a[i]
aj := a[j]
// The sort order here is arbitrary but needs to be consistent between builds.
// The InnerIndex should be stable because the parser for a single file is
// single-threaded and deterministically assigns out InnerIndex values
// sequentially. But the OuterIndex (i.e. source index) should be unstable
// because the main thread assigns out source index values sequentially to