Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

implement a copy loader #2320

Merged
merged 1 commit into from
Jun 15, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@

## Unreleased

* Add a `copy` loader ([#2255](https://github.com/evanw/esbuild/issues/2255))

You can configure the "loader" for a specific file extension in esbuild, which is a way of telling esbuild how it should treat that file. For example, the `text` loader means the file is imported as a string while the `binary` loader means the file is imported as a `Uint8Array`. If you want the imported file to stay a separate file, the only option was previously the `file` loader (which is intended to be similar to Webpack's [`file-loader`](https://v4.webpack.js.org/loaders/file-loader/) package). This loader copies the file to the output directory and imports the path to that output file as a string. This is useful for a web application because you can refer to resources such as `.png` images by importing them for their URL. However, it's not helpful if you need the imported file to stay a separate file but to still behave the way it normally would when the code is run without bundling.

With this release, there is now a new loader called `copy` that copies the loaded file to the output directory and then rewrites the path of the import statement or `require()` call to point to the copied file instead of the original file. This will automatically add a content hash to the output name by default (which can be configured with the `--asset-names=` setting). You can use this by specifying `copy` for a specific file extension, such as with `--loader:.png=copy`.

* Fix a regression in arrow function lowering ([#2302](https://github.com/evanw/esbuild/pull/2302))

This release fixes a regression with lowering arrow functions to function expressions in ES5. This feature was introduced in version 0.7.2 and regressed in version 0.14.30.
Expand Down
2 changes: 1 addition & 1 deletion cmd/esbuild/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ var helpText = func(colors logger.Colors) string {
is browser and cjs when platform is node)
--loader:X=L Use loader L to load file extension X, where L is
one of: js | jsx | ts | tsx | css | json | text |
base64 | file | dataurl | binary
base64 | file | dataurl | binary | copy
--minify Minify the output (sets all --minify-* flags)
--outdir=... The output directory (for multiple entry points)
--outfile=... The output file (for one entry point)
Expand Down
6 changes: 5 additions & 1 deletion internal/ast/ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,13 @@ type ImportRecord struct {
ErrorHandlerLoc logger.Loc

// The resolved source index for an internal import (within the bundle) or
// nil for an external import (not included in the bundle)
// invalid for an external import (not included in the bundle)
SourceIndex Index32

// Files imported via the "copy" loader use this instead of "SourceIndex"
// because they are sort of like external imports, and are not bundled.
CopySourceIndex Index32

Flags ImportRecordFlags
Kind ImportKind
}
Expand Down
81 changes: 58 additions & 23 deletions internal/bundler/bundler.go
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,18 @@ func parseFile(args parseArgs) {
result.ok = true

// Mark that this file is from the "file" loader
result.file.inputFile.UniqueKeyForFileLoader = uniqueKey
result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey

case config.LoaderCopy:
uniqueKey := fmt.Sprintf("%sA%08d", args.uniqueKeyPrefix, args.sourceIndex)
uniqueKeyPath := uniqueKey + source.KeyPath.IgnoredSuffix
result.file.inputFile.Repr = &graph.CopyRepr{
URLForCode: uniqueKeyPath,
}
result.ok = true

// Mark that this file is from the "copy" loader
result.file.inputFile.UniqueKeyForAdditionalFile = uniqueKey

default:
var message string
Expand Down Expand Up @@ -358,9 +369,8 @@ func parseFile(args parseArgs) {

// Run the resolver on the parse thread so it's not run on the main thread.
// That way the main thread isn't blocked if the resolver takes a while.
if args.options.Mode == config.ModeBundle && !args.skipResolve {
if recordsPtr := result.file.inputFile.Repr.ImportRecords(); args.options.Mode == config.ModeBundle && !args.skipResolve && recordsPtr != nil {
// Clone the import records because they will be mutated later
recordsPtr := result.file.inputFile.Repr.ImportRecords()
records := append([]ast.ImportRecord{}, *recordsPtr...)
*recordsPtr = records
result.resolveResults = make([]*resolver.ResolveResult, len(records))
Expand Down Expand Up @@ -1126,7 +1136,7 @@ func ScanBundle(
s.preprocessInjectedFiles()
entryPointMeta := s.addEntryPoints(entryPoints)
s.scanAllDependencies()
files := s.processScannedFiles()
files := s.processScannedFiles(entryPointMeta)

return Bundle{
fs: fs,
Expand Down Expand Up @@ -1643,8 +1653,8 @@ func (s *scanner) scanAllDependencies() {
}

// Don't try to resolve paths if we're not bundling
if s.options.Mode == config.ModeBundle {
records := *result.file.inputFile.Repr.ImportRecords()
if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil {
records := *recordsPtr
for importRecordIndex := range records {
record := &records[importRecordIndex]

Expand Down Expand Up @@ -1690,12 +1700,18 @@ func (s *scanner) scanAllDependencies() {
}
}

func (s *scanner) processScannedFiles() []scannerFile {
func (s *scanner) processScannedFiles(entryPointMeta []graph.EntryPoint) []scannerFile {
s.timer.Begin("Process scanned files")
defer s.timer.End("Process scanned files")

// Build a set of entry point source indices for quick lookup
entryPointSourceIndices := make(map[uint32]bool, len(entryPointMeta))
for _, meta := range entryPointMeta {
entryPointSourceIndices[meta.SourceIndex] = true
}

// Now that all files have been scanned, process the final file import records
for i, result := range s.results {
for sourceIndex, result := range s.results {
if !result.ok {
continue
}
Expand All @@ -1710,8 +1726,8 @@ func (s *scanner) processScannedFiles() []scannerFile {
}

// Don't try to resolve paths if we're not bundling
if s.options.Mode == config.ModeBundle {
records := *result.file.inputFile.Repr.ImportRecords()
if recordsPtr := result.file.inputFile.Repr.ImportRecords(); s.options.Mode == config.ModeBundle && recordsPtr != nil {
records := *recordsPtr
tracker := logger.MakeLineColumnTracker(&result.file.inputFile.Source)

for importRecordIndex := range records {
Expand Down Expand Up @@ -1743,6 +1759,7 @@ func (s *scanner) processScannedFiles() []scannerFile {
}

// Generate metadata about each import
otherFile := &s.results[record.SourceIndex.GetIndex()].file
if s.options.NeedsMetafile {
if isFirstImport {
isFirstImport = false
Expand All @@ -1751,14 +1768,13 @@ func (s *scanner) processScannedFiles() []scannerFile {
sb.WriteString(",\n ")
}
sb.WriteString(fmt.Sprintf("{\n \"path\": %s,\n \"kind\": %s\n }",
js_printer.QuoteForJSON(s.results[record.SourceIndex.GetIndex()].file.inputFile.Source.PrettyPath, s.options.ASCIIOnly),
js_printer.QuoteForJSON(otherFile.inputFile.Source.PrettyPath, s.options.ASCIIOnly),
js_printer.QuoteForJSON(record.Kind.StringForMetafile(), s.options.ASCIIOnly)))
}

switch record.Kind {
case ast.ImportAt, ast.ImportAtConditional:
// Using a JavaScript file with CSS "@import" is not allowed
otherFile := &s.results[record.SourceIndex.GetIndex()].file
if _, ok := otherFile.inputFile.Repr.(*graph.JSRepr); ok {
s.log.AddError(&tracker, record.Range,
fmt.Sprintf("Cannot import %q into a CSS file", otherFile.inputFile.Source.PrettyPath))
Expand All @@ -1769,7 +1785,6 @@ func (s *scanner) processScannedFiles() []scannerFile {

case ast.ImportURL:
// Using a JavaScript or CSS file with CSS "url()" is not allowed
otherFile := &s.results[record.SourceIndex.GetIndex()].file
switch otherRepr := otherFile.inputFile.Repr.(type) {
case *graph.CSSRepr:
s.log.AddError(&tracker, record.Range,
Expand All @@ -1783,11 +1798,18 @@ func (s *scanner) processScannedFiles() []scannerFile {
}
}

// If the imported file uses the "copy" loader, then move it from
// "SourceIndex" to "CopySourceIndex" so we don't end up bundling it.
if _, ok := otherFile.inputFile.Repr.(*graph.CopyRepr); ok {
record.CopySourceIndex = record.SourceIndex
record.SourceIndex = ast.Index32{}
continue
}

// If an import from a JavaScript file targets a CSS file, generate a
// JavaScript stub to ensure that JavaScript files only ever import
// other JavaScript files.
if _, ok := result.file.inputFile.Repr.(*graph.JSRepr); ok {
otherFile := &s.results[record.SourceIndex.GetIndex()].file
if css, ok := otherFile.inputFile.Repr.(*graph.CSSRepr); ok {
if s.options.WriteToStdout {
s.log.AddError(&tracker, record.Range,
Expand Down Expand Up @@ -1879,14 +1901,23 @@ func (s *scanner) processScannedFiles() []scannerFile {

result.file.jsonMetadataChunk = sb.String()

// If this file is from the "file" loader, generate an additional file
if result.file.inputFile.UniqueKeyForFileLoader != "" {
// If this file is from the "file" or "copy" loaders, generate an additional file
if result.file.inputFile.UniqueKeyForAdditionalFile != "" {
bytes := []byte(result.file.inputFile.Source.Contents)
template := s.options.AssetPathTemplate

// Use the entry path template instead of the asset path template if this
// file is an entry point and uses the "copy" loader. With the "file" loader
// the JS stub is the entry point, but with the "copy" loader the file is
// the entry point itself.
if result.file.inputFile.Loader == config.LoaderCopy && entryPointSourceIndices[uint32(sourceIndex)] {
template = s.options.EntryPathTemplate
}

// Add a hash to the file name to prevent multiple files with the same name
// but different contents from colliding
var hash string
if config.HasPlaceholder(s.options.AssetPathTemplate, config.HashPlaceholder) {
if config.HasPlaceholder(template, config.HashPlaceholder) {
h := xxhash.New()
h.Write(bytes)
hash = hashForFileName(h.Sum(nil))
Expand All @@ -1902,9 +1933,9 @@ func (s *scanner) processScannedFiles() []scannerFile {
/* customFilePath */ "",
)

// Apply the asset path template
// Apply the path template
templateExt := strings.TrimPrefix(originalExt, ".")
relPath := config.TemplateToString(config.SubstituteTemplate(s.options.AssetPathTemplate, config.PathPlaceholders{
relPath := config.TemplateToString(config.SubstituteTemplate(template, config.PathPlaceholders{
Dir: &dir,
Name: &base,
Hash: &hash,
Expand Down Expand Up @@ -1933,7 +1964,7 @@ func (s *scanner) processScannedFiles() []scannerFile {
}}
}

s.results[i] = result
s.results[sourceIndex] = result
}

// The linker operates on an array of files, so construct that now. This
Expand Down Expand Up @@ -2253,9 +2284,13 @@ func findReachableFiles(files []graph.InputFile, entryPoints []graph.EntryPoint)
if repr, ok := file.Repr.(*graph.JSRepr); ok && repr.CSSSourceIndex.IsValid() {
visit(repr.CSSSourceIndex.GetIndex())
}
for _, record := range *file.Repr.ImportRecords() {
if record.SourceIndex.IsValid() {
visit(record.SourceIndex.GetIndex())
if recordsPtr := file.Repr.ImportRecords(); recordsPtr != nil {
for _, record := range *recordsPtr {
if record.SourceIndex.IsValid() {
visit(record.SourceIndex.GetIndex())
} else if record.CopySourceIndex.IsValid() {
visit(record.CopySourceIndex.GetIndex())
}
}
}

Expand Down
122 changes: 122 additions & 0 deletions internal/bundler/bundler_loader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -959,3 +959,125 @@ func TestLoaderDataURLUnknownMIME(t *testing.T) {
},
})
}

func TestLoaderCopyWithBundleFromJS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import x from "../assets/some.file"
console.log(x)
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{"/Users/user/project/src/entry.js"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".file": config.LoaderCopy,
},
},
})
}

func TestLoaderCopyWithBundleFromCSS(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.css": `
body {
background: url(../assets/some.file);
}
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{"/Users/user/project/src/entry.css"},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".css": config.LoaderCSS,
".file": config.LoaderCopy,
},
},
})
}

func TestLoaderCopyWithBundleEntryPoint(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `
import x from "../assets/some.file"
console.log(x)
`,
"/Users/user/project/src/entry.css": `
body {
background: url(../assets/some.file);
}
`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/src/entry.css",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModeBundle,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".css": config.LoaderCSS,
".file": config.LoaderCopy,
},
},
})
}

func TestLoaderCopyWithTransform(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `console.log('entry')`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModePassThrough,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".file": config.LoaderCopy,
},
},
})
}

func TestLoaderCopyWithFormat(t *testing.T) {
default_suite.expectBundled(t, bundled{
files: map[string]string{
"/Users/user/project/src/entry.js": `console.log('entry')`,
"/Users/user/project/assets/some.file": `stuff`,
},
entryPaths: []string{
"/Users/user/project/src/entry.js",
"/Users/user/project/assets/some.file",
},
options: config.Options{
Mode: config.ModeConvertFormat,
OutputFormat: config.FormatIIFE,
AbsOutputBase: "/Users/user/project",
AbsOutputDir: "/out",
ExtensionToLoader: map[string]config.Loader{
".js": config.LoaderJS,
".file": config.LoaderCopy,
},
},
})
}