diff --git a/.github/workflows/turborepo-release.yml b/.github/workflows/turborepo-release.yml index a9703eded8039..a9822f537e5ea 100644 --- a/.github/workflows/turborepo-release.yml +++ b/.github/workflows/turborepo-release.yml @@ -61,7 +61,8 @@ jobs: outputs: stage-branch: "${{ steps.stage.outputs.STAGE_BRANCH }}" - smoke-test: + go-smoke-test: + name: Go Unit Tests runs-on: ubuntu-latest needs: [stage] steps: @@ -75,13 +76,49 @@ jobs: with: github-token: "${{ secrets.GITHUB_TOKEN }}" target: ${{ matrix.os.name }} - - name: Run Unit Tests + - name: Run Go Unit Tests run: turbo run test --filter=cli --color + rust-smoke-test: + name: Rust Unit Tests + runs-on: ubuntu-latest + needs: [stage] + steps: + - name: Show Stage Commit + run: echo "${{ needs.stage.outputs.stage-branch }}" + - uses: actions/checkout@v3 + with: + ref: ${{ needs.stage.outputs.stage-branch }} + - name: Build turborepo CLI from source + uses: ./.github/actions/setup-turborepo-environment + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + target: ${{ matrix.os.name }} + - name: Run Rust Unit Tests + run: cargo tr-test + + js-smoke-test: + name: JS Package Tests + runs-on: ubuntu-latest + needs: [stage] + steps: + - name: Show Stage Commit + run: echo "${{ needs.stage.outputs.stage-branch }}" + - uses: actions/checkout@v3 + with: + ref: ${{ needs.stage.outputs.stage-branch }} + - name: Build turborepo CLI from source + uses: ./.github/actions/setup-turborepo-environment + with: + github-token: "${{ secrets.GITHUB_TOKEN }}" + target: ${{ matrix.os.name }} + - name: Run JS Package Tests + run: turbo run check-types test --filter="./packages/*" --filter="\!@vercel/*" --color + build-go-darwin: name: "Build Go for macOS" runs-on: macos-latest - needs: [stage, smoke-test] + needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] steps: - name: Show Stage Commit run: echo "${{ needs.stage.outputs.stage-branch }}" @@ -118,7 +155,7 @@ jobs: build-go-cross: name: "Build Go for Windows and Linux" runs-on: ubuntu-latest - needs: [stage, smoke-test] + needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] container: image: docker://ghcr.io/vercel/turbo-cross:v1.18.5 steps: @@ -165,7 +202,7 @@ jobs: build-rust: name: "Build Rust" - needs: [stage, smoke-test] + needs: [stage, go-smoke-test, rust-smoke-test, js-smoke-test] strategy: fail-fast: false matrix: diff --git a/.prettierignore b/.prettierignore index 0964dfb8a7e78..8a36bfdd1a555 100644 --- a/.prettierignore +++ b/.prettierignore @@ -25,3 +25,6 @@ crates/turbopack-ecmascript/tests/analyzer/graph crates/turbopack-ecmascript/tests/tree-shaker crates/next-transform-strip-page-exports/tests crates/next-transform-dynamic/tests + +# generators +*.hbs diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7f5cfa69a900c..cfd92d47e03f9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -15,6 +15,7 @@ Thanks for your interest in contributing to Turbo! - [Updating `turbo`](#updating-turbo) - [Manually testing `turbo`](#manually-testing-turbo) - [Publishing `turbo` to the npm registry](#publishing-turbo-to-the-npm-registry) + - [Creating a new release blog post](#creating-a-new-release-blog-post) - [Adding A New Crate](#adding-a-new-crate) - [Contributing to Turbopack](#contributing-to-turbopack) - [Turbopack Architecture](#turbopack-architecture) @@ -160,6 +161,24 @@ These lists are by no means exhaustive. Feel free to add to them with other stra See [the publishing guide](./release.md#release-turborepo). +## Creating a new release blog post + +Creating a new release post can be done via a turborepo generator. Run the following command from anywhere within the repo: + +```bash +turbo generate run "blog - release post" +``` + +This will walk you through creating a new blog post from start to finish. + +NOTE: If you would like to update the stats (github stars / npm downloads / time saved) for an existing blog post that has yet to be published (useful if time has passed since the blog post was created, and up to date stats are required before publishing) - run: + +```bash +turbo generate run "blog - "blog - update release post stats" +``` + +and choose the blog post you would like to update. + ## Adding A New Crate When adding a new crate to the repo, it is essential that it is included/excluded from the diff --git a/Cargo.lock b/Cargo.lock index aa4bc265938a3..fadaef7b054a7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -197,9 +197,9 @@ dependencies = [ [[package]] name = "ast_node" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52f7fd7740c5752c16281a1c1f9442b1e69ba41738acde85dc604aaf3ce41890" +checksum = "c704e2f6ee1a98223f5a7629a6ef0f3decb3b552ed282889dc957edff98ce1e6" dependencies = [ "pmutil", "proc-macro2", @@ -668,9 +668,9 @@ dependencies = [ [[package]] name = "binding_macros" -version = "0.49.41" +version = "0.50.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7148cb5385a7aba0e7b54d188b91d452a5a2c9f51abb982df2c7194b737005c" +checksum = "558a7f9d50a611bf724d521bfa09972259b5e828b22a522680a29f796348f4ed" dependencies = [ "anyhow", "console_error_panic_hook", @@ -2193,13 +2193,13 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" +checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" dependencies = [ "errno-dragonfly", "libc", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -3553,9 +3553,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.142" +version = "0.2.143" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a987beff54b60ffa6d51982e1aa1146bc42f19bd26be28b0586f252fccf5317" +checksum = "edc207893e85c5d6be840e969b496b53d94cec8be2d501b214f50daa97fa8024" [[package]] name = "libfuzzer-sys" @@ -3773,9 +3773,9 @@ dependencies = [ [[package]] name = "mdxjs" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9bef082151ac4aba3884306e47fd2c1afcc2e208a9cb9a67c4ecfb96bb5d0c" +checksum = "c88a71be094e8cf4f13b62e6ba304472332f8890e7cdf15098dc6512b812fdab" dependencies = [ "markdown", "serde", @@ -4006,9 +4006,9 @@ dependencies = [ [[package]] name = "modularize_imports" -version = "0.27.7" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8907a5e244f284ed9435687cfdfe0446a6ddeabc3948c26323ecd3389958d26" +checksum = "8655bebb1bba9b4ece2a07c24dc1794fa0b3996f45de13d0d0673f1491369924" dependencies = [ "convert_case 0.5.0", "handlebars", @@ -6423,9 +6423,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "styled_components" -version = "0.54.7" +version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ac87c3150a2b7d834d1469d4b06d05fbe111b6378ccfab98f58f357be062417" +checksum = "335d6e59c6c19a92fa8169acec235c40e5fbae41d2b58de39b550268827a4aac" dependencies = [ "Inflector", "once_cell", @@ -6437,9 +6437,9 @@ dependencies = [ [[package]] name = "styled_jsx" -version = "0.31.7" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88365f6c83fa4bfb4b29a75ca1d353d940f4ac44a535907467ad624bfd90f24d" +checksum = "f90351bac51f52a2283c0338871d1a8471d500223b0493044aa9314e8e7e73f6" dependencies = [ "easy-error", "swc_core", @@ -6482,9 +6482,9 @@ dependencies = [ [[package]] name = "swc" -version = "0.260.41" +version = "0.261.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5244c29808af3900e02287da1b5a781d9feca5a332c205932abae074e7d2d40a" +checksum = "4ccfd95a68272ef53f41b42ad0d24dab8c29a92b1eea9bd1cea822fc8419b341" dependencies = [ "ahash 0.7.6", "anyhow", @@ -6545,10 +6545,11 @@ dependencies = [ [[package]] name = "swc_atoms" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "593c2f3e4cea60ddc4179ed731cabebe7eacec209d9e76a3bbcff4b2b020e3f5" +checksum = "5c17c2810ab8281e81fd88e7a4356efbf56481087bf801baa84e757316a4564d" dependencies = [ + "bytecheck", "once_cell", "rkyv", "rustc-hash", @@ -6560,9 +6561,9 @@ dependencies = [ [[package]] name = "swc_bundler" -version = "0.213.31" +version = "0.214.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "715835a6f035f1bccf40fb1f8afa95c6d76107f5f1d27735b50490b90685699c" +checksum = "5c7c14fccf046cca0de34bd21f5be4840a5ff6f827a8916cc594b241f7fca767" dependencies = [ "ahash 0.7.6", "anyhow", @@ -6607,15 +6608,16 @@ dependencies = [ [[package]] name = "swc_common" -version = "0.31.5" +version = "0.31.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f876f826866e402da364d77aa97448fdf67cb4aeb6a5f1c0de39cacf35aa89a" +checksum = "5e727f62843d9383511b7844ec3c28a56e416331ec564af3e6d4aafa0190429d" dependencies = [ "ahash 0.7.6", "anyhow", "ast_node", "atty", "better_scoped_tls", + "bytecheck", "cfg-if 1.0.0", "either", "from_variant", @@ -6665,9 +6667,9 @@ dependencies = [ [[package]] name = "swc_core" -version = "0.75.41" +version = "0.76.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c604661086151ef0cfe2dff7740cd5fbdd06685d2eb03367024c3990d214b168" +checksum = "6bc618c58129b1e311b55457a26c8d0fcdccf028156021ddb2bd4e0673c27222" dependencies = [ "binding_macros", "swc", @@ -6710,9 +6712,9 @@ dependencies = [ [[package]] name = "swc_css_ast" -version = "0.137.5" +version = "0.137.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fadc4b9192ee616e7cba2c1612ae79a616546e23856ab23edb5db9c43e9612a" +checksum = "b09d67e21eb2f2d6287502d7098c91e43683172836b76e3f09a7b0aaedbe18f1" dependencies = [ "is-macro", "serde", @@ -6723,9 +6725,9 @@ dependencies = [ [[package]] name = "swc_css_codegen" -version = "0.147.6" +version = "0.147.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d27c1d2b7ebf21cc5cfe5e7b3d7e08a038a5aa7fc93ea263aa4bd7dc0641bca" +checksum = "c353568b45510c8756d98715ea5154c888ada4aafa9089ef52023f993fa11c26" dependencies = [ "auto_impl", "bitflags 2.2.1", @@ -6753,9 +6755,9 @@ dependencies = [ [[package]] name = "swc_css_compat" -version = "0.23.6" +version = "0.23.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b8dbb86390644c9c6d230f294d5cf528964d84206a65bd48b08b1875b2ef0d4" +checksum = "21a3cdfeaa3590122cbe0d796263b13ac184ebb217624c36dda8398eeae17420" dependencies = [ "bitflags 2.2.1", "once_cell", @@ -6770,9 +6772,9 @@ dependencies = [ [[package]] name = "swc_css_modules" -version = "0.25.6" +version = "0.25.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "650f2adbf58150567564846accb31a22278735192e9f40a6dc9e319cfc1ede99" +checksum = "0a75ba7e362d0a3ac7d95a51e267add2ca59d526a4cc7cb2c2426a4740c9c7ab" dependencies = [ "rustc-hash", "serde", @@ -6786,9 +6788,9 @@ dependencies = [ [[package]] name = "swc_css_parser" -version = "0.146.6" +version = "0.146.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a14a42f39156a50d34f1e3d351c534b4929b27b7b47135055fa2a9a06649ea7" +checksum = "e2ca0ad85a9bcc293b4bbc3472459f794376d165ace1f5c103cc983b96b845e5" dependencies = [ "bitflags 2.2.1", "lexical", @@ -6800,9 +6802,9 @@ dependencies = [ [[package]] name = "swc_css_prefixer" -version = "0.149.7" +version = "0.149.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c12cf20c0e40ea4793ac3d23a04969e9badfc3f9cc478c562f60f1e302fd80f" +checksum = "6b95feb3eb0cd11379f4153894b276469ca6499dc931ec6b62dce38238cf6f72" dependencies = [ "once_cell", "preset_env_base", @@ -6817,9 +6819,9 @@ dependencies = [ [[package]] name = "swc_css_utils" -version = "0.134.5" +version = "0.134.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6360cbced8a80618320b5d143299bb8c9684c71a46facc01fe648b198bf0200d" +checksum = "4d7db9b3b4439c536f8a32e14c577e707d34945966e93e4e51f8c1280b100324" dependencies = [ "once_cell", "serde", @@ -6832,9 +6834,9 @@ dependencies = [ [[package]] name = "swc_css_visit" -version = "0.136.5" +version = "0.136.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e6749283744944ee224fd0199ae218fdc4c3b3d1b51e176ee43caf47ae0d67a" +checksum = "8b1b04f1a4aaeeefe555d6d2876a897843410fff183fd274aa823f179a87bac6" dependencies = [ "serde", "swc_atoms", @@ -6845,11 +6847,12 @@ dependencies = [ [[package]] name = "swc_ecma_ast" -version = "0.103.5" +version = "0.104.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1087786027e9e938588c0a66d45d1044a5e2285922b0928e023303f03a60900f" +checksum = "450c4c5cf7e678dafe5de55bb2bf09204ad4421a3238a370e9d200b20d24c69c" dependencies = [ "bitflags 2.2.1", + "bytecheck", "is-macro", "num-bigint", "rkyv", @@ -6863,9 +6866,9 @@ dependencies = [ [[package]] name = "swc_ecma_codegen" -version = "0.138.15" +version = "0.139.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "807e07eda1b7f45971ec9a65c9ac032bf53acd0b02dae4456bbcfef3d47da95c" +checksum = "47f9ffdfbd816a80b90980a835dd47b3592a533d3a5e7453d8113645df7067fe" dependencies = [ "memchr", "num-bigint", @@ -6895,9 +6898,9 @@ dependencies = [ [[package]] name = "swc_ecma_ext_transforms" -version = "0.102.12" +version = "0.103.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a2bdc37df46e96aedfbb422fc44f48ca3ee69109e8fe0f130d8454e67dce59" +checksum = "bc792bfd2ec5fdade86da3197745641a635bce6cb8cbdc7b7c438047c6a87807" dependencies = [ "phf", "swc_atoms", @@ -6909,9 +6912,9 @@ dependencies = [ [[package]] name = "swc_ecma_lints" -version = "0.81.18" +version = "0.82.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb9588ec5320276cda7f4770dcedf00224014702b7b314c1c27590f74247ad3d" +checksum = "8abe0bb69b427a24f7a0225465e1644d9668cd3abae36893787b787e3209cea7" dependencies = [ "ahash 0.7.6", "auto_impl", @@ -6930,9 +6933,9 @@ dependencies = [ [[package]] name = "swc_ecma_loader" -version = "0.43.7" +version = "0.43.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b853be4b7380384dc3bac5564697c1ba30b47eff94b81449567032fa44d3d0c" +checksum = "6c414e3f97521776589995a575b6187eeef2f67876569d3b61ab4542fbbf64e1" dependencies = [ "ahash 0.7.6", "anyhow", @@ -6952,9 +6955,9 @@ dependencies = [ [[package]] name = "swc_ecma_minifier" -version = "0.180.31" +version = "0.181.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e31075982125fca1d7e023d05aa779f4cb566494fbaba1a0cf5db4ef1a573b4a" +checksum = "0fb09840fcbe65459e2c08027deb8856aa5a4b42a235b0698a1fc3d4448ee4b1" dependencies = [ "ahash 0.7.6", "arrayvec 0.7.2", @@ -6988,9 +6991,9 @@ dependencies = [ [[package]] name = "swc_ecma_parser" -version = "0.133.12" +version = "0.134.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fa9f6dab04b0e4d148fab7069ecb896ae6e9a3e3ec94a493d52d1d16a780cda" +checksum = "307ff662ba0ce202ae397e75c37e89ff8ec338e76ffab1973414b9cb98641892" dependencies = [ "either", "lexical", @@ -7008,9 +7011,9 @@ dependencies = [ [[package]] name = "swc_ecma_preset_env" -version = "0.194.28" +version = "0.195.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0a377e83fbf99e2fa78126d5d5db2c93e75c0c348e5b06e55460137f32b3640" +checksum = "649408dbf1fd8a40061bab90499545956cb5234756305279fddd86f23b0dae73" dependencies = [ "ahash 0.7.6", "anyhow", @@ -7033,9 +7036,9 @@ dependencies = [ [[package]] name = "swc_ecma_quote_macros" -version = "0.44.12" +version = "0.45.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d225ec68bc21334b840d9a0c9f20d26a8fa2854708d549ef8572be54c9b033" +checksum = "f3e13fd2762b355a5a24e0cd44c92e34d83b592104618fa2240653482b81480f" dependencies = [ "anyhow", "pmutil", @@ -7063,9 +7066,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms" -version = "0.217.27" +version = "0.218.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dea5aef62b3ecbc1ea2557c27c500ed9b452abaf13d6142ce8bc553493341086" +checksum = "c89fc45152e5f46e8202ce0ce614ea2fedb48f45d6807de683592ad03abc0fbe" dependencies = [ "swc_atoms", "swc_common", @@ -7083,9 +7086,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_base" -version = "0.126.18" +version = "0.127.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96b6521512d072b082071a569d1aaad638bab56b9a18c9d88edc436055fe13ca" +checksum = "9e121602717fb551f898ccfb4e39ac4b86ff9126e1859d1869a75edf7d66f891" dependencies = [ "better_scoped_tls", "bitflags 2.2.1", @@ -7107,9 +7110,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_classes" -version = "0.115.18" +version = "0.116.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "959c65d67ba1bbb0f7b70042e0d75e92e7c68df9d98d3e3992ccbdd4e5c4fa2e" +checksum = "71df7d0eff27a91708ec6c2e57f0df5fc54ae980902da1a8fe19c480bd8da1bd" dependencies = [ "swc_atoms", "swc_common", @@ -7121,9 +7124,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_compat" -version = "0.152.19" +version = "0.153.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae0a611e9693f61e7e55413665a39b93e2af784acb1a77d4badeaadca64d35ce" +checksum = "a99ddc75f3814735eee4c7b959e33fa657eb857031e4c88731adbcf5e828762b" dependencies = [ "ahash 0.7.6", "arrayvec 0.7.2", @@ -7161,9 +7164,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_module" -version = "0.169.23" +version = "0.170.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5a0fc4624b1b07ca4928e3897888367c65deb1777a163b26c7a2e169160277" +checksum = "d3ec58c9a0bd37f2fe4a4e9c2d6ae4a94ca4b6d1f8475286c2d50b452c47a23b" dependencies = [ "Inflector", "ahash 0.7.6", @@ -7189,9 +7192,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_optimization" -version = "0.186.27" +version = "0.187.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74fe6203e8397ab483fe8f1bd676dce5a6e493b10ad287243cf740a08e3ec315" +checksum = "da30d614175de1db66d9440116fe76e88511be20e7eaed3227c92e0cf4868ea8" dependencies = [ "ahash 0.7.6", "dashmap", @@ -7215,9 +7218,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_proposal" -version = "0.160.21" +version = "0.161.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ed9af008da8b354ed0c27a910532163a3734dfbb5dab0a460ff2ebd6ebd7004" +checksum = "b5dd0f43cdb935ef9a3e2238b214e85d9902614b969b78be08d485f8d19c35c1" dependencies = [ "either", "rustc-hash", @@ -7235,9 +7238,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_react" -version = "0.172.25" +version = "0.173.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d393985bafe0680c03d3d016b4172fcb293499f97e82418d63673a11310029c3" +checksum = "508b91041606bb06e9d9d2ec01eedf2ee551708441a123d0bea0e2b114af5623" dependencies = [ "ahash 0.7.6", "base64 0.13.1", @@ -7261,9 +7264,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_testing" -version = "0.129.18" +version = "0.130.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3109919ceca5dc1a89721a2d3ed43917f7fc53eeb87433a020a810286b85ba0" +checksum = "278d60c82245e67cea4de48565aee5201f401be5da0a337936236c298f570fe7" dependencies = [ "ansi_term", "anyhow", @@ -7287,9 +7290,9 @@ dependencies = [ [[package]] name = "swc_ecma_transforms_typescript" -version = "0.176.26" +version = "0.177.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70968fd6bafd8511037ecadb3f89bfe97c3e8c4560dd04a5a291679a77eee84a" +checksum = "ee601491ed61add4d1fc93367416c5b551978c33d8115f4d23d90827645902ef" dependencies = [ "serde", "swc_atoms", @@ -7303,9 +7306,9 @@ dependencies = [ [[package]] name = "swc_ecma_usage_analyzer" -version = "0.12.12" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5cc3026867838b0ed45d7b341973beac5b1154c66d47963a328f7f373343d03" +checksum = "fd623fa7f37ea9375d145c27d96947fc4242e7713f8c5378b9bddeddf98a2a05" dependencies = [ "ahash 0.7.6", "indexmap", @@ -7321,9 +7324,9 @@ dependencies = [ [[package]] name = "swc_ecma_utils" -version = "0.116.12" +version = "0.117.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "547cce84256af2ce8b9ee44669872dc514c9e1fe737ab1bd517c4bd21038b7d8" +checksum = "1c124202a0e27efc8f71fb7ed95c7634585f2eeb86d1ef3fc5c9f0eef1a06762" dependencies = [ "indexmap", "num_cpus", @@ -7340,9 +7343,9 @@ dependencies = [ [[package]] name = "swc_ecma_visit" -version = "0.89.5" +version = "0.90.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4490a5ed042234d72986e1a0c8afb54291fcf82b42af78ea72507b52bcbe13dd" +checksum = "1006ae19756e69e1329c6be92ae1843d6a857e3f2912efbc70d08d9a0522dc67" dependencies = [ "num-bigint", "swc_atoms", @@ -7354,9 +7357,9 @@ dependencies = [ [[package]] name = "swc_emotion" -version = "0.30.7" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06838d609bf7d97d834b06ed2f6c32a593160e9c44977ace88038e249f2e9b43" +checksum = "0dfeb6d1c29a115d3ce2ad4e22b1867c14e6e0ca51e0742a481c9135b0802a46" dependencies = [ "base64 0.13.1", "byteorder", @@ -7384,9 +7387,9 @@ dependencies = [ [[package]] name = "swc_error_reporters" -version = "0.15.5" +version = "0.15.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3afbf2e52ddce38da1ee204252f7b9019a12176ca73aaa0fd0c36ded1ecbec7d" +checksum = "4ad32cbdb779bcae0f5f178347fd2d984ccc8393ea0dbcde351be0a8a2370ea2" dependencies = [ "anyhow", "miette 4.7.1", @@ -7397,9 +7400,9 @@ dependencies = [ [[package]] name = "swc_fast_graph" -version = "0.19.5" +version = "0.19.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95683baee47d2cbf10e0bf8ad14d4e8f6160674d9eb96b3ab560aa39fa37ccdc" +checksum = "3c41b07bb4ffefcc70272a2f1e3d4e1e1b6b5bd505b68040e28f79ad8d35ec9f" dependencies = [ "indexmap", "petgraph", @@ -7409,9 +7412,9 @@ dependencies = [ [[package]] name = "swc_graph_analyzer" -version = "0.20.6" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4812745a05bf856948b7ada6f1f1f8d2c4be0afa060844532798165b4c76416e" +checksum = "d1bdd413c31882cb8f54e70dcba20e175c36a499456f4f739feb92cc2c012034" dependencies = [ "ahash 0.7.6", "auto_impl", @@ -7444,9 +7447,9 @@ dependencies = [ [[package]] name = "swc_node_comments" -version = "0.18.5" +version = "0.18.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d3be08cc983291059f7a16a62ff297bdb83c1282cfe876416fd52b3466e791" +checksum = "80423f9f4f85218ca177c702b54b9fb66c3bd707430ccd78b581a8fdb2d8022f" dependencies = [ "ahash 0.7.6", "dashmap", @@ -7470,9 +7473,9 @@ dependencies = [ [[package]] name = "swc_plugin_proxy" -version = "0.32.5" +version = "0.33.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd5b2880508aedc964f4f52a4debc07c4b48212b45b44522d651c701b1a4d84" +checksum = "622c0b4d7708c3528fbe99f1daf97ec6c3770798f945f561b8e3f722d0db99de" dependencies = [ "better_scoped_tls", "rkyv", @@ -7484,9 +7487,9 @@ dependencies = [ [[package]] name = "swc_plugin_runner" -version = "0.94.19" +version = "0.95.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9290518378028a7d0cf1a8ca52f5b0934c44bc0862155b6a52ec1c97a378601b" +checksum = "791ba2be2bfbc127d4f1b9738a07ae6f07a2c3f5c97e3de32abe686dc2ed6407" dependencies = [ "anyhow", "enumset", @@ -7506,9 +7509,9 @@ dependencies = [ [[package]] name = "swc_relay" -version = "0.2.7" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaa02b6c66a7de1fe196d1787a5378a5fb91c67ec7acccd76052d6ec389b6d16" +checksum = "833baec21649cbf29221d0935090aac77d4268cc1923129b2dfac2dba6c7a415" dependencies = [ "once_cell", "regex", @@ -7521,9 +7524,9 @@ dependencies = [ [[package]] name = "swc_timer" -version = "0.19.6" +version = "0.19.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d525672140610b0797da5ee7a3f5bcb0dbf13940c84d63c9f966c0239f26cb7" +checksum = "4554d113671c1c960ebca0136b552e4b1308444b50e5ddd33dd5ad0bd086faa4" dependencies = [ "tracing", ] @@ -7764,9 +7767,9 @@ dependencies = [ [[package]] name = "testing" -version = "0.33.6" +version = "0.33.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77e5fcdfc6805d181431333b850f9fde170f654148e29ba97fd8033c7814e665" +checksum = "9f8aa132535a499b7d18cbf3a0ec31e7b66a496c118fec6a6e9ccdb42eeffe25" dependencies = [ "ansi_term", "difference", @@ -9300,9 +9303,9 @@ dependencies = [ name = "turborepo-scm" version = "0.1.0" dependencies = [ + "anyhow", "dunce", "git2 0.16.1", - "nom", "tempfile", "thiserror", "turbopath", @@ -9314,8 +9317,8 @@ version = "1.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ - "cfg-if 1.0.0", - "rand 0.8.5", + "cfg-if 0.1.10", + "rand 0.4.6", "static_assertions", ] @@ -9719,9 +9722,9 @@ dependencies = [ [[package]] name = "wai-bindgen-wasmer" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9367b98b4849e8910720d2b4e9ce3d35bbfa3b6120154d455b57416bd0bf6f0f" +checksum = "2e1e0eda6f3b18f1b630eabc3d82b3b8ca74749b89e73b7bba0999726ebfae04" dependencies = [ "anyhow", "bitflags 1.3.2", @@ -9904,9 +9907,9 @@ dependencies = [ [[package]] name = "wasmer" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8472807bd8d5062aef0a35681e1b6fbaed8fea88d97910870741100c03b8fef3" +checksum = "78caedecd8cb71ed47ccca03b68d69414a3d278bb031e6f93f15759344efdd52" dependencies = [ "bytes", "cfg-if 1.0.0", @@ -9914,6 +9917,7 @@ dependencies = [ "indexmap", "js-sys", "more-asserts", + "rustc-demangle", "serde", "serde-wasm-bindgen", "target-lexicon", @@ -9925,16 +9929,17 @@ dependencies = [ "wasmer-derive", "wasmer-types", "wasmer-vm", - "wasmparser", + "wasmparser 0.83.0", + "wasmparser 0.95.0", "wat", "winapi 0.3.9", ] [[package]] name = "wasmer-cache" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c8dcf5d253d30a2736b1bd876e09eb64bd1d7ed2b464a9288772cc797aa36c0" +checksum = "7f0de969b05cc3c11196beeb46e5868a3712a187d777ee94113f7258c2ec121c" dependencies = [ "blake3", "hex", @@ -9944,9 +9949,9 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e322cdfb8ed189d92cbb5c34acb319c0f04fb9799ed68e127717f255f8b246a" +checksum = "726a8450541af4a57c34af7b6973fdbfc79f896cc7e733429577dfd1d1687180" dependencies = [ "backtrace", "cfg-if 1.0.0", @@ -9957,20 +9962,19 @@ dependencies = [ "memmap2", "more-asserts", "region", - "rustc-demangle", "smallvec", "thiserror", "wasmer-types", "wasmer-vm", - "wasmparser", + "wasmparser 0.95.0", "winapi 0.3.9", ] [[package]] name = "wasmer-compiler-cranelift" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5240177aca0d8322c890d17d4b1b87f23ccb45340f616f384655aaba18f51bd" +checksum = "a1e5633f90f372563ebbdf3f9799c7b29ba11c90e56cf9b54017112d2e656c95" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -9987,9 +9991,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb6858f330764b1041e68d3c824970064d2fbd8e27704180289fd248ff892c48" +checksum = "97901fdbaae383dbb90ea162cc3a76a9fa58ac39aec7948b4c0b9bbef9307738" dependencies = [ "proc-macro-error", "proc-macro2", @@ -9999,9 +10003,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83345e3335fb9b21be6c394bc3d712522447bc8750db8a40ac9170627e3de35" +checksum = "67f1f2839f4f61509550e4ddcd0e658e19f3af862b51c79fda15549d735d659b" dependencies = [ "bytecheck", "enum-iterator 0.7.0", @@ -10015,9 +10019,9 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "3.2.1" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ca3cf9a2bb5919ae048231972440767efad7a693afaeb41332ab0796be1c884" +checksum = "043118ec4f16d1714fed3aab758b502b864bd865e1d5188626c9ad290100563f" dependencies = [ "backtrace", "cc", @@ -10042,9 +10046,9 @@ dependencies = [ [[package]] name = "wasmer-wasix" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511532f5e07542a767eb56cb6812a381b86aad5f0a2848baae80a6db44e3b1e1" +checksum = "c216facb6a1aae257e38f2018a27b270765aa9d386166e28afecd4004c306cbc" dependencies = [ "anyhow", "async-trait", @@ -10092,9 +10096,9 @@ dependencies = [ [[package]] name = "wasmer-wasix-types" -version = "0.3.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eec8e2f60e476535824438dd23ce1ed52e86c3a9fc5d67a60c899f24dfa6dde" +checksum = "a34aaac6706d29f89a771f2a58bd7e93628ef65344a39d993bdd717c62aafc27" dependencies = [ "anyhow", "bitflags 1.3.2", @@ -10112,6 +10116,12 @@ dependencies = [ "wasmer-types", ] +[[package]] +name = "wasmparser" +version = "0.83.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "718ed7c55c2add6548cca3ddd6383d738cd73b892df400e96b9aa876f0141d7a" + [[package]] name = "wasmparser" version = "0.95.0" diff --git a/Cargo.toml b/Cargo.toml index 7cdfd196ff233..d34beb647cc17 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -61,14 +61,14 @@ opt-level = 3 [workspace.dependencies] # Keep consistent with preset_env_base through swc_core browserslist-rs = { version = "0.12.2" } -mdxjs = { version = "0.1.11" } -modularize_imports = { version = "0.27.7" } -styled_components = { version = "0.54.7" } -styled_jsx = { version = "0.31.7" } -swc_core = { version = "0.75.41" } -swc_emotion = { version = "0.30.7" } -swc_relay = { version = "0.2.7" } -testing = { version = "0.33.6" } +mdxjs = { version = "0.1.12" } +modularize_imports = { version = "0.29.0" } +styled_components = { version = "0.56.0" } +styled_jsx = { version = "0.33.0" } +swc_core = { version = "0.76.6" } +swc_emotion = { version = "0.32.0" } +swc_relay = { version = "0.4.0" } +testing = { version = "0.33.10" } auto-hash-map = { path = "crates/turbo-tasks-auto-hash-map" } node-file-trace = { path = "crates/node-file-trace", default-features = false } diff --git a/cli/Makefile b/cli/Makefile index 90db191661e67..a4914378e3b5c 100644 --- a/cli/Makefile +++ b/cli/Makefile @@ -168,12 +168,14 @@ cmd/turbo/version.go: ../version.txt mv cmd/turbo/version.go.txt cmd/turbo/version.go build: install - cd $(CLI_DIR)/../ && pnpm install --filter=create-turbo && pnpm turbo build --filter=create-turbo... - cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/codemod && pnpm turbo build --filter=@turbo/codemod... - cd $(CLI_DIR)/../ && pnpm install --filter=turbo-ignore && pnpm turbo build --filter=turbo-ignore... - cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/workspaces && pnpm turbo build --filter=@turbo/workspaces... - cd $(CLI_DIR)/../ && pnpm install --filter=eslint-plugin-turbo && pnpm turbo build --filter=eslint-plugin-turbo... - cd $(CLI_DIR)/../ && pnpm install --filter=eslint-config-turbo && pnpm turbo build --filter=eslint-config-turbo... + cd $(CLI_DIR)/../ && pnpm build:turbo + cd $(CLI_DIR)/../ && pnpm install --filter=create-turbo && pnpm turbo-prebuilt build --filter=create-turbo... + cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/codemod && pnpm turbo-prebuilt build --filter=@turbo/codemod... + cd $(CLI_DIR)/../ && pnpm install --filter=turbo-ignore && pnpm turbo-prebuilt build --filter=turbo-ignore... + cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/workspaces && pnpm turbo-prebuilt build --filter=@turbo/workspaces... + cd $(CLI_DIR)/../ && pnpm install --filter=@turbo/gen && pnpm turbo-prebuilt build --filter=@turbo/gen... + cd $(CLI_DIR)/../ && pnpm install --filter=eslint-plugin-turbo && pnpm turbo-prebuilt build --filter=eslint-plugin-turbo... + cd $(CLI_DIR)/../ && pnpm install --filter=eslint-config-turbo && pnpm turbo-prebuilt build --filter=eslint-config-turbo... .PHONY: prepublish prepublish: compile-protos cmd/turbo/version.go @@ -239,6 +241,7 @@ stage-release: cmd/turbo/version.go cd $(CLI_DIR)/../packages/turbo-codemod && pnpm version "$(TURBO_VERSION)" --allow-same-version cd $(CLI_DIR)/../packages/turbo-ignore && pnpm version "$(TURBO_VERSION)" --allow-same-version cd $(CLI_DIR)/../packages/turbo-workspaces && pnpm version "$(TURBO_VERSION)" --allow-same-version + cd $(CLI_DIR)/../packages/turbo-gen && pnpm version "$(TURBO_VERSION)" --allow-same-version cd $(CLI_DIR)/../packages/eslint-plugin-turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version cd $(CLI_DIR)/../packages/eslint-config-turbo && pnpm version "$(TURBO_VERSION)" --allow-same-version @@ -266,6 +269,7 @@ publish-turbo: clean build cd $(CLI_DIR)/../packages/turbo-codemod && pnpm pack --pack-destination=$(CLI_DIR)/../ cd $(CLI_DIR)/../packages/turbo-ignore && pnpm pack --pack-destination=$(CLI_DIR)/../ cd $(CLI_DIR)/../packages/turbo-workspaces && pnpm pack --pack-destination=$(CLI_DIR)/../ + cd $(CLI_DIR)/../packages/turbo-gen && pnpm pack --pack-destination=$(CLI_DIR)/../ cd $(CLI_DIR)/../packages/eslint-plugin-turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ cd $(CLI_DIR)/../packages/eslint-config-turbo && pnpm pack --pack-destination=$(CLI_DIR)/../ @@ -277,6 +281,7 @@ ifneq ($(SKIP_PUBLISH),--skip-publish) npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-codemod-$(TURBO_VERSION).tgz npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-ignore-$(TURBO_VERSION).tgz npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-workspaces-$(TURBO_VERSION).tgz + npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../turbo-gen-$(TURBO_VERSION).tgz npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../eslint-plugin-turbo-$(TURBO_VERSION).tgz npm publish -ddd --tag $(TURBO_TAG) $(CLI_DIR)/../eslint-config-turbo-$(TURBO_VERSION).tgz endif diff --git a/cli/cmd/turbo/version.go b/cli/cmd/turbo/version.go index a448da6f6913d..49d1fecb28795 100644 --- a/cli/cmd/turbo/version.go +++ b/cli/cmd/turbo/version.go @@ -1,3 +1,3 @@ package main -const turboVersion = "1.9.4-canary.11" +const turboVersion = "1.9.6" diff --git a/cli/internal/cache/cache.go b/cli/internal/cache/cache.go index 8b74272ed10dd..e5e24c1a9d133 100644 --- a/cli/internal/cache/cache.go +++ b/cli/internal/cache/cache.go @@ -129,7 +129,7 @@ func newSyncCache(opts Opts, repoRoot turbopath.AbsoluteSystemPath, client clien } if useHTTPCache { - implementation := newHTTPCache(opts, client, recorder) + implementation := newHTTPCache(opts, client, recorder, repoRoot) cacheImplementations = append(cacheImplementations, implementation) } diff --git a/cli/internal/cache/cache_http.go b/cli/internal/cache/cache_http.go index 1d345bf57ae8b..f335a954949bc 100644 --- a/cli/internal/cache/cache_http.go +++ b/cli/internal/cache/cache_http.go @@ -20,6 +20,7 @@ import ( "github.com/DataDog/zstd" "github.com/vercel/turbo/cli/internal/analytics" + "github.com/vercel/turbo/cli/internal/cacheitem" "github.com/vercel/turbo/cli/internal/tarpatch" "github.com/vercel/turbo/cli/internal/turbopath" ) @@ -251,102 +252,9 @@ func (cache *httpCache) retrieve(hash string) (bool, []turbopath.AnchoredSystemP return true, files, duration, nil } -// restoreTar returns posix-style repo-relative paths of the files it -// restored. In the future, these should likely be repo-relative system paths -// so that they are suitable for being fed into cache.Put for other caches. -// For now, I think this is working because windows also accepts /-delimited paths. func restoreTar(root turbopath.AbsoluteSystemPath, reader io.Reader) ([]turbopath.AnchoredSystemPath, error) { - files := []turbopath.AnchoredSystemPath{} - missingLinks := []*tar.Header{} - zr := zstd.NewReader(reader) - var closeError error - defer func() { closeError = zr.Close() }() - tr := tar.NewReader(zr) - for { - hdr, err := tr.Next() - if err != nil { - if err == io.EOF { - for _, link := range missingLinks { - err := restoreSymlink(root, link, true) - if err != nil { - return nil, err - } - } - - return files, closeError - } - return nil, err - } - // hdr.Name is always a posix-style path - // FIXME: THIS IS A BUG. - restoredName := turbopath.AnchoredUnixPath(hdr.Name) - files = append(files, restoredName.ToSystemPath()) - filename := restoredName.ToSystemPath().RestoreAnchor(root) - if isChild, err := root.ContainsPath(filename); err != nil { - return nil, err - } else if !isChild { - return nil, fmt.Errorf("cannot untar file to %v", filename) - } - switch hdr.Typeflag { - case tar.TypeDir: - if err := filename.MkdirAll(0775); err != nil { - return nil, err - } - case tar.TypeReg: - if dir := filename.Dir(); dir != "." { - if err := dir.MkdirAll(0775); err != nil { - return nil, err - } - } - if f, err := filename.OpenFile(os.O_WRONLY|os.O_TRUNC|os.O_CREATE, os.FileMode(hdr.Mode)); err != nil { - return nil, err - } else if _, err := io.Copy(f, tr); err != nil { - return nil, err - } else if err := f.Close(); err != nil { - return nil, err - } - case tar.TypeSymlink: - if err := restoreSymlink(root, hdr, false); errors.Is(err, errNonexistentLinkTarget) { - missingLinks = append(missingLinks, hdr) - } else if err != nil { - return nil, err - } - default: - log.Printf("Unhandled file type %d for %s", hdr.Typeflag, hdr.Name) - } - } -} - -var errNonexistentLinkTarget = errors.New("the link target does not exist") - -func restoreSymlink(root turbopath.AbsoluteSystemPath, hdr *tar.Header, allowNonexistentTargets bool) error { - // Note that hdr.Linkname is really the link target - relativeLinkTarget := filepath.FromSlash(hdr.Linkname) - linkFilename := root.UntypedJoin(hdr.Name) - if err := linkFilename.EnsureDir(); err != nil { - return err - } - - // TODO: check if this is an absolute path, or if we even care - linkTarget := linkFilename.Dir().UntypedJoin(relativeLinkTarget) - if _, err := linkTarget.Lstat(); err != nil { - if os.IsNotExist(err) { - if !allowNonexistentTargets { - return errNonexistentLinkTarget - } - // if we're allowing nonexistent link targets, proceed to creating the link - } else { - return err - } - } - // Ensure that the link we're about to create doesn't already exist - if err := linkFilename.Remove(); err != nil && !errors.Is(err, os.ErrNotExist) { - return err - } - if err := linkFilename.Symlink(relativeLinkTarget); err != nil { - return err - } - return nil + cache := cacheitem.FromReader(reader, true) + return cache.Restore(root) } func (cache *httpCache) Clean(_ turbopath.AbsoluteSystemPath) { @@ -359,12 +267,13 @@ func (cache *httpCache) CleanAll() { func (cache *httpCache) Shutdown() {} -func newHTTPCache(opts Opts, client client, recorder analytics.Recorder) *httpCache { +func newHTTPCache(opts Opts, client client, recorder analytics.Recorder, repoRoot turbopath.AbsoluteSystemPath) *httpCache { return &httpCache{ writable: true, client: client, requestLimiter: make(limiter, 20), recorder: recorder, + repoRoot: repoRoot, signerVerifier: &ArtifactSignatureAuthentication{ // TODO(Gaspar): this should use RemoteCacheOptions.TeamId once we start // enforcing team restrictions for repositories. diff --git a/cli/internal/cache/cache_http_test.go b/cli/internal/cache/cache_http_test.go index d1879316a3e76..7883e358b7ad3 100644 --- a/cli/internal/cache/cache_http_test.go +++ b/cli/internal/cache/cache_http_test.go @@ -81,7 +81,7 @@ func makeValidTar(t *testing.T) *bytes.Buffer { // my-pkg h := &tar.Header{ Name: "my-pkg/", - Mode: int64(0644), + Mode: int64(0755), Typeflag: tar.TypeDir, } if err := tw.WriteHeader(h); err != nil { @@ -182,7 +182,7 @@ func TestRestoreTar(t *testing.T) { expectedFiles := []turbopath.AnchoredSystemPath{ turbopath.AnchoredUnixPath("extra-file").ToSystemPath(), - turbopath.AnchoredUnixPath("my-pkg/").ToSystemPath(), + turbopath.AnchoredUnixPath("my-pkg").ToSystemPath(), turbopath.AnchoredUnixPath("my-pkg/some-file").ToSystemPath(), turbopath.AnchoredUnixPath("my-pkg/link-to-extra-file").ToSystemPath(), turbopath.AnchoredUnixPath("my-pkg/broken-link").ToSystemPath(), diff --git a/cli/internal/cacheitem/cacheitem.go b/cli/internal/cacheitem/cacheitem.go index 2fb2c3b41cc51..62d8964208456 100644 --- a/cli/internal/cacheitem/cacheitem.go +++ b/cli/internal/cacheitem/cacheitem.go @@ -7,7 +7,6 @@ import ( "crypto/sha512" "errors" "io" - "os" "github.com/vercel/turbo/cli/internal/turbopath" ) @@ -32,7 +31,7 @@ type CacheItem struct { tw *tar.Writer zw io.WriteCloser fileBuffer *bufio.Writer - handle *os.File + handle io.Reader compressed bool } @@ -57,9 +56,14 @@ func (ci *CacheItem) Close() error { } if ci.handle != nil { - if err := ci.handle.Close(); err != nil { - return err + closer, isCloser := ci.handle.(io.Closer) + + if isCloser { + if err := closer.Close(); err != nil { + return err + } } + } return nil diff --git a/cli/internal/cacheitem/create.go b/cli/internal/cacheitem/create.go index ce5b1c8ac2718..452f63ae6be60 100644 --- a/cli/internal/cacheitem/create.go +++ b/cli/internal/cacheitem/create.go @@ -36,7 +36,12 @@ func Create(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { // Wires all the writers end-to-end: // tar.Writer -> zstd.Writer -> fileBuffer -> file func (ci *CacheItem) init() { - fileBuffer := bufio.NewWriterSize(ci.handle, 2^20) // Flush to disk in 1mb chunks. + writer, isWriter := ci.handle.(io.Writer) + if !isWriter { + panic("can't write to this cache item") + } + + fileBuffer := bufio.NewWriterSize(writer, 2^20) // Flush to disk in 1mb chunks. var tw *tar.Writer if ci.compressed { diff --git a/cli/internal/cacheitem/restore.go b/cli/internal/cacheitem/restore.go index 347b99646c9ae..a2d8c31c2b640 100644 --- a/cli/internal/cacheitem/restore.go +++ b/cli/internal/cacheitem/restore.go @@ -14,6 +14,14 @@ import ( "github.com/vercel/turbo/cli/internal/turbopath" ) +// FromReader returns an existing CacheItem at the specified path. +func FromReader(reader io.Reader, compressed bool) *CacheItem { + return &CacheItem{ + handle: reader, + compressed: compressed, + } +} + // Open returns an existing CacheItem at the specified path. func Open(path turbopath.AbsoluteSystemPath) (*CacheItem, error) { handle, err := sequential.OpenFile(path.ToString(), os.O_RDONLY, 0777) diff --git a/cli/internal/ci/vendors.go b/cli/internal/ci/vendors.go index f619140bc8ef4..66fdc2cf22946 100644 --- a/cli/internal/ci/vendors.go +++ b/cli/internal/ci/vendors.go @@ -21,6 +21,9 @@ type Vendor struct { // The name of the environment variable that contains the current checked out branch BranchEnvVar string + + // The name of the environment variable that contains the user using turbo + UsernameEnvVar string } // Vendors is a list of common CI/CD vendors (from https://github.com/watson/ci-info/blob/master/vendors.json) @@ -112,12 +115,14 @@ var Vendors = []Vendor{ Constant: "EAS", Env: vendorEnvs{Any: []string{"EAS_BUILD"}}, }, + // https://docs.github.com/en/actions/learn-github-actions/variables#default-environment-variables { - Name: "GitHub Actions", - Constant: "GITHUB_ACTIONS", - Env: vendorEnvs{Any: []string{"GITHUB_ACTIONS"}}, - ShaEnvVar: "GITHUB_SHA", - BranchEnvVar: "GITHUB_REF_NAME", + Name: "GitHub Actions", + Constant: "GITHUB_ACTIONS", + Env: vendorEnvs{Any: []string{"GITHUB_ACTIONS"}}, + ShaEnvVar: "GITHUB_SHA", + BranchEnvVar: "GITHUB_REF_NAME", + UsernameEnvVar: "GITHUB_ACTOR", }, { Name: "GitLab CI", @@ -231,12 +236,14 @@ var Vendors = []Vendor{ Constant: "TRAVIS", Env: vendorEnvs{Any: []string{"TRAVIS"}}, }, + // https://vercel.com/docs/concepts/projects/environment-variables/system-environment-variables { - Name: "Vercel", - Constant: "VERCEL", - Env: vendorEnvs{Any: []string{"NOW_BUILDER", "VERCEL"}}, - ShaEnvVar: "VERCEL_GIT_COMMIT_SHA", - BranchEnvVar: "VERCEL_GIT_COMMIT_REF", + Name: "Vercel", + Constant: "VERCEL", + Env: vendorEnvs{Any: []string{"NOW_BUILDER", "VERCEL"}}, + ShaEnvVar: "VERCEL_GIT_COMMIT_SHA", + BranchEnvVar: "VERCEL_GIT_COMMIT_REF", + UsernameEnvVar: "VERCEL_GIT_COMMIT_AUTHOR_LOGIN", }, { Name: "Visual Studio App Center", diff --git a/cli/internal/ffi/bindings.h b/cli/internal/ffi/bindings.h index 61010332dceb1..923145d3100ef 100644 --- a/cli/internal/ffi/bindings.h +++ b/cli/internal/ffi/bindings.h @@ -18,8 +18,6 @@ struct Buffer previous_content(struct Buffer buffer); struct Buffer recursive_copy(struct Buffer buffer); -struct Buffer get_package_file_hashes_from_git_index(struct Buffer buffer); - struct Buffer transitive_closure(struct Buffer buf); struct Buffer subgraph(struct Buffer buf); diff --git a/cli/internal/ffi/ffi.go b/cli/internal/ffi/ffi.go index 0a05db77a7251..b59846fa73bb0 100644 --- a/cli/internal/ffi/ffi.go +++ b/cli/internal/ffi/ffi.go @@ -313,26 +313,3 @@ func GlobalChange(packageManager string, prevContents []byte, currContents []byt return resp.GetGlobalChange() } - -// GetPackageFileHashesFromGitIndex proxies to rust to use git to hash the files in a package. -// It does not support additional files, it just hashes the non-ignored files in the package. -func GetPackageFileHashesFromGitIndex(rootPath string, packagePath string) (map[string]string, error) { - req := ffi_proto.GetPackageFileHashesFromGitIndexRequest{ - TurboRoot: rootPath, - PackagePath: packagePath, - } - reqBuf := Marshal(&req) - resBuf := C.get_package_file_hashes_from_git_index(reqBuf) - reqBuf.Free() - - resp := ffi_proto.GetPackageFileHashesFromGitIndexResponse{} - if err := Unmarshal(resBuf, resp.ProtoReflect().Interface()); err != nil { - panic(err) - } - - if err := resp.GetError(); err != "" { - return nil, errors.New(err) - } - hashes := resp.GetHashes() - return hashes.GetHashes(), nil -} diff --git a/cli/internal/ffi/proto/messages.pb.go b/cli/internal/ffi/proto/messages.pb.go index 2232633f114bd..553b205c07904 100644 --- a/cli/internal/ffi/proto/messages.pb.go +++ b/cli/internal/ffi/proto/messages.pb.go @@ -1662,190 +1662,6 @@ func (x *RecursiveCopyResponse) GetError() string { return "" } -type GetPackageFileHashesFromGitIndexRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - TurboRoot string `protobuf:"bytes,1,opt,name=turbo_root,json=turboRoot,proto3" json:"turbo_root,omitempty"` - PackagePath string `protobuf:"bytes,2,opt,name=package_path,json=packagePath,proto3" json:"package_path,omitempty"` -} - -func (x *GetPackageFileHashesFromGitIndexRequest) Reset() { - *x = GetPackageFileHashesFromGitIndexRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[26] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetPackageFileHashesFromGitIndexRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetPackageFileHashesFromGitIndexRequest) ProtoMessage() {} - -func (x *GetPackageFileHashesFromGitIndexRequest) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[26] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetPackageFileHashesFromGitIndexRequest.ProtoReflect.Descriptor instead. -func (*GetPackageFileHashesFromGitIndexRequest) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{26} -} - -func (x *GetPackageFileHashesFromGitIndexRequest) GetTurboRoot() string { - if x != nil { - return x.TurboRoot - } - return "" -} - -func (x *GetPackageFileHashesFromGitIndexRequest) GetPackagePath() string { - if x != nil { - return x.PackagePath - } - return "" -} - -type FileHashes struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Hashes map[string]string `protobuf:"bytes,1,rep,name=hashes,proto3" json:"hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` -} - -func (x *FileHashes) Reset() { - *x = FileHashes{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[27] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *FileHashes) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*FileHashes) ProtoMessage() {} - -func (x *FileHashes) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[27] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use FileHashes.ProtoReflect.Descriptor instead. -func (*FileHashes) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{27} -} - -func (x *FileHashes) GetHashes() map[string]string { - if x != nil { - return x.Hashes - } - return nil -} - -type GetPackageFileHashesFromGitIndexResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - // Types that are assignable to Response: - // *GetPackageFileHashesFromGitIndexResponse_Hashes - // *GetPackageFileHashesFromGitIndexResponse_Error - Response isGetPackageFileHashesFromGitIndexResponse_Response `protobuf_oneof:"response"` -} - -func (x *GetPackageFileHashesFromGitIndexResponse) Reset() { - *x = GetPackageFileHashesFromGitIndexResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_turborepo_ffi_messages_proto_msgTypes[28] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetPackageFileHashesFromGitIndexResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetPackageFileHashesFromGitIndexResponse) ProtoMessage() {} - -func (x *GetPackageFileHashesFromGitIndexResponse) ProtoReflect() protoreflect.Message { - mi := &file_turborepo_ffi_messages_proto_msgTypes[28] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetPackageFileHashesFromGitIndexResponse.ProtoReflect.Descriptor instead. -func (*GetPackageFileHashesFromGitIndexResponse) Descriptor() ([]byte, []int) { - return file_turborepo_ffi_messages_proto_rawDescGZIP(), []int{28} -} - -func (m *GetPackageFileHashesFromGitIndexResponse) GetResponse() isGetPackageFileHashesFromGitIndexResponse_Response { - if m != nil { - return m.Response - } - return nil -} - -func (x *GetPackageFileHashesFromGitIndexResponse) GetHashes() *FileHashes { - if x, ok := x.GetResponse().(*GetPackageFileHashesFromGitIndexResponse_Hashes); ok { - return x.Hashes - } - return nil -} - -func (x *GetPackageFileHashesFromGitIndexResponse) GetError() string { - if x, ok := x.GetResponse().(*GetPackageFileHashesFromGitIndexResponse_Error); ok { - return x.Error - } - return "" -} - -type isGetPackageFileHashesFromGitIndexResponse_Response interface { - isGetPackageFileHashesFromGitIndexResponse_Response() -} - -type GetPackageFileHashesFromGitIndexResponse_Hashes struct { - Hashes *FileHashes `protobuf:"bytes,1,opt,name=hashes,proto3,oneof"` -} - -type GetPackageFileHashesFromGitIndexResponse_Error struct { - Error string `protobuf:"bytes,2,opt,name=error,proto3,oneof"` -} - -func (*GetPackageFileHashesFromGitIndexResponse_Hashes) isGetPackageFileHashesFromGitIndexResponse_Response() { -} - -func (*GetPackageFileHashesFromGitIndexResponse_Error) isGetPackageFileHashesFromGitIndexResponse_Response() { -} - var File_turborepo_ffi_messages_proto protoreflect.FileDescriptor var file_turborepo_ffi_messages_proto_rawDesc = []byte{ @@ -2027,32 +1843,10 @@ var file_turborepo_ffi_messages_proto_rawDesc = []byte{ 0x76, 0x65, 0x43, 0x6f, 0x70, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0x6b, 0x0a, 0x27, 0x47, 0x65, 0x74, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, - 0x65, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x46, 0x72, 0x6f, 0x6d, 0x47, - 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, - 0x0a, 0x0a, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x74, 0x75, 0x72, 0x62, 0x6f, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x21, 0x0a, - 0x0c, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x50, 0x61, 0x74, 0x68, - 0x22, 0x78, 0x0a, 0x0a, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x2f, - 0x0a, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x17, - 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x2e, 0x48, 0x61, 0x73, 0x68, - 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x1a, - 0x39, 0x0a, 0x0b, 0x48, 0x61, 0x73, 0x68, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, - 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x75, 0x0a, 0x28, 0x47, 0x65, - 0x74, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, 0x68, - 0x65, 0x73, 0x46, 0x72, 0x6f, 0x6d, 0x47, 0x69, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x25, 0x0a, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x48, 0x61, 0x73, - 0x68, 0x65, 0x73, 0x48, 0x00, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x16, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x2a, 0x24, 0x0a, 0x0e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, 0x6e, 0x61, - 0x67, 0x65, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x4e, 0x50, 0x4d, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, - 0x42, 0x45, 0x52, 0x52, 0x59, 0x10, 0x01, 0x42, 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x72, 0x6f, 0x72, 0x2a, 0x24, 0x0a, 0x0e, 0x50, 0x61, 0x63, 0x6b, 0x61, 0x67, 0x65, 0x4d, 0x61, + 0x6e, 0x61, 0x67, 0x65, 0x72, 0x12, 0x07, 0x0a, 0x03, 0x4e, 0x50, 0x4d, 0x10, 0x00, 0x12, 0x09, + 0x0a, 0x05, 0x42, 0x45, 0x52, 0x52, 0x59, 0x10, 0x01, 0x42, 0x0b, 0x5a, 0x09, 0x66, 0x66, 0x69, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2068,68 +1862,62 @@ func file_turborepo_ffi_messages_proto_rawDescGZIP() []byte { } var file_turborepo_ffi_messages_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 33) +var file_turborepo_ffi_messages_proto_msgTypes = make([]protoimpl.MessageInfo, 29) var file_turborepo_ffi_messages_proto_goTypes = []interface{}{ - (PackageManager)(0), // 0: PackageManager - (*TurboDataDirResp)(nil), // 1: TurboDataDirResp - (*GlobReq)(nil), // 2: GlobReq - (*GlobResp)(nil), // 3: GlobResp - (*GlobRespList)(nil), // 4: GlobRespList - (*ChangedFilesReq)(nil), // 5: ChangedFilesReq - (*ChangedFilesResp)(nil), // 6: ChangedFilesResp - (*ChangedFilesList)(nil), // 7: ChangedFilesList - (*PreviousContentReq)(nil), // 8: PreviousContentReq - (*PreviousContentResp)(nil), // 9: PreviousContentResp - (*PackageDependency)(nil), // 10: PackageDependency - (*PackageDependencyList)(nil), // 11: PackageDependencyList - (*WorkspaceDependencies)(nil), // 12: WorkspaceDependencies - (*TransitiveDepsRequest)(nil), // 13: TransitiveDepsRequest - (*TransitiveDepsResponse)(nil), // 14: TransitiveDepsResponse - (*AdditionalBerryData)(nil), // 15: AdditionalBerryData - (*LockfilePackage)(nil), // 16: LockfilePackage - (*LockfilePackageList)(nil), // 17: LockfilePackageList - (*SubgraphRequest)(nil), // 18: SubgraphRequest - (*SubgraphResponse)(nil), // 19: SubgraphResponse - (*PatchesRequest)(nil), // 20: PatchesRequest - (*PatchesResponse)(nil), // 21: PatchesResponse - (*Patches)(nil), // 22: Patches - (*GlobalChangeRequest)(nil), // 23: GlobalChangeRequest - (*GlobalChangeResponse)(nil), // 24: GlobalChangeResponse - (*RecursiveCopyRequest)(nil), // 25: RecursiveCopyRequest - (*RecursiveCopyResponse)(nil), // 26: RecursiveCopyResponse - (*GetPackageFileHashesFromGitIndexRequest)(nil), // 27: GetPackageFileHashesFromGitIndexRequest - (*FileHashes)(nil), // 28: FileHashes - (*GetPackageFileHashesFromGitIndexResponse)(nil), // 29: GetPackageFileHashesFromGitIndexResponse - nil, // 30: WorkspaceDependencies.DependenciesEntry - nil, // 31: TransitiveDepsRequest.WorkspacesEntry - nil, // 32: AdditionalBerryData.ResolutionsEntry - nil, // 33: FileHashes.HashesEntry + (PackageManager)(0), // 0: PackageManager + (*TurboDataDirResp)(nil), // 1: TurboDataDirResp + (*GlobReq)(nil), // 2: GlobReq + (*GlobResp)(nil), // 3: GlobResp + (*GlobRespList)(nil), // 4: GlobRespList + (*ChangedFilesReq)(nil), // 5: ChangedFilesReq + (*ChangedFilesResp)(nil), // 6: ChangedFilesResp + (*ChangedFilesList)(nil), // 7: ChangedFilesList + (*PreviousContentReq)(nil), // 8: PreviousContentReq + (*PreviousContentResp)(nil), // 9: PreviousContentResp + (*PackageDependency)(nil), // 10: PackageDependency + (*PackageDependencyList)(nil), // 11: PackageDependencyList + (*WorkspaceDependencies)(nil), // 12: WorkspaceDependencies + (*TransitiveDepsRequest)(nil), // 13: TransitiveDepsRequest + (*TransitiveDepsResponse)(nil), // 14: TransitiveDepsResponse + (*AdditionalBerryData)(nil), // 15: AdditionalBerryData + (*LockfilePackage)(nil), // 16: LockfilePackage + (*LockfilePackageList)(nil), // 17: LockfilePackageList + (*SubgraphRequest)(nil), // 18: SubgraphRequest + (*SubgraphResponse)(nil), // 19: SubgraphResponse + (*PatchesRequest)(nil), // 20: PatchesRequest + (*PatchesResponse)(nil), // 21: PatchesResponse + (*Patches)(nil), // 22: Patches + (*GlobalChangeRequest)(nil), // 23: GlobalChangeRequest + (*GlobalChangeResponse)(nil), // 24: GlobalChangeResponse + (*RecursiveCopyRequest)(nil), // 25: RecursiveCopyRequest + (*RecursiveCopyResponse)(nil), // 26: RecursiveCopyResponse + nil, // 27: WorkspaceDependencies.DependenciesEntry + nil, // 28: TransitiveDepsRequest.WorkspacesEntry + nil, // 29: AdditionalBerryData.ResolutionsEntry } var file_turborepo_ffi_messages_proto_depIdxs = []int32{ 4, // 0: GlobResp.files:type_name -> GlobRespList 7, // 1: ChangedFilesResp.files:type_name -> ChangedFilesList 10, // 2: PackageDependencyList.list:type_name -> PackageDependency - 30, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry + 27, // 3: WorkspaceDependencies.dependencies:type_name -> WorkspaceDependencies.DependenciesEntry 0, // 4: TransitiveDepsRequest.package_manager:type_name -> PackageManager - 31, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry + 28, // 5: TransitiveDepsRequest.workspaces:type_name -> TransitiveDepsRequest.WorkspacesEntry 15, // 6: TransitiveDepsRequest.resolutions:type_name -> AdditionalBerryData 12, // 7: TransitiveDepsResponse.dependencies:type_name -> WorkspaceDependencies - 32, // 8: AdditionalBerryData.resolutions:type_name -> AdditionalBerryData.ResolutionsEntry + 29, // 8: AdditionalBerryData.resolutions:type_name -> AdditionalBerryData.ResolutionsEntry 16, // 9: LockfilePackageList.list:type_name -> LockfilePackage 0, // 10: SubgraphRequest.package_manager:type_name -> PackageManager 15, // 11: SubgraphRequest.resolutions:type_name -> AdditionalBerryData 0, // 12: PatchesRequest.package_manager:type_name -> PackageManager 22, // 13: PatchesResponse.patches:type_name -> Patches 0, // 14: GlobalChangeRequest.package_manager:type_name -> PackageManager - 33, // 15: FileHashes.hashes:type_name -> FileHashes.HashesEntry - 28, // 16: GetPackageFileHashesFromGitIndexResponse.hashes:type_name -> FileHashes - 17, // 17: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList - 11, // 18: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList - 19, // [19:19] is the sub-list for method output_type - 19, // [19:19] is the sub-list for method input_type - 19, // [19:19] is the sub-list for extension type_name - 19, // [19:19] is the sub-list for extension extendee - 0, // [0:19] is the sub-list for field type_name + 17, // 15: WorkspaceDependencies.DependenciesEntry.value:type_name -> LockfilePackageList + 11, // 16: TransitiveDepsRequest.WorkspacesEntry.value:type_name -> PackageDependencyList + 17, // [17:17] is the sub-list for method output_type + 17, // [17:17] is the sub-list for method input_type + 17, // [17:17] is the sub-list for extension type_name + 17, // [17:17] is the sub-list for extension extendee + 0, // [0:17] is the sub-list for field type_name } func init() { file_turborepo_ffi_messages_proto_init() } @@ -2450,42 +2238,6 @@ func file_turborepo_ffi_messages_proto_init() { return nil } } - file_turborepo_ffi_messages_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPackageFileHashesFromGitIndexRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FileHashes); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_turborepo_ffi_messages_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetPackageFileHashesFromGitIndexResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } } file_turborepo_ffi_messages_proto_msgTypes[2].OneofWrappers = []interface{}{ (*GlobResp_Files)(nil), @@ -2515,17 +2267,13 @@ func file_turborepo_ffi_messages_proto_init() { (*PatchesResponse_Error)(nil), } file_turborepo_ffi_messages_proto_msgTypes[25].OneofWrappers = []interface{}{} - file_turborepo_ffi_messages_proto_msgTypes[28].OneofWrappers = []interface{}{ - (*GetPackageFileHashesFromGitIndexResponse_Hashes)(nil), - (*GetPackageFileHashesFromGitIndexResponse_Error)(nil), - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_turborepo_ffi_messages_proto_rawDesc, NumEnums: 1, - NumMessages: 33, + NumMessages: 29, NumExtensions: 0, NumServices: 0, }, diff --git a/cli/internal/hashing/package_deps_hash.go b/cli/internal/hashing/package_deps_hash.go index dfa972585729d..c51c056b9dcf7 100644 --- a/cli/internal/hashing/package_deps_hash.go +++ b/cli/internal/hashing/package_deps_hash.go @@ -28,6 +28,48 @@ type PackageDepsOptions struct { InputPatterns []string } +func getPackageFileHashesFromGitIndex(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { + var result map[turbopath.AnchoredUnixPath]string + absolutePackagePath := packagePath.RestoreAnchor(rootPath) + + // Get the state of the git index. + gitLsTreeOutput, err := gitLsTree(absolutePackagePath) + if err != nil { + return nil, fmt.Errorf("could not get git hashes for files in package %s: %w", packagePath, err) + } + result = gitLsTreeOutput + + // Update the with the state of the working directory. + // The paths returned from this call are anchored at the package directory + gitStatusOutput, err := gitStatus(absolutePackagePath) + if err != nil { + return nil, fmt.Errorf("Could not get git hashes from git status: %v", err) + } + + // Review status output to identify the delta. + var filesToHash []turbopath.AnchoredSystemPath + for filePath, status := range gitStatusOutput { + if status.isDelete() { + delete(result, filePath) + } else { + filesToHash = append(filesToHash, filePath.ToSystemPath()) + } + } + + // Get the hashes for any modified files in the working directory. + hashes, err := GetHashesForFiles(absolutePackagePath, filesToHash) + if err != nil { + return nil, err + } + + // Zip up file paths and hashes together + for filePath, hash := range hashes { + result[filePath] = hash + } + + return result, nil +} + func safeCompileIgnoreFile(filepath turbopath.AbsoluteSystemPath) (*gitignore.GitIgnore, error) { if filepath.FileExists() { return gitignore.CompileIgnoreFile(filepath.ToString()) @@ -454,3 +496,61 @@ type statusCode struct { func (s statusCode) isDelete() bool { return s.x == "D" || s.y == "D" } + +// gitStatus returns a map of paths to their `git` status code. This can be used to identify what should +// be done with files that do not currently match what is in the index. +// +// Note: `git status -z`'s relative path results are relative to the repository's location. +// We need to calculate where the repository's location is in order to determine what the full path is +// before we can return those paths relative to the calling directory, normalizing to the behavior of +// `ls-files` and `ls-tree`. +func gitStatus(rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]statusCode, error) { + cmd := exec.Command( + "git", // Using `git` from $PATH, + "status", // tell me about the status of the working tree, + "--untracked-files", // including information about untracked files, + "--no-renames", // do not detect renames, + "-z", // with each file path relative to the repository root and \000-terminated, + "--", // and any additional argument you see is a path, promise. + ) + cmd.Args = append(cmd.Args, ".") // Operate in the current directory instead of the root of the working tree. + cmd.Dir = rootPath.ToString() // Include files only from this directory. + + entries, err := runGitCommand(cmd, "status", gitoutput.NewStatusReader) + if err != nil { + return nil, err + } + + output := make(map[turbopath.AnchoredUnixPath]statusCode, len(entries)) + convertedRootPath := turbopath.AbsoluteSystemPathFromUpstream(rootPath.ToString()) + + traversePath, err := memoizedGetTraversePath(convertedRootPath) + if err != nil { + return nil, err + } + + for _, entry := range entries { + statusEntry := gitoutput.StatusEntry(entry) + // Anchored at repository. + pathFromStatus := turbopath.AnchoredUnixPathFromUpstream(statusEntry.GetField(gitoutput.Path)) + var outputPath turbopath.AnchoredUnixPath + + if len(traversePath) > 0 { + repositoryPath := convertedRootPath.Join(traversePath.ToSystemPath()) + fileFullPath := pathFromStatus.ToSystemPath().RestoreAnchor(repositoryPath) + + relativePath, err := fileFullPath.RelativeTo(convertedRootPath) + if err != nil { + return nil, err + } + + outputPath = relativePath.ToUnixPath() + } else { + outputPath = pathFromStatus + } + + output[outputPath] = statusCode{x: statusEntry.GetField(gitoutput.StatusX), y: statusEntry.GetField(gitoutput.StatusY)} + } + + return output, nil +} diff --git a/cli/internal/hashing/package_deps_hash_go.go b/cli/internal/hashing/package_deps_hash_go.go deleted file mode 100644 index 46e5db6a65774..0000000000000 --- a/cli/internal/hashing/package_deps_hash_go.go +++ /dev/null @@ -1,112 +0,0 @@ -//go:build go || !rust -// +build go !rust - -package hashing - -import ( - "fmt" - "os/exec" - - "github.com/vercel/turbo/cli/internal/encoding/gitoutput" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func getPackageFileHashesFromGitIndex(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - var result map[turbopath.AnchoredUnixPath]string - absolutePackagePath := packagePath.RestoreAnchor(rootPath) - - // Get the state of the git index. - gitLsTreeOutput, err := gitLsTree(absolutePackagePath) - if err != nil { - return nil, fmt.Errorf("could not get git hashes for files in package %s: %w", packagePath, err) - } - result = gitLsTreeOutput - - // Update the with the state of the working directory. - // The paths returned from this call are anchored at the package directory - gitStatusOutput, err := gitStatus(absolutePackagePath) - if err != nil { - return nil, fmt.Errorf("Could not get git hashes from git status: %v", err) - } - - // Review status output to identify the delta. - var filesToHash []turbopath.AnchoredSystemPath - for filePath, status := range gitStatusOutput { - if status.isDelete() { - delete(result, filePath) - } else { - filesToHash = append(filesToHash, filePath.ToSystemPath()) - } - } - - // Get the hashes for any modified files in the working directory. - hashes, err := GetHashesForFiles(absolutePackagePath, filesToHash) - if err != nil { - return nil, err - } - - // Zip up file paths and hashes together - for filePath, hash := range hashes { - result[filePath] = hash - } - - return result, nil -} - -// gitStatus returns a map of paths to their `git` status code. This can be used to identify what should -// be done with files that do not currently match what is in the index. -// -// Note: `git status -z`'s relative path results are relative to the repository's location. -// We need to calculate where the repository's location is in order to determine what the full path is -// before we can return those paths relative to the calling directory, normalizing to the behavior of -// `ls-files` and `ls-tree`. -func gitStatus(rootPath turbopath.AbsoluteSystemPath) (map[turbopath.AnchoredUnixPath]statusCode, error) { - cmd := exec.Command( - "git", // Using `git` from $PATH, - "status", // tell me about the status of the working tree, - "--untracked-files", // including information about untracked files, - "--no-renames", // do not detect renames, - "-z", // with each file path relative to the repository root and \000-terminated, - "--", // and any additional argument you see is a path, promise. - ) - cmd.Args = append(cmd.Args, ".") // Operate in the current directory instead of the root of the working tree. - cmd.Dir = rootPath.ToString() // Include files only from this directory. - - entries, err := runGitCommand(cmd, "status", gitoutput.NewStatusReader) - if err != nil { - return nil, err - } - - output := make(map[turbopath.AnchoredUnixPath]statusCode, len(entries)) - convertedRootPath := turbopath.AbsoluteSystemPathFromUpstream(rootPath.ToString()) - - traversePath, err := memoizedGetTraversePath(convertedRootPath) - if err != nil { - return nil, err - } - - for _, entry := range entries { - statusEntry := gitoutput.StatusEntry(entry) - // Anchored at repository. - pathFromStatus := turbopath.AnchoredUnixPathFromUpstream(statusEntry.GetField(gitoutput.Path)) - var outputPath turbopath.AnchoredUnixPath - - if len(traversePath) > 0 { - repositoryPath := convertedRootPath.Join(traversePath.ToSystemPath()) - fileFullPath := pathFromStatus.ToSystemPath().RestoreAnchor(repositoryPath) - - relativePath, err := fileFullPath.RelativeTo(convertedRootPath) - if err != nil { - return nil, err - } - - outputPath = relativePath.ToUnixPath() - } else { - outputPath = pathFromStatus - } - - output[outputPath] = statusCode{x: statusEntry.GetField(gitoutput.StatusX), y: statusEntry.GetField(gitoutput.StatusY)} - } - - return output, nil -} diff --git a/cli/internal/hashing/package_deps_hash_rust.go b/cli/internal/hashing/package_deps_hash_rust.go deleted file mode 100644 index 4f5aa1dd13ae9..0000000000000 --- a/cli/internal/hashing/package_deps_hash_rust.go +++ /dev/null @@ -1,22 +0,0 @@ -//go:build rust -// +build rust - -package hashing - -import ( - "github.com/vercel/turbo/cli/internal/ffi" - "github.com/vercel/turbo/cli/internal/turbopath" -) - -func getPackageFileHashesFromGitIndex(rootPath turbopath.AbsoluteSystemPath, packagePath turbopath.AnchoredSystemPath) (map[turbopath.AnchoredUnixPath]string, error) { - rawHashes, err := ffi.GetPackageFileHashesFromGitIndex(rootPath.ToString(), packagePath.ToString()) - if err != nil { - return nil, err - } - - hashes := make(map[turbopath.AnchoredUnixPath]string, len(rawHashes)) - for rawPath, hash := range rawHashes { - hashes[turbopath.AnchoredUnixPathFromUpstream(rawPath)] = hash - } - return hashes, nil -} diff --git a/cli/internal/prune/prune.go b/cli/internal/prune/prune.go index 65555673d894f..e61032105f298 100644 --- a/cli/internal/prune/prune.go +++ b/cli/internal/prune/prune.go @@ -4,6 +4,7 @@ import ( "bufio" "fmt" "os" + "path/filepath" "strings" "github.com/vercel/turbo/cli/internal/cmdutil" @@ -69,7 +70,12 @@ func (p *prune) prune(opts *turbostate.PrunePayload, packageManagerName string) if err != nil { return errors.Wrap(err, "could not construct graph") } - outDir := p.base.RepoRoot.UntypedJoin(opts.OutputDir) + var outDir turbopath.AbsoluteSystemPath + if filepath.IsAbs(opts.OutputDir) { + outDir = turbopath.AbsoluteSystemPathFromUpstream(opts.OutputDir) + } else { + outDir = p.base.RepoRoot.UntypedJoin(opts.OutputDir) + } fullDir := outDir if opts.Docker { fullDir = fullDir.UntypedJoin("full") diff --git a/cli/internal/runsummary/format_json.go b/cli/internal/runsummary/format_json.go index e7530949c6aa7..329964bd74144 100644 --- a/cli/internal/runsummary/format_json.go +++ b/cli/internal/runsummary/format_json.go @@ -73,5 +73,6 @@ type nonMonorepoRunSummary struct { FrameworkInference bool `json:"frameworkInference"` ExecutionSummary *executionSummary `json:"execution,omitempty"` Tasks []*TaskSummary `json:"tasks"` + User string `json:"user"` SCM *scmState `json:"scm"` } diff --git a/cli/internal/runsummary/run_summary.go b/cli/internal/runsummary/run_summary.go index ab870823e2066..b1770204ce4ba 100644 --- a/cli/internal/runsummary/run_summary.go +++ b/cli/internal/runsummary/run_summary.go @@ -11,7 +11,9 @@ import ( "github.com/mitchellh/cli" "github.com/segmentio/ksuid" + "github.com/vercel/turbo/cli/internal/ci" "github.com/vercel/turbo/cli/internal/client" + "github.com/vercel/turbo/cli/internal/env" "github.com/vercel/turbo/cli/internal/spinner" "github.com/vercel/turbo/cli/internal/turbopath" "github.com/vercel/turbo/cli/internal/util" @@ -68,6 +70,7 @@ type RunSummary struct { FrameworkInference bool `json:"frameworkInference"` ExecutionSummary *executionSummary `json:"execution,omitempty"` Tasks []*TaskSummary `json:"tasks"` + User string `json:"user"` SCM *scmState `json:"scm"` } @@ -100,6 +103,7 @@ func NewRunSummary( executionSummary := newExecutionSummary(synthesizedCommand, repoPath, startAt, profile) + envVars := env.GetEnvMap() return Meta{ RunSummary: &RunSummary{ ID: ksuid.New(), @@ -111,7 +115,8 @@ func NewRunSummary( FrameworkInference: runOpts.FrameworkInference, Tasks: []*TaskSummary{}, GlobalHashSummary: globalHashSummary, - SCM: getSCMState(repoRoot), + SCM: getSCMState(envVars, repoRoot), + User: getUser(envVars, repoRoot), }, ui: ui, runType: runType, @@ -329,3 +334,14 @@ func (rsm *Meta) postTaskSummaries(runID string) []error { return nil } + +func getUser(envVars env.EnvironmentVariableMap, dir turbopath.AbsoluteSystemPath) string { + var username string + + if ci.IsCi() { + vendor := ci.Info() + username = envVars[vendor.UsernameEnvVar] + } + + return username +} diff --git a/cli/internal/runsummary/scm_summary.go b/cli/internal/runsummary/scm_summary.go index 217e05a365ae0..e38b23d79a7f2 100644 --- a/cli/internal/runsummary/scm_summary.go +++ b/cli/internal/runsummary/scm_summary.go @@ -16,16 +16,15 @@ type scmState struct { // getSCMState returns the sha and branch when in a git repo // Otherwise it should return empty strings right now. // We my add handling of other scms and non-git tracking in the future. -func getSCMState(dir turbopath.AbsoluteSystemPath) *scmState { - allEnvVars := env.GetEnvMap() +func getSCMState(envVars env.EnvironmentVariableMap, dir turbopath.AbsoluteSystemPath) *scmState { state := &scmState{Type: "git"} // If we're in CI, try to get the values we need from environment variables if ci.IsCi() { vendor := ci.Info() - state.Sha = allEnvVars[vendor.ShaEnvVar] - state.Branch = allEnvVars[vendor.BranchEnvVar] + state.Sha = envVars[vendor.ShaEnvVar] + state.Branch = envVars[vendor.BranchEnvVar] } // Otherwise fallback to using `git` diff --git a/cli/internal/runsummary/spaces.go b/cli/internal/runsummary/spaces.go index 88298a2a3efc6..f197fa8b7242c 100644 --- a/cli/internal/runsummary/spaces.go +++ b/cli/internal/runsummary/spaces.go @@ -28,9 +28,7 @@ type spacesRunPayload struct { Client spacesClientSummary `json:"client"` // Details about the turbo client GitBranch string `json:"gitBranch"` GitSha string `json:"gitSha"` - - // TODO: we need to add these in - // originationUser string + User string `json:"originationUser,omitempty"` } // spacesCacheStatus is the same as TaskCacheSummary so we can convert @@ -74,6 +72,7 @@ func (rsm *Meta) newSpacesRunCreatePayload() *spacesRunPayload { Context: context, GitBranch: rsm.RunSummary.SCM.Branch, GitSha: rsm.RunSummary.SCM.Sha, + User: rsm.RunSummary.User, Client: spacesClientSummary{ ID: "turbo", Name: "Turbo", diff --git a/crates/turbo-tasks/src/debug/vdbg.rs b/crates/turbo-tasks/src/debug/vdbg.rs index 36d9ef5798884..df737057d6391 100644 --- a/crates/turbo-tasks/src/debug/vdbg.rs +++ b/crates/turbo-tasks/src/debug/vdbg.rs @@ -15,33 +15,48 @@ macro_rules! vdbg { () => { eprintln!("[{}:{}]", file!(), line!()) }; - ($val:expr ; depth = $depth:expr) => { - // Use of `match` here is intentional because it affects the lifetimes - // of temporaries - https://stackoverflow.com/a/48732525/1063961 - match $val { - tmp => { - $crate::macro_helpers::spawn_detached(async move { - use $crate::debug::ValueDebugFormat; - eprintln!( - "[{}:{}] {} = {}", - file!(), - line!(), - stringify!($val), - (&tmp).value_debug_format($depth).try_to_string().await?, - ); - Ok(()) - }); - tmp - } + + (__init $depth:expr ; $($val:expr),* ) => { + { + use $crate::debug::ValueDebugFormat; + let depth = $depth; + $crate::macro_helpers::spawn_detached(async move { + $crate::vdbg!(__expand depth ; [ $($val),* ] []); + Ok(()) + }); + ($($val),*) } }; - ($($val:expr),+ ; depth = $depth:expr) => { - ($(vdbg!($val ; depth = $depth)),+,) + + (__expand $depth:ident ; [ $val:expr $(, $rest:expr )* ] [ $($tt:tt)* ]) => { + let valstr = stringify!($val); + let valdbg = (&$val).value_debug_format($depth).try_to_string().await?; + $crate::vdbg!(__expand $depth ; [ $($rest),* ] [ $($tt)* valstr valdbg ]); }; - ($val:expr $(,)?) => { - vdbg!($val ; depth = usize::MAX) + (__expand $depth:ident ; [] [ $( $valstr:ident $valdbg:ident )* ]) => { + // By pre-awaiting, then printing everything at once, we ensure that the + // output won't be interleaved with output from other threads, and that + // it will always appear in the order that the macro was invoked. + eprint!( + $crate::vdbg!(__repeat "[{file}:{line}] {} = {}\n" $($valstr)*), + $( + $valstr, + $valdbg, + )* + file = file!(), + line = line!(), + ); + }; + + // Sub-macro for repeating a string N times, where N is controlled by the number of identifiers + // passed to the macro. + (__repeat $str:literal $x:ident $($rest:ident)*) => { concat!($str, $crate::vdbg!(__repeat $str $($rest)*)) }; + (__repeat $str:literal) => { "" }; + + ($($val:expr),* ; depth = $depth:expr) => { + $crate::vdbg!(__init $depth ; $($val),*) }; ($($val:expr),+ $(,)?) => { - ($(vdbg!($val)),+,) + $crate::vdbg!(__init usize::MAX ; $($val),*) }; } diff --git a/crates/turbopack-cli/src/dev/web_entry_source.rs b/crates/turbopack-cli/src/dev/web_entry_source.rs index 4dd239591be9f..ba908031d4724 100644 --- a/crates/turbopack-cli/src/dev/web_entry_source.rs +++ b/crates/turbopack-cli/src/dev/web_entry_source.rs @@ -7,17 +7,14 @@ use turbo_tasks_fs::{FileSystem, FileSystemPathVc}; use turbopack::{ condition::ContextCondition, ecmascript::EcmascriptModuleAssetVc, - module_options::{ - JsxTransformOptions, ModuleOptionsContext, ModuleOptionsContextVc, - StyledComponentsTransformConfigVc, - }, + module_options::{JsxTransformOptions, ModuleOptionsContext, ModuleOptionsContextVc}, resolve_options_context::{ResolveOptionsContext, ResolveOptionsContextVc}, transition::TransitionsByNameVc, ModuleAssetContextVc, }; use turbopack_cli_utils::runtime_entry::{RuntimeEntriesVc, RuntimeEntry}; use turbopack_core::{ - chunk::{ChunkableAsset, ChunkableAssetVc, ChunkingContext, ChunkingContextVc}, + chunk::{ChunkableAssetVc, ChunkingContextVc}, compile_time_defines, compile_time_info::{CompileTimeDefinesVc, CompileTimeInfo, CompileTimeInfoVc}, context::AssetContextVc, @@ -35,7 +32,9 @@ use turbopack_dev_server::{ html::DevHtmlAssetVc, source::{asset_graph::AssetGraphContentSourceVc, ContentSourceVc}, }; -use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfigVc; +use turbopack_ecmascript_plugins::transform::{ + emotion::EmotionTransformConfigVc, styled_components::StyledComponentsTransformConfigVc, +}; use turbopack_node::execution_context::ExecutionContextVc; use crate::embed_js::embed_file_path; @@ -109,10 +108,17 @@ async fn get_client_module_options_context( .await? .is_found(); + let enable_jsx = Some( + JsxTransformOptions { + react_refresh: enable_react_refresh, + ..Default::default() + } + .cell(), + ); + let module_options_context = ModuleOptionsContext { - enable_jsx: Some(JsxTransformOptions::default().cell()), + enable_jsx, enable_emotion: Some(EmotionTransformConfigVc::default()), - enable_react_refresh, enable_styled_components: Some(StyledComponentsTransformConfigVc::default()), enable_styled_jsx: true, enable_postcss_transform: Some(Default::default()), diff --git a/crates/turbopack-core/src/resolve/mod.rs b/crates/turbopack-core/src/resolve/mod.rs index 9fc206d1ddad3..a0666aa735c50 100644 --- a/crates/turbopack-core/src/resolve/mod.rs +++ b/crates/turbopack-core/src/resolve/mod.rs @@ -758,13 +758,20 @@ async fn resolve_internal( path.push(ext.clone().into()); patterns.push(path); } - let new_pat = Pattern::alternatives(patterns); - resolve_internal( - context, - RequestVc::raw(Value::new(new_pat), *force_in_context), - options, - ) + // This ensures the order of the patterns (extensions) is + // preserved, `Pattern::Alternatives` inside a `Request::Raw` does not preserve + // the order + let mut results = Vec::new(); + for pattern in patterns { + results.push(resolve_internal( + context, + RequestVc::raw(Value::new(pattern), *force_in_context), + options, + )); + } + + merge_results(results) } Request::Module { module, diff --git a/crates/turbopack-dev/js/src/runtime.js b/crates/turbopack-dev/js/src/runtime.js index d13e717155e95..e322c55b3db85 100644 --- a/crates/turbopack-dev/js/src/runtime.js +++ b/crates/turbopack-dev/js/src/runtime.js @@ -482,8 +482,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -501,11 +502,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -530,21 +531,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1418,6 +1424,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs b/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs index 4d2a4847d455d..65f15c5a35689 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/directives/server.rs @@ -13,6 +13,7 @@ use super::is_server_module; pub struct ServerDirectiveTransformer { // ServerDirective is not implemented yet and always reports an issue. // We don't have to pass a valid transition name yet, but the API is prepared. + #[allow(unused)] transition_name: StringVc, } diff --git a/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs b/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs index 3938e12cfb9fc..2215b3fbcaece 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/emotion.rs @@ -116,13 +116,3 @@ impl CustomTransformer for EmotionTransformer { Ok(()) } } - -pub async fn build_emotion_transformer( - config: &Option, -) -> Result>> { - Ok(if let Some(config) = config { - EmotionTransformer::new(&*config.await?).map(Box::new) - } else { - None - }) -} diff --git a/crates/turbopack-ecmascript-plugins/src/transform/relay.rs b/crates/turbopack-ecmascript-plugins/src/transform/relay.rs index 1f2ad64a9df1d..52161b1ef078f 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/relay.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/relay.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use anyhow::Result; use async_trait::async_trait; +use serde::{Deserialize, Serialize}; use swc_core::{ common::{util::take::Take, FileName}, ecma::{ @@ -9,16 +10,46 @@ use swc_core::{ visit::FoldWith, }, }; +use swc_relay::RelayLanguageConfig; +use turbo_tasks::trace::TraceRawVcs; use turbopack_ecmascript::{CustomTransformer, TransformContext}; +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)] +#[serde(rename_all = "camelCase")] +pub struct RelayConfig { + pub src: String, + pub artifact_directory: Option, + pub language: Option, +} + +#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)] +#[serde(rename_all = "lowercase")] +pub enum RelayLanguage { + TypeScript, + Flow, + JavaScript, +} + #[derive(Debug)] pub struct RelayTransformer { config: swc_relay::Config, } impl RelayTransformer { - pub fn new(config: swc_relay::Config) -> Self { - Self { config } + pub fn new(config: &RelayConfig) -> Self { + let options = swc_relay::Config { + artifact_directory: config.artifact_directory.as_ref().map(PathBuf::from), + language: config.language.as_ref().map_or( + RelayLanguageConfig::TypeScript, + |v| match v { + RelayLanguage::JavaScript => RelayLanguageConfig::JavaScript, + RelayLanguage::TypeScript => RelayLanguageConfig::TypeScript, + RelayLanguage::Flow => RelayLanguageConfig::Flow, + }, + ), + ..Default::default() + }; + Self { config: options } } } diff --git a/crates/turbopack-ecmascript-plugins/src/transform/styled_components.rs b/crates/turbopack-ecmascript-plugins/src/transform/styled_components.rs index 43403779d5135..bedb6886db421 100644 --- a/crates/turbopack-ecmascript-plugins/src/transform/styled_components.rs +++ b/crates/turbopack-ecmascript-plugins/src/transform/styled_components.rs @@ -4,18 +4,86 @@ use anyhow::Result; use async_trait::async_trait; use swc_core::{ common::FileName, - ecma::{ast::Program, visit::VisitMutWith}, + ecma::{ast::Program, atoms::JsWord, visit::VisitMutWith}, }; use turbopack_ecmascript::{CustomTransformer, TransformContext}; +#[turbo_tasks::value(transparent)] +pub struct OptionStyledComponentsTransformConfig(Option); + +#[turbo_tasks::value(shared)] +#[derive(Clone, Debug)] +#[serde(rename_all = "camelCase")] +pub struct StyledComponentsTransformConfig { + pub display_name: bool, + pub ssr: bool, + pub file_name: bool, + pub top_level_import_paths: Vec, + pub meaningless_file_names: Vec, + pub css_prop: bool, + pub namespace: Option, +} + +impl Default for StyledComponentsTransformConfig { + fn default() -> Self { + StyledComponentsTransformConfig { + display_name: true, + ssr: true, + file_name: true, + top_level_import_paths: vec![], + meaningless_file_names: vec!["index".to_string()], + css_prop: true, + namespace: None, + } + } +} + +#[turbo_tasks::value_impl] +impl StyledComponentsTransformConfigVc { + #[turbo_tasks::function] + pub fn default() -> Self { + Self::cell(Default::default()) + } +} + +impl Default for StyledComponentsTransformConfigVc { + fn default() -> Self { + Self::default() + } +} + #[derive(Debug)] pub struct StyledComponentsTransformer { config: styled_components::Config, } impl StyledComponentsTransformer { - pub fn new(config: styled_components::Config) -> Self { - Self { config } + pub fn new(config: &StyledComponentsTransformConfig) -> Self { + let mut options = styled_components::Config { + display_name: config.display_name, + ssr: config.ssr, + file_name: config.file_name, + css_prop: config.css_prop, + ..Default::default() + }; + + if let Some(namespace) = &config.namespace { + options.namespace = namespace.clone(); + } + + let top_level_import_paths = &config.top_level_import_paths; + if !top_level_import_paths.is_empty() { + options.top_level_import_paths = top_level_import_paths + .iter() + .map(|s| JsWord::from(s.clone())) + .collect(); + } + let meaningless_file_names = &config.meaningless_file_names; + if !meaningless_file_names.is_empty() { + options.meaningless_file_names = meaningless_file_names.clone(); + } + + Self { config: options } } } diff --git a/crates/turbopack-ecmascript/src/parse.rs b/crates/turbopack-ecmascript/src/parse.rs index cd86c0e2c7265..10f28764f6ca4 100644 --- a/crates/turbopack-ecmascript/src/parse.rs +++ b/crates/turbopack-ecmascript/src/parse.rs @@ -267,6 +267,7 @@ async fn parse_content( allow_super_outside_method: true, allow_return_outside_function: true, auto_accessors: true, + using_decl: true, }), EcmascriptModuleAssetType::Typescript | EcmascriptModuleAssetType::TypescriptWithTypes => { diff --git a/crates/turbopack-ecmascript/src/references/mod.rs b/crates/turbopack-ecmascript/src/references/mod.rs index 3a78abcc4c82d..97167654322fb 100644 --- a/crates/turbopack-ecmascript/src/references/mod.rs +++ b/crates/turbopack-ecmascript/src/references/mod.rs @@ -2298,6 +2298,12 @@ fn for_each_ident_in_decl(decl: &Decl, f: &mut impl FnMut(String)) { .iter() .for_each(|VarDeclarator { name, .. }| for_each_ident_in_pat(name, f)); } + Decl::Using(using_decl) => { + let decls = &*using_decl.decls; + decls + .iter() + .for_each(|VarDeclarator { name, .. }| for_each_ident_in_pat(name, f)); + } Decl::TsInterface(_) | Decl::TsTypeAlias(_) | Decl::TsEnum(_) | Decl::TsModule(_) => { // ignore typescript for code generation } diff --git a/crates/turbopack-ecmascript/src/transform/mod.rs b/crates/turbopack-ecmascript/src/transform/mod.rs index 5bf97f1508c98..ccd1dc496ab2e 100644 --- a/crates/turbopack-ecmascript/src/transform/mod.rs +++ b/crates/turbopack-ecmascript/src/transform/mod.rs @@ -1,6 +1,3 @@ -mod server_to_client_proxy; -mod util; - use std::{fmt::Debug, hash::Hash, path::PathBuf, sync::Arc}; use anyhow::Result; @@ -9,16 +6,15 @@ use swc_core::{ base::SwcComments, common::{chain, util::take::Take, FileName, Mark, SourceMap}, ecma::{ - ast::{Module, ModuleItem, Program}, + ast::{Module, ModuleItem, Program, Script}, atoms::JsWord, preset_env::{self, Targets}, transforms::{ - base::{feature::FeatureFlag, helpers::inject_helpers, resolver, Assumptions}, + base::{feature::FeatureFlag, helpers::inject_helpers, Assumptions}, react::react, }, visit::{FoldWith, VisitMutWith}, }, - quote, }; use turbo_tasks::primitives::{OptionStringVc, StringVc, StringsVc}; use turbo_tasks_fs::FileSystemPathVc; @@ -27,16 +23,9 @@ use turbopack_core::{ issue::{Issue, IssueSeverity, IssueSeverityVc, IssueVc}, }; -use self::{ - server_to_client_proxy::create_proxy_module, - util::{is_client_module, is_server_module}, -}; - #[turbo_tasks::value(serialization = "auto_for_input")] #[derive(Debug, Clone, PartialOrd, Ord, Hash)] pub enum EcmascriptInputTransform { - ClientDirective(StringVc), - ServerDirective(StringVc), CommonJs, Plugin(TransformPluginVc), PresetEnv(EnvironmentVc), @@ -148,7 +137,6 @@ impl EcmascriptInputTransform { source_map, top_level_mark, unresolved_mark, - file_name_str, file_name_hash, file_path, .. @@ -221,7 +209,22 @@ impl EcmascriptInputTransform { ..Default::default() }; - let module_program = unwrap_module_program(program); + let module_program = std::mem::replace(program, Program::Module(Module::dummy())); + + let module_program = if let Program::Script(Script { + span, + mut body, + shebang, + }) = module_program + { + Program::Module(Module { + span, + body: body.drain(..).map(|stmt| ModuleItem::Stmt(stmt)).collect(), + shebang, + }) + } else { + module_program + }; *program = module_program.fold_with(&mut chain!( preset_env::preset_env( @@ -311,31 +314,6 @@ impl EcmascriptInputTransform { inject_helpers(unresolved_mark) )); } - // [TODO]: WEB-940 - use ClientDirectiveTransformer in next-swc - EcmascriptInputTransform::ClientDirective(transition_name) => { - if is_client_module(program) { - let transition_name = &*transition_name.await?; - *program = create_proxy_module(transition_name, &format!("./{file_name_str}")); - program.visit_mut_with(&mut resolver(unresolved_mark, top_level_mark, false)); - } - } - // [TODO]: WEB-940 - use ServerDirectiveTransformer in next-swc - EcmascriptInputTransform::ServerDirective(_transition_name) => { - if is_server_module(program) { - let stmt = quote!( - "throw new Error('Server actions (\"use server\") are not yet supported in \ - Turbopack');" as Stmt - ); - match program { - Program::Module(m) => m.body = vec![ModuleItem::Stmt(stmt)], - Program::Script(s) => s.body = vec![stmt], - } - UnsupportedServerActionIssue { context: file_path } - .cell() - .as_issue() - .emit(); - } - } EcmascriptInputTransform::Plugin(transform) => { transform.await?.transform(program, ctx).await? } @@ -355,21 +333,6 @@ pub fn remove_shebang(program: &mut Program) { } } -fn unwrap_module_program(program: &mut Program) -> Program { - match program { - Program::Module(module) => Program::Module(module.take()), - Program::Script(s) => Program::Module(Module { - span: s.span, - body: s - .body - .iter() - .map(|stmt| ModuleItem::Stmt(stmt.clone())) - .collect(), - shebang: s.shebang.clone(), - }), - } -} - #[turbo_tasks::value(shared)] pub struct UnsupportedServerActionIssue { pub context: FileSystemPathVc, diff --git a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs b/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs deleted file mode 100644 index fbdb7f7cb7471..0000000000000 --- a/crates/turbopack-ecmascript/src/transform/server_to_client_proxy.rs +++ /dev/null @@ -1,53 +0,0 @@ -use swc_core::{ - common::DUMMY_SP, - ecma::{ - ast::{ - Expr, ExprStmt, Ident, ImportDecl, ImportDefaultSpecifier, ImportSpecifier, - KeyValueProp, Lit, Module, ModuleDecl, ModuleItem, ObjectLit, Program, Prop, PropName, - PropOrSpread, Stmt, Str, - }, - utils::private_ident, - }, - quote, -}; - -use crate::references::TURBOPACK_HELPER; - -#[deprecated(note = "use Client/ServerDirectiveTransformer instead")] -pub fn create_proxy_module(transition_name: &str, target_import: &str) -> Program { - let ident = private_ident!("createProxy"); - Program::Module(Module { - body: vec![ - ModuleItem::Stmt(Stmt::Expr(ExprStmt { - expr: Box::new(Expr::Lit(Lit::Str(Str { - value: format!("TURBOPACK {{ transition: {transition_name} }}").into(), - raw: None, - span: DUMMY_SP, - }))), - span: DUMMY_SP, - })), - ModuleItem::ModuleDecl(ModuleDecl::Import(ImportDecl { - specifiers: vec![ImportSpecifier::Default(ImportDefaultSpecifier { - local: ident.clone(), - span: DUMMY_SP, - })], - src: Box::new(target_import.into()), - type_only: false, - asserts: Some(Box::new(ObjectLit { - span: DUMMY_SP, - props: vec![PropOrSpread::Prop(Box::new(Prop::KeyValue(KeyValueProp { - key: PropName::Ident(Ident::new(TURBOPACK_HELPER.into(), DUMMY_SP)), - value: Box::new(Expr::Lit(true.into())), - })))], - })), - span: DUMMY_SP, - })), - ModuleItem::Stmt(quote!( - "__turbopack_export_namespace__($proxy);" as Stmt, - proxy = ident, - )), - ], - shebang: None, - span: DUMMY_SP, - }) -} diff --git a/crates/turbopack-ecmascript/src/transform/util.rs b/crates/turbopack-ecmascript/src/transform/util.rs deleted file mode 100644 index a08dac45dcfac..0000000000000 --- a/crates/turbopack-ecmascript/src/transform/util.rs +++ /dev/null @@ -1,35 +0,0 @@ -use swc_core::ecma::ast::{Lit, Program}; - -macro_rules! has_directive { - ($stmts:expr, $name:literal) => { - $stmts - .map(|item| { - if let Lit::Str(str) = item?.as_expr()?.expr.as_lit()? { - Some(str) - } else { - None - } - }) - .take_while(Option::is_some) - .map(Option::unwrap) - .any(|s| &*s.value == $name) - }; -} - -pub fn is_client_module(program: &Program) -> bool { - match program { - Program::Module(m) => { - has_directive!(m.body.iter().map(|item| item.as_stmt()), "use client") - } - Program::Script(s) => has_directive!(s.body.iter().map(Some), "use client"), - } -} - -pub fn is_server_module(program: &Program) -> bool { - match program { - Program::Module(m) => { - has_directive!(m.body.iter().map(|item| item.as_stmt()), "use server") - } - Program::Script(s) => has_directive!(s.body.iter().map(Some), "use server"), - } -} diff --git a/crates/turbopack-mdx/src/lib.rs b/crates/turbopack-mdx/src/lib.rs index 6622d48bb4978..ae09eb867e616 100644 --- a/crates/turbopack-mdx/src/lib.rs +++ b/crates/turbopack-mdx/src/lib.rs @@ -40,6 +40,7 @@ pub struct MdxTransformOptions { pub preserve_jsx: bool, pub jsx_runtime: Option, pub jsx_import_source: Option, + pub provider_import_source: Option, } impl Default for MdxTransformOptions { @@ -49,6 +50,7 @@ impl Default for MdxTransformOptions { preserve_jsx: false, jsx_runtime: None, jsx_import_source: None, + provider_import_source: None, } } } @@ -108,6 +110,7 @@ async fn into_ecmascript_module_asset( let options = Options { development: transform_options.development, + provider_import_source: transform_options.provider_import_source.clone(), jsx: transform_options.preserve_jsx, // true means 'preserve' jsx syntax. jsx_runtime, jsx_import_source: transform_options diff --git a/crates/turbopack-tests/tests/snapshot.rs b/crates/turbopack-tests/tests/snapshot.rs index 8a818f8cc733f..e764da96200d3 100644 --- a/crates/turbopack-tests/tests/snapshot.rs +++ b/crates/turbopack-tests/tests/snapshot.rs @@ -19,10 +19,10 @@ use turbo_tasks_fs::{ use turbo_tasks_memory::MemoryBackend; use turbopack::{ condition::ContextCondition, - ecmascript::EcmascriptModuleAssetVc, + ecmascript::{EcmascriptModuleAssetVc, TransformPluginVc}, module_options::{ - JsxTransformOptions, JsxTransformOptionsVc, ModuleOptionsContext, - StyledComponentsTransformConfigVc, + CustomEcmascriptTransformPlugins, CustomEcmascriptTransformPluginsVc, JsxTransformOptions, + JsxTransformOptionsVc, ModuleOptionsContext, }, resolve_options_context::ResolveOptionsContext, transition::TransitionsByNameVc, @@ -43,7 +43,10 @@ use turbopack_core::{ source_asset::SourceAssetVc, }; use turbopack_dev::DevChunkingContextVc; -use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfig; +use turbopack_ecmascript_plugins::transform::{ + emotion::{EmotionTransformConfig, EmotionTransformer}, + styled_components::StyledComponentsTransformConfigVc, +}; use turbopack_env::ProcessEnvAssetVc; use turbopack_test_utils::snapshot::{diff, expected, matches_expected, snapshot_issues}; @@ -213,6 +216,18 @@ async fn run_test(resource: &str) -> Result { } .cell(), )], + custom_ecma_transform_plugins: Some(CustomEcmascriptTransformPluginsVc::cell( + CustomEcmascriptTransformPlugins { + source_transforms: vec![TransformPluginVc::cell(Box::new( + EmotionTransformer::new(&EmotionTransformConfig { + sourcemap: Some(false), + ..Default::default() + }) + .unwrap(), + ))], + output_transforms: vec![], + }, + )), ..Default::default() } .into(), diff --git a/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js b/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js index 9faac2c50aeec..67e3df5963889 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js +++ b/crates/turbopack-tests/tests/snapshot/basic/async_chunk/output/crates_turbopack-tests_tests_snapshot_basic_async_chunk_input_index_0d348e.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js b/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js index ee4075eb29004..35770e1121614 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js +++ b/crates/turbopack-tests/tests/snapshot/basic/chunked/output/crates_turbopack-tests_tests_snapshot_basic_chunked_input_index_e77e9f.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js b/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js index a4847ad6eaff1..b3526d8ff9a51 100644 --- a/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js +++ b/crates/turbopack-tests/tests/snapshot/basic/shebang/output/crates_turbopack-tests_tests_snapshot_basic_shebang_input_index_b1f0c2.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js b/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js index 4487995094958..21a3fd426bba9 100644 --- a/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js +++ b/crates/turbopack-tests/tests/snapshot/comptime/define/output/crates_turbopack-tests_tests_snapshot_comptime_define_input_index_6b0d2b.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js b/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js index c78ddb8403f55..93acd8dba7818 100644 --- a/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js +++ b/crates/turbopack-tests/tests/snapshot/css/absolute-uri-import/output/crates_turbopack-tests_tests_snapshot_css_absolute-uri-import_input_index_fa9a30.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js index b94e3db0e07d8..007b7a651beb4 100644 --- a/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js +++ b/crates/turbopack-tests/tests/snapshot/css/css/output/crates_turbopack-tests_tests_snapshot_css_css_input_index_37a138.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js index cbb2213240007..9a40e3c27baf7 100644 --- a/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js +++ b/crates/turbopack-tests/tests/snapshot/emotion/emotion/output/crates_turbopack-tests_tests_snapshot_emotion_emotion_input_index_b080c4.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js b/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js index 048efca1a5c08..940fd535b1e8a 100644 --- a/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js +++ b/crates/turbopack-tests/tests/snapshot/env/env/output/crates_turbopack-tests_tests_snapshot_env_env_input_index_29a23f.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js b/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js index ff834e2efc6e3..00d9330d5d92e 100644 --- a/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js +++ b/crates/turbopack-tests/tests/snapshot/evaluated_entrry/runtime_entry/output/a587c_tests_snapshot_evaluated_entrry_runtime_entry_input_index_f59cc7.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js b/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js index f0c25a2106a57..b5e8880764033 100644 --- a/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js +++ b/crates/turbopack-tests/tests/snapshot/example/example/output/crates_turbopack-tests_tests_snapshot_example_example_input_index_78b6bf.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js index 764c6d18f653c..076674a4c8ff5 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-2/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-2_input_index_289ae7.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js index 77ae01e426dca..5f9df13220446 100644 --- a/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js +++ b/crates/turbopack-tests/tests/snapshot/export-alls/cjs-script/output/crates_turbopack-tests_tests_snapshot_export-alls_cjs-script_input_index_3e96b7.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js b/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js index 96214f1fda13b..be59d90b341b6 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/cjs/output/crates_turbopack-tests_tests_snapshot_import-meta_cjs_input_index_537553.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js index 756b271c5eef6..d6f027efb234b 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-multiple/output/79fb1_turbopack-tests_tests_snapshot_import-meta_esm-multiple_input_index_c00392.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js index 85b62498eb3da..c9b2cfbc731b7 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-mutable/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-mutable_input_index_6c9201.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js index 5a856f3271654..19af0fa2736fb 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm-object/output/crates_turbopack-tests_tests_snapshot_import-meta_esm-object_input_index_6fcf7d.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js b/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js index f03d318c58144..54ba2a78ff9a1 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/esm/output/crates_turbopack-tests_tests_snapshot_import-meta_esm_input_index_c4c88a.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js b/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js index 0eeaa505be03d..e8716224cd143 100644 --- a/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js +++ b/crates/turbopack-tests/tests/snapshot/import-meta/url/output/crates_turbopack-tests_tests_snapshot_import-meta_url_input_index_988b57.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js b/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js index 0c2e44afe9be8..72e7915a5132e 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js +++ b/crates/turbopack-tests/tests/snapshot/imports/dynamic/output/crates_turbopack-tests_tests_snapshot_imports_dynamic_input_index_45c162.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js b/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js index cab2b1da5f1a1..10f02270be3c2 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js +++ b/crates/turbopack-tests/tests/snapshot/imports/json/output/crates_turbopack-tests_tests_snapshot_imports_json_input_index_961ae2.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js b/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js new file mode 100644 index 0000000000000..c0dc83d3308ac --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js @@ -0,0 +1,6 @@ +import posts from "./posts"; + +console.log(posts.js); +if (!posts.js) { + process.exit(1); +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.json b/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.json new file mode 100644 index 0000000000000..d8351c5dda596 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.json @@ -0,0 +1,3 @@ +{ + "js": false +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts b/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts new file mode 100644 index 0000000000000..5ec2a6d7e517d --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts @@ -0,0 +1,3 @@ +export default { + js: true, +}; diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_8ff67b.js b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_8ff67b.js new file mode 100644 index 0000000000000..41a2752901868 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_8ff67b.js @@ -0,0 +1,11 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push([ + "output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_8ff67b.js", + {}, +]); +(globalThis.TURBOPACK_CHUNK_LISTS = globalThis.TURBOPACK_CHUNK_LISTS || []).push({ + "path": "output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_8ff67b.js", + "chunks": [ + "output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js" + ], + "source": "entry" +}); diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js new file mode 100644 index 0000000000000..b750b8e5a20b0 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js @@ -0,0 +1,1738 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push([ + "output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js", + {}, + {"otherChunks":[{"path":"output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js","included":["[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js (ecmascript)"]}],"runtimeModuleIds":["[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js (ecmascript)"]} +]); +(() => { +if (!Array.isArray(globalThis.TURBOPACK)) { + return; +} +/* eslint-disable @next/next/no-assign-module-variable */ + +/** @typedef {import('../types').ChunkRegistration} ChunkRegistration */ +/** @typedef {import('../types').ModuleFactory} ModuleFactory */ + +/** @typedef {import('../types').ChunkPath} ChunkPath */ +/** @typedef {import('../types').ModuleId} ModuleId */ +/** @typedef {import('../types').GetFirstModuleChunk} GetFirstModuleChunk */ +/** @typedef {import('../types').ChunkList} ChunkList */ + +/** @typedef {import('../types').Module} Module */ +/** @typedef {import('../types').ChunkData} ChunkData */ +/** @typedef {import('../types').SourceInfo} SourceInfo */ +/** @typedef {import('../types').SourceType} SourceType */ +/** @typedef {import('../types').SourceType.Runtime} SourceTypeRuntime */ +/** @typedef {import('../types').SourceType.Parent} SourceTypeParent */ +/** @typedef {import('../types').SourceType.Update} SourceTypeUpdate */ +/** @typedef {import('../types').Exports} Exports */ +/** @typedef {import('../types').EsmNamespaceObject} EsmNamespaceObject */ +/** @typedef {import('../types').RequireContext} RequireContext */ +/** @typedef {import('../types').RequireContextMap} RequireContextMap */ + +/** @typedef {import('../types').RefreshHelpers} RefreshHelpers */ +/** @typedef {import('../types').RefreshContext} RefreshContext */ +/** @typedef {import('../types/hot').Hot} Hot */ +/** @typedef {import('../types/hot').HotData} HotData */ +/** @typedef {import('../types/hot').AcceptCallback} AcceptCallback */ +/** @typedef {import('../types/hot').AcceptErrorHandler} AcceptErrorHandler */ +/** @typedef {import('../types/hot').HotState} HotState */ +/** @typedef {import('../types/protocol').PartialUpdate} PartialUpdate */ +/** @typedef {import('../types/protocol').ChunkListUpdate} ChunkListUpdate */ +/** @typedef {import('../types/protocol').EcmascriptMergedUpdate} EcmascriptMergedUpdate */ +/** @typedef {import('../types/protocol').EcmascriptMergedChunkUpdate} EcmascriptMergedChunkUpdate */ +/** @typedef {import('../types/protocol').EcmascriptModuleEntry} EcmascriptModuleEntry */ + +/** @typedef {import('../types/runtime').ModuleEffect} ModuleEffect */ + +/** @type {Object.} */ +const moduleFactories = { __proto__: null }; +/** @type {Object.} */ +const moduleCache = { __proto__: null }; +/** + * Maps module IDs to persisted data between executions of their hot module + * implementation (`hot.data`). + * + * @type {Map} + */ +const moduleHotData = new Map(); +/** + * Maps module instances to their hot module state. + * + * @type {Map} + */ +const moduleHotState = new Map(); +/** + * Module IDs that are instantiated as part of the runtime of a chunk. + * + * @type {Set} + */ +const runtimeModules = new Set(); +/** + * Map from module ID to the chunks that contain this module. + * + * In HMR, we need to keep track of which modules are contained in which so + * chunks. This is so we don't eagerly dispose of a module when it is removed + * from chunk A, but still exists in chunk B. + * + * @type {Map>} + */ +const moduleChunksMap = new Map(); +/** + * Map from chunk path to all modules it contains. + * @type {Map>} + */ +const chunkModulesMap = new Map(); +/** + * Chunk lists that contain a runtime. When these chunk lists receive an update + * that can't be reconciled with the current state of the page, we need to + * reload the runtime entirely. + * @type {Set} + */ +const runtimeChunkLists = new Set(); +/** + * Map from chunk list to the chunk paths it contains. + * @type {Map>} + */ +const chunkListChunksMap = new Map(); +/** + * Map from chunk path to the chunk lists it belongs to. + * @type {Map>} + */ +const chunkChunkListsMap = new Map(); + +const hOP = Object.prototype.hasOwnProperty; + +const toStringTag = typeof Symbol !== "undefined" && Symbol.toStringTag; + +/** + * @param {any} obj + * @param {PropertyKey} name + * @param {PropertyDescriptor & ThisType} options + */ +function defineProp(obj, name, options) { + if (!hOP.call(obj, name)) Object.defineProperty(obj, name, options); +} + +/** + * Adds the getters to the exports object + * + * @param {Exports} exports + * @param {Record any>} getters + */ +function esm(exports, getters) { + defineProp(exports, "__esModule", { value: true }); + if (toStringTag) defineProp(exports, toStringTag, { value: "Module" }); + for (const key in getters) { + defineProp(exports, key, { get: getters[key], enumerable: true }); + } +} + +/** + * Makes the module an ESM with exports + * + * @param {Module} module + * @param {Record any>} getters + */ +function makeEsm(module, getters) { + esm((module.namespaceObject = module.exports), getters); +} + +/** + * Adds the getters to the exports object + * + * @param {Exports} exports + * @param {Record} props + */ +function cjs(exports, props) { + for (const key in props) { + defineProp(exports, key, { get: () => props[key], enumerable: true }); + } +} + +/** + * @param {Module} module + * @param {any} value + */ +function exportValue(module, value) { + module.exports = value; +} + +/** + * @param {Module} module + * @param {any} namespace + */ +function exportNamespace(module, namespace) { + module.exports = module.namespaceObject = namespace; +} + +/** + * @param {Record} obj + * @param {string} key + */ +function createGetter(obj, key) { + return () => obj[key]; +} + +/** + * @param {any} obj + * @returns {any} prototype of the object + */ +const getProto = Object.getPrototypeOf + ? (obj) => Object.getPrototypeOf(obj) + : (obj) => obj.__proto__; + +/** Prototypes that are not expanded for exports */ +const LEAF_PROTOTYPES = [null, getProto({}), getProto([]), getProto(getProto)]; + +/** + * @param {Exports} raw + * @param {EsmNamespaceObject} ns + * @param {boolean} [allowExportDefault] false: will have the raw module as default export, true: will have the default property as default export + */ +function interopEsm(raw, ns, allowExportDefault) { + /** @type {Object. any>} */ + const getters = { __proto__: null }; + for ( + let current = raw; + (typeof current === "object" || typeof current === "function") && + !LEAF_PROTOTYPES.includes(current); + current = getProto(current) + ) { + for (const key of Object.getOwnPropertyNames(current)) { + getters[key] = createGetter(raw, key); + } + } + if (!(allowExportDefault && "default" in getters)) { + getters["default"] = () => raw; + } + esm(ns, getters); +} + +/** + * @param {Module} sourceModule + * @param {ModuleId} id + * @returns {EsmNamespaceObject} + */ +function esmImport(sourceModule, id) { + const module = getOrInstantiateModuleFromParent(id, sourceModule); + if (module.error) throw module.error; + if (module.namespaceObject) return module.namespaceObject; + const raw = module.exports; + const ns = (module.namespaceObject = {}); + interopEsm(raw, ns, raw.__esModule); + return ns; +} + +/** + * @param {Module} sourceModule + * @param {ModuleId} id + * @returns {Exports} + */ +function commonJsRequire(sourceModule, id) { + const module = getOrInstantiateModuleFromParent(id, sourceModule); + if (module.error) throw module.error; + return module.exports; +} + +/** + * @param {Module} sourceModule + * @param {RequireContextMap} map + * @returns {RequireContext} + */ +function requireContext(sourceModule, map) { + /** + * @param {ModuleId} id + * @returns {Exports} + */ + function requireContext(id) { + const entry = map[id]; + + if (!entry) { + throw new Error( + `module ${id} is required from a require.context, but is not in the context` + ); + } + + return entry.internal + ? commonJsRequire(sourceModule, entry.id()) + : externalRequire(entry.id(), false); + } + + /** + * @returns {ModuleId[]} + */ + requireContext.keys = () => { + return Object.keys(map); + }; + + /** + * @param {ModuleId} id + * @returns {ModuleId} + */ + requireContext.resolve = (id) => { + const entry = map[id]; + + if (!entry) { + throw new Error( + `module ${id} is resolved from a require.context, but is not in the context` + ); + } + + return entry.id(); + }; + + return requireContext; +} + +/** + * @param {ModuleId} id + * @param {boolean} esm + * @returns {Exports | EsmNamespaceObject} + */ +function externalRequire(id, esm) { + let raw; + try { + raw = require(id); + } catch (err) { + // TODO(alexkirsz) This can happen when a client-side module tries to load + // an external module we don't provide a shim for (e.g. querystring, url). + // For now, we fail semi-silently, but in the future this should be a + // compilation error. + throw new Error(`Failed to load external module ${id}: ${err}`); + } + if (!esm) { + return raw; + } + const ns = {}; + interopEsm(raw, ns, raw.__esModule); + return ns; +} +externalRequire.resolve = (name, opt) => { + return require.resolve(name, opt); +}; + +/** @type {Map | true>} */ +const availableModules = new Map(); + +/** @type {Map | true>} */ +const availableModuleChunks = new Map(); + +/** + * @param {SourceInfo} source + * @param {ChunkData} chunkData + * @returns {Promise} + */ +async function loadChunk(source, chunkData) { + if (typeof chunkData === "string") { + return loadChunkPath(source, chunkData); + } + + const includedList = chunkData.included || []; + const modulesPromises = includedList.map((included) => { + if (moduleFactories[included]) return true; + return availableModules.get(included); + }); + if (modulesPromises.length > 0 && modulesPromises.every((p) => p)) { + // When all included items are already loaded or loading, we can skip loading ourselves + return Promise.all(modulesPromises); + } + + const includedModuleChunksList = chunkData.moduleChunks || []; + const moduleChunksPromises = includedModuleChunksList + .map((included) => { + // TODO(alexkirsz) Do we need this check? + // if (moduleFactories[included]) return true; + return availableModuleChunks.get(included); + }) + .filter((p) => p); + + let promise; + if (moduleChunksPromises.length > 0) { + // Some module chunks are already loaded or loading. + + if (moduleChunksPromises.length == includedModuleChunksList.length) { + // When all included module chunks are already loaded or loading, we can skip loading ourselves + return Promise.all(moduleChunksPromises); + } + + const moduleChunksToLoad = new Set(); + for (const moduleChunk of includedModuleChunksList) { + if (!availableModuleChunks.has(moduleChunk)) { + moduleChunksToLoad.add(moduleChunk); + } + } + + for (const moduleChunkToLoad of moduleChunksToLoad) { + const promise = loadChunkPath(source, moduleChunkToLoad); + + availableModuleChunks.set(moduleChunkToLoad, promise); + + moduleChunksPromises.push(promise); + } + + promise = Promise.all(moduleChunksPromises); + } else { + promise = loadChunkPath(source, chunkData.path); + + // Mark all included module chunks as loading if they are not already loaded or loading. + for (const includedModuleChunk of includedModuleChunksList) { + if (!availableModuleChunks.has(includedModuleChunk)) { + availableModuleChunks.set(includedModuleChunk, promise); + } + } + } + + for (const included of includedList) { + if (!availableModules.has(included)) { + // It might be better to race old and new promises, but it's rare that the new promise will be faster than a request started earlier. + // In production it's even more rare, because the chunk optimization tries to deduplicate modules anyway. + availableModules.set(included, promise); + } + } + + return promise; +} + +/** + * @param {SourceInfo} source + * @param {ChunkPath} chunkPath + * @returns {Promise} + */ +async function loadChunkPath(source, chunkPath) { + try { + await BACKEND.loadChunk(chunkPath, source); + } catch (error) { + let loadReason; + switch (source.type) { + case SourceTypeRuntime: + loadReason = `as a runtime dependency of chunk ${source.chunkPath}`; + break; + case SourceTypeParent: + loadReason = `from module ${source.parentId}`; + break; + case SourceTypeUpdate: + loadReason = "from an HMR update"; + break; + } + throw new Error( + `Failed to load chunk ${chunkPath} ${loadReason}${ + error ? `: ${error}` : "" + }` + ); + } +} + +/** @type {SourceTypeRuntime} */ +const SourceTypeRuntime = 0; +/** @type {SourceTypeParent} */ +const SourceTypeParent = 1; +/** @type {SourceTypeUpdate} */ +const SourceTypeUpdate = 2; + +/** + * + * @param {ModuleId} id + * @param {SourceInfo} source + * @returns {Module} + */ +function instantiateModule(id, source) { + /** @type {ModuleFactory} */ + const moduleFactory = moduleFactories[id]; + if (typeof moduleFactory !== "function") { + // This can happen if modules incorrectly handle HMR disposes/updates, + // e.g. when they keep a `setTimeout` around which still executes old code + // and contains e.g. a `require("something")` call. + let instantiationReason; + switch (source.type) { + case SourceTypeRuntime: + instantiationReason = `as a runtime entry of chunk ${source.chunkPath}`; + break; + case SourceTypeParent: + instantiationReason = `because it was required from module ${source.parentId}`; + break; + case SourceTypeUpdate: + instantiationReason = "because of an HMR update"; + break; + } + throw new Error( + `Module ${id} was instantiated ${instantiationReason}, but the module factory is not available. It might have been deleted in an HMR update.` + ); + } + + const hotData = moduleHotData.get(id); + const { hot, hotState } = createModuleHot(hotData); + + /** @type {Module} */ + const module = { + exports: {}, + error: undefined, + loaded: false, + id, + parents: undefined, + children: [], + namespaceObject: undefined, + hot, + }; + moduleCache[id] = module; + moduleHotState.set(module, hotState); + + switch (source.type) { + case SourceTypeRuntime: + runtimeModules.add(id); + module.parents = []; + break; + case SourceTypeParent: + // No need to add this module as a child of the parent module here, this + // has already been taken care of in `getOrInstantiateModuleFromParent`. + module.parents = [source.parentId]; + break; + case SourceTypeUpdate: + module.parents = source.parents || []; + break; + } + + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { + moduleFactory.call(module.exports, { + e: module.exports, + r: commonJsRequire.bind(null, module), + x: externalRequire, + f: requireContext.bind(null, module), + i: esmImport.bind(null, module), + s: makeEsm.bind(null, module), + j: cjs.bind(null, module.exports), + v: exportValue.bind(null, module), + n: exportNamespace.bind(null, module), + m: module, + c: moduleCache, + l: loadChunk.bind(null, { type: SourceTypeParent, parentId: id }), + g: globalThis, + k: refresh, + __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), + }); + }); + } catch (error) { + module.error = error; + throw error; + } + + module.loaded = true; + if (module.namespaceObject && module.exports !== module.namespaceObject) { + // in case of a circular dependency: cjs1 -> esm2 -> cjs1 + interopEsm(module.exports, module.namespaceObject); + } + + return module; +} + +/** + * NOTE(alexkirsz) Webpack has an "module execution" interception hook that + * Next.js' React Refresh runtime hooks into to add module context to the + * refresh registry. + * + * @param {Module} module + * @param {(ctx: RefreshContext) => void} executeModule + */ +function runModuleExecutionHooks(module, executeModule) { + const cleanupReactRefreshIntercept = + typeof globalThis.$RefreshInterceptModuleExecution$ === "function" + ? globalThis.$RefreshInterceptModuleExecution$(module.id) + : () => {}; + + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); + + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); + } +} + +/** + * Retrieves a module from the cache, or instantiate it if it is not cached. + * + * @param {ModuleId} id + * @param {Module} sourceModule + * @returns {Module} + */ +function getOrInstantiateModuleFromParent(id, sourceModule) { + if (!sourceModule.hot.active) { + console.warn( + `Unexpected import of module ${id} from module ${sourceModule.id}, which was deleted by an HMR update` + ); + } + + const module = moduleCache[id]; + + if (sourceModule.children.indexOf(id) === -1) { + sourceModule.children.push(id); + } + + if (module) { + if (module.parents.indexOf(sourceModule.id) === -1) { + module.parents.push(sourceModule.id); + } + + return module; + } + + return instantiateModule(id, { + type: SourceTypeParent, + parentId: sourceModule.id, + }); +} + +/** + * This is adapted from https://github.com/vercel/next.js/blob/3466862d9dc9c8bb3131712134d38757b918d1c0/packages/react-refresh-utils/internal/ReactRefreshModule.runtime.ts + * + * @param {Module} module + * @param {RefreshHelpers} helpers + */ +function registerExportsAndSetupBoundaryForReactRefresh(module, helpers) { + const currentExports = module.exports; + const prevExports = module.hot.data.prevExports ?? null; + + helpers.registerExportsForReactRefresh(currentExports, module.id); + + // A module can be accepted automatically based on its exports, e.g. when + // it is a Refresh Boundary. + if (helpers.isReactRefreshBoundary(currentExports)) { + // Save the previous exports on update so we can compare the boundary + // signatures. + module.hot.dispose((data) => { + data.prevExports = currentExports; + }); + // Unconditionally accept an update to this module, we'll check if it's + // still a Refresh Boundary later. + module.hot.accept(); + + // This field is set when the previous version of this module was a + // Refresh Boundary, letting us know we need to check for invalidation or + // enqueue an update. + if (prevExports !== null) { + // A boundary can become ineligible if its exports are incompatible + // with the previous exports. + // + // For example, if you add/remove/change exports, we'll want to + // re-execute the importing modules, and force those components to + // re-render. Similarly, if you convert a class component to a + // function, we want to invalidate the boundary. + if ( + helpers.shouldInvalidateReactRefreshBoundary( + prevExports, + currentExports + ) + ) { + module.hot.invalidate(); + } else { + helpers.scheduleUpdate(); + } + } + } else { + // Since we just executed the code for the module, it's possible that the + // new exports made it ineligible for being a boundary. + // We only care about the case when we were _previously_ a boundary, + // because we already accepted this update (accidental side effect). + const isNoLongerABoundary = prevExports !== null; + if (isNoLongerABoundary) { + module.hot.invalidate(); + } + } +} + +/** + * @param {ModuleId[]} dependencyChain + * @returns {string} + */ +function formatDependencyChain(dependencyChain) { + return `Dependency chain: ${dependencyChain.join(" -> ")}`; +} + +/** + * @param {EcmascriptModuleEntry} entry + * @returns {ModuleFactory} + * @private + */ +function _eval({ code, url, map }) { + code += `\n\n//# sourceURL=${location.origin}${url}`; + if (map) code += `\n//# sourceMappingURL=${map}`; + return eval(code); +} + +/** + * @param {Map} added + * @param {Map} modified + * @returns {{outdatedModules: Set, newModuleFactories: Map}} + */ +function computeOutdatedModules(added, modified) { + const outdatedModules = new Set(); + const newModuleFactories = new Map(); + + for (const [moduleId, entry] of added) { + if (entry != null) { + newModuleFactories.set(moduleId, _eval(entry)); + } + } + + for (const [moduleId, entry] of modified) { + const effect = getAffectedModuleEffects(moduleId); + + switch (effect.type) { + case "unaccepted": + throw new Error( + `cannot apply update: unaccepted module. ${formatDependencyChain( + effect.dependencyChain + )}.` + ); + case "self-declined": + throw new Error( + `cannot apply update: self-declined module. ${formatDependencyChain( + effect.dependencyChain + )}.` + ); + case "accepted": + newModuleFactories.set(moduleId, _eval(entry)); + for (const outdatedModuleId of effect.outdatedModules) { + outdatedModules.add(outdatedModuleId); + } + break; + // TODO(alexkirsz) Dependencies: handle dependencies effects. + } + } + + return { outdatedModules, newModuleFactories }; +} + +/** + * @param {Iterable} outdatedModules + * @returns {{ moduleId: ModuleId, errorHandler: true | Function }[]} + */ +function computeOutdatedSelfAcceptedModules(outdatedModules) { + const outdatedSelfAcceptedModules = []; + for (const moduleId of outdatedModules) { + const module = moduleCache[moduleId]; + const hotState = moduleHotState.get(module); + if (module && hotState.selfAccepted && !hotState.selfInvalidated) { + outdatedSelfAcceptedModules.push({ + moduleId, + errorHandler: hotState.selfAccepted, + }); + } + } + return outdatedSelfAcceptedModules; +} + +/** + * Adds, deletes, and moves modules between chunks. This must happen before the + * dispose phase as it needs to know which modules were removed from all chunks, + * which we can only compute *after* taking care of added and moved modules. + * + * @param {Map>} chunksAddedModules + * @param {Map>} chunksDeletedModules + * @returns {{ disposedModules: Set }} + */ +function updateChunksPhase(chunksAddedModules, chunksDeletedModules) { + for (const [chunkPath, addedModuleIds] of chunksAddedModules) { + for (const moduleId of addedModuleIds) { + addModuleToChunk(moduleId, chunkPath); + } + } + + const disposedModules = new Set(); + for (const [chunkPath, addedModuleIds] of chunksDeletedModules) { + for (const moduleId of addedModuleIds) { + if (removeModuleFromChunk(moduleId, chunkPath)) { + disposedModules.add(moduleId); + } + } + } + + return { disposedModules }; +} + +/** + * @param {Iterable} outdatedModules + * @param {Set} disposedModules + * @return {{ outdatedModuleParents: Map> }} + */ +function disposePhase(outdatedModules, disposedModules) { + for (const moduleId of outdatedModules) { + disposeModule(moduleId, "replace"); + } + + for (const moduleId of disposedModules) { + disposeModule(moduleId, "clear"); + } + + // Removing modules from the module cache is a separate step. + // We also want to keep track of previous parents of the outdated modules. + const outdatedModuleParents = new Map(); + for (const moduleId of outdatedModules) { + const oldModule = moduleCache[moduleId]; + outdatedModuleParents.set(moduleId, oldModule?.parents); + delete moduleCache[moduleId]; + } + + // TODO(alexkirsz) Dependencies: remove outdated dependency from module + // children. + + return { outdatedModuleParents }; +} + +/** + * Disposes of an instance of a module. + * + * Returns the persistent hot data that should be kept for the next module + * instance. + * + * NOTE: mode = "replace" will not remove modules from the moduleCache. + * This must be done in a separate step afterwards. + * This is important because all modules need to be diposed to update the + * parent/child relationships before they are actually removed from the moduleCache. + * If this would be done in this method, following disposeModulecalls won't find + * the module from the module id in the cache. + * + * @param {ModuleId} moduleId + * @param {"clear" | "replace"} mode + */ +function disposeModule(moduleId, mode) { + const module = moduleCache[moduleId]; + if (!module) { + return; + } + + const hotState = moduleHotState.get(module); + const data = {}; + + // Run the `hot.dispose` handler, if any, passing in the persistent + // `hot.data` object. + for (const disposeHandler of hotState.disposeHandlers) { + disposeHandler(data); + } + + // This used to warn in `getOrInstantiateModuleFromParent` when a disposed + // module is still importing other modules. + module.hot.active = false; + + moduleHotState.delete(module); + + // TODO(alexkirsz) Dependencies: delete the module from outdated deps. + + // Remove the disposed module from its children's parents list. + // It will be added back once the module re-instantiates and imports its + // children again. + for (const childId of module.children) { + const child = moduleCache[childId]; + if (!child) { + continue; + } + + const idx = child.parents.indexOf(module.id); + if (idx >= 0) { + child.parents.splice(idx, 1); + } + } + + switch (mode) { + case "clear": + delete moduleCache[module.id]; + moduleHotData.delete(module.id); + break; + case "replace": + moduleHotData.set(module.id, data); + break; + default: + invariant(mode, (mode) => `invalid mode: ${mode}`); + } +} + +/** + * + * @param {{ moduleId: ModuleId, errorHandler: true | Function }[]} outdatedSelfAcceptedModules + * @param {Map} newModuleFactories + * @param {Map>} outdatedModuleParents + */ +function applyPhase( + outdatedSelfAcceptedModules, + newModuleFactories, + outdatedModuleParents +) { + // Update module factories. + for (const [moduleId, factory] of newModuleFactories.entries()) { + moduleFactories[moduleId] = factory; + } + + // TODO(alexkirsz) Run new runtime entries here. + + // TODO(alexkirsz) Dependencies: call accept handlers for outdated deps. + + // Re-instantiate all outdated self-accepted modules. + for (const { moduleId, errorHandler } of outdatedSelfAcceptedModules) { + try { + instantiateModule(moduleId, { + type: SourceTypeUpdate, + parents: outdatedModuleParents.get(moduleId), + }); + } catch (err) { + if (typeof errorHandler === "function") { + try { + errorHandler(err, { moduleId, module: moduleCache[moduleId] }); + } catch (_) { + // Ignore error. + } + } + } + } +} + +/** + * Utility function to ensure all variants of an enum are handled. + * @param {never} never + * @param {(arg: any) => string} computeMessage + * @returns {never} + */ +function invariant(never, computeMessage) { + throw new Error(`Invariant: ${computeMessage(never)}`); +} + +/** + * + * @param {ChunkPath} chunkListPath + * @param {PartialUpdate} update + */ +function applyUpdate(chunkListPath, update) { + switch (update.type) { + case "ChunkListUpdate": + applyChunkListUpdate(chunkListPath, update); + break; + default: + invariant(update, (update) => `Unknown update type: ${update.type}`); + } +} + +/** + * + * @param {ChunkPath} chunkListPath + * @param {ChunkListUpdate} update + */ +function applyChunkListUpdate(chunkListPath, update) { + if (update.merged != null) { + for (const merged of update.merged) { + switch (merged.type) { + case "EcmascriptMergedUpdate": + applyEcmascriptMergedUpdate(chunkListPath, merged); + break; + default: + invariant(merged, (merged) => `Unknown merged type: ${merged.type}`); + } + } + } + + if (update.chunks != null) { + for (const [chunkPath, chunkUpdate] of Object.entries(update.chunks)) { + switch (chunkUpdate.type) { + case "added": + BACKEND.loadChunk(chunkPath, { type: SourceTypeUpdate }); + break; + case "total": + BACKEND.reloadChunk?.(chunkPath); + break; + case "deleted": + BACKEND.unloadChunk?.(chunkPath); + break; + case "partial": + invariant( + chunkUpdate.instruction, + (instruction) => + `Unknown partial instruction: ${JSON.stringify(instruction)}.` + ); + default: + invariant( + chunkUpdate, + (chunkUpdate) => `Unknown chunk update type: ${chunkUpdate.type}` + ); + } + } + } +} + +/** + * @param {ChunkPath} chunkPath + * @param {EcmascriptMergedUpdate} update + */ +function applyEcmascriptMergedUpdate(chunkPath, update) { + const { entries = {}, chunks = {} } = update; + const { added, modified, deleted, chunksAdded, chunksDeleted } = + computeChangedModules(entries, chunks); + const { outdatedModules, newModuleFactories } = computeOutdatedModules( + added, + modified + ); + const outdatedSelfAcceptedModules = + computeOutdatedSelfAcceptedModules(outdatedModules); + const { disposedModules } = updateChunksPhase(chunksAdded, chunksDeleted); + const { outdatedModuleParents } = disposePhase( + outdatedModules, + disposedModules + ); + applyPhase( + outdatedSelfAcceptedModules, + newModuleFactories, + outdatedModuleParents + ); +} + +/** + * @param {Record} entries + * @param {Record} updates + * @returns {{ + * added: Map, + * modified: Map, + * deleted: Set, + * chunksAdded: Map>, + * chunksDeleted: Map>, + * }} + */ +function computeChangedModules(entries, updates) { + const chunksAdded = new Map(); + const chunksDeleted = new Map(); + const added = new Map(); + const modified = new Map(); + const deleted = new Set(); + + for (const [chunkPath, mergedChunkUpdate] of Object.entries(updates)) { + switch (mergedChunkUpdate.type) { + case "added": { + const updateAdded = new Set(mergedChunkUpdate.modules); + for (const moduleId of updateAdded) { + added.set(moduleId, entries[moduleId]); + } + chunksAdded.set(chunkPath, updateAdded); + break; + } + case "deleted": { + // We could also use `mergedChunkUpdate.modules` here. + const updateDeleted = new Set(chunkModulesMap.get(chunkPath)); + for (const moduleId of updateDeleted) { + deleted.add(moduleId); + } + chunksDeleted.set(chunkPath, updateDeleted); + break; + } + case "partial": { + const updateAdded = new Set(mergedChunkUpdate.added); + const updateDeleted = new Set(mergedChunkUpdate.deleted); + for (const moduleId of updateAdded) { + added.set(moduleId, entries[moduleId]); + } + for (const moduleId of updateDeleted) { + deleted.add([moduleId, chunkPath]); + } + chunksAdded.set(chunkPath, updateAdded); + chunksDeleted.set(chunkPath, updateDeleted); + break; + } + default: + invariant( + mergedChunkUpdate, + (mergedChunkUpdate) => + `Unknown merged chunk update type: ${mergedChunkUpdate.type}` + ); + } + } + + // If a module was added from one chunk and deleted from another in the same update, + // consider it to be modified, as it means the module was moved from one chunk to another + // AND has new code in a single update. + for (const moduleId of added.keys()) { + if (deleted.has(moduleId)) { + added.delete(moduleId); + deleted.delete(moduleId); + } + } + + for (const [moduleId, entry] of Object.entries(entries)) { + // Modules that haven't been added to any chunk but have new code are considered + // to be modified. + // This needs to be under the previous loop, as we need it to get rid of modules + // that were added and deleted in the same update. + if (!added.has(moduleId)) { + modified.set(moduleId, entry); + } + } + + return { added, deleted, modified, chunksAdded, chunksDeleted }; +} + +/** + * + * @param {ModuleId} moduleId + * @returns {ModuleEffect} + */ +function getAffectedModuleEffects(moduleId) { + const outdatedModules = new Set(); + + /** @typedef {{moduleId?: ModuleId, dependencyChain: ModuleId[]}} QueueItem */ + + /** @type {QueueItem[]} */ + const queue = [ + { + moduleId, + dependencyChain: [], + }, + ]; + + while (queue.length > 0) { + const { moduleId, dependencyChain } = + /** @type {QueueItem} */ queue.shift(); + outdatedModules.add(moduleId); + + // We've arrived at the runtime of the chunk, which means that nothing + // else above can accept this update. + if (moduleId === undefined) { + return { + type: "unaccepted", + dependencyChain, + }; + } + + const module = moduleCache[moduleId]; + const hotState = moduleHotState.get(module); + + if ( + // The module is not in the cache. Since this is a "modified" update, + // it means that the module was never instantiated before. + !module || // The module accepted itself without invalidating globalThis. + // TODO is that right? + (hotState.selfAccepted && !hotState.selfInvalidated) + ) { + continue; + } + + if (hotState.selfDeclined) { + return { + type: "self-declined", + dependencyChain, + moduleId, + }; + } + + if (runtimeModules.has(moduleId)) { + queue.push({ + moduleId: undefined, + dependencyChain: [...dependencyChain, moduleId], + }); + continue; + } + + for (const parentId of module.parents) { + const parent = moduleCache[parentId]; + + if (!parent) { + // TODO(alexkirsz) Is this even possible? + continue; + } + + // TODO(alexkirsz) Dependencies: check accepted and declined + // dependencies here. + + queue.push({ + moduleId: parentId, + dependencyChain: [...dependencyChain, moduleId], + }); + } + } + + return { + type: "accepted", + moduleId, + outdatedModules, + }; +} + +/** + * @param {ChunkPath} chunkListPath + * @param {import('../types/protocol').ServerMessage} update + */ +function handleApply(chunkListPath, update) { + switch (update.type) { + case "partial": { + // This indicates that the update is can be applied to the current state of the application. + applyUpdate(chunkListPath, update.instruction); + break; + } + case "restart": { + // This indicates that there is no way to apply the update to the + // current state of the application, and that the application must be + // restarted. + BACKEND.restart(); + break; + } + case "notFound": { + // This indicates that the chunk list no longer exists: either the dynamic import which created it was removed, + // or the page itself was deleted. + // If it is a dynamic import, we simply discard all modules that the chunk has exclusive access to. + // If it is a runtime chunk list, we restart the application. + if (runtimeChunkLists.has(chunkListPath)) { + BACKEND.restart(); + } else { + disposeChunkList(chunkListPath); + } + break; + } + default: + throw new Error(`Unknown update type: ${update.type}`); + } +} + +/** + * @param {HotData} [hotData] + * @returns {{hotState: HotState, hot: Hot}} + */ +function createModuleHot(hotData) { + /** @type {HotState} */ + const hotState = { + selfAccepted: false, + selfDeclined: false, + selfInvalidated: false, + disposeHandlers: [], + }; + + /** + * TODO(alexkirsz) Support full (dep, callback, errorHandler) form. + * + * @param {string | string[] | AcceptErrorHandler} [dep] + * @param {AcceptCallback} [_callback] + * @param {AcceptErrorHandler} [_errorHandler] + */ + function accept(dep, _callback, _errorHandler) { + if (dep === undefined) { + hotState.selfAccepted = true; + } else if (typeof dep === "function") { + hotState.selfAccepted = dep; + } else { + throw new Error("unsupported `accept` signature"); + } + } + + /** @type {Hot} */ + const hot = { + // TODO(alexkirsz) This is not defined in the HMR API. It was used to + // decide whether to warn whenever an HMR-disposed module required other + // modules. We might want to remove it. + active: true, + + data: hotData ?? {}, + + accept: accept, + + decline: (dep) => { + if (dep === undefined) { + hotState.selfDeclined = true; + } else { + throw new Error("unsupported `decline` signature"); + } + }, + + dispose: (callback) => { + hotState.disposeHandlers.push(callback); + }, + + addDisposeHandler: (callback) => { + hotState.disposeHandlers.push(callback); + }, + + removeDisposeHandler: (callback) => { + const idx = hotState.disposeHandlers.indexOf(callback); + if (idx >= 0) { + hotState.disposeHandlers.splice(idx, 1); + } + }, + + invalidate: () => { + hotState.selfInvalidated = true; + // TODO(alexkirsz) The original HMR code had management-related code + // here. + }, + + // NOTE(alexkirsz) This is part of the management API, which we don't + // implement, but the Next.js React Refresh runtime uses this to decide + // whether to schedule an update. + status: () => "idle", + + // NOTE(alexkirsz) Since we always return "idle" for now, these are no-ops. + addStatusHandler: (_handler) => {}, + removeStatusHandler: (_handler) => {}, + }; + + return { hot, hotState }; +} + +/** + * Adds a module to a chunk. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + */ +function addModuleToChunk(moduleId, chunkPath) { + let moduleChunks = moduleChunksMap.get(moduleId); + if (!moduleChunks) { + moduleChunks = new Set([chunkPath]); + moduleChunksMap.set(moduleId, moduleChunks); + } else { + moduleChunks.add(chunkPath); + } + + let chunkModules = chunkModulesMap.get(chunkPath); + if (!chunkModules) { + chunkModules = new Set([moduleId]); + chunkModulesMap.set(chunkPath, chunkModules); + } else { + chunkModules.add(moduleId); + } +} + +/** + * Returns the first chunk that included a module. + * This is used by the Node.js backend, hence why it's marked as unused in this + * file. + * + * @type {GetFirstModuleChunk} + */ +function getFirstModuleChunk(moduleId) { + const moduleChunkPaths = moduleChunksMap.get(moduleId); + if (moduleChunkPaths == null) { + return null; + } + + return moduleChunkPaths.values().next().value; +} + +/** + * Removes a module from a chunk. Returns true there are no remaining chunks + * including this module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {boolean} + */ +function removeModuleFromChunk(moduleId, chunkPath) { + const moduleChunks = moduleChunksMap.get(moduleId); + moduleChunks.delete(chunkPath); + + const chunkModules = chunkModulesMap.get(chunkPath); + chunkModules.delete(moduleId); + + const noRemainingModules = chunkModules.size === 0; + if (noRemainingModules) { + chunkModulesMap.delete(chunkPath); + } + + const noRemainingChunks = moduleChunks.size === 0; + if (noRemainingChunks) { + moduleChunksMap.delete(moduleId); + } + + return noRemainingChunks; +} + +/** + * Diposes of a chunk list and its corresponding exclusive chunks. + * + * @param {ChunkPath} chunkListPath + * @returns {boolean} Whether the chunk list was disposed of. + */ +function disposeChunkList(chunkListPath) { + const chunkPaths = chunkListChunksMap.get(chunkListPath); + if (chunkPaths == null) { + return false; + } + chunkListChunksMap.delete(chunkListPath); + + for (const chunkPath of chunkPaths) { + const chunkChunkLists = chunkChunkListsMap.get(chunkPath); + chunkChunkLists.delete(chunkListPath); + + if (chunkChunkLists.size === 0) { + chunkChunkListsMap.delete(chunkPath); + disposeChunk(chunkPath); + } + } + + // We must also dispose of the chunk list's chunk itself to ensure it may + // be reloaded properly in the future. + BACKEND.unloadChunk(chunkListPath); + + return true; +} + +/** + * Disposes of a chunk and its corresponding exclusive modules. + * + * @param {ChunkPath} chunkPath + * @returns {boolean} Whether the chunk was disposed of. + */ +function disposeChunk(chunkPath) { + // This should happen whether or not the chunk has any modules in it. For instance, + // CSS chunks have no modules in them, but they still need to be unloaded. + BACKEND.unloadChunk(chunkPath); + + const chunkModules = chunkModulesMap.get(chunkPath); + if (chunkModules == null) { + return false; + } + chunkModules.delete(chunkPath); + + for (const moduleId of chunkModules) { + const moduleChunks = moduleChunksMap.get(moduleId); + moduleChunks.delete(chunkPath); + + const noRemainingChunks = moduleChunks.size === 0; + if (noRemainingChunks) { + moduleChunksMap.delete(moduleId); + disposeModule(moduleId, "clear"); + availableModules.delete(moduleId); + } + } + + return true; +} + +/** + * Instantiates a runtime module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {Module} + */ +function instantiateRuntimeModule(moduleId, chunkPath) { + return instantiateModule(moduleId, { type: SourceTypeRuntime, chunkPath }); +} + +/** + * Gets or instantiates a runtime module. + * + * @param {ModuleId} moduleId + * @param {ChunkPath} chunkPath + * @returns {Module} + */ +function getOrInstantiateRuntimeModule(moduleId, chunkPath) { + const module = moduleCache[moduleId]; + if (module) { + if (module.error) { + throw module.error; + } + return module; + } + + return instantiateModule(moduleId, { type: SourceTypeRuntime, chunkPath }); +} + +/** + * Returns the path of a chunk defined by its data. + * + * @param {ChunkData} chunkData + * @returns {ChunkPath} the chunk path + */ +function getChunkPath(chunkData) { + return typeof chunkData === "string" ? chunkData : chunkData.path; +} + +/** + * Subscribes to chunk list updates from the update server and applies them. + * + * @param {ChunkList} chunkList + */ +function registerChunkList(chunkList) { + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS.push([ + chunkList.path, + handleApply.bind(null, chunkList.path), + ]); + + // Adding chunks to chunk lists and vice versa. + const chunks = new Set(chunkList.chunks.map(getChunkPath)); + chunkListChunksMap.set(chunkList.path, chunks); + for (const chunkPath of chunks) { + let chunkChunkLists = chunkChunkListsMap.get(chunkPath); + if (!chunkChunkLists) { + chunkChunkLists = new Set([chunkList.path]); + chunkChunkListsMap.set(chunkPath, chunkChunkLists); + } else { + chunkChunkLists.add(chunkList.path); + } + } + + if (chunkList.source === "entry") { + markChunkListAsRuntime(chunkList.path); + } +} + +/** + * Marks a chunk list as a runtime chunk list. There can be more than one + * runtime chunk list. For instance, integration tests can have multiple chunk + * groups loaded at runtime, each with its own chunk list. + * + * @param {ChunkPath} chunkListPath + */ +function markChunkListAsRuntime(chunkListPath) { + runtimeChunkLists.add(chunkListPath); +} + +/** + * @param {ChunkRegistration} chunkRegistration + */ +function registerChunk([chunkPath, chunkModules, runtimeParams]) { + for (const [moduleId, moduleFactory] of Object.entries(chunkModules)) { + if (!moduleFactories[moduleId]) { + moduleFactories[moduleId] = moduleFactory; + } + addModuleToChunk(moduleId, chunkPath); + } + + return BACKEND.registerChunk(chunkPath, runtimeParams); +} + +globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS = + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS || []; + +const chunkListsToRegister = globalThis.TURBOPACK_CHUNK_LISTS || []; +for (const chunkList of chunkListsToRegister) { + registerChunkList(chunkList); +} +globalThis.TURBOPACK_CHUNK_LISTS = { + push: (chunkList) => { + registerChunkList(chunkList); + }, +}; + +globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS = + globalThis.TURBOPACK_CHUNK_UPDATE_LISTENERS || []; +/** @typedef {import('../types/backend').RuntimeBackend} RuntimeBackend */ +/** @typedef {import('../types/runtime.dom').ChunkResolver} ChunkResolver */ +/** @typedef {import('../types').ChunkPath} ChunkPath */ +/** @typedef {import('../types').SourceInfo} SourceInfo */ + +/** @type {RuntimeBackend} */ +let BACKEND; + +(() => { + BACKEND = { + async registerChunk(chunkPath, params) { + const resolver = getOrCreateResolver(chunkPath); + resolver.resolve(); + + if (params == null) { + return; + } + + for (const otherChunkData of params.otherChunks) { + const otherChunkPath = getChunkPath(otherChunkData); + // Chunk might have started loading, so we want to avoid triggering another load. + getOrCreateResolver(otherChunkPath); + } + + // This waits for chunks to be loaded, but also marks included items as available. + await Promise.all( + params.otherChunks.map((otherChunkData) => + loadChunk({ type: SourceTypeRuntime, chunkPath }, otherChunkData) + ) + ); + + if (params.runtimeModuleIds.length > 0) { + for (const moduleId of params.runtimeModuleIds) { + getOrInstantiateRuntimeModule(moduleId, chunkPath); + } + } + }, + + loadChunk(chunkPath, source) { + return doLoadChunk(chunkPath, source); + }, + + unloadChunk(chunkPath) { + deleteResolver(chunkPath); + + if (chunkPath.endsWith(".css")) { + const links = document.querySelectorAll(`link[href="/${chunkPath}"]`); + for (const link of Array.from(links)) { + link.remove(); + } + } else if (chunkPath.endsWith(".js")) { + // Unloading a JS chunk would have no effect, as it lives in the JS + // runtime once evaluated. + // However, we still want to remove the script tag from the DOM to keep + // the HTML somewhat consistent from the user's perspective. + const scripts = document.querySelectorAll( + `script[src="/${chunkPath}"]` + ); + for (const script of Array.from(scripts)) { + script.remove(); + } + } else { + throw new Error(`can't infer type of chunk from path ${chunkPath}`); + } + }, + + reloadChunk(chunkPath) { + return new Promise((resolve, reject) => { + if (!chunkPath.endsWith(".css")) { + reject(new Error("The DOM backend can only reload CSS chunks")); + return; + } + + const encodedChunkPath = chunkPath + .split("/") + .map((p) => encodeURIComponent(p)) + .join("/"); + + const previousLink = document.querySelector( + `link[rel=stylesheet][href^="/${encodedChunkPath}"]` + ); + + if (previousLink == null) { + reject(new Error(`No link element found for chunk ${chunkPath}`)); + return; + } + + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.href = `/${encodedChunkPath}`; + link.onerror = () => { + reject(); + }; + link.onload = () => { + // First load the new CSS, then remove the old one. This prevents visible + // flickering that would happen in-between removing the previous CSS and + // loading the new one. + previousLink.remove(); + + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolve(); + }; + + // Make sure to insert the new CSS right after the previous one, so that + // its precedence is higher. + previousLink.parentElement.insertBefore(link, previousLink.nextSibling); + }); + }, + + restart: () => self.location.reload(), + }; + + /** + * Maps chunk paths to the corresponding resolver. + * + * @type {Map} + */ + const chunkResolvers = new Map(); + + /** + * @param {ChunkPath} chunkPath + * @returns {ChunkResolver} + */ + function getOrCreateResolver(chunkPath) { + let resolver = chunkResolvers.get(chunkPath); + if (!resolver) { + let resolve; + let reject; + const promise = new Promise((innerResolve, innerReject) => { + resolve = innerResolve; + reject = innerReject; + }); + resolver = { + resolved: false, + promise, + resolve: () => { + resolver.resolved = true; + resolve(); + }, + reject, + }; + chunkResolvers.set(chunkPath, resolver); + } + return resolver; + } + + function deleteResolver(chunkPath) { + chunkResolvers.delete(chunkPath); + } + + /** + * Loads the given chunk, and returns a promise that resolves once the chunk + * has been loaded. + * + * @param {ChunkPath} chunkPath + * @param {SourceInfo} source + */ + async function doLoadChunk(chunkPath, source) { + const resolver = getOrCreateResolver(chunkPath); + if (resolver.resolved) { + return resolver.promise; + } + + if (source.type === SourceTypeRuntime) { + // We don't need to load chunks references from runtime code, as they're already + // present in the DOM. + + if (chunkPath.endsWith(".css")) { + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolver.resolve(); + } + + // We need to wait for JS chunks to register themselves within `registerChunk` + // before we can start instantiating runtime modules, hence the absence of + // `resolver.resolve()` in this branch. + + return resolver.promise; + } + + if (chunkPath.endsWith(".css")) { + const link = document.createElement("link"); + link.rel = "stylesheet"; + link.href = `/${chunkPath}`; + link.onerror = () => { + resolver.reject(); + }; + link.onload = () => { + // CSS chunks do not register themselves, and as such must be marked as + // loaded instantly. + resolver.resolve(); + }; + document.body.appendChild(link); + } else if (chunkPath.endsWith(".js")) { + const script = document.createElement("script"); + script.src = `/${chunkPath}`; + // We'll only mark the chunk as loaded once the script has been executed, + // which happens in `registerChunk`. Hence the absence of `resolve()` in + // this branch. + script.onerror = () => { + resolver.reject(); + }; + document.body.appendChild(script); + } else { + throw new Error(`can't infer type of chunk from path ${chunkPath}`); + } + + return resolver.promise; + } +})(); +const chunksToRegister = globalThis.TURBOPACK; +globalThis.TURBOPACK = { push: registerChunk }; +chunksToRegister.forEach(registerChunk); +})(); \ No newline at end of file diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js.map b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js.map new file mode 100644 index 0000000000000..f3efabbe00c39 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_9cd22f.js.map @@ -0,0 +1,4 @@ +{ + "version": 3, + "sections": [] +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js new file mode 100644 index 0000000000000..5cac983b0de78 --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js @@ -0,0 +1,30 @@ +(globalThis.TURBOPACK = globalThis.TURBOPACK || []).push(["output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js", { + +"[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.json (json)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_export_value__(JSON.parse("{\"js\":false}")); +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +__turbopack_esm__({ + "default": ()=>__TURBOPACK__default__export__ +}); +const __TURBOPACK__default__export__ = { + js: true +}; + +})()), +"[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js (ecmascript)": (({ r: __turbopack_require__, x: __turbopack_external_require__, f: __turbopack_require_context__, i: __turbopack_import__, s: __turbopack_esm__, v: __turbopack_export_value__, n: __turbopack_export_namespace__, c: __turbopack_cache__, l: __turbopack_load__, j: __turbopack_cjs__, k: __turbopack_refresh__, g: global, __dirname }) => (() => { + +var __TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$order$2f$input$2f$posts$2e$ts__$28$ecmascript$29$__ = __turbopack_import__("[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts (ecmascript)"); +"__TURBOPACK__ecmascript__hoisting__location__"; +; +console.log(__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$order$2f$input$2f$posts$2e$ts__$28$ecmascript$29$__["default"].js); +if (!__TURBOPACK__imported__module__$5b$project$5d2f$crates$2f$turbopack$2d$tests$2f$tests$2f$snapshot$2f$imports$2f$order$2f$input$2f$posts$2e$ts__$28$ecmascript$29$__["default"].js) { + process.exit(1); +} + +})()), +}]); + +//# sourceMappingURL=crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js.map diff --git a/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js.map b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js.map new file mode 100644 index 0000000000000..fb741105a811b --- /dev/null +++ b/crates/turbopack-tests/tests/snapshot/imports/order/output/crates_turbopack-tests_tests_snapshot_imports_order_input_index_b53fce.js.map @@ -0,0 +1,8 @@ +{ + "version": 3, + "sections": [ + {"offset": {"line": 8, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/posts.ts"],"sourcesContent":["export default {\n js: true,\n};\n"],"names":[],"mappings":";;;uCAAe;IACb,IAAI,IAAI;AACV"}}, + {"offset": {"line": 14, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}, + {"offset": {"line": 18, "column": 0}, "map": {"version":3,"sources":["/turbopack/[project]/crates/turbopack-tests/tests/snapshot/imports/order/input/index.js"],"sourcesContent":["import posts from \"./posts\";\n\nconsole.log(posts.js);\nif (!posts.js) {\n process.exit(1);\n}\n"],"names":[],"mappings":";;;AAEA,QAAQ,GAAG,CAAC,+KAAM,EAAE;AACpB,IAAI,CAAC,+KAAM,EAAE,EAAE;IACb,QAAQ,IAAI,CAAC;AACf,CAAC"}}, + {"offset": {"line": 25, "column": 0}, "map": {"version":3,"sources":[],"names":[],"mappings":"A"}}] +} diff --git a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js index 483ebe58ea515..4a119e898371a 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js +++ b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_cjs/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_cjs_input_index_f8412b.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js index fa3d7dd1f8cb5..45f6682203864 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js +++ b/crates/turbopack-tests/tests/snapshot/imports/resolve_error_esm/output/79fb1_turbopack-tests_tests_snapshot_imports_resolve_error_esm_input_index_0b3e45.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js b/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js index bd82767967a96..2225c2e14d034 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js +++ b/crates/turbopack-tests/tests/snapshot/imports/static-and-dynamic/output/79fb1_turbopack-tests_tests_snapshot_imports_static-and-dynamic_input_index_ec8693.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js b/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js index f7ff37164dd04..152c7e714acd1 100644 --- a/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js +++ b/crates/turbopack-tests/tests/snapshot/imports/static/output/crates_turbopack-tests_tests_snapshot_imports_static_input_index_885269.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js b/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js index 1faef93b5247d..1dd789077ec36 100644 --- a/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js +++ b/crates/turbopack-tests/tests/snapshot/node/node_protocol_external/output/79fb1_turbopack-tests_tests_snapshot_node_node_protocol_external_input_index_667edf.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js b/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js index dde419d9c2660..d8806e4c3cb4b 100644 --- a/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js +++ b/crates/turbopack-tests/tests/snapshot/styled_components/styled_components/output/a587c_tests_snapshot_styled_components_styled_components_input_index_afc482.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js index 90df7488d675b..4a194d553e25a 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/mono_transforms/output/a587c_tests_snapshot_swc_transforms_mono_transforms_input_packages_app_index_4a3d65.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js index 415413abf8d6e..92c32027da470 100644 --- a/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js +++ b/crates/turbopack-tests/tests/snapshot/swc_transforms/preset_env/output/79fb1_turbopack-tests_tests_snapshot_swc_transforms_preset_env_input_index_9dcfd0.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js b/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js index 7f96583a5436f..f31da5f173e91 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/jsconfig-baseurl/output/79fb1_turbopack-tests_tests_snapshot_typescript_jsconfig-baseurl_input_index_8f1e58.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js index 66e3b0df0eb2c..7675e4c7494ad 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-baseurl/output/a587c_tests_snapshot_typescript_tsconfig-baseurl_input_index.ts_0aa04e._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module-full-path/output/8562f_snapshot_typescript_tsconfig-extends-module-full-path_input_index.ts_a751eb._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module-full-path/output/8562f_snapshot_typescript_tsconfig-extends-module-full-path_input_index.ts_a751eb._.js index ae2bfdedff4fb..b96e1170da3e2 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module-full-path/output/8562f_snapshot_typescript_tsconfig-extends-module-full-path_input_index.ts_a751eb._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module-full-path/output/8562f_snapshot_typescript_tsconfig-extends-module-full-path_input_index.ts_a751eb._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module/output/a587c_tests_snapshot_typescript_tsconfig-extends-module_input_index.ts_a662d4._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module/output/a587c_tests_snapshot_typescript_tsconfig-extends-module_input_index.ts_a662d4._.js index c1b244a7921f3..288c4b661fb0f 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module/output/a587c_tests_snapshot_typescript_tsconfig-extends-module_input_index.ts_a662d4._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-module/output/a587c_tests_snapshot_typescript_tsconfig-extends-module_input_index.ts_a662d4._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-relative-dir/output/a587c_tests_snapshot_typescript_tsconfig-extends-relative-dir_input_index.ts_be3d7b._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-relative-dir/output/a587c_tests_snapshot_typescript_tsconfig-extends-relative-dir_input_index.ts_be3d7b._.js index 526f49bfd1564..765269862787b 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-relative-dir/output/a587c_tests_snapshot_typescript_tsconfig-extends-relative-dir_input_index.ts_be3d7b._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-relative-dir/output/a587c_tests_snapshot_typescript_tsconfig-extends-relative-dir_input_index.ts_be3d7b._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-without-ext/output/a587c_tests_snapshot_typescript_tsconfig-extends-without-ext_input_index.ts_38aae8._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-without-ext/output/a587c_tests_snapshot_typescript_tsconfig-extends-without-ext_input_index.ts_38aae8._.js index 2c3a7fbdde133..fa745333aa513 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-without-ext/output/a587c_tests_snapshot_typescript_tsconfig-extends-without-ext_input_index.ts_38aae8._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends-without-ext/output/a587c_tests_snapshot_typescript_tsconfig-extends-without-ext_input_index.ts_38aae8._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends/output/a587c_tests_snapshot_typescript_tsconfig-extends_input_index.ts_18c083._.js b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends/output/a587c_tests_snapshot_typescript_tsconfig-extends_input_index.ts_18c083._.js index 9f230f9ea0fcb..cfc5d9b63be4f 100644 --- a/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends/output/a587c_tests_snapshot_typescript_tsconfig-extends_input_index.ts_18c083._.js +++ b/crates/turbopack-tests/tests/snapshot/typescript/tsconfig-extends/output/a587c_tests_snapshot_typescript_tsconfig-extends_input_index.ts_18c083._.js @@ -491,8 +491,9 @@ function instantiateModule(id, source) { break; } - runModuleExecutionHooks(module, (refresh) => { - try { + // NOTE(alexkirsz) This can fail when the module encounters a runtime error. + try { + runModuleExecutionHooks(module, (refresh) => { moduleFactory.call(module.exports, { e: module.exports, r: commonJsRequire.bind(null, module), @@ -510,11 +511,11 @@ function instantiateModule(id, source) { k: refresh, __dirname: module.id.replace(/(^|\/)[\/]+$/, ""), }); - } catch (error) { - module.error = error; - throw error; - } - }); + }); + } catch (error) { + module.error = error; + throw error; + } module.loaded = true; if (module.namespaceObject && module.exports !== module.namespaceObject) { @@ -539,21 +540,26 @@ function runModuleExecutionHooks(module, executeModule) { ? globalThis.$RefreshInterceptModuleExecution$(module.id) : () => {}; - executeModule({ - register: globalThis.$RefreshReg$, - signature: globalThis.$RefreshSig$, - }); + try { + executeModule({ + register: globalThis.$RefreshReg$, + signature: globalThis.$RefreshSig$, + }); - if ("$RefreshHelpers$" in globalThis) { - // This pattern can also be used to register the exports of - // a module with the React Refresh runtime. - registerExportsAndSetupBoundaryForReactRefresh( - module, - globalThis.$RefreshHelpers$ - ); + if ("$RefreshHelpers$" in globalThis) { + // This pattern can also be used to register the exports of + // a module with the React Refresh runtime. + registerExportsAndSetupBoundaryForReactRefresh( + module, + globalThis.$RefreshHelpers$ + ); + } + } catch (e) { + throw e; + } finally { + // Always cleanup the intercept, even if module execution failed. + cleanupReactRefreshIntercept(); } - - cleanupReactRefreshIntercept(); } /** @@ -1427,6 +1433,9 @@ function instantiateRuntimeModule(moduleId, chunkPath) { function getOrInstantiateRuntimeModule(moduleId, chunkPath) { const module = moduleCache[moduleId]; if (module) { + if (module.error) { + throw module.error; + } return module; } diff --git a/crates/turbopack/src/lib.rs b/crates/turbopack/src/lib.rs index e904a8e065cda..12494649d9eb8 100644 --- a/crates/turbopack/src/lib.rs +++ b/crates/turbopack/src/lib.rs @@ -377,14 +377,20 @@ impl AssetContext for ModuleAssetContext { #[turbo_tasks::function] async fn resolve_options( - &self, + self_vc: ModuleAssetContextVc, origin_path: FileSystemPathVc, _reference_type: Value, ) -> Result { + let this = self_vc.await?; + let context = if let Some(transition) = this.transition { + transition.process_context(self_vc) + } else { + self_vc + }; // TODO move `apply_commonjs/esm_resolve_options` etc. to here Ok(resolve_options( origin_path.parent().resolve().await?, - self.resolve_options_context, + context.await?.resolve_options_context, )) } diff --git a/crates/turbopack/src/module_options/mod.rs b/crates/turbopack/src/module_options/mod.rs index 9876d5659ba6e..f85db4645c18d 100644 --- a/crates/turbopack/src/module_options/mod.rs +++ b/crates/turbopack/src/module_options/mod.rs @@ -16,9 +16,7 @@ use turbopack_core::{ use turbopack_css::{CssInputTransform, CssInputTransformsVc}; use turbopack_ecmascript::{ EcmascriptInputTransform, EcmascriptInputTransformsVc, EcmascriptOptions, SpecifiedModuleType, - TransformPluginVc, }; -use turbopack_ecmascript_plugins::transform::emotion::build_emotion_transformer; use turbopack_mdx::MdxTransformOptions; use turbopack_node::transforms::{postcss::PostCssTransformVc, webpack::WebpackLoadersVc}; @@ -64,8 +62,6 @@ impl ModuleOptionsVc { ) -> Result { let ModuleOptionsContext { enable_jsx, - ref enable_emotion, - enable_react_refresh, enable_styled_jsx, ref enable_styled_components, enable_types, @@ -77,8 +73,6 @@ impl ModuleOptionsVc { ref enable_postcss_transform, ref enable_webpack_loaders, preset_env_versions, - ref custom_ecmascript_app_transforms, - ref custom_ecmascript_transforms, ref custom_ecma_transform_plugins, ref custom_rules, execution_context, @@ -117,8 +111,6 @@ impl ModuleOptionsVc { }; let mut transforms = before_transform_plugins; - transforms.extend(custom_ecmascript_app_transforms.iter().cloned()); - transforms.extend(custom_ecmascript_transforms.iter().cloned()); // Order of transforms is important. e.g. if the React transform occurs before // Styled JSX, there won't be JSX nodes for Styled JSX to transform. @@ -126,12 +118,6 @@ impl ModuleOptionsVc { transforms.push(EcmascriptInputTransform::StyledJsx); } - if let Some(transformer) = build_emotion_transformer(enable_emotion).await? { - transforms.push(EcmascriptInputTransform::Plugin(TransformPluginVc::cell( - transformer, - ))); - } - if let Some(enable_styled_components) = enable_styled_components { let styled_components_transform = &*enable_styled_components.await?; transforms.push(EcmascriptInputTransform::StyledComponents { @@ -152,7 +138,7 @@ impl ModuleOptionsVc { let jsx = enable_jsx.await?; transforms.push(EcmascriptInputTransform::React { - refresh: enable_react_refresh, + refresh: jsx.react_refresh, import_source: OptionStringVc::cell(jsx.import_source.clone()), runtime: OptionStringVc::cell(jsx.runtime.clone()), }); @@ -192,15 +178,13 @@ impl ModuleOptionsVc { None }; - let vendor_transforms = - EcmascriptInputTransformsVc::cell(custom_ecmascript_transforms.clone()); + let vendor_transforms = EcmascriptInputTransformsVc::cell(vec![]); let ts_app_transforms = if let Some(transform) = &ts_transform { - let mut base_transforms = if let Some(decorators_transform) = &decorators_transform { + let base_transforms = if let Some(decorators_transform) = &decorators_transform { vec![decorators_transform.clone(), transform.clone()] } else { vec![transform.clone()] }; - base_transforms.extend(custom_ecmascript_transforms.iter().cloned()); EcmascriptInputTransformsVc::cell( base_transforms .iter() @@ -430,7 +414,7 @@ impl ModuleOptionsVc { ), ]; - if enable_mdx || enable_mdx_rs { + if enable_mdx || enable_mdx_rs.is_some() { let (jsx_runtime, jsx_import_source) = if let Some(enable_jsx) = enable_jsx { let jsx = enable_jsx.await?; (jsx.runtime.clone(), jsx.import_source.clone()) @@ -438,11 +422,16 @@ impl ModuleOptionsVc { (None, None) }; + let mdx_options = enable_mdx_rs + .unwrap_or(MdxTransformModuleOptionsVc::default()) + .await?; + let mdx_transform_options = (MdxTransformOptions { development: true, preserve_jsx: false, jsx_runtime, jsx_import_source, + provider_import_source: mdx_options.provider_import_source.clone(), }) .cell(); diff --git a/crates/turbopack/src/module_options/module_options_context.rs b/crates/turbopack/src/module_options/module_options_context.rs index b361f5ebace46..0ce312236e188 100644 --- a/crates/turbopack/src/module_options/module_options_context.rs +++ b/crates/turbopack/src/module_options/module_options_context.rs @@ -2,8 +2,10 @@ use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use turbo_tasks::trace::TraceRawVcs; use turbopack_core::{environment::EnvironmentVc, resolve::options::ImportMappingVc}; -use turbopack_ecmascript::{EcmascriptInputTransform, TransformPluginVc}; -use turbopack_ecmascript_plugins::transform::emotion::EmotionTransformConfigVc; +use turbopack_ecmascript::TransformPluginVc; +use turbopack_ecmascript_plugins::transform::{ + emotion::EmotionTransformConfigVc, styled_components::StyledComponentsTransformConfigVc, +}; use turbopack_node::{ execution_context::ExecutionContextVc, transforms::webpack::WebpackLoaderConfigItemsVc, }; @@ -104,58 +106,14 @@ impl WebpackLoadersOptions { } } -// [TODO]: should enabled_react_refresh belong to this options? #[turbo_tasks::value(shared)] #[derive(Default, Clone, Debug)] pub struct JsxTransformOptions { + pub react_refresh: bool, pub import_source: Option, pub runtime: Option, } -#[turbo_tasks::value(transparent)] -pub struct OptionStyledComponentsTransformConfig(Option); - -#[turbo_tasks::value(shared)] -#[derive(Clone, Debug)] -#[serde(rename_all = "camelCase")] -pub struct StyledComponentsTransformConfig { - pub display_name: bool, - pub ssr: bool, - pub file_name: bool, - pub top_level_import_paths: Vec, - pub meaningless_file_names: Vec, - pub css_prop: bool, - pub namespace: Option, -} - -impl Default for StyledComponentsTransformConfig { - fn default() -> Self { - StyledComponentsTransformConfig { - display_name: true, - ssr: true, - file_name: true, - top_level_import_paths: vec![], - meaningless_file_names: vec!["index".to_string()], - css_prop: true, - namespace: None, - } - } -} - -#[turbo_tasks::value_impl] -impl StyledComponentsTransformConfigVc { - #[turbo_tasks::function] - pub fn default() -> Self { - Self::cell(Default::default()) - } -} - -impl Default for StyledComponentsTransformConfigVc { - fn default() -> Self { - Self::default() - } -} - /// Configuration options for the custom ecma transform to be applied. #[turbo_tasks::value(shared)] #[derive(Default, Clone)] @@ -170,55 +128,48 @@ pub struct CustomEcmascriptTransformPlugins { #[turbo_tasks::value(shared)] #[derive(Default, Clone)] +#[serde(default)] +pub struct MdxTransformModuleOptions { + /// The path to a module providing Components to mdx modules. + /// The provider must export a useMDXComponents, which is called to access + /// an object of components. + pub provider_import_source: Option, +} + +#[turbo_tasks::value_impl] +impl MdxTransformModuleOptionsVc { + #[turbo_tasks::function] + pub fn default() -> Self { + Self::cell(Default::default()) + } +} + +#[turbo_tasks::value(shared)] +#[derive(Default, Clone)] +#[serde(default)] pub struct ModuleOptionsContext { - #[serde(default)] pub enable_jsx: Option, - #[serde(default)] pub enable_emotion: Option, - #[serde(default)] - pub enable_react_refresh: bool, - #[serde(default)] pub enable_styled_components: Option, - #[serde(default)] pub enable_styled_jsx: bool, - #[serde(default)] pub enable_postcss_transform: Option, - #[serde(default)] pub enable_webpack_loaders: Option, - #[serde(default)] pub enable_types: bool, - #[serde(default)] pub enable_typescript_transform: Option, - #[serde(default)] pub decorators: Option, - #[serde(default)] pub enable_mdx: bool, // [Note]: currently mdx, and mdx_rs have different configuration entrypoint from next.config.js, // however we might want to unify them in the future. - #[serde(default)] - pub enable_mdx_rs: bool, - #[serde(default)] + pub enable_mdx_rs: Option, pub preset_env_versions: Option, - #[deprecated(note = "use custom_ecma_transform_plugins instead")] - #[serde(default)] - pub custom_ecmascript_app_transforms: Vec, - #[deprecated(note = "use custom_ecma_transform_plugins instead")] - #[serde(default)] - pub custom_ecmascript_transforms: Vec, - #[serde(default)] pub custom_ecma_transform_plugins: Option, - #[serde(default)] /// Custom rules to be applied after all default rules. pub custom_rules: Vec, - #[serde(default)] pub execution_context: Option, - #[serde(default)] /// A list of rules to use a different module option context for certain /// context paths. The first matching is used. pub rules: Vec<(ContextCondition, ModuleOptionsContextVc)>, - #[serde(default)] pub placeholder_for_future_extensions: (), - #[serde(default)] pub enable_tree_shaking: bool, } diff --git a/crates/turborepo-ffi/messages.proto b/crates/turborepo-ffi/messages.proto index d4408d21daf5f..ce7b189a1f3a2 100644 --- a/crates/turborepo-ffi/messages.proto +++ b/crates/turborepo-ffi/messages.proto @@ -149,19 +149,3 @@ message RecursiveCopyRequest { message RecursiveCopyResponse { optional string error = 1; } - -message GetPackageFileHashesFromGitIndexRequest { - string turbo_root = 1; - string package_path = 2; -} - -message FileHashes { - map hashes = 1; -} - -message GetPackageFileHashesFromGitIndexResponse { - oneof response { - FileHashes hashes = 1; - string error = 2; - } -} diff --git a/crates/turborepo-ffi/src/lib.rs b/crates/turborepo-ffi/src/lib.rs index 5b788ba6334b3..f1d34380a0be8 100644 --- a/crates/turborepo-ffi/src/lib.rs +++ b/crates/turborepo-ffi/src/lib.rs @@ -4,10 +4,10 @@ //! and in ffi.go before modifying this file. mod lockfile; -use std::{collections::HashMap, mem::ManuallyDrop, path::PathBuf}; +use std::{mem::ManuallyDrop, path::PathBuf}; pub use lockfile::{patches, subgraph, transitive_closure}; -use turbopath::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf}; +use turbopath::AbsoluteSystemPathBuf; mod proto { include!(concat!(env!("OUT_DIR"), "/_.rs")); @@ -163,87 +163,3 @@ pub extern "C" fn recursive_copy(buffer: Buffer) -> Buffer { }; response.into() } - -#[no_mangle] -pub extern "C" fn get_package_file_hashes_from_git_index(buffer: Buffer) -> Buffer { - let req: proto::GetPackageFileHashesFromGitIndexRequest = match buffer.into_proto() { - Ok(req) => req, - Err(err) => { - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some( - proto::get_package_file_hashes_from_git_index_response::Response::Error( - err.to_string(), - ), - ), - }; - return resp.into(); - } - }; - - let turbo_root = match AbsoluteSystemPathBuf::new(req.turbo_root) { - Ok(turbo_root) => turbo_root, - Err(err) => { - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some( - proto::get_package_file_hashes_from_git_index_response::Response::Error( - err.to_string(), - ), - ), - }; - return resp.into(); - } - }; - let package_path = match AnchoredSystemPathBuf::from_raw(req.package_path) { - Ok(package_path) => package_path, - Err(err) => { - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some( - proto::get_package_file_hashes_from_git_index_response::Response::Error( - err.to_string(), - ), - ), - }; - return resp.into(); - } - }; - let response = match turborepo_scm::package_deps::get_package_file_hashes_from_git_index( - &turbo_root, - &package_path, - ) { - Ok(hashes) => { - let mut to_return = HashMap::new(); - for (filename, hash) in hashes { - let filename = match filename.as_str() { - Ok(s) => s.to_owned(), - Err(err) => { - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some(proto::get_package_file_hashes_from_git_index_response::Response::Error(err.to_string())) - }; - return resp.into(); - } - }; - to_return.insert(filename, hash); - } - let file_hashes = proto::FileHashes { hashes: to_return }; - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some( - proto::get_package_file_hashes_from_git_index_response::Response::Hashes( - file_hashes, - ), - ), - }; - resp - } - Err(err) => { - let resp = proto::GetPackageFileHashesFromGitIndexResponse { - response: Some( - proto::get_package_file_hashes_from_git_index_response::Response::Error( - err.to_string(), - ), - ), - }; - return resp.into(); - } - }; - response.into() -} diff --git a/crates/turborepo-lib/.gitignore b/crates/turborepo-lib/.gitignore deleted file mode 100644 index 1a7387072f811..0000000000000 --- a/crates/turborepo-lib/.gitignore +++ /dev/null @@ -1 +0,0 @@ -file_descriptor_set.bin diff --git a/crates/turborepo-lib/src/cli.rs b/crates/turborepo-lib/src/cli.rs index b7d5806bade68..48b01f95ce7e4 100644 --- a/crates/turborepo-lib/src/cli.rs +++ b/crates/turborepo-lib/src/cli.rs @@ -14,7 +14,7 @@ use tracing::{debug, error}; use turbopath::AbsoluteSystemPathBuf; use crate::{ - commands::{bin, daemon, link, login, logout, unlink, CommandBase}, + commands::{bin, daemon, generate, link, login, logout, unlink, CommandBase}, get_version, shim::{RepoMode, RepoState}, tracing::TurboSubscriber, @@ -264,6 +264,14 @@ pub enum Command { #[clap(long, value_enum, default_value_t = LinkTarget::RemoteCache)] target: LinkTarget, }, + /// Generate a new app / package + Generate { + #[clap(long, default_value_t = String::from("latest"), hide = true)] + tag: String, + #[clap(subcommand)] + #[serde(flatten)] + command: GenerateCommand, + }, /// Login to your Vercel account Login { #[clap(long = "sso-team")] @@ -300,6 +308,66 @@ pub enum Command { }, } +#[derive(Parser, Clone, Debug, Default, Serialize, PartialEq)] +pub struct GenerateCustomArgs { + /// The name of the generator to run + pub generator_name: Option, + /// Generator configuration file + #[clap(short = 'c', long)] + pub config: Option, + /// The root of your repository (default: directory with root turbo.json) + #[clap(short = 'r', long)] + pub root: Option, + /// Answers passed directly to generator + #[clap(short = 'a', long, value_delimiter = ' ', num_args = 1..)] + pub args: Vec, +} + +#[derive(Parser, Clone, Debug, Default, Serialize, PartialEq)] +pub struct GenerateAddArgs { + /// Name for the new workspace + #[clap(short = 'n', long)] + pub name: Option, + /// Generate an empty workspace + #[clap(short = 'b', long, conflicts_with = "copy", default_value_t = true)] + pub empty: bool, + /// Generate a workspace using an existing workspace as a template + #[clap(short = 'c', long, conflicts_with = "empty", default_value_t = false)] + pub copy: bool, + /// Where the new workspace should be created + #[clap(short = 'd', long)] + pub destination: Option, + /// The type of workspace to create + #[clap(short = 'w', long)] + pub what: Option, + /// The root of your repository (default: directory with root turbo.json) + #[clap(short = 'r', long)] + pub root: Option, + /// An example package to add. You can use a GitHub URL with any branch + /// and/or subdirectory. + #[clap(short = 'e', long)] + pub example: Option, + /// In a rare case, your GitHub URL might contain a branch name with a slash + /// (e.g. bug/fix-1) and the path to the example (e.g. foo/bar). In this + /// case, you must specify the path to the example separately: + /// --example-path foo/bar + #[clap(short = 'p', long)] + pub example_path: Option, + /// Do not filter available dependencies by the workspace type + #[clap(long, default_value_t = false)] + pub show_all_dependencies: bool, +} + +#[derive(Subcommand, Clone, Debug, Serialize, PartialEq)] +pub enum GenerateCommand { + /// Add a new package or app to your project + #[clap(name = "add", alias = "a")] + Add(GenerateAddArgs), + /// Run custom generators + #[clap(name = "run", alias = "r")] + Custom(GenerateCustomArgs), +} + #[derive(Parser, Clone, Debug, Default, Serialize, PartialEq)] pub struct RunArgs { /// Override the filesystem cache directory. @@ -567,6 +635,10 @@ pub async fn run( Ok(Payload::Rust(Ok(0))) } + Command::Generate { command, tag } => { + generate::run(command, tag)?; + Ok(Payload::Rust(Ok(0))) + } Command::Daemon { command, idle_time } => { let base = CommandBase::new(cli_args.clone(), repo_root, version, ui)?; diff --git a/crates/turborepo-lib/src/commands/generate.rs b/crates/turborepo-lib/src/commands/generate.rs new file mode 100644 index 0000000000000..88d4313ce4606 --- /dev/null +++ b/crates/turborepo-lib/src/commands/generate.rs @@ -0,0 +1,61 @@ +use std::process::{Command, Stdio}; + +use anyhow::Result; + +use crate::{child::spawn_child, cli::GenerateCommand}; + +fn verify_requirements() -> Result<()> { + let output = Command::new("npx") + .arg("--version") + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .status(); + + match output { + Ok(result) if result.success() => Ok(()), + _ => Err(anyhow::anyhow!( + "Unable to run generate - missing requirements (npx)" + )), + } +} + +fn call_turbo_gen(command: &str, tag: &String, raw_args: &str) -> Result { + let mut npx = Command::new("npx"); + npx.arg("--yes") + .arg(format!("@turbo/gen@{}", tag)) + .arg("raw") + .arg(command) + .args(["--json", raw_args]) + .stdout(Stdio::inherit()) + .stderr(Stdio::inherit()); + + let child = spawn_child(npx)?; + let exit_code = child.wait()?.code().unwrap_or(2); + Ok(exit_code) +} + +pub fn run(command: &GenerateCommand, tag: &String) -> Result<()> { + // ensure npx is available + verify_requirements()?; + + match command { + GenerateCommand::Add(args) => { + let mut add_args = args.clone(); + // example implies copy + if add_args.example.is_some() { + add_args.copy = true; + add_args.empty = false; + } + + // convert args to json + let raw_args = serde_json::to_string(&add_args)?; + call_turbo_gen("add", tag, &raw_args)?; + } + GenerateCommand::Custom(args) => { + let raw_args = serde_json::to_string(args)?; + call_turbo_gen("generate", tag, &raw_args)?; + } + }; + + Ok(()) +} diff --git a/crates/turborepo-lib/src/commands/mod.rs b/crates/turborepo-lib/src/commands/mod.rs index bab5ecd8cb7c6..f8443dd757263 100644 --- a/crates/turborepo-lib/src/commands/mod.rs +++ b/crates/turborepo-lib/src/commands/mod.rs @@ -17,6 +17,7 @@ use crate::{ pub(crate) mod bin; pub(crate) mod daemon; +pub(crate) mod generate; pub(crate) mod link; pub(crate) mod login; pub(crate) mod logout; diff --git a/crates/turborepo-scm/Cargo.toml b/crates/turborepo-scm/Cargo.toml index 466e6e11f5025..9c40f507ce844 100644 --- a/crates/turborepo-scm/Cargo.toml +++ b/crates/turborepo-scm/Cargo.toml @@ -7,9 +7,9 @@ license = "MPL-2.0" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = { workspace = true } dunce = { workspace = true } git2 = { version = "0.16.1", default-features = false } -nom = "7.1.3" thiserror = { workspace = true } turbopath = { workspace = true } diff --git a/crates/turborepo-scm/fixtures/01-git-hash-object/.gitignore b/crates/turborepo-scm/fixtures/01-git-hash-object/.gitignore deleted file mode 100644 index d8e19507425ea..0000000000000 --- a/crates/turborepo-scm/fixtures/01-git-hash-object/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -"quote" -new*line diff --git a/crates/turborepo-scm/fixtures/01-git-hash-object/child/child.json b/crates/turborepo-scm/fixtures/01-git-hash-object/child/child.json deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/crates/turborepo-scm/fixtures/01-git-hash-object/child/grandchild/grandchild.json b/crates/turborepo-scm/fixtures/01-git-hash-object/child/grandchild/grandchild.json deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/crates/turborepo-scm/fixtures/01-git-hash-object/root.json b/crates/turborepo-scm/fixtures/01-git-hash-object/root.json deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/crates/turborepo-scm/src/hash_object.rs b/crates/turborepo-scm/src/hash_object.rs deleted file mode 100644 index f0797a731c38d..0000000000000 --- a/crates/turborepo-scm/src/hash_object.rs +++ /dev/null @@ -1,192 +0,0 @@ -use std::{ - io::{BufWriter, Read, Write}, - panic, - process::{Command, Stdio}, - thread, -}; - -use nom::{Finish, IResult}; -use turbopath::{AbsoluteSystemPathBuf, RelativeUnixPathBuf}; - -use crate::{package_deps::GitHashes, Error}; - -pub(crate) fn hash_objects( - pkg_path: &AbsoluteSystemPathBuf, - to_hash: Vec, - pkg_prefix: &RelativeUnixPathBuf, - hashes: &mut GitHashes, -) -> Result<(), Error> { - if to_hash.is_empty() { - return Ok(()); - } - let mut git = Command::new("git") - .args(["hash-object", "--stdin-paths"]) - .current_dir(pkg_path) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .stdin(Stdio::piped()) - .spawn()?; - { - let stdout = git - .stdout - .as_mut() - .ok_or_else(|| Error::git_error("failed to get stdout for git hash-object"))?; - // We take, rather than borrow, stdin so that we can drop it and force the - // underlying file descriptor to close, signalling the end of input. - let stdin: std::process::ChildStdin = git - .stdin - .take() - .ok_or_else(|| Error::git_error("failed to get stdin for git hash-object"))?; - let mut stderr = git - .stderr - .take() - .ok_or_else(|| Error::git_error("failed to get stderr for git hash-object"))?; - let result = read_object_hashes(stdout, stdin, &to_hash, pkg_prefix, hashes); - if let Err(err) = result { - let mut buf = String::new(); - let bytes_read = stderr.read_to_string(&mut buf)?; - if bytes_read > 0 { - // something failed with git, report that error - return Err(Error::git_error(buf)); - } - return Err(err); - } - } - git.wait()?; - Ok(()) -} - -const HASH_LEN: usize = 40; - -fn read_object_hashes( - mut reader: R, - writer: W, - to_hash: &Vec, - pkg_prefix: &RelativeUnixPathBuf, - hashes: &mut GitHashes, -) -> Result<(), Error> { - thread::scope(move |scope| -> Result<(), Error> { - let write_thread = scope.spawn(move || -> Result<(), Error> { - let mut writer = BufWriter::new(writer); - for path in to_hash { - path.write_escaped_bytes(&mut writer)?; - writer.write_all(&[b'\n'])?; - writer.flush()?; - } - // writer is dropped here, closing stdin - Ok(()) - }); - // Buffer size is HASH_LEN + 1 to account for the trailing \n - let mut buffer: [u8; HASH_LEN + 1] = [0; HASH_LEN + 1]; - for (i, filename) in to_hash.iter().enumerate() { - if i == to_hash.len() { - break; - } - reader.read_exact(&mut buffer)?; - { - let hash = parse_hash_object(&buffer)?; - let hash = String::from_utf8(hash.to_vec())?; - let path = filename.strip_prefix(pkg_prefix)?; - hashes.insert(path, hash); - } - } - match write_thread.join() { - // the error case is if the thread panic'd. In that case, we propagate - // the panic, since we aren't going to handle it. - Err(e) => panic::resume_unwind(e), - Ok(result) => result, - } - })?; - Ok(()) -} - -fn parse_hash_object(i: &[u8]) -> Result<&[u8], Error> { - match nom_parse_hash_object(i).finish() { - Ok((_, hash)) => Ok(hash), - Err(e) => Err(Error::git_error(format!( - "failed to parse git-hash-object {}", - String::from_utf8_lossy(e.input) - ))), - } -} - -fn nom_parse_hash_object(i: &[u8]) -> IResult<&[u8], &[u8]> { - let (i, hash) = nom::bytes::complete::take(HASH_LEN)(i)?; - let (i, _) = nom::bytes::complete::tag(&[b'\n'])(i)?; - Ok((i, hash)) -} - -#[cfg(test)] -mod test { - use turbopath::{AbsoluteSystemPathBuf, RelativeUnixPathBuf}; - - use super::hash_objects; - use crate::package_deps::{find_git_root, GitHashes}; - - #[test] - fn test_read_object_hashes() { - // Note that cwd can be different based on where the test suite is running from - // or if the test is launched in debug mode from VSCode - let cwd = std::env::current_dir().unwrap(); - let cwd = AbsoluteSystemPathBuf::new(cwd).unwrap(); - let git_root = find_git_root(&cwd).unwrap(); - let fixture_path = git_root - .join_unix_path_literal("crates/turborepo-scm/fixtures/01-git-hash-object") - .unwrap(); - - let fixture_child_path = fixture_path.join_literal("child"); - let git_root = find_git_root(&fixture_path).unwrap(); - - // paths for files here are relative to the package path. - let tests: Vec<(Vec<(&str, &str)>, &AbsoluteSystemPathBuf)> = vec![ - (vec![], &fixture_path), - ( - vec![ - ("../root.json", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), - ("child.json", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), - ( - "grandchild/grandchild.json", - "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", - ), - ], - &fixture_child_path, - ), - ]; - - for (to_hash, pkg_path) in tests { - let file_hashes: Vec<(RelativeUnixPathBuf, String)> = to_hash - .into_iter() - .map(|(raw, hash)| (RelativeUnixPathBuf::new(raw).unwrap(), String::from(hash))) - .collect(); - - let git_to_pkg_path = git_root.anchor(pkg_path).unwrap(); - let pkg_prefix = git_to_pkg_path.to_unix().unwrap(); - - let expected_hashes = GitHashes::from_iter(file_hashes.into_iter()); - let mut hashes = GitHashes::new(); - let to_hash = expected_hashes.keys().map(|k| pkg_prefix.join(k)).collect(); - hash_objects(&pkg_path, to_hash, &pkg_prefix, &mut hashes).unwrap(); - assert_eq!(hashes, expected_hashes); - } - - // paths for files here are relative to the package path. - let error_tests: Vec<(Vec<&str>, &AbsoluteSystemPathBuf)> = vec![ - // skipping test for outside of git repo, we now error earlier in the process - (vec!["nonexistent.json"], &fixture_path), - ]; - - for (to_hash, pkg_path) in error_tests { - let git_to_pkg_path = git_root.anchor(pkg_path).unwrap(); - let pkg_prefix = git_to_pkg_path.to_unix().unwrap(); - - let to_hash = to_hash - .into_iter() - .map(|k| pkg_prefix.join(&RelativeUnixPathBuf::new(k).unwrap())) - .collect(); - - let mut hashes = GitHashes::new(); - let result = hash_objects(&pkg_path, to_hash, &pkg_prefix, &mut hashes); - assert_eq!(result.is_err(), true); - } - } -} diff --git a/crates/turborepo-scm/src/lib.rs b/crates/turborepo-scm/src/lib.rs index d0b11553269df..86abdfc38d4e5 100644 --- a/crates/turborepo-scm/src/lib.rs +++ b/crates/turborepo-scm/src/lib.rs @@ -2,16 +2,12 @@ #![feature(provide_any)] #![feature(assert_matches)] -use std::backtrace::{self, Backtrace}; +use std::backtrace; use thiserror::Error; use turbopath::PathError; pub mod git; -mod hash_object; -mod ls_tree; -pub mod package_deps; -mod status; #[derive(Debug, Error)] pub enum Error { @@ -23,15 +19,4 @@ pub enum Error { Io(#[from] std::io::Error, #[backtrace] backtrace::Backtrace), #[error("path error: {0}")] Path(#[from] PathError, #[backtrace] backtrace::Backtrace), - #[error("encoding error: {0}")] - Encoding( - #[from] std::string::FromUtf8Error, - #[backtrace] backtrace::Backtrace, - ), -} - -impl Error { - pub(crate) fn git_error(s: impl Into) -> Self { - Error::Git(s.into(), Backtrace::capture()) - } } diff --git a/crates/turborepo-scm/src/ls_tree.rs b/crates/turborepo-scm/src/ls_tree.rs deleted file mode 100644 index 51fb9626e3e3a..0000000000000 --- a/crates/turborepo-scm/src/ls_tree.rs +++ /dev/null @@ -1,150 +0,0 @@ -use std::{ - io::{BufRead, BufReader, Read}, - process::{Command, Stdio}, -}; - -use nom::Finish; -use turbopath::{AbsoluteSystemPathBuf, RelativeUnixPathBuf}; - -use crate::{package_deps::GitHashes, Error}; - -pub fn git_ls_tree(root_path: &AbsoluteSystemPathBuf) -> Result { - let mut hashes = GitHashes::new(); - let mut git = Command::new("git") - .args(["ls-tree", "-r", "-z", "HEAD"]) - .current_dir(root_path) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn()?; - { - let stdout = git - .stdout - .as_mut() - .ok_or_else(|| Error::git_error("failed to get stdout for git ls-tree"))?; - let mut stderr = git - .stderr - .take() - .ok_or_else(|| Error::git_error("failed to get stderr for git ls-tree"))?; - let result = read_ls_tree(stdout, &mut hashes); - if result.is_err() { - let mut buf = String::new(); - let bytes_read = stderr.read_to_string(&mut buf)?; - if bytes_read > 0 { - // something failed with git, report that error - return Err(Error::git_error(buf)); - } - } - result?; - } - git.wait()?; - Ok(hashes) -} - -fn read_ls_tree(reader: R, hashes: &mut GitHashes) -> Result<(), Error> { - let mut reader = BufReader::new(reader); - let mut buffer = Vec::new(); - loop { - buffer.clear(); - { - let bytes_read = reader.read_until(b'\0', &mut buffer)?; - if bytes_read == 0 { - break; - } - { - let entry = parse_ls_tree(&buffer)?; - let hash = String::from_utf8(entry.hash.to_vec())?; - let path = RelativeUnixPathBuf::new(entry.filename)?; - hashes.insert(path, hash); - } - } - } - Ok(()) -} - -struct LsTreeEntry<'a> { - filename: &'a [u8], - hash: &'a [u8], -} - -fn parse_ls_tree(i: &[u8]) -> Result, Error> { - match nom_parse_ls_tree(i).finish() { - Ok((_, entry)) => Ok(entry), - Err(e) => Err(Error::git_error(format!( - "failed to parse git-ls-tree: {}", - String::from_utf8_lossy(e.input) - ))), - } -} - -fn nom_parse_ls_tree(i: &[u8]) -> nom::IResult<&[u8], LsTreeEntry<'_>> { - let (i, _) = nom::bytes::complete::is_not(" ")(i)?; - let (i, _) = nom::character::complete::space1(i)?; - let (i, _) = nom::bytes::complete::is_not(" ")(i)?; - let (i, _) = nom::character::complete::space1(i)?; - let (i, hash) = nom::bytes::complete::take(40usize)(i)?; - let (i, _) = nom::bytes::complete::take(1usize)(i)?; - let (i, filename) = nom::bytes::complete::is_not(" \0")(i)?; - Ok((i, LsTreeEntry { filename, hash })) -} - -#[cfg(test)] -mod tests { - use std::collections::HashMap; - - use turbopath::RelativeUnixPathBuf; - - use crate::{ls_tree::read_ls_tree, package_deps::GitHashes}; - - fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { - ( - RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), - hash.to_string(), - ) - })) - } - - #[test] - fn test_ls_tree() { - let tests: &[(&str, &[(&str, &str)])] = &[ - ( - "100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\tpackage.json\0", - &[("package.json", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391")], - ), - ( - // missing nul byte - "100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\tpackage.json", - &[("package.json", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391")], - ), - ( - "100644 blob e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t\t\000100644 blob \ - e69de29bb2d1d6434b8b29ae775ad8c2e48c5391\t\"\000100644 blob \ - 5b999efa470b056e329b4c23a73904e0794bdc2f\t\n\000100644 blob \ - f44f57fff95196c5f7139dfa0b96875f1e9650a9\t.gitignore\000100644 blob \ - 33dbaf21275ca2a5f460249d941cbc27d5da3121\tREADME.md\000040000 tree \ - 7360f2d292aec95907cebdcbb412a6bf2bd10f8a\tapps\000100644 blob \ - 9ec2879b24ce2c817296eebe2cb3846f8e4751ea\tpackage.json\000040000 tree \ - 5759aadaea2cde55468a61e7104eb0a9d86c1d30\tpackages\000100644 blob \ - 33d0621ee2f4da4a2f6f6bdd51a42618d181e337\tturbo.json\0", - &[ - ("\t", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), - ("\"", "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"), - ("\n", "5b999efa470b056e329b4c23a73904e0794bdc2f"), - (".gitignore", "f44f57fff95196c5f7139dfa0b96875f1e9650a9"), - ("README.md", "33dbaf21275ca2a5f460249d941cbc27d5da3121"), - ("apps", "7360f2d292aec95907cebdcbb412a6bf2bd10f8a"), - ("package.json", "9ec2879b24ce2c817296eebe2cb3846f8e4751ea"), - ("packages", "5759aadaea2cde55468a61e7104eb0a9d86c1d30"), - ("turbo.json", "33d0621ee2f4da4a2f6f6bdd51a42618d181e337"), - ], - ), - ]; - for (input, expected) in tests { - let input_bytes = input.as_bytes(); - let mut hashes = GitHashes::new(); - let expected = to_hash_map(expected); - read_ls_tree(input_bytes, &mut hashes).unwrap(); - assert_eq!(hashes, expected); - } - } -} diff --git a/crates/turborepo-scm/src/package_deps.rs b/crates/turborepo-scm/src/package_deps.rs deleted file mode 100644 index 27002cfef8eee..0000000000000 --- a/crates/turborepo-scm/src/package_deps.rs +++ /dev/null @@ -1,148 +0,0 @@ -use std::{collections::HashMap, process::Command}; - -use turbopath::{AbsoluteSystemPathBuf, AnchoredSystemPathBuf, RelativeUnixPathBuf}; - -use crate::{hash_object::hash_objects, ls_tree::git_ls_tree, status::append_git_status, Error}; - -pub type GitHashes = HashMap; - -pub fn get_package_file_hashes_from_git_index( - turbo_root: &AbsoluteSystemPathBuf, - package_path: &AnchoredSystemPathBuf, -) -> Result { - // TODO: memoize git root -> turbo root calculation once we aren't crossing ffi - let git_root = find_git_root(turbo_root)?; - let full_pkg_path = turbo_root.resolve(package_path); - let git_to_pkg_path = git_root.anchor(&full_pkg_path)?; - let pkg_prefix = git_to_pkg_path.to_unix()?; - let mut hashes = git_ls_tree(&full_pkg_path)?; - // Note: to_hash is *git repo relative* - let to_hash = append_git_status(&full_pkg_path, &pkg_prefix, &mut hashes)?; - hash_objects(&full_pkg_path, to_hash, &pkg_prefix, &mut hashes)?; - Ok(hashes) -} - -pub(crate) fn find_git_root( - turbo_root: &AbsoluteSystemPathBuf, -) -> Result { - let rev_parse = Command::new("git") - .args(["rev-parse", "--show-cdup"]) - .current_dir(turbo_root) - .output()?; - let root = String::from_utf8(rev_parse.stdout)?; - Ok(turbo_root.join_literal(root.trim_end()).to_realpath()?) -} - -#[cfg(test)] -mod tests { - use std::process::Command; - - use super::*; - - fn tmp_dir() -> (tempfile::TempDir, AbsoluteSystemPathBuf) { - let tmp_dir = tempfile::tempdir().unwrap(); - let dir = AbsoluteSystemPathBuf::new(tmp_dir.path().to_path_buf()) - .unwrap() - .to_realpath() - .unwrap(); - (tmp_dir, dir) - } - - fn require_git_cmd(repo_root: &AbsoluteSystemPathBuf, args: &[&str]) { - let mut cmd = Command::new("git"); - cmd.args(args).current_dir(repo_root); - assert_eq!(cmd.output().unwrap().status.success(), true); - } - - fn setup_repository(repo_root: &AbsoluteSystemPathBuf) { - let cmds: &[&[&str]] = &[ - &["init", "."], - &["config", "--local", "user.name", "test"], - &["config", "--local", "user.email", "test@example.com"], - ]; - for cmd in cmds { - require_git_cmd(repo_root, cmd); - } - } - - fn commit_all(repo_root: &AbsoluteSystemPathBuf) { - let cmds: &[&[&str]] = &[&["add", "."], &["commit", "-m", "foo"]]; - for cmd in cmds { - require_git_cmd(repo_root, cmd); - } - } - - #[test] - fn test_get_package_deps() -> Result<(), Error> { - // Directory structure: - // / - // new-root-file <- new file not added to git - // my-pkg/ - // committed-file - // deleted-file - // uncommitted-file <- new file not added to git - // dir/ - // nested-file - let (_repo_root_tmp, repo_root) = tmp_dir(); - let my_pkg_dir = repo_root.join_literal("my-pkg"); - my_pkg_dir.create_dir_all()?; - - // create file 1 - let committed_file_path = my_pkg_dir.join_literal("committed-file"); - committed_file_path.create_with_contents("committed bytes")?; - - // create file 2 - let deleted_file_path = my_pkg_dir.join_literal("deleted-file"); - deleted_file_path.create_with_contents("delete-me")?; - - // create file 3 - let nested_file_path = my_pkg_dir.join_literal("dir/nested-file"); - nested_file_path.ensure_dir()?; - nested_file_path.create_with_contents("nested")?; - - // create a package.json - let pkg_json_path = my_pkg_dir.join_literal("package.json"); - pkg_json_path.create_with_contents("{}")?; - - setup_repository(&repo_root); - commit_all(&repo_root); - - // remove a file - deleted_file_path.remove()?; - - // create another untracked file in git - let uncommitted_file_path = my_pkg_dir.join_literal("uncommitted-file"); - uncommitted_file_path.create_with_contents("uncommitted bytes")?; - - // create an untracked file in git up a level - let root_file_path = repo_root.join_literal("new-root-file"); - root_file_path.create_with_contents("new-root bytes")?; - - let package_path = AnchoredSystemPathBuf::from_raw("my-pkg")?; - - let expected = to_hash_map(&[ - ("committed-file", "3a29e62ea9ba15c4a4009d1f605d391cdd262033"), - ( - "uncommitted-file", - "4e56ad89387e6379e4e91ddfe9872cf6a72c9976", - ), - ("package.json", "9e26dfeeb6e641a33dae4961196235bdb965b21b"), - ( - "dir/nested-file", - "bfe53d766e64d78f80050b73cd1c88095bc70abb", - ), - ]); - let hashes = get_package_file_hashes_from_git_index(&repo_root, &package_path)?; - assert_eq!(hashes, expected); - Ok(()) - } - - fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { - ( - RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), - hash.to_string(), - ) - })) - } -} diff --git a/crates/turborepo-scm/src/status.rs b/crates/turborepo-scm/src/status.rs deleted file mode 100644 index f7110c90e0cea..0000000000000 --- a/crates/turborepo-scm/src/status.rs +++ /dev/null @@ -1,156 +0,0 @@ -use std::{ - io::{BufRead, BufReader, Read}, - process::{Command, Stdio}, -}; - -use nom::Finish; -use turbopath::{AbsoluteSystemPathBuf, RelativeUnixPathBuf}; - -use crate::{package_deps::GitHashes, Error}; - -pub(crate) fn append_git_status( - root_path: &AbsoluteSystemPathBuf, - pkg_prefix: &RelativeUnixPathBuf, - hashes: &mut GitHashes, -) -> Result, Error> { - let mut git = Command::new("git") - .args([ - "status", - "--untracked-files", - "--no-renames", - "-z", - "--", - ".", - ]) - .current_dir(root_path) - .stdout(Stdio::piped()) - .stderr(Stdio::piped()) - .spawn()?; - let to_hash = { - let stdout = git - .stdout - .as_mut() - .ok_or_else(|| Error::git_error("failed to get stdout for git status"))?; - let mut stderr = git - .stderr - .take() - .ok_or_else(|| Error::git_error("failed to get stderr for git status"))?; - let result = read_status(stdout, pkg_prefix, hashes); - if result.is_err() { - let mut buf = String::new(); - let bytes_read = stderr.read_to_string(&mut buf)?; - if bytes_read > 0 { - // something failed with git, report that error - return Err(Error::git_error(buf)); - } - } - result? - }; - git.wait()?; - Ok(to_hash) -} - -fn read_status( - reader: R, - pkg_prefix: &RelativeUnixPathBuf, - hashes: &mut GitHashes, -) -> Result, Error> { - let mut to_hash = Vec::new(); - let mut reader = BufReader::new(reader); - let mut buffer = Vec::new(); - loop { - buffer.clear(); - { - let bytes_read = reader.read_until(b'\0', &mut buffer)?; - if bytes_read == 0 { - break; - } - { - let entry = parse_status(&buffer)?; - let path = RelativeUnixPathBuf::new(entry.filename)?; - if entry.is_delete { - let path = path.strip_prefix(pkg_prefix)?; - hashes.remove(&path); - } else { - to_hash.push(path); - } - } - } - } - Ok(to_hash) -} - -struct StatusEntry<'a> { - filename: &'a [u8], - is_delete: bool, -} - -fn parse_status(i: &[u8]) -> Result, Error> { - match nom_parse_status(i).finish() { - Ok((_, tup)) => Ok(tup), - Err(e) => Err(Error::git_error(format!( - "failed to parse git-status: {}", - String::from_utf8_lossy(e.input) - ))), - } -} - -fn nom_parse_status(i: &[u8]) -> nom::IResult<&[u8], StatusEntry<'_>> { - let (i, x) = nom::bytes::complete::take(1usize)(i)?; - let (i, y) = nom::bytes::complete::take(1usize)(i)?; - let (i, _) = nom::character::complete::space1(i)?; - let (i, filename) = nom::bytes::complete::is_not(" \0")(i)?; - Ok(( - i, - StatusEntry { - filename, - is_delete: x[0] == b'D' || y[0] == b'D', - }, - )) -} - -#[cfg(test)] -mod tests { - use std::collections::HashMap; - - use turbopath::RelativeUnixPathBuf; - - use super::read_status; - use crate::package_deps::GitHashes; - - #[test] - fn test_status() { - let tests: &[(&str, &str, (&str, bool))] = &[ - ("AD my-pkg/package.json\0", "my-pkg", ("package.json", true)), - ( - // no trailing NUL - "AD some-pkg/package.json", - "some-pkg", - ("package.json", true), - ), - ("M package.json\0", "", ("package.json", false)), - ("A some-pkg/some-file\0", "some-pkg", ("some-file", false)), - ]; - for (input, prefix, (expected_filename, expect_delete)) in tests { - let prefix = RelativeUnixPathBuf::new(prefix.as_bytes()).unwrap(); - let mut hashes = to_hash_map(&[(expected_filename, "some-hash")]); - let to_hash = read_status(input.as_bytes(), &prefix, &mut hashes).unwrap(); - if *expect_delete { - assert_eq!(hashes.len(), 0, "input: {}", input); - } else { - assert_eq!(to_hash.len(), 1, "input: {}", input); - let expected = prefix.join(&RelativeUnixPathBuf::new(*expected_filename).unwrap()); - assert_eq!(to_hash[0], expected); - } - } - } - - fn to_hash_map(pairs: &[(&str, &str)]) -> GitHashes { - HashMap::from_iter(pairs.into_iter().map(|(path, hash)| { - ( - RelativeUnixPathBuf::new(path.as_bytes()).unwrap(), - hash.to_string(), - ) - })) - } -} diff --git a/docs/components/ExamplesArea.tsx b/docs/components/ExamplesArea.tsx index 8b382632a5d37..17c8f3f3c41fd 100644 --- a/docs/components/ExamplesArea.tsx +++ b/docs/components/ExamplesArea.tsx @@ -1,6 +1,5 @@ import { useSSG } from "nextra/ssg"; -import { DetailedFeatureLink } from "./Feature"; -import { GitHubIcon } from "./Icons"; +import { ExampleCard } from "./ExamplesCard"; export const ExamplesArea = ({ filter = "featured", @@ -10,19 +9,18 @@ export const ExamplesArea = ({ const { examples } = useSSG(); return ( -
+
{examples .filter(({ featured }) => (filter === "featured" ? featured : true)) - .map(({ name, description, slug }) => ( - (a.template ? -1 : 1)) + .map(({ name, description, slug, featured, template }) => ( + ))}
diff --git a/docs/components/ExamplesCard/AnimatedIcon.tsx b/docs/components/ExamplesCard/AnimatedIcon.tsx new file mode 100644 index 0000000000000..cd6ff623a56e7 --- /dev/null +++ b/docs/components/ExamplesCard/AnimatedIcon.tsx @@ -0,0 +1,120 @@ +import { useState, useEffect } from "react"; +import { AnimatePresence, motion } from "framer-motion"; +import { SVGProps } from "react"; + +type SVGPaths = Record>; + +const AnimatedPaths: SVGPaths = { + prompt: { + d: "M103.5 439.875L258.75 284.625L103.5 129.375", + }, + check: { + d: "M73.0002 364.165L252.952 489.952L540 84.5", + }, +}; + +const StaticPaths: SVGPaths = { + vercel: { + d: "M310 72L378.75 191.2L447.5 310.399L516.25 429.599L585 548.799H447.5H310H172.5H35L103.75 429.599L172.5 310.399L241.25 191.2L310 72Z", + }, + github: { + fillRule: "evenodd", + clipRule: "evenodd", + d: "M310.228 39C158.034 39 35 163.026 35 316.464C35 439.114 113.832 542.936 223.193 579.681C236.866 582.444 241.874 573.711 241.874 566.365C241.874 559.933 241.423 537.884 241.423 514.911C164.862 531.452 148.919 481.836 148.919 481.836C136.615 449.679 118.384 441.415 118.384 441.415C93.3254 424.417 120.209 424.417 120.209 424.417C148.006 426.255 162.591 452.898 162.591 452.898C187.194 495.157 226.838 483.217 242.787 475.866C245.063 457.949 252.358 445.547 260.105 438.658C199.041 432.225 134.795 408.339 134.795 301.761C134.795 271.442 145.724 246.637 163.042 227.345C160.31 220.456 150.738 191.97 165.78 153.843C165.78 153.843 189.019 146.491 241.418 182.324C263.852 176.25 286.987 173.16 310.228 173.134C333.466 173.134 357.156 176.353 379.032 182.324C431.436 146.491 454.675 153.843 454.675 153.843C469.717 191.97 460.14 220.456 457.407 227.345C475.182 246.637 485.66 271.442 485.66 301.761C485.66 408.339 421.414 431.763 359.894 438.658C369.922 447.385 378.575 463.92 378.575 490.106C378.575 527.314 378.125 557.176 378.125 566.36C378.125 573.711 383.139 582.444 396.806 579.687C506.167 542.93 584.999 439.114 584.999 316.464C585.449 163.026 461.965 39 310.228 39Z", + }, +}; + +export function AnimatedIcon({ + icon, + showCheck, +}: { + icon: string; + showCheck?: boolean; +}) { + const [showCheckInternal, setShowCheckInternal] = useState(showCheck); + + useEffect(() => { + if (!showCheck) { + return; + } + + setShowCheckInternal(true); + const timeout = setTimeout(() => { + setShowCheckInternal(false); + }, 1500); + + return () => clearTimeout(timeout); + }, [showCheck]); + + if (icon === "vercel" || icon === "github") { + return ( + + + + ); + } + + return ( + + + {/* prompt > or check */} + + {/* prompt: bottom line */} + + + + ); +} diff --git a/docs/components/ExamplesCard/index.tsx b/docs/components/ExamplesCard/index.tsx new file mode 100644 index 0000000000000..8d1558fda6688 --- /dev/null +++ b/docs/components/ExamplesCard/index.tsx @@ -0,0 +1,126 @@ +import { motion, useAnimationControls } from "framer-motion"; +import { useState, useEffect } from "react"; +import classNames from "classnames"; +import copy from "copy-to-clipboard"; +import { GitHubIcon } from "../Icons"; +import { AnimatedIcon } from "./AnimatedIcon"; + +export function ExampleCard({ + name, + description, + slug, + template, +}: { + name: string; + description: string; + slug: string; + template?: string; +}) { + const [isHoveringStartBuilding, setIsHoveringStartBuilding] = useState(false); + const [isHoveringDeployNow, setIsHoveringDeployNow] = useState(false); + const [copiedStartBuildingCmd, setCopiedStartBuildingCmd] = useState(false); + const [details, setDetails] = useState({ + icon: "github", + text: `examples/${slug}`, + }); + + const onCopyStartBuildingCmd = () => { + copy(`npx create-turbo -e ${slug}`); + setCopiedStartBuildingCmd(true); + }; + + useEffect(() => { + if (!copiedStartBuildingCmd) { + return; + } + + const timeout = setTimeout(() => { + setCopiedStartBuildingCmd(false); + }, 2000); + + return () => clearTimeout(timeout); + }, [copiedStartBuildingCmd]); + + useEffect(() => { + if (copiedStartBuildingCmd) { + setDetails({ + icon: "prompt", + text: `copied to clipboard`, + }); + } else if (isHoveringStartBuilding) { + setDetails({ + icon: "prompt", + text: `npx create-turbo -e ${slug}`, + }); + } else if (isHoveringDeployNow) { + setDetails({ + icon: "vercel", + text: `Deploy with Vercel`, + }); + } else { + setDetails({ + icon: "github", + text: `examples/${slug}`, + }); + } + }, [isHoveringStartBuilding, isHoveringDeployNow, copiedStartBuildingCmd]); + + return ( +
  • + +

    + + {name} + +

    + + {description} + + +
    + + + {details.text} + +
    +
    +
    + + {template && ( + setIsHoveringDeployNow(true)} + onMouseLeave={() => setIsHoveringDeployNow(false)} + > + Deploy Now + + )} +
    +
  • + ); +} diff --git a/docs/components/clients/users.ts b/docs/components/clients/users.ts index efd21727bc64e..6e7c6174047d7 100644 --- a/docs/components/clients/users.ts +++ b/docs/components/clients/users.ts @@ -553,4 +553,10 @@ export const users: Array = [ infoLink: "https://chotot.com/", pinned: true, }, + { + caption: "SLIIT FOSS", + image: "/images/logos/sliit-foss.svg", + infoLink: "https://sliitfoss.org", + pinned: true, + }, ]; diff --git a/docs/package.json b/docs/package.json index e9384f72bb157..f4c80a8bcdb4d 100644 --- a/docs/package.json +++ b/docs/package.json @@ -30,6 +30,7 @@ "next-themes": "^0.2.0", "nextra": "^2.1.0", "nextra-theme-docs": "^2.1.0", + "node-fetch": "^2.6.8", "react": "^18.2.0", "react-dom": "^18.2.0", "sharp": "^0.32.1", @@ -37,6 +38,7 @@ }, "devDependencies": { "@babel/core": "7.20.12", + "@turbo/gen": "workspace:*", "@turbo/types": "workspace:*", "@types/node": "^16.11.12", "@types/react": "18.2.0", diff --git a/docs/pages/repo/docs/ci/github-actions.mdx b/docs/pages/repo/docs/ci/github-actions.mdx index 2991a0a3604b2..64b11115ab949 100644 --- a/docs/pages/repo/docs/ci/github-actions.mdx +++ b/docs/pages/repo/docs/ci/github-actions.mdx @@ -62,7 +62,7 @@ Create file called `.github/workflows/ci.yml` in your repository with the follow # To use Remote Caching, uncomment the next lines and follow the steps below. # env: # TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} - # TURBO_TEAM: ${{ secrets.TURBO_TEAM }} + # TURBO_TEAM: ${{ vars.TURBO_TEAM }} # TURBO_REMOTE_ONLY: true steps: @@ -106,7 +106,7 @@ Create file called `.github/workflows/ci.yml` in your repository with the follow # To use Remote Caching, uncomment the next lines and follow the steps below. # env: # TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} - # TURBO_TEAM: ${{ secrets.TURBO_TEAM }} + # TURBO_TEAM: ${{ vars.TURBO_TEAM }} steps: - name: Check out code @@ -149,7 +149,7 @@ Create file called `.github/workflows/ci.yml` in your repository with the follow # To use Remote Caching, uncomment the next lines and follow the steps below. # env: # TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} - # TURBO_TEAM: ${{ secrets.TURBO_TEAM }} + # TURBO_TEAM: ${{ vars.TURBO_TEAM }} steps: - name: Check out code @@ -223,7 +223,7 @@ jobs: # To use Turborepo Remote Caching, set the following environment variables for the job. env: TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }} - TURBO_TEAM: ${{ secrets.TURBO_TEAM }} + TURBO_TEAM: ${{ vars.TURBO_TEAM }} steps: - name: Check out code diff --git a/docs/public/images/logos/color/sliit-foss.svg b/docs/public/images/logos/color/sliit-foss.svg new file mode 100644 index 0000000000000..a97b14e3d74c0 --- /dev/null +++ b/docs/public/images/logos/color/sliit-foss.svg @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/public/images/logos/white/sliit-foss.svg b/docs/public/images/logos/white/sliit-foss.svg new file mode 100644 index 0000000000000..c6f749e8b59e0 --- /dev/null +++ b/docs/public/images/logos/white/sliit-foss.svg @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/turbo/generators/config.ts b/docs/turbo/generators/config.ts new file mode 100644 index 0000000000000..9cd9aa2557acd --- /dev/null +++ b/docs/turbo/generators/config.ts @@ -0,0 +1,146 @@ +import fs from "fs"; +import { releasePostStats } from "./utils"; +import * as helpers from "./helpers"; +import type { PlopTypes } from "@turbo/gen"; + +export default function generator(plop: PlopTypes.NodePlopAPI): void { + // add helpers for use in templates + helpers.init(plop); + + // create generators + plop.setGenerator("blog - release post", { + description: "Add a new release post to the turbo.build blog", + prompts: [ + { + type: "input", + name: "version", + message: + 'The full semantic version of the new release (example: "1.9.0")', + validate: (input) => { + if (!input.match(/^\d+\.\d+\.\d+$/)) { + return "Version must be in the form of major.minor.patch"; + } + return true; + }, + }, + { + type: "input", + name: "prevVersion", + message: + 'The full semantic version of the previous release (example: "1.8.0")', + validate: (input) => { + if (!input.match(/^\d+\.\d+\.\d+$/)) { + return "Version must be in the form of major.minor.patch"; + } + return true; + }, + }, + { + type: "checkbox", + choices: [ + "gregsoltis", + "nathanhammond", + "tomknickman", + "anthonyshew", + "jaredpalmer", + "mehulkar", + "chrisolszewski", + "nicholasyang", + "alexanderlyon", + ], + name: "authors", + pageSize: 20, + message: "Select all authors for the release blog post.", + }, + { + type: "input", + name: "tagline", + message: + 'What is the tagline for the release (example: "focuses on improving observability for your task runs to better understand your caching behavior")', + }, + { + type: "input", + name: "headlineTitle1", + message: "What is the first headline feature?", + }, + { + type: "input", + name: "headlineTitle2", + message: "What is the second headline feature?", + }, + { + type: "input", + name: "headlineTitle3", + message: "What is the third headline feature?", + }, + ], + actions: [ + // extend answers with data fetched asynchronously + releasePostStats, + { + type: "add", + path: "pages/blog/turbo-{{dashCase version}}.mdx", + templateFile: "templates/release-blog-post.hbs", + }, + { + type: "append", + path: "pages/blog/_meta.json", + pattern: /"\*":\s\{(.|\n)*?\},/gm, + template: + ' "turbo-{{dashCase version}}": "Turborepo {{ majorMinor version }}",', + }, + ], + }); + + plop.setGenerator("blog - update release post stats", { + description: "Update stats in a release post", + prompts: [ + { + type: "list", + name: "post", + pageSize: 20, + message: "Which release post should the stats be updated?", + choices: () => { + return fs + .readdirSync("pages/blog") + .filter((f) => f.startsWith("turbo-")) + .map((f) => ({ + name: f + .replace("turbo-", "") + .replace(".mdx", "") + .replace(/-/g, "."), + value: f, + })); + }, + }, + ], + actions: [ + // extend answers with data fetched asynchronously + releasePostStats, + // update github stars + { + type: "modify", + path: "pages/blog/{{ post }}", + pattern: /^-\s\[.*?\sGitHub\sStars\].*$/gm, + template: + "- [{{ turboStars }}+ GitHub Stars](https://github.com/vercel/turbo)", + }, + // update weekly npm downloads + { + type: "modify", + path: "pages/blog/{{ post }}", + pattern: /^-\s\[.*?\sweekly\sNPM\sdownloads\].*$/gm, + template: + "- [{{ turboDownloads }}+ weekly NPM downloads](https://www.npmjs.com/package/turbo)", + }, + // update years saved + { + type: "modify", + path: "pages/blog/{{ post }}", + pattern: /^-\s.*?years of compute time saved.*$/gm, + template: + "- {{ turboYearsSaved }} years of compute time saved through [Remote Caching on Vercel](https://vercel.com/docs/concepts/monorepos/remote-caching)", + }, + ], + }); +} diff --git a/docs/turbo/generators/helpers.ts b/docs/turbo/generators/helpers.ts new file mode 100644 index 0000000000000..6b48604b1154f --- /dev/null +++ b/docs/turbo/generators/helpers.ts @@ -0,0 +1,14 @@ +import type { PlopTypes } from "@turbo/gen"; + +// helpers +const dateToday = (): string => + new Date().toISOString().split("T")[0].replace(/-/g, "/"); + +const majorMinor = (version: string): string => + version.split(".").slice(0, 2).join("."); + +export function init(plop: PlopTypes.NodePlopAPI): void { + // add helpers for use in templates + plop.setHelper("dateToday", dateToday); + plop.setHelper("majorMinor", majorMinor); +} diff --git a/docs/turbo/generators/templates/release-blog-post.hbs b/docs/turbo/generators/templates/release-blog-post.hbs new file mode 100644 index 0000000000000..3c02fb5785dfd --- /dev/null +++ b/docs/turbo/generators/templates/release-blog-post.hbs @@ -0,0 +1,55 @@ +--- +title: Turborepo {{ majorMinor version }} +date: {{ dateToday }} +description: Turborepo {{ majorMinor version }} {{ tagline }} +tag: "web development" +ogImage: /images/blog/turbo-{{ dashCase version }}/twitter-card.png +--- + +# Turborepo {{ majorMinor version }} + +import { Authors } from '../../components/Authors' +import Badge from '../../components/Badge' +import Date from "../../components/blog/Date"; + + + Monday, April 11th, 2023 + + + + +Turborepo {{ majorMinor version }} focuses on improving observability for your task runs to better understand your caching behavior: + +- [**{{ titleCase headlineTitle1 }}**](#{{ dashCase headlineTitle1}}): Description. +- [**{{ titleCase headlineTitle2 }}**](#{{ dashCase headlineTitle2}}): Description. +- [**{{ titleCase headlineTitle2 }}**](#{{ dashCase headlineTitle3}}): Description. + +Update today by running `npx @turbo/codemod migrate`. + +## {{ titleCase headlineTitle1 }} + +Details + +## {{ titleCase headlineTitle2 }} + +Details + +## {{ titleCase headlineTitle3 }} + +Details + +## Community + +Since releasing [Turborepo v{{ majorMinor prevVersion }}](/blog/turbo-{{ dashCase prevVersion }}) we've seen incredible adoption and community growth: + +- [{{ turboStars }}+ GitHub Stars](https://github.com/vercel/turbo) +- [{{ turboDownloads }}+ weekly NPM downloads](https://www.npmjs.com/package/turbo) +- {{ turboYearsSaved }} years of compute time saved through [Remote Caching on Vercel](https://vercel.com/docs/concepts/monorepos/remote-caching) + +Turborepo is the result of the combined work of all of its contributors, including our core team. + +Thank you for your continued support, feedback, and collaboration to make Turborepo your build tool of choice. diff --git a/docs/turbo/generators/utils.ts b/docs/turbo/generators/utils.ts new file mode 100644 index 0000000000000..ddba993433a8e --- /dev/null +++ b/docs/turbo/generators/utils.ts @@ -0,0 +1,68 @@ +import fetch from "node-fetch"; + +interface Answers extends Object { + turboStars: string; + turboDownloads: string; + turboYearsSaved: string; +} + +const MINUTES_IN_YEAR = 60 * 24 * 365; + +const PUBLIC_TB_TOKEN = + "p.eyJ1IjogIjAzYzA0Y2MyLTM1YTAtNDhhNC05ZTZjLThhMWE0NGNhNjhkZiIsICJpZCI6ICJmOWIzMTU5Yi0wOTVjLTQyM2UtOWIwNS04ZDZlNzIyNjEwNzIifQ.A3TOPdm3Lhmn-1x5m6jNvulCQbbgUeQfAIO3IaaAt5k"; + +export async function releasePostStats(answers: Answers): Promise { + const [starsResponse, downloadsResponse, timeSavedResponse] = + await Promise.all([ + fetch("https://api.github.com/repos/vercel/turbo"), + fetch("https://api.npmjs.org/versions/turbo/last-week"), + fetch( + `https://api.us-east.tinybird.co/v0/pipes/turborepo_time_saved_ticker.json?token=${PUBLIC_TB_TOKEN}` + ), + ]); + + const [starsData, downloadsData, timeSavedData] = await Promise.all([ + starsResponse.json() as { stargazers_count: number }, + downloadsResponse.json() as { + downloads: Array<{ [key: string]: number }>; + }, + timeSavedResponse.json() as { + data: [ + { + remote_cache_minutes_saved: number; + local_cache_minutes_saved: number; + } + ]; + }, + ]); + + const totalMinutesSaved: number = + timeSavedData.data[0].remote_cache_minutes_saved + + timeSavedData.data[0].local_cache_minutes_saved; + const totalYearsSaved: number = Math.floor( + totalMinutesSaved / MINUTES_IN_YEAR + ); + const weeklyDownloads: number = Object.keys(downloadsData.downloads).reduce( + (sum, version) => sum + downloadsData.downloads[version], + 0 + ); + + console.log(JSON.stringify(weeklyDownloads)); + + const prettyRound = (num: number): string => { + if (num < 1000) { + return num.toString(); + } else if (num < 1000000) { + return (num / 1000).toFixed(1) + "k"; + } else { + return (num / 1000000).toFixed(1) + "M"; + } + }; + + // extend answers + answers.turboStars = prettyRound(starsData.stargazers_count); + answers.turboDownloads = prettyRound(weeklyDownloads); + answers.turboYearsSaved = prettyRound(totalYearsSaved); + + return "Fetched stats for release post"; +} diff --git a/packages/create-turbo/__tests__/git.test.ts b/packages/create-turbo/__tests__/git.test.ts index b361ee1d99a0e..a4a8e773fcf07 100644 --- a/packages/create-turbo/__tests__/git.test.ts +++ b/packages/create-turbo/__tests__/git.test.ts @@ -124,7 +124,6 @@ describe("git", () => { const { root } = useFixture({ fixture: `git` }); const mockExecSync = jest .spyOn(childProcess, "execSync") - .mockReturnValueOnce("git version 2.38.1") .mockImplementationOnce(() => { throw new Error( "fatal: not a git repository (or any of the parent directories): .git" @@ -139,11 +138,9 @@ describe("git", () => { expect(result).toBe(true); const calls = [ - "git --version", "git init", "git checkout -b main", - "git add -A", - 'git commit -m "test commit"', + 'git commit --author="Turbobot " -am "test commit"', ]; expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 2); calls.forEach((call) => { @@ -160,16 +157,15 @@ describe("git", () => { }); const mockExecSync = jest .spyOn(childProcess, "execSync") - .mockReturnValueOnce("git version 2.38.1") .mockReturnValueOnce("true") .mockReturnValue("success"); const result = tryGitInit(root, "test commit"); expect(result).toBe(false); - const calls = ["git --version"]; + const calls: string[] = []; - // 1 call for git --version, 1 call for isInGitRepository + // 1 call for isInGitRepository expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 1); calls.forEach((call) => { expect(mockExecSync).toHaveBeenCalledWith(call, { @@ -184,13 +180,21 @@ describe("git", () => { const mockExecSync = jest .spyOn(childProcess, "execSync") .mockImplementationOnce(() => { - throw new Error("fatal: unknown command git"); + throw new Error( + "fatal: not a git repository (or any of the parent directories): .git" + ); + }) + .mockImplementationOnce(() => { + throw new Error("abort: no repository found (.hg not found)"); + }) + .mockImplementationOnce(() => { + throw new Error("fatal: 128"); }); const result = tryGitInit(root, "test commit"); expect(result).toBe(false); - const calls = ["git --version"]; + const calls: string[] = [GIT_REPO_COMMAND, HG_REPO_COMMAND, "git init"]; expect(mockExecSync).toHaveBeenCalledTimes(calls.length); calls.forEach((call) => { @@ -205,7 +209,6 @@ describe("git", () => { const { root } = useFixture({ fixture: `git` }); const mockExecSync = jest .spyOn(childProcess, "execSync") - .mockReturnValueOnce("git version 2.38.1") .mockImplementationOnce(() => { throw new Error( "fatal: not a git repository (or any of the parent directories): .git" @@ -224,10 +227,9 @@ describe("git", () => { expect(result).toBe(false); const calls = [ - "git --version", "git init", "git checkout -b main", - "git add -A", + 'git commit --author="Turbobot " -am "test commit"', ]; expect(mockExecSync).toHaveBeenCalledTimes(calls.length + 2); diff --git a/packages/create-turbo/package.json b/packages/create-turbo/package.json index 00ad2e0e80715..710273ae7615b 100644 --- a/packages/create-turbo/package.json +++ b/packages/create-turbo/package.json @@ -1,6 +1,6 @@ { "name": "create-turbo", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "Create a new Turborepo", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/create-turbo/src/utils/git.ts b/packages/create-turbo/src/utils/git.ts index 7cd5a44fca74d..f0a8a4ce7437a 100644 --- a/packages/create-turbo/src/utils/git.ts +++ b/packages/create-turbo/src/utils/git.ts @@ -51,7 +51,6 @@ export function isInMercurialRepository(): boolean { export function tryGitInit(root: string, message: string): boolean { let didInit = false; try { - execSync("git --version", { stdio: "ignore" }); if (isInGitRepository() || isInMercurialRepository()) { return false; } @@ -61,10 +60,7 @@ export function tryGitInit(root: string, message: string): boolean { execSync("git checkout -b main", { stdio: "ignore" }); - execSync("git add -A", { stdio: "ignore" }); - execSync(`git commit -m "${message}"`, { - stdio: "ignore", - }); + gitCommit(message); return true; } catch (err) { if (didInit) { @@ -78,12 +74,18 @@ export function tryGitInit(root: string, message: string): boolean { export function tryGitCommit(message: string): boolean { try { - execSync("git add -A", { stdio: "ignore" }); - execSync(`git commit -m "${message}"`, { - stdio: "ignore", - }); + gitCommit(message); return true; } catch (err) { return false; } } + +function gitCommit(message: string) { + execSync( + `git commit --author="Turbobot " -am "${message}"`, + { + stdio: "ignore", + } + ); +} diff --git a/packages/eslint-config-turbo/package.json b/packages/eslint-config-turbo/package.json index 1fb11648456e8..9edeb9ab1334d 100644 --- a/packages/eslint-config-turbo/package.json +++ b/packages/eslint-config-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-config-turbo", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "ESLint config for Turborepo", "repository": { "type": "git", diff --git a/packages/eslint-plugin-turbo/package.json b/packages/eslint-plugin-turbo/package.json index 7181f5693b0c4..45dc3c9d10a05 100644 --- a/packages/eslint-plugin-turbo/package.json +++ b/packages/eslint-plugin-turbo/package.json @@ -1,6 +1,6 @@ { "name": "eslint-plugin-turbo", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "ESLint plugin for Turborepo", "keywords": [ "turbo", diff --git a/packages/turbo-codemod/package.json b/packages/turbo-codemod/package.json index 55aa4bce53c55..5f1fef68c5bbd 100644 --- a/packages/turbo-codemod/package.json +++ b/packages/turbo-codemod/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/codemod", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "Provides Codemod transformations to help upgrade your Turborepo codebase when a feature is deprecated.", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", @@ -36,6 +36,11 @@ "update-check": "^1.5.4" }, "devDependencies": { + "@turbo/gen": "workspace:*", + "@turbo/test-utils": "workspace:*", + "@turbo/tsconfig": "workspace:*", + "@turbo/types": "workspace:*", + "@turbo/utils": "workspace:*", "@types/chalk-animation": "^1.6.0", "@types/diff": "^5.0.2", "@types/fs-extra": "^9.0.13", @@ -51,11 +56,7 @@ "plop": "^3.1.1", "semver": "^7.3.5", "ts-jest": "^27.1.1", - "@turbo/tsconfig": "workspace:*", - "tsup": "^5.10.3", - "@turbo/test-utils": "workspace:*", - "@turbo/types": "workspace:*", - "@turbo/utils": "workspace:*", + "tsup": "^6.7.0", "typescript": "^4.5.5" }, "files": [ diff --git a/packages/turbo-codemod/plopfile.js b/packages/turbo-codemod/turbo/generators/config.ts similarity index 61% rename from packages/turbo-codemod/plopfile.js rename to packages/turbo-codemod/turbo/generators/config.ts index 9cc2dd71beb61..7f4dc119de745 100644 --- a/packages/turbo-codemod/plopfile.js +++ b/packages/turbo-codemod/turbo/generators/config.ts @@ -1,8 +1,12 @@ -const fs = require("fs-extra"); +import path from "path"; +import fs from "fs-extra"; +import type { PlopTypes } from "@turbo/gen"; -module.exports = function plopConfig(plop) { - // controller generator - plop.setGenerator("controller", { +export default function generator( + plop: PlopTypes.NodePlopAPI, + config: PlopTypes.PlopCfg +): void { + plop.setGenerator("transformer", { description: "Add a new transformer", prompts: [ { @@ -34,13 +38,21 @@ module.exports = function plopConfig(plop) { path: "__tests__/{{name}}.test.ts", templateFile: "templates/transformer.test.hbs", }, - function createFixturesDirectory(answers) { - process.chdir(plop.getPlopfilePath()); - const directory = `__tests__/__fixtures__/${answers.name}`; - fs.mkdirSync(`__tests__/__fixtures__/${answers.name}`); + function createFixturesDirectory(answers: { name?: string }) { + if (!answers.name) { + return "no name provided, skipping fixture directory creation"; + } + + const directory = path.join( + config.destBasePath, + "__tests__", + "__fixtures__", + answers.name + ); + fs.mkdirSync(directory); return `created empty ${directory} directory for fixtures`; }, ], }); -}; +} diff --git a/packages/turbo-codemod/templates/transformer.hbs b/packages/turbo-codemod/turbo/generators/templates/transformer.hbs similarity index 100% rename from packages/turbo-codemod/templates/transformer.hbs rename to packages/turbo-codemod/turbo/generators/templates/transformer.hbs diff --git a/packages/turbo-codemod/templates/transformer.test.hbs b/packages/turbo-codemod/turbo/generators/templates/transformer.test.hbs similarity index 100% rename from packages/turbo-codemod/templates/transformer.test.hbs rename to packages/turbo-codemod/turbo/generators/templates/transformer.test.hbs diff --git a/packages/turbo-gen/LICENSE b/packages/turbo-gen/LICENSE new file mode 100644 index 0000000000000..fa0086a952236 --- /dev/null +++ b/packages/turbo-gen/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. \ No newline at end of file diff --git a/packages/turbo-gen/README.md b/packages/turbo-gen/README.md new file mode 100644 index 0000000000000..0b134cef1acdd --- /dev/null +++ b/packages/turbo-gen/README.md @@ -0,0 +1,66 @@ +# `@turbo/gen` + +> This package is currently in **beta**. Please report any issues you encounter, and give us feedback about your experience using it! + +Easily extend your Turborepo with new apps, and packages. Create new empty workspaces, copy existing workspaces, add workspaces from remote sources (just like `create-turbo`!) or run custom generators defined using [Plop](https://plopjs.com/) configurations. + +## Usage + +```bash +Usage: @turbo/gen [options] [command] + +Extend your Turborepo + +Options: + -v, --version Output the current version + -h, --help Display help for command + +Commands: + add|a [options] Add a new package or app to your project + generate|g [options] [generator-name] Run custom generators + help [command] display help for command +``` + +## Add + +Extend your Turborepo with new apps or packages. Create new empty workspaces, copy existing workspaces, or add workspaces from remote sources (just like `create-turbo`!). + +### Usage + +#### Blank Workspace + +```bash +@turbo/gen add +``` + +#### Copy a Local Workspace + +```bash +@turbo/gen add --copy +``` + +#### Copy a Remote Workspace + +```bash +@turbo/gen add -e +``` + +## Generate + +Extend your Turborepo with custom generators defined using [Plop](https://plopjs.com/) configurations. + +### Usage + +```bash +@turbo/gen generate [generator-name] +``` + +### Writing Generators + +`@turbo/gen` will search the root of your monorepo, and every workspace for generators defined at: + +```bash +turbo/generators/config.js +``` + +**NOTE**: By default, generators are run from the _root_ of the _workspace_ where they are defined. diff --git a/packages/turbo-gen/__tests__/test-utils.ts b/packages/turbo-gen/__tests__/test-utils.ts new file mode 100644 index 0000000000000..fa6c20420e317 --- /dev/null +++ b/packages/turbo-gen/__tests__/test-utils.ts @@ -0,0 +1,34 @@ +import path from "path"; +import { PackageManager } from "@turbo/workspaces"; + +export function getWorkspaceDetailsMockReturnValue({ + root, + packageManager = "npm", +}: { + root: string; + packageManager: PackageManager; +}) { + return { + name: "mock-project", + packageManager, + paths: { + root, + packageJson: path.join(root, "package.json"), + lockfile: path.join(root, "yarn.lock"), + nodeModules: path.join(root, "node_modules"), + }, + workspaceData: { + globs: ["packages/*"], + workspaces: [ + { + name: "packages/mock-package", + paths: { + root: path.join(root, "packages/mock-package"), + packageJson: path.join(root, "packages/mock-package/package.json"), + nodeModules: path.join(root, "packages/mock-package/node_modules"), + }, + }, + ], + }, + }; +} diff --git a/packages/turbo-gen/jest.config.js b/packages/turbo-gen/jest.config.js new file mode 100644 index 0000000000000..b738f4b2bd92b --- /dev/null +++ b/packages/turbo-gen/jest.config.js @@ -0,0 +1,11 @@ +/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */ +module.exports = { + preset: "ts-jest/presets/js-with-ts", + testEnvironment: "node", + testPathIgnorePatterns: ["/__fixtures__/", "/__tests__/test-utils.ts"], + coveragePathIgnorePatterns: ["/__fixtures__/", "/__tests__/test-utils.ts"], + transformIgnorePatterns: ["/node_modules/(?!(ansi-regex)/)"], + modulePathIgnorePatterns: ["/node_modules", "/dist"], + collectCoverage: true, + verbose: true, +}; diff --git a/packages/turbo-gen/package.json b/packages/turbo-gen/package.json new file mode 100644 index 0000000000000..22dadd4ab2b50 --- /dev/null +++ b/packages/turbo-gen/package.json @@ -0,0 +1,57 @@ +{ + "name": "@turbo/gen", + "version": "1.9.6", + "description": "Extend a Turborepo", + "homepage": "https://turbo.build/repo", + "license": "MPL-2.0", + "repository": { + "type": "git", + "url": "https://github.com/vercel/turbo", + "directory": "packages/turbo-gen" + }, + "bugs": { + "url": "https://github.com/vercel/turbo/issues" + }, + "bin": "dist/cli.js", + "types": "dist/types.d.ts", + "scripts": { + "build": "tsup", + "lint": "eslint src/**/*.ts", + "check-types": "tsc --noEmit" + }, + "dependencies": { + "chalk": "2.4.2", + "commander": "^10.0.0", + "fs-extra": "^10.1.0", + "inquirer": "^8.2.4", + "minimatch": "^9.0.0", + "node-plop": "^0.26.3", + "semver": "^7.3.8", + "ts-node": "^10.9.1", + "update-check": "^1.5.4", + "validate-npm-package-name": "^5.0.0" + }, + "devDependencies": { + "@turbo/test-utils": "workspace:*", + "@turbo/tsconfig": "workspace:*", + "@turbo/utils": "workspace:*", + "@turbo/workspaces": "workspace:*", + "@types/fs-extra": "^9.0.13", + "@types/inquirer": "^8.2.5", + "@types/jest": "^27.4.0", + "@types/node": "^16.11.12", + "@types/semver": "^7.3.9", + "@types/validate-npm-package-name": "^4.0.0", + "eslint": "^7.23.0", + "jest": "^27.4.3", + "ts-jest": "^27.1.1", + "tsup": "^6.7.0", + "typescript": "^4.5.5" + }, + "files": [ + "dist" + ], + "publishConfig": { + "access": "public" + } +} diff --git a/packages/turbo-gen/src/cli.ts b/packages/turbo-gen/src/cli.ts new file mode 100644 index 0000000000000..7f2d8f18aa464 --- /dev/null +++ b/packages/turbo-gen/src/cli.ts @@ -0,0 +1,128 @@ +#!/usr/bin/env node + +import chalk from "chalk"; +import { Argument, Command, Option } from "commander"; +import notifyUpdate from "./utils/notifyUpdate"; +import { logger } from "@turbo/utils"; + +import { add, generate, raw } from "./commands"; +import cliPkg from "../package.json"; +import { GeneratorError } from "./utils/error"; + +const turboGenCli = new Command(); + +turboGenCli + .name(chalk.bold(logger.turboGradient("@turbo/gen"))) + .description("Extend your Turborepo") + .version(cliPkg.version, "-v, --version", "Output the current version") + .helpOption("-h, --help", "Display help for command") + .showHelpAfterError(false); + +turboGenCli + .command("raw", { hidden: true }) + .argument("", "The type of generator to run") + .addOption(new Option("--json ", "Arguments as raw JSON")) + .action(raw); + +turboGenCli + .command("add") + .aliases(["a"]) + .description("Add a new package or app to your project") + .addOption( + new Option("-n, --name ", "Name for the new workspace") + ) + .addOption( + new Option("-b, --empty", "Generate an empty workspace") + .conflicts("copy") + .default(true) + ) + .addOption( + new Option( + "-c, --copy", + "Generate a workspace using an existing workspace as a template" + ).conflicts("empty") + ) + .addOption( + new Option( + "-d, --destination ", + "Where the new workspace should be created" + ) + ) + .addOption( + new Option("-w, --what ", "The type of workspace to create").choices([ + "app", + "package", + ]) + ) + .addOption( + new Option( + "-r, --root ", + "The root of your repository (default: directory with root turbo.json)" + ) + ) + .addOption( + new Option( + "-e, --example [github-url]", + `An example package to add. You can use a GitHub URL with any branch and/or subdirectory.` + ).implies({ copy: true }) + ) + .addOption( + new Option( + "-p, --example-path ", + `In a rare case, your GitHub URL might contain a branch name with +a slash (e.g. bug/fix-1) and the path to the example (e.g. foo/bar). +In this case, you must specify the path to the example separately: +--example-path foo/bar +` + ).implies({ copy: true }) + ) + .addOption( + new Option( + "--show-all-dependencies", + "Do not filter available dependencies by the workspace type" + ).default(false) + ) + .action(add); + +turboGenCli + .command("generate") + .aliases(["g", "gen"]) + .description("Run custom generators") + .addArgument( + new Argument("[generator-name]", "The name of the generator to run") + ) + .addOption( + new Option( + "-c, --config ", + "Generator configuration file (default: turbo/generators/config.js" + ) + ) + .addOption( + new Option( + "-r, --root ", + "The root of your repository (default: directory with root turbo.json)" + ) + ) + .addOption( + new Option( + "-a, --args ", + "Arguments passed directly to generator" + ).default([]) + ) + .action(generate); + +turboGenCli + .parseAsync() + .then(notifyUpdate) + .catch(async (error) => { + console.log(); + if (error instanceof GeneratorError) { + logger.error(error.message); + } else { + logger.error("Unexpected error. Please report it as a bug:"); + console.log(error.message); + } + console.log(); + await notifyUpdate(); + process.exit(1); + }); diff --git a/packages/turbo-gen/src/commands/add/index.ts b/packages/turbo-gen/src/commands/add/index.ts new file mode 100644 index 0000000000000..1dc7af25095b5 --- /dev/null +++ b/packages/turbo-gen/src/commands/add/index.ts @@ -0,0 +1,38 @@ +import { logger } from "@turbo/utils"; +import { getProject } from "../../utils/getProject"; +import { copy, empty } from "../../generators"; +import { WorkspaceType } from "../../generators/types"; + +export interface TurboGeneratorOptions { + name?: string; + // default to true + empty: boolean; + copy?: boolean; + destination?: string; + what?: WorkspaceType; + root?: string; + example?: string; + examplePath?: string; + // defaults to false + showAllDependencies: boolean; +} + +export async function add(opts: TurboGeneratorOptions) { + const project = await getProject(opts); + + console.log(); + const args = { project, opts }; + if (opts.copy) { + if (opts.example) { + logger.info(`Copy a remote workspace from ${opts.example}`); + } else { + logger.info(`Copy an existing workspace from ${project.name}`); + } + console.log(); + await copy(args); + } else { + logger.info(`Add an empty workspace to ${project.name}`); + console.log(); + await empty(args); + } +} diff --git a/packages/turbo-gen/src/commands/add/prompts.ts b/packages/turbo-gen/src/commands/add/prompts.ts new file mode 100644 index 0000000000000..d8a8893c693ad --- /dev/null +++ b/packages/turbo-gen/src/commands/add/prompts.ts @@ -0,0 +1,266 @@ +import fs from "fs-extra"; +import path from "path"; +import inquirer from "inquirer"; +import { minimatch } from "minimatch"; +import validName from "validate-npm-package-name"; +import type { Project, Workspace } from "@turbo/workspaces"; +import { validateDirectory } from "@turbo/utils"; +import { getWorkspaceStructure } from "../../utils/getWorkspaceStructure"; +import type { WorkspaceType } from "../../generators/types"; +import { getWorkspaceList } from "../../utils/getWorkspaceList"; +import type { DependencyGroups, PackageJson } from "../../types"; + +export async function name({ + override, + suggestion, + what, +}: { + override?: string; + suggestion?: string; + what: WorkspaceType; +}): Promise<{ answer: string }> { + const { validForNewPackages } = validName(override || ""); + if (override && validForNewPackages) { + return { answer: override }; + } + return inquirer.prompt<{ answer: string }>({ + type: "input", + name: "answer", + default: suggestion, + validate: (input: string) => { + const { validForNewPackages } = validName(input); + return validForNewPackages || `Invalid ${what} name`; + }, + message: `What is the name of the ${what}?`, + }); +} + +export async function what({ + override, +}: { + override?: WorkspaceType; +}): Promise<{ answer: WorkspaceType }> { + if (override) { + return { answer: override }; + } + + return inquirer.prompt<{ answer: WorkspaceType }>({ + type: "list", + name: "answer", + message: `What type of workspace should be added?`, + choices: [ + { + name: "app", + value: "app", + }, + { + name: "package", + value: "package", + }, + ], + }); +} + +export async function location({ + what, + name, + destination, + project, +}: { + what: "app" | "package"; + name: string; + destination?: string; + project: Project; +}): Promise<{ absolute: string; relative: string }> { + // handle names with scopes + const nameAsPath = name.includes("/") ? name.split("/")[1] : name; + + // handle destination option (NOTE: this intentionally allows adding packages to non workspace directories) + if (destination) { + const { valid, root } = validateDirectory(destination); + if (valid) { + return { + absolute: root, + relative: path.relative(project.paths.root, root), + }; + } + } + + // build default name based on what is being added + let newWorkspaceLocation: string | undefined = undefined; + const workspaceStructure = getWorkspaceStructure({ project }); + + if (what === "app" && workspaceStructure.hasRootApps) { + newWorkspaceLocation = `${project.paths.root}/apps/${nameAsPath}`; + } else if (what === "package" && workspaceStructure.hasRootPackages) { + newWorkspaceLocation = `${project.paths.root}/packages/${nameAsPath}`; + } + + const { answer } = await inquirer.prompt<{ + answer: string; + }>({ + type: "input", + name: "answer", + message: `Where should "${name}" be added?`, + default: newWorkspaceLocation + ? path.relative(project.paths.root, newWorkspaceLocation) + : undefined, + validate: (input: string) => { + const base = path.join(project.paths.root, input); + const { valid, error } = validateDirectory(base); + const isWorkspace = project.workspaceData.globs.some((glob) => + minimatch(input, glob) + ); + + if (valid && isWorkspace) { + return true; + } + + if (!isWorkspace) { + return `${input} is not a valid workspace location`; + } + + return error; + }, + }); + + return { + absolute: path.join(project.paths.root, answer), + relative: answer, + }; +} + +export async function source({ + workspaces, + name, +}: { + workspaces: Array; + name: string; +}) { + const sourceAnswer = await inquirer.prompt<{ + answer: Workspace; + }>({ + type: "list", + name: "answer", + loop: false, + pageSize: 25, + message: `Which workspace should "${name}" start from?`, + choices: workspaces.map((choice) => { + if (choice instanceof inquirer.Separator) { + return choice; + } + return { + name: ` ${choice.name}`, + value: choice, + }; + }), + }); + + return sourceAnswer; +} + +export async function dependencies({ + name, + project, + source, + showAllDependencies, +}: { + name: string; + project: Project; + source?: Workspace; + showAllDependencies?: boolean; +}) { + const selectedDependencies: DependencyGroups = { + dependencies: {}, + devDependencies: {}, + peerDependencies: {}, + optionalDependencies: {}, + }; + const { answer: addDependencies } = await confirm({ + message: `Add workspace dependencies to "${name}"?`, + }); + if (!addDependencies) { + return selectedDependencies; + } + + const { answer: dependencyGroups } = await inquirer.prompt<{ + answer: Array; + }>({ + type: "checkbox", + name: "answer", + message: `Select all dependencies types to modify for "${name}"`, + loop: false, + choices: [ + { name: "dependencies", value: "dependencies" }, + { name: "devDependencies", value: "devDependencies" }, + { name: "peerDependencies", value: "peerDependencies" }, + { name: "optionalDependencies", value: "optionalDependencies" }, + ], + }); + + // supported workspace dependencies (apps can never be dependencies) + let depChoices = getWorkspaceList({ + project, + what: "package", + showAllDependencies, + }); + + const sourcePackageJson = source + ? (fs.readJsonSync(source.paths.packageJson) as PackageJson) + : undefined; + + for (let group of dependencyGroups) { + const { answer: selected } = await inquirer.prompt<{ + answer: Array; + }>({ + type: "checkbox", + name: "answer", + default: + sourcePackageJson && Object.keys(sourcePackageJson?.[group] || {}), + pageSize: 15, + message: `Which packages should be added as ${group} to "${name}?`, + loop: false, + choices: depChoices.map((choice) => { + if (choice instanceof inquirer.Separator) { + return choice; + } + return { + name: ` ${choice.name}`, + value: choice.name, + }; + }), + }); + + const newDependencyGroup = sourcePackageJson?.[group] || {}; + if (Object.keys(newDependencyGroup).length) { + const existingDependencyKeys = new Set(Object.keys(newDependencyGroup)); + + selected.forEach((dep) => { + if (!existingDependencyKeys.has(dep)) { + newDependencyGroup[dep] = + project.packageManager === "pnpm" ? "workspace:*" : "*"; + } + }); + + selectedDependencies[group] = newDependencyGroup; + } else { + selectedDependencies[group] = selected.reduce( + (acc, dep) => ({ + ...acc, + [dep]: project.packageManager === "pnpm" ? "workspace:*" : "*", + }), + {} + ); + } + } + + return selectedDependencies; +} + +export async function confirm({ message }: { message: string }) { + return await inquirer.prompt<{ answer: boolean }>({ + type: "confirm", + name: "answer", + message, + }); +} diff --git a/packages/turbo-gen/src/commands/generate/index.ts b/packages/turbo-gen/src/commands/generate/index.ts new file mode 100644 index 0000000000000..d61ca72c4d97a --- /dev/null +++ b/packages/turbo-gen/src/commands/generate/index.ts @@ -0,0 +1,22 @@ +import { logger } from "@turbo/utils"; +import { getProject } from "../../utils/getProject"; +import { custom } from "../../generators"; + +export interface CustomGeneratorOptions { + config?: string; + root?: string; + args?: Array; +} + +export async function generate( + generator: string | undefined, + opts: CustomGeneratorOptions +) { + const project = await getProject(opts); + + console.log(); + logger.info(`Modify ${project.name} using custom generators`); + console.log(); + + await custom({ generator, project, opts }); +} diff --git a/packages/turbo-gen/src/commands/generate/prompts.ts b/packages/turbo-gen/src/commands/generate/prompts.ts new file mode 100644 index 0000000000000..36b95df80d662 --- /dev/null +++ b/packages/turbo-gen/src/commands/generate/prompts.ts @@ -0,0 +1,74 @@ +import inquirer from "inquirer"; +import type { Generator } from "../../utils/plop"; +import { logger } from "@turbo/utils"; + +export async function customGenerators({ + generators, + generator, +}: { + generators: Array; + generator?: string; +}) { + if (generator) { + if ( + generators.find( + (g) => !(g instanceof inquirer.Separator) && g.name === generator + ) + ) { + return { + selectedGenerator: generator, + }; + } + + logger.warn(`Generator "${generator}" not found`); + console.log(); + } + + const generatorAnswer = await inquirer.prompt<{ + selectedGenerator: string; + }>({ + type: "list", + name: "selectedGenerator", + message: `Select generator to run`, + choices: generators.map((gen) => { + if (gen instanceof inquirer.Separator) { + return gen; + } + return { + name: gen.description + ? ` ${gen.name}: ${gen.description}` + : ` ${gen.name}`, + value: gen.name, + }; + }), + }); + + return generatorAnswer; +} + +export async function chooseGeneratorTemplate() { + return await inquirer.prompt<{ answer: "ts" | "js" }>({ + type: "list", + name: "answer", + message: "Should the generator config be created with TS or JS?", + default: "ts", + choices: [ + { + name: "js", + value: "js", + }, + { + name: "ts", + value: "ts", + }, + ], + }); +} + +export async function confirm({ message }: { message: string }) { + return await inquirer.prompt<{ answer: boolean }>({ + type: "confirm", + name: "answer", + message, + }); +} diff --git a/packages/turbo-gen/src/commands/index.ts b/packages/turbo-gen/src/commands/index.ts new file mode 100644 index 0000000000000..68ce13b2a0043 --- /dev/null +++ b/packages/turbo-gen/src/commands/index.ts @@ -0,0 +1,3 @@ +export { add } from "./add"; +export { generate } from "./generate"; +export { raw } from "./raw"; diff --git a/packages/turbo-gen/src/commands/raw/index.ts b/packages/turbo-gen/src/commands/raw/index.ts new file mode 100644 index 0000000000000..7c15524f22390 --- /dev/null +++ b/packages/turbo-gen/src/commands/raw/index.ts @@ -0,0 +1,37 @@ +import { add, type TurboGeneratorOptions } from "../add"; +import { generate, type CustomGeneratorOptions } from "../generate"; +import { convertCase } from "@turbo/utils"; + +interface MinimalOptions { + generatorName?: string; + [arg: string]: any; +} + +export async function raw(command: string, options: { json: string }) { + let incomingOptions: MinimalOptions = {}; + try { + const parsed = JSON.parse(options.json || "{}"); + // convert keys in parsed to camelCase and add to incomingOptions (if these are coming from rust they're likely kebab) + for (const key in parsed) { + incomingOptions[convertCase(key, { to: "camel" })] = parsed[key]; + } + } catch (err) { + console.error("Error parsing arguments", err); + process.exit(1); + } + + switch (command) { + case "add": + await add(incomingOptions as TurboGeneratorOptions); + break; + case "generate": + const { generatorName } = incomingOptions; + await generate(generatorName, incomingOptions as CustomGeneratorOptions); + break; + default: + console.error( + `Received unknown command - "${command}" (must be one of "add" | "generate")` + ); + process.exit(1); + } +} diff --git a/packages/turbo-gen/src/generators/copy.ts b/packages/turbo-gen/src/generators/copy.ts new file mode 100644 index 0000000000000..ecb6a2b9f07cb --- /dev/null +++ b/packages/turbo-gen/src/generators/copy.ts @@ -0,0 +1,99 @@ +import path from "path"; +import fs from "fs-extra"; +import chalk from "chalk"; +import { CopyFilterAsync } from "fs-extra"; +import { createProject, logger } from "@turbo/utils"; +import { gatherAddRequirements } from "../utils/gatherAddRequirements"; +import type { TurboGeneratorArguments } from "./types"; +import { DependencyGroups, PackageJson } from "../types"; + +export async function generate({ project, opts }: TurboGeneratorArguments) { + const { name, what, location, source, dependencies } = + await gatherAddRequirements({ + project, + opts, + }); + + const newPackageJsonPath = path.join(location.absolute, "package.json"); + + // copying from a remote example + if (opts.example) { + console.log(); + logger.warn("Some manual modifications may be required."); + logger.dimmed( + `This ${what} may require local dependencies or a different package manager than what is available in this repo` + ); + await createProject({ + appPath: location.absolute, + example: opts.example, + examplePath: opts.examplePath, + }); + + try { + if (fs.existsSync(newPackageJsonPath)) { + const packageJson = (await fs.readJSON( + newPackageJsonPath + )) as PackageJson; + if (packageJson.workspaces) { + throw new Error( + "New workspace root detected - unexpected 'workspaces' field in package.json" + ); + } + } else { + throw new Error("New workspace is missing a package.json file"); + } + + if (fs.existsSync(path.join(location.absolute, "pnpm-workspace.yaml"))) { + throw new Error( + "New workspace root detected - unexpected pnpm-workspace.yaml" + ); + } + } catch (err) { + let message = "UNKNOWN_ERROR"; + if (err instanceof Error) { + message = err.message; + } + logger.error(message); + + // rollback changes + await fs.rm(location.absolute, { recursive: true, force: true }); + return; + } + } else if (source) { + const filterFunc: CopyFilterAsync = async (src, dest) => { + if (src.includes("node_modules")) { + return false; + } + return true; + }; + + const loader = logger.turboLoader( + `Creating "${name}" from "${source.name}"...` + ); + loader.start(); + await fs.copy(source.paths.root, location.absolute, { + filter: filterFunc, + }); + loader.stop(); + } + + // update package.json with new name + const packageJson = await fs.readJSON(newPackageJsonPath); + packageJson.name = name; + + // update dependencies + Object.keys(dependencies).forEach((group) => { + const deps = dependencies[group as keyof DependencyGroups]; + if (deps && Object.keys(deps).length > 0) { + packageJson[group as keyof DependencyGroups] = deps; + } + }); + await fs.writeJSON(newPackageJsonPath, packageJson, { spaces: 2 }); + + console.log(); + console.log( + `${chalk.bold(logger.turboGradient(">>> Success!"))} Created ${name} at "${ + location.relative + }"` + ); +} diff --git a/packages/turbo-gen/src/generators/custom.ts b/packages/turbo-gen/src/generators/custom.ts new file mode 100644 index 0000000000000..eeb52dd4f7768 --- /dev/null +++ b/packages/turbo-gen/src/generators/custom.ts @@ -0,0 +1,72 @@ +import chalk from "chalk"; +import { logger } from "@turbo/utils"; +import { getCustomGenerators, runCustomGenerator } from "../utils/plop"; +import * as prompts from "../commands/generate/prompts"; +import type { CustomGeneratorArguments } from "./types"; +import { GeneratorError } from "../utils/error"; +import { setupFromTemplate } from "../utils/setupFromTemplate"; + +export async function generate({ + generator, + project, + opts, +}: CustomGeneratorArguments) { + let generators = getCustomGenerators({ project, configPath: opts.config }); + if (!generators.length) { + logger.error(`No custom generators found.`); + console.log(); + + const { answer } = await prompts.confirm({ + message: `Would you like to add generators to ${project.name}?`, + }); + + if (answer) { + const { answer: template } = await prompts.chooseGeneratorTemplate(); + try { + await setupFromTemplate({ project, template }); + } catch (err) { + if (err instanceof GeneratorError) { + throw err; + } + logger.error(`Failed to create generator config`); + throw err; + } + + // fetch generators again, and continue to selection prompt + generators = getCustomGenerators({ project, configPath: opts.config }); + } else { + return; + } + } + const { selectedGenerator } = await prompts.customGenerators({ + generators, + generator, + }); + + try { + await runCustomGenerator({ + project, + generator: selectedGenerator, + bypassArgs: opts.args, + configPath: opts.config, + }); + } catch (err) { + // pass any GeneratorErrors through to root + if (err instanceof GeneratorError) { + throw err; + } + + // capture any other errors and throw as GeneratorErrors + let message = "Failed to run custom generator"; + if (err instanceof Error) { + message = err.message; + } + + throw new GeneratorError(message, { + type: "plop_error_running_generator", + }); + } + + console.log(); + console.log(chalk.bold(logger.turboGradient(">>> Success!"))); +} diff --git a/packages/turbo-gen/src/generators/empty.ts b/packages/turbo-gen/src/generators/empty.ts new file mode 100644 index 0000000000000..f14fcf5d3252f --- /dev/null +++ b/packages/turbo-gen/src/generators/empty.ts @@ -0,0 +1,50 @@ +import path from "path"; +import fs from "fs-extra"; +import chalk from "chalk"; +import { logger } from "@turbo/utils"; +import { gatherAddRequirements } from "../utils/gatherAddRequirements"; +import type { TurboGeneratorArguments } from "./types"; +import type { PackageJson, DependencyGroups } from "../types"; + +export async function generate({ project, opts }: TurboGeneratorArguments) { + const { name, location, dependencies } = await gatherAddRequirements({ + project, + opts, + }); + + const packageJson: PackageJson = { + name, + version: "0.0.0", + private: true, + scripts: { + build: "turbo build", + }, + }; + + // update dependencies + Object.keys(dependencies).forEach((group) => { + const deps = dependencies[group as keyof DependencyGroups]; + if (deps && Object.keys(deps).length > 0) { + packageJson[group as keyof DependencyGroups] = deps; + } + }); + + // write the directory + fs.mkdirSync(location.absolute, { recursive: true }); + + // create package.json + fs.writeFileSync( + path.join(location.absolute, "package.json"), + JSON.stringify(packageJson, null, 2) + ); + + // create README + fs.writeFileSync(path.join(location.absolute, "README.md"), `# \`${name}\``); + + console.log(); + console.log( + `${chalk.bold(logger.turboGradient(">>> Success!"))} Created ${name} at "${ + location.relative + }"` + ); +} diff --git a/packages/turbo-gen/src/generators/index.ts b/packages/turbo-gen/src/generators/index.ts new file mode 100644 index 0000000000000..7dcbaa801ef72 --- /dev/null +++ b/packages/turbo-gen/src/generators/index.ts @@ -0,0 +1,3 @@ +export { generate as custom } from "./custom"; +export { generate as empty } from "./empty"; +export { generate as copy } from "./copy"; diff --git a/packages/turbo-gen/src/generators/types.ts b/packages/turbo-gen/src/generators/types.ts new file mode 100644 index 0000000000000..8bf87a46f522d --- /dev/null +++ b/packages/turbo-gen/src/generators/types.ts @@ -0,0 +1,16 @@ +import type { Project } from "@turbo/workspaces"; +import type { TurboGeneratorOptions } from "../commands/add"; +import type { CustomGeneratorOptions } from "../commands/generate"; + +export type WorkspaceType = "app" | "package"; + +export interface TurboGeneratorArguments { + project: Project; + opts: TurboGeneratorOptions; +} + +export interface CustomGeneratorArguments { + generator: string | undefined; + project: Project; + opts: CustomGeneratorOptions; +} diff --git a/packages/turbo-gen/src/templates/simple-js/config.js b/packages/turbo-gen/src/templates/simple-js/config.js new file mode 100644 index 0000000000000..4b2f3aaa09e41 --- /dev/null +++ b/packages/turbo-gen/src/templates/simple-js/config.js @@ -0,0 +1,31 @@ +module.exports = function generator(plop) { + plop.setGenerator("example", { + description: + "An example Turborepo generator - creates a new file at the root of the project", + prompts: [ + { + type: "input", + name: "file", + message: "What is the name of the file to create?", + }, + { + type: "input", + name: "author", + message: "What is your name? (Will be added as the file author)", + }, + { + type: "list", + name: "type", + message: "What type of file should be created?", + choices: [".md", ".txt"], + }, + ], + actions: [ + { + type: "add", + path: "{{ turbo.paths.root }}/{{ dashCase file }}{{ type }}", + templateFile: "templates/turborepo-generators.hbs", + }, + ], + }); +}; diff --git a/packages/turbo-gen/src/templates/simple-js/templates/turborepo-generators.hbs b/packages/turbo-gen/src/templates/simple-js/templates/turborepo-generators.hbs new file mode 100644 index 0000000000000..2b7e200b2f0e7 --- /dev/null +++ b/packages/turbo-gen/src/templates/simple-js/templates/turborepo-generators.hbs @@ -0,0 +1,5 @@ +# Turborepo Generators + +Read the docs at [turbo.build](https://turbo.build/repo/docs). + +Created by {{ author }}. diff --git a/packages/turbo-gen/src/templates/simple-ts/config.ts b/packages/turbo-gen/src/templates/simple-ts/config.ts new file mode 100644 index 0000000000000..ae6a49b933d25 --- /dev/null +++ b/packages/turbo-gen/src/templates/simple-ts/config.ts @@ -0,0 +1,33 @@ +import { PlopTypes } from "@turbo/gen"; + +export default function generator(plop: PlopTypes.NodePlopAPI): void { + plop.setGenerator("example", { + description: + "An example Turborepo generator - creates a new file at the root of the project", + prompts: [ + { + type: "input", + name: "file", + message: "What is the name of the file to create?", + }, + { + type: "input", + name: "author", + message: "What is your name? (Will be added as the file author)", + }, + { + type: "list", + name: "type", + message: "What type of file should be created?", + choices: [".md", ".txt"], + }, + ], + actions: [ + { + type: "add", + path: "{{ turbo.paths.root }}/{{ dashCase file }}{{ type }}", + templateFile: "templates/turborepo-generators.hbs", + }, + ], + }); +} diff --git a/packages/turbo-gen/src/templates/simple-ts/templates/turborepo-generators.hbs b/packages/turbo-gen/src/templates/simple-ts/templates/turborepo-generators.hbs new file mode 100644 index 0000000000000..2b7e200b2f0e7 --- /dev/null +++ b/packages/turbo-gen/src/templates/simple-ts/templates/turborepo-generators.hbs @@ -0,0 +1,5 @@ +# Turborepo Generators + +Read the docs at [turbo.build](https://turbo.build/repo/docs). + +Created by {{ author }}. diff --git a/packages/turbo-gen/src/types.ts b/packages/turbo-gen/src/types.ts new file mode 100644 index 0000000000000..e3bfdb9749ddf --- /dev/null +++ b/packages/turbo-gen/src/types.ts @@ -0,0 +1,22 @@ +import type * as PlopTypes from "node-plop"; + +interface DependencyGroups { + dependencies?: Record; + devDependencies?: Record; + peerDependencies?: Record; + optionalDependencies?: Record; +} + +interface PackageJson extends DependencyGroups { + name: string; + version: string; + private?: boolean; + description?: string; + workspaces?: Array | Record>; + main?: string; + module?: string; + exports?: object; + scripts?: Record; +} + +export type { PlopTypes, DependencyGroups, PackageJson }; diff --git a/packages/turbo-gen/src/utils/error.ts b/packages/turbo-gen/src/utils/error.ts new file mode 100644 index 0000000000000..13dc78bab8020 --- /dev/null +++ b/packages/turbo-gen/src/utils/error.ts @@ -0,0 +1,23 @@ +export type GenerateErrorType = + // custom errors + | "plop_error_running_generator" + | "plop_unable_to_load_config" + | "plop_generator_not_found" + | "config_directory_already_exists" + // default + | "unknown"; + +export type GeneratorErrorOptions = { + type?: GenerateErrorType; +}; + +export class GeneratorError extends Error { + public type: GenerateErrorType; + + constructor(message: string, opts?: GeneratorErrorOptions) { + super(message); + this.name = "GenerateError"; + this.type = opts?.type ?? "unknown"; + Error.captureStackTrace(this, GeneratorError); + } +} diff --git a/packages/turbo-gen/src/utils/gatherAddRequirements.ts b/packages/turbo-gen/src/utils/gatherAddRequirements.ts new file mode 100644 index 0000000000000..4e1f86b5d7270 --- /dev/null +++ b/packages/turbo-gen/src/utils/gatherAddRequirements.ts @@ -0,0 +1,50 @@ +import { Workspace } from "@turbo/workspaces"; +import type { TurboGeneratorArguments } from "../generators/types"; +import * as prompts from "../commands/add/prompts"; +import { getWorkspaceList } from "./getWorkspaceList"; + +export async function gatherAddRequirements({ + project, + opts, +}: TurboGeneratorArguments) { + let source: Workspace | undefined = undefined; + const { answer: what } = await prompts.what({ override: opts.what }); + + // suggestion for the name based on the (optional) example path + const suggestion = + opts.examplePath?.split("/").pop() || opts.example?.split("/").pop(); + + const { answer: name } = await prompts.name({ + override: opts.name, + what, + suggestion, + }); + if (opts.copy && !opts.example) { + const { answer } = await prompts.source({ + workspaces: getWorkspaceList({ project, what }), + name, + }); + source = answer; + } + const location = await prompts.location({ + what, + name, + project, + destination: opts.destination, + }); + + const dependencies = await prompts.dependencies({ + name, + project, + source, + showAllDependencies: opts.showAllDependencies, + }); + + return { + what, + name, + location, + source, + dependencies, + }; +} diff --git a/packages/turbo-gen/src/utils/getProject.ts b/packages/turbo-gen/src/utils/getProject.ts new file mode 100644 index 0000000000000..e4aabe3b8a46f --- /dev/null +++ b/packages/turbo-gen/src/utils/getProject.ts @@ -0,0 +1,28 @@ +import { getTurboRoot } from "@turbo/utils"; +import { type Project, getWorkspaceDetails } from "@turbo/workspaces"; +import { logger } from "@turbo/utils"; + +interface GetProjectArguments { + root?: string; +} + +export async function getProject({ + root, +}: GetProjectArguments): Promise { + const directory = root || process.cwd(); + const repoRoot = getTurboRoot(directory); + + if (!repoRoot) { + logger.error("Unable to infer repository root - override with --root"); + } else { + try { + return getWorkspaceDetails({ root: repoRoot }); + } catch (err) { + logger.error( + `Unable to determine workspace details. Make sure "${root}" is the root, or add "packageManager" to "package.json" or ensure a lockfile is present.` + ); + } + } + + process.exit(1); +} diff --git a/packages/turbo-gen/src/utils/getWorkspaceList.ts b/packages/turbo-gen/src/utils/getWorkspaceList.ts new file mode 100644 index 0000000000000..a33fa6e9a7336 --- /dev/null +++ b/packages/turbo-gen/src/utils/getWorkspaceList.ts @@ -0,0 +1,42 @@ +import type { Project, Workspace } from "@turbo/workspaces"; +import inquirer from "inquirer"; +import { + getWorkspaceStructure, + getGroupFromWorkspace, +} from "./getWorkspaceStructure"; +import { WorkspaceType } from "../generators/types"; + +export function getWorkspaceList({ + project, + what, + showAllDependencies, +}: { + project: Project; + what: WorkspaceType; + showAllDependencies?: boolean; +}): Array { + const structure = getWorkspaceStructure({ project }); + const workspaceChoices: Array = []; + + let workspacesForDisplay: Array = project.workspaceData.workspaces; + if (!showAllDependencies) { + if (what === "app" && structure.hasRootApps) { + workspacesForDisplay = structure.workspacesByGroup.apps; + } else if (what === "package" && structure.nonAppWorkspaces.length > 0) { + workspacesForDisplay = structure.nonAppWorkspaces; + } + } + + // build final list with separators between groups + let lastGroup: string | undefined; + workspacesForDisplay.forEach((workspace) => { + const group = getGroupFromWorkspace({ project, workspace }); + if (group !== lastGroup) { + workspaceChoices.push(new inquirer.Separator(group)); + } + lastGroup = group; + workspaceChoices.push(workspace); + }); + + return workspaceChoices; +} diff --git a/packages/turbo-gen/src/utils/getWorkspaceStructure.ts b/packages/turbo-gen/src/utils/getWorkspaceStructure.ts new file mode 100644 index 0000000000000..22ce6d5df9631 --- /dev/null +++ b/packages/turbo-gen/src/utils/getWorkspaceStructure.ts @@ -0,0 +1,56 @@ +import path from "path"; +import type { Project, Workspace } from "@turbo/workspaces"; +import { getWorkspaceRoots } from "./workspaceRoots"; + +interface WorkspaceStructure { + hasRootApps: boolean; + hasRootPackages: boolean; + workspacesByGroup: Record>; + nonAppWorkspaces: Array; +} + +export function getGroupFromWorkspace({ + project, + workspace, +}: { + project: Project; + workspace: Workspace; +}) { + return path + .relative(project.paths.root, workspace.paths.root) + .split(path.sep)[0]; +} + +export function getWorkspaceStructure({ + project, +}: { + project: Project; +}): WorkspaceStructure { + // get the workspace roots first, any assumptions we make + // should at least be based around configured workspaces + const roots = getWorkspaceRoots({ project }); + const hasRootApps = roots.includes("apps"); + const hasRootPackages = roots.includes("packages"); + + const workspacesByGroup: WorkspaceStructure["workspacesByGroup"] = {}; + const nonAppWorkspaces: WorkspaceStructure["nonAppWorkspaces"] = []; + project.workspaceData.workspaces.forEach((w) => { + const group = getGroupFromWorkspace({ project, workspace: w }); + if (group !== "apps") { + nonAppWorkspaces.push(w); + } + + // add to group + if (!workspacesByGroup[group]) { + workspacesByGroup[group] = []; + } + workspacesByGroup[group].push(w); + }); + + return { + hasRootApps, + hasRootPackages, + workspacesByGroup, + nonAppWorkspaces, + }; +} diff --git a/packages/turbo-gen/src/utils/notifyUpdate.ts b/packages/turbo-gen/src/utils/notifyUpdate.ts new file mode 100644 index 0000000000000..cbb4e034d475e --- /dev/null +++ b/packages/turbo-gen/src/utils/notifyUpdate.ts @@ -0,0 +1,24 @@ +import chalk from "chalk"; +import checkForUpdate from "update-check"; + +import cliPkgJson from "../../package.json"; + +const update = checkForUpdate(cliPkgJson).catch(() => null); + +export default async function notifyUpdate(): Promise { + try { + const res = await update; + if (res?.latest) { + console.log(); + console.log( + chalk.yellow.bold( + `A new version of \`${cliPkgJson.name}\` is available!` + ) + ); + console.log(); + } + process.exit(); + } catch (_e: any) { + // ignore error + } +} diff --git a/packages/turbo-gen/src/utils/plop.ts b/packages/turbo-gen/src/utils/plop.ts new file mode 100644 index 0000000000000..8317e6af1b516 --- /dev/null +++ b/packages/turbo-gen/src/utils/plop.ts @@ -0,0 +1,285 @@ +import fs from "fs-extra"; +import { Project } from "@turbo/workspaces"; +import nodePlop, { NodePlopAPI, PlopGenerator } from "node-plop"; +import { register } from "ts-node"; +import path from "path"; +import inquirer from "inquirer"; +import { searchUp, getTurboConfigs, logger } from "@turbo/utils"; +import { GeneratorError } from "./error"; + +const SUPPORTED_CONFIG_EXTENSIONS = ["ts", "js", "cjs"]; +const TURBO_GENERATOR_DIRECTORY = path.join("turbo", "generators"); + +// config formats that will be automatically loaded from within workspaces +const SUPPORTED_WORKSPACE_GENERATOR_CONFIGS = SUPPORTED_CONFIG_EXTENSIONS.map( + (ext) => path.join(TURBO_GENERATOR_DIRECTORY, `config.${ext}`) +); + +// config formats that will be automatically loaded from the root (support plopfiles so that users with existing configurations can use them immediately) +const SUPPORTED_ROOT_GENERATOR_CONFIGS = [ + ...SUPPORTED_WORKSPACE_GENERATOR_CONFIGS, + ...SUPPORTED_CONFIG_EXTENSIONS.map((ext) => path.join(`plopfile.${ext}`)), +]; + +export type Generator = PlopGenerator & { + basePath: string; + name: string; +}; + +// init ts-node for plop to support ts configs +register({ + transpileOnly: true, +}); + +export function getPlop({ + project, + configPath, +}: { + project: Project; + configPath?: string; +}): NodePlopAPI | undefined { + // fetch all the workspace generator configs + const workspaceConfigs = getWorkspaceGeneratorConfigs({ project }); + let plop: NodePlopAPI | undefined = undefined; + + if (configPath) { + try { + plop = nodePlop(configPath, { + destBasePath: configPath, + force: false, + }); + } catch (e) { + // skip + } + } else { + // look for a root config + for (const configPath of SUPPORTED_ROOT_GENERATOR_CONFIGS) { + const plopFile = path.join(project.paths.root, configPath); + try { + plop = nodePlop(plopFile, { + destBasePath: project.paths.root, + force: false, + }); + break; + } catch (e) { + // skip + } + } + + if (!plop && workspaceConfigs.length > 0) { + // if no root config, use the first workspace config as the entrypoint + plop = nodePlop(workspaceConfigs[0].config, { + destBasePath: workspaceConfigs[0].root, + force: false, + }); + workspaceConfigs.shift(); + } + } + + if (plop) { + // add in all the workspace configs + workspaceConfigs.forEach((c) => { + try { + plop?.load(c.config, { + destBasePath: c.root, + force: false, + }); + } catch (e) { + console.error(e); + } + }); + } + + return plop; +} + +export function getCustomGenerators({ + project, + configPath, +}: { + project: Project; + configPath?: string; +}): Array { + const plop = getPlop({ project, configPath }); + + if (!plop) { + return []; + } + + const gens = plop.getGeneratorList(); + const gensWithDetails = gens.map((g) => plop.getGenerator(g.name)); + + // group by workspace + const gensByWorkspace: Record> = {}; + gensWithDetails.forEach((g) => { + const generatorDetails = g as Generator; + const gensWorkspace = project.workspaceData.workspaces.find((w) => { + if (generatorDetails.basePath === project.paths.root) { + return false; + } + // we can strip two directories to get the workspace root + const parts = generatorDetails.basePath.split(path.sep); + // generators + parts.pop(); + // turbo + parts.pop(); + const workspaceRoot = path.join("/", ...parts); + return workspaceRoot == w.paths.root; + }); + + if (gensWorkspace) { + if (!gensByWorkspace[gensWorkspace.name]) { + gensByWorkspace[gensWorkspace.name] = []; + } + gensByWorkspace[gensWorkspace.name].push(generatorDetails); + } else { + if (!gensByWorkspace["root"]) { + gensByWorkspace["root"] = []; + } + gensByWorkspace["root"].push(generatorDetails); + } + }); + + // add in separators to group by workspace + const gensWithSeparators: Array = []; + const lastGroup = undefined; + Object.keys(gensByWorkspace).forEach((group) => { + if (group !== lastGroup) { + gensWithSeparators.push(new inquirer.Separator(group)); + } + gensWithSeparators.push(...gensByWorkspace[group]); + }); + + return gensWithSeparators; +} + +export function getCustomGenerator({ + project, + generator, + configPath, +}: { + project: Project; + generator: string; + configPath?: string; +}): string | undefined { + const plop = getPlop({ project, configPath }); + if (!plop) { + return undefined; + } + + try { + const gen = plop.getGenerator(generator); + if (gen) { + return generator; + } + return undefined; + } catch (e) { + return undefined; + } +} + +function injectTurborepoData({ + project, + generator, +}: { + project: Project; + generator: PlopGenerator & { basePath?: string }; +}) { + const paths = { + root: project.paths.root, + workspace: generator.basePath + ? searchUp({ cwd: generator.basePath, target: "package.json" }) + : undefined, + }; + let turboConfigs = {}; + try { + turboConfigs = getTurboConfigs(generator.basePath); + } catch (e) { + // ignore + } + + return { + turbo: { + paths, + configs: turboConfigs, + }, + }; +} + +function getWorkspaceGeneratorConfigs({ project }: { project: Project }) { + const workspaceGeneratorConfigs: Array<{ + config: string; + root: string; + }> = []; + project.workspaceData.workspaces.forEach((w) => { + for (const configPath of SUPPORTED_WORKSPACE_GENERATOR_CONFIGS) { + if (fs.existsSync(path.join(w.paths.root, configPath))) { + workspaceGeneratorConfigs.push({ + config: path.join(w.paths.root, configPath), + root: w.paths.root, + }); + } + } + }); + return workspaceGeneratorConfigs; +} + +export async function runCustomGenerator({ + project, + generator, + bypassArgs, + configPath, +}: { + project: Project; + generator: string; + bypassArgs?: Array; + configPath?: string; +}): Promise { + const plop = getPlop({ project, configPath }); + if (!plop) { + throw new GeneratorError("Unable to load generators", { + type: "plop_unable_to_load_config", + }); + } + const gen: PlopGenerator & { basePath?: string } = + plop.getGenerator(generator); + + if (!gen) { + throw new GeneratorError(`Generator ${generator} not found`, { + type: "plop_generator_not_found", + }); + } + + const answers = await gen.runPrompts(bypassArgs); + const results = await gen.runActions( + { ...answers, ...injectTurborepoData({ project, generator: gen }) }, + { + onComment: (comment: string) => { + console.info(comment); + }, + } + ); + + if (results.failures && results.failures.length > 0) { + // log all errors: + results.failures.forEach((f) => { + if (f instanceof Error) { + logger.error(`Error - ${f.message}`); + } else { + logger.error(`Error - ${f.error}. Unable to ${f.type} to "${f.path}"`); + } + }); + throw new GeneratorError(`Failed to run "${generator}" generator`, { + type: "plop_error_running_generator", + }); + } + + if (results.changes && results.changes.length > 0) { + logger.info("Changes made:"); + results.changes.forEach((c) => { + if (c.path) { + logger.item(`${c.path} (${c.type})`); + } + }); + } +} diff --git a/packages/turbo-gen/src/utils/setupFromTemplate.ts b/packages/turbo-gen/src/utils/setupFromTemplate.ts new file mode 100644 index 0000000000000..5be65fca32b44 --- /dev/null +++ b/packages/turbo-gen/src/utils/setupFromTemplate.ts @@ -0,0 +1,30 @@ +import type { Project } from "@turbo/workspaces"; +import path from "path"; +import fs from "fs-extra"; +import { GeneratorError } from "./error"; + +export async function setupFromTemplate({ + project, + template, +}: { + project: Project; + template: "ts" | "js"; +}) { + const configDirectory = path.join(project.paths.root, "turbo", "generators"); + + // TODO: could create some more complex starters in the future + const toCopy = `simple-${template}`; + + // required to ensure we don't overwrite any existing files at this location + if (await fs.pathExists(configDirectory)) { + throw new GeneratorError( + `Generator config directory already exists at ${configDirectory}`, + { type: "config_directory_already_exists" } + ); + } + + // copy templates to project + await fs.copy(path.join(__dirname, "templates", toCopy), configDirectory, { + recursive: true, + }); +} diff --git a/packages/turbo-gen/src/utils/workspaceRoots.ts b/packages/turbo-gen/src/utils/workspaceRoots.ts new file mode 100644 index 0000000000000..0b19ee78b9ac5 --- /dev/null +++ b/packages/turbo-gen/src/utils/workspaceRoots.ts @@ -0,0 +1,32 @@ +import path from "path"; +import type { Project } from "@turbo/workspaces"; + +// This function is not perfect and could be improved to be more accurate. +// Given a list of workspace globs, it aims to return a selectable list of paths that are valid workspace locations. +// This current naive approach does not work with globs that contain nested wildcards, for example: `packages/*/utils` will not work. +export function getWorkspaceRoots({ + project, +}: { + project: Project; +}): Array { + const allWorkspaces = project.workspaceData.workspaces; + const allWorkspacePaths = allWorkspaces.map((workspace) => + path.relative(project.paths.root, workspace.paths.root) + ); + + // find valid workspace locations + const workspaceRoots = new Set(); + project.workspaceData.globs.forEach((glob) => { + if (allWorkspacePaths.includes(glob)) { + return; + } else if (glob.startsWith("!")) { + return; + } else { + const globParts = glob.split("/"); + const globRoot = globParts[0]; + workspaceRoots.add(globRoot); + } + }); + + return Array.from(workspaceRoots); +} diff --git a/packages/turbo-gen/tsconfig.json b/packages/turbo-gen/tsconfig.json new file mode 100644 index 0000000000000..e218ac72fdcfb --- /dev/null +++ b/packages/turbo-gen/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "@turbo/tsconfig/library.json", + "exclude": ["src/templates", "dist", "node_modules"], + "compilerOptions": { + "rootDir": "." + } +} diff --git a/packages/turbo-gen/tsup.config.ts b/packages/turbo-gen/tsup.config.ts new file mode 100644 index 0000000000000..9e4678cd7221d --- /dev/null +++ b/packages/turbo-gen/tsup.config.ts @@ -0,0 +1,23 @@ +import { defineConfig, Options } from "tsup"; +import fs from "fs-extra"; +import chalk from "chalk"; + +export default defineConfig((options: Options) => ({ + entry: ["src/cli.ts", "src/types.ts"], + format: ["cjs"], + dts: true, + clean: true, + minify: true, + onSuccess: async () => { + // start time + const start = Date.now(); + await fs.copy("src/templates", "dist/templates"); + // make the output match + console.log( + chalk.hex("#7c5cad")("TEMPLATES"), + "copied in", + chalk.green(`${Date.now() - start}ms`) + ); + }, + ...options, +})); diff --git a/packages/turbo-gen/turbo.json b/packages/turbo-gen/turbo.json new file mode 100644 index 0000000000000..6466b2d9e7ea9 --- /dev/null +++ b/packages/turbo-gen/turbo.json @@ -0,0 +1,12 @@ +{ + "$schema": "../../docs/public/schema.json", + "extends": ["//"], + "pipeline": { + "test": { + "dependsOn": ["build"] + }, + "build": { + "dependsOn": ["^build"] + } + } +} diff --git a/packages/turbo-ignore/package.json b/packages/turbo-ignore/package.json index 8977faf91e39e..276fe1fc9350e 100644 --- a/packages/turbo-ignore/package.json +++ b/packages/turbo-ignore/package.json @@ -1,6 +1,6 @@ { "name": "turbo-ignore", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "", "homepage": "https://turbo.build/repo", "keywords": [], diff --git a/packages/turbo-utils/__tests__/convertCase.test.ts b/packages/turbo-utils/__tests__/convertCase.test.ts new file mode 100644 index 0000000000000..a52ccfd180160 --- /dev/null +++ b/packages/turbo-utils/__tests__/convertCase.test.ts @@ -0,0 +1,23 @@ +import { convertCase, type CaseOptions } from "../src/convertCase"; + +interface TestCase { + input: string; + expected: string; + to: CaseOptions["to"]; +} + +describe("convertCase", () => { + const testCases: Array = [ + { input: "hello_world", expected: "helloWorld", to: "camel" }, + { input: "hello-world", expected: "helloWorld", to: "camel" }, + { input: "helloWorld", expected: "helloWorld", to: "camel" }, + { input: "helloworld", expected: "helloworld", to: "camel" }, + ]; + + it.each(testCases)( + "should convert '$input' to '$to'", + ({ input, expected, to }) => { + expect(convertCase(input, { to })).toBe(expected); + } + ); +}); diff --git a/packages/turbo-utils/src/convertCase.ts b/packages/turbo-utils/src/convertCase.ts new file mode 100644 index 0000000000000..9adb6190373a2 --- /dev/null +++ b/packages/turbo-utils/src/convertCase.ts @@ -0,0 +1,14 @@ +export interface CaseOptions { + to: "camel" | "pascal" | "kebab" | "snake"; +} + +export function convertCase(str: string, opts: CaseOptions = { to: "camel" }) { + switch (opts.to) { + case "camel": + return str.replace(/([-_][a-z])/g, (group) => + group.toUpperCase().replace("-", "").replace("_", "") + ); + default: + throw new Error("Not implemented"); + } +} diff --git a/packages/turbo-utils/src/createProject.ts b/packages/turbo-utils/src/createProject.ts index f996825b75eb8..5221168148b5a 100644 --- a/packages/turbo-utils/src/createProject.ts +++ b/packages/turbo-utils/src/createProject.ts @@ -37,60 +37,69 @@ export async function createProject({ let repoInfo: RepoInfo | undefined; let repoUrl: URL | undefined; - try { - repoUrl = new URL(example); - } catch (err: any) { - if (err.code !== "ERR_INVALID_URL") { - error(err); - process.exit(1); - } - } - - if (repoUrl) { - if (repoUrl.origin !== "https://github.com") { - error( - `Invalid URL: ${chalk.red( - `"${example}"` - )}. Only GitHub repositories are supported. Please use a GitHub URL and try again.` - ); - process.exit(1); - } - - repoInfo = await getRepoInfo(repoUrl, examplePath); - - if (!repoInfo) { - error( - `Unable to fetch repository information from: ${chalk.red( - `"${example}"` - )}. Please fix the URL and try again.` - ); - process.exit(1); + if (isDefaultExample) { + repoInfo = { + username: "vercel", + name: "turbo", + branch: "main", + filePath: "examples/basic", + }; + } else { + try { + repoUrl = new URL(example); + } catch (err: any) { + if (err.code !== "ERR_INVALID_URL") { + error(err); + process.exit(1); + } } - const found = await hasRepo(repoInfo); - - if (!found) { - error( - `Could not locate the repository for ${chalk.red( - `"${example}"` - )}. Please check that the repository exists and try again.` - ); - process.exit(1); - } - } else { - const found = await existsInRepo(example); - - if (!found) { - error( - `Could not locate an example named ${chalk.red( - `"${example}"` - )}. It could be due to the following:\n`, - `1. Your spelling of example ${chalk.red( - `"${example}"` - )} might be incorrect.\n`, - `2. You might not be connected to the internet or you are behind a proxy.` - ); - process.exit(1); + if (repoUrl) { + if (repoUrl.origin !== "https://github.com") { + error( + `Invalid URL: ${chalk.red( + `"${example}"` + )}. Only GitHub repositories are supported. Please use a GitHub URL and try again.` + ); + process.exit(1); + } + + repoInfo = await getRepoInfo(repoUrl, examplePath); + + if (!repoInfo) { + error( + `Unable to fetch repository information from: ${chalk.red( + `"${example}"` + )}. Please fix the URL and try again.` + ); + process.exit(1); + } + + const found = await hasRepo(repoInfo); + + if (!found) { + error( + `Could not locate the repository for ${chalk.red( + `"${example}"` + )}. Please check that the repository exists and try again.` + ); + process.exit(1); + } + } else { + const found = await existsInRepo(example); + + if (!found) { + error( + `Could not locate an example named ${chalk.red( + `"${example}"` + )}. It could be due to the following:\n`, + `1. Your spelling of example ${chalk.red( + `"${example}"` + )} might be incorrect.\n`, + `2. You might not be connected to the internet or you are behind a proxy.` + ); + process.exit(1); + } } } diff --git a/packages/turbo-utils/src/index.ts b/packages/turbo-utils/src/index.ts index 5a091808363ca..0f401ac2b90e8 100644 --- a/packages/turbo-utils/src/index.ts +++ b/packages/turbo-utils/src/index.ts @@ -15,9 +15,11 @@ export { } from "./examples"; export { isWriteable } from "./isWriteable"; export { createProject } from "./createProject"; +export { convertCase } from "./convertCase"; export * as logger from "./logger"; // types export type { PackageManagerAvailable } from "./managers"; export type { RepoInfo } from "./examples"; +export type { TurboConfigs } from "./getTurboConfigs"; diff --git a/packages/turbo-utils/src/logger.ts b/packages/turbo-utils/src/logger.ts index ee6d5845b27a1..fc8e7a357fb17 100644 --- a/packages/turbo-utils/src/logger.ts +++ b/packages/turbo-utils/src/logger.ts @@ -30,3 +30,11 @@ export const error = (...args: any[]) => { export const warn = (...args: any[]) => { console.error(yellow.bold(">>>"), ...args); }; + +export const dimmed = (...args: any[]) => { + console.log(chalk.dim(...args)); +}; + +export const item = (...args: any[]) => { + console.log(turboBlue.bold(" •"), ...args); +}; diff --git a/packages/turbo-workspaces/package.json b/packages/turbo-workspaces/package.json index 5fcac249bc27c..1a0f0bc1d619f 100644 --- a/packages/turbo-workspaces/package.json +++ b/packages/turbo-workspaces/package.json @@ -1,6 +1,6 @@ { "name": "@turbo/workspaces", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "Tools for working with package managers", "homepage": "https://turbo.build/repo", "license": "MPL-2.0", diff --git a/packages/turbo-workspaces/src/install.ts b/packages/turbo-workspaces/src/install.ts index ad5f74185feb1..c1c15334179c5 100644 --- a/packages/turbo-workspaces/src/install.ts +++ b/packages/turbo-workspaces/src/install.ts @@ -40,7 +40,7 @@ export const PACKAGE_MANAGERS: Record< name: "pnpm", template: "pnpm", command: "pnpm", - installArgs: ["install"], + installArgs: ["install", "--fix-lockfile"], version: "latest", executable: "pnpm dlx", semver: ">=7", diff --git a/packages/turbo/package.json b/packages/turbo/package.json index dabb0f5245a46..6f91b42d49ee1 100644 --- a/packages/turbo/package.json +++ b/packages/turbo/package.json @@ -1,6 +1,6 @@ { "name": "turbo", - "version": "1.9.4-canary.11", + "version": "1.9.6", "description": "Turborepo is a high-performance build system for JavaScript and TypeScript codebases.", "repository": "https://github.com/vercel/turbo", "bugs": "https://github.com/vercel/turbo/issues", @@ -19,11 +19,11 @@ "install.js" ], "optionalDependencies": { - "turbo-darwin-64": "1.9.4-canary.11", - "turbo-darwin-arm64": "1.9.4-canary.11", - "turbo-linux-64": "1.9.4-canary.11", - "turbo-linux-arm64": "1.9.4-canary.11", - "turbo-windows-64": "1.9.4-canary.11", - "turbo-windows-arm64": "1.9.4-canary.11" + "turbo-darwin-64": "1.9.6", + "turbo-darwin-arm64": "1.9.6", + "turbo-linux-64": "1.9.6", + "turbo-linux-arm64": "1.9.6", + "turbo-windows-64": "1.9.6", + "turbo-windows-arm64": "1.9.6" } } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 3d6aac2253c23..fa05b97dd27ad 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -147,6 +147,7 @@ importers: '@react-aria/ssr': 3.3.0 '@react-spring/web': ^9.6.1 '@sentry/nextjs': ^7.50.0 + '@turbo/gen': workspace:* '@turbo/types': workspace:* '@types/node': ^16.11.12 '@types/react': 18.2.0 @@ -166,6 +167,7 @@ importers: next-themes: ^0.2.0 nextra: ^2.1.0 nextra-theme-docs: ^2.1.0 + node-fetch: ^2.6.8 postcss: 8.4.21 react: ^18.2.0 react-dom: ^18.2.0 @@ -191,12 +193,14 @@ importers: next-themes: 0.2.0_vwuoxxmoeydgew5hhkavebkk4e nextra: 2.1.0_vwuoxxmoeydgew5hhkavebkk4e nextra-theme-docs: 2.1.0_vwuoxxmoeydgew5hhkavebkk4e + node-fetch: 2.6.9 react: 18.2.0 react-dom: 18.2.0_react@18.2.0 sharp: 0.32.1 swr: 1.3.0_react@18.2.0 devDependencies: '@babel/core': 7.20.12 + '@turbo/gen': link:../packages/turbo-gen '@turbo/types': link:../packages/turbo-types '@types/node': 16.11.44 '@types/react': 18.2.0 @@ -315,6 +319,7 @@ importers: packages/turbo-codemod: specifiers: + '@turbo/gen': workspace:* '@turbo/test-utils': workspace:* '@turbo/tsconfig': workspace:* '@turbo/types': workspace:* @@ -345,7 +350,7 @@ importers: plop: ^3.1.1 semver: ^7.3.7 ts-jest: ^27.1.1 - tsup: ^5.10.3 + tsup: ^6.7.0 typescript: ^4.5.5 update-check: ^1.5.4 dependencies: @@ -363,6 +368,7 @@ importers: semver: 7.3.8 update-check: 1.5.4 devDependencies: + '@turbo/gen': link:../turbo-gen '@turbo/test-utils': link:../turbo-test-utils '@turbo/tsconfig': link:../tsconfig '@turbo/types': link:../turbo-types @@ -380,10 +386,65 @@ importers: eslint: 7.32.0 jest: 27.5.1 plop: 3.1.1 - ts-jest: 27.1.5_33teeovbg7qyb2atewgc6tchgq - tsup: 5.12.9_typescript@4.7.4 + ts-jest: 27.1.5_ew7e4eh5vevza5kamuvovgl5kq + tsup: 6.7.0_typescript@4.7.4 typescript: 4.7.4 + packages/turbo-gen: + specifiers: + '@turbo/test-utils': workspace:* + '@turbo/tsconfig': workspace:* + '@turbo/utils': workspace:* + '@turbo/workspaces': workspace:* + '@types/fs-extra': ^9.0.13 + '@types/inquirer': ^8.2.5 + '@types/jest': ^27.4.0 + '@types/node': ^16.11.12 + '@types/semver': ^7.3.9 + '@types/validate-npm-package-name': ^4.0.0 + chalk: 2.4.2 + commander: ^10.0.0 + eslint: ^7.23.0 + fs-extra: ^10.1.0 + inquirer: ^8.2.4 + jest: ^27.4.3 + minimatch: ^9.0.0 + node-plop: ^0.26.3 + semver: ^7.3.8 + ts-jest: ^27.1.1 + ts-node: ^10.9.1 + tsup: ^6.7.0 + typescript: ^4.5.5 + update-check: ^1.5.4 + validate-npm-package-name: ^5.0.0 + dependencies: + chalk: 2.4.2 + commander: 10.0.0 + fs-extra: 10.1.0 + inquirer: 8.2.4 + minimatch: 9.0.0 + node-plop: 0.26.3 + semver: 7.5.0 + ts-node: 10.9.1_53e5n3kefom5jmudvwxecmm4oi + update-check: 1.5.4 + validate-npm-package-name: 5.0.0 + devDependencies: + '@turbo/test-utils': link:../turbo-test-utils + '@turbo/tsconfig': link:../tsconfig + '@turbo/utils': link:../turbo-utils + '@turbo/workspaces': link:../turbo-workspaces + '@types/fs-extra': 9.0.13 + '@types/inquirer': 8.2.5 + '@types/jest': 27.5.2 + '@types/node': 16.11.56 + '@types/semver': 7.3.12 + '@types/validate-npm-package-name': 4.0.0 + eslint: 7.32.0 + jest: 27.5.1_ts-node@10.9.1 + ts-jest: 27.1.5_fu5qd3dwfwo63mklk7zcmwwv6q + tsup: 6.7.0_z6wznmtyb6ovnulj6iujpct7um + typescript: 4.9.4 + packages/turbo-ignore: specifiers: '@turbo/test-utils': workspace:^0.0.0 @@ -974,13 +1035,12 @@ packages: dependencies: core-js-pure: 3.26.1 regenerator-runtime: 0.13.11 - dev: true /@babel/runtime/7.18.9: resolution: {integrity: sha512-lkqXDcvlFT5rvEjiu6+QYO+1GXrEHRo2LOtS7E4GtX5ESIZOgepqsZBVIj6Pv+a6zqsya9VCgiK1KAK4BvJDAw==} engines: {node: '>=6.9.0'} dependencies: - regenerator-runtime: 0.13.9 + regenerator-runtime: 0.13.11 /@babel/runtime/7.20.6: resolution: {integrity: sha512-Q+8MqP7TiHMWzSfwiJwXCjyf4GYA4Dgw3emg/7xmwsdLJOZUp+nMqcOwOzzYheuM1rhDu8FSj2l0aoMygEuXuA==} @@ -1050,6 +1110,12 @@ packages: resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} dev: true + /@cspotcode/source-map-support/0.8.1: + resolution: {integrity: sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==} + engines: {node: '>=12'} + dependencies: + '@jridgewell/trace-mapping': 0.3.9 + /@emotion/is-prop-valid/0.8.8: resolution: {integrity: sha512-u5WtneEAr5IDG2Wv65yhunPSMLIpuKsbuOktRojfrEiEvRyC85LgPMZI63cr7NUqT8ZIGdSVg8ZKGxIug4lXcA==} requiresBuild: true @@ -1441,7 +1507,7 @@ packages: engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 chalk: 4.1.2 jest-message-util: 27.5.1 jest-util: 27.5.1 @@ -1493,13 +1559,58 @@ packages: - utf-8-validate dev: true + /@jest/core/27.5.1_ts-node@10.9.1: + resolution: {integrity: sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/console': 27.5.1 + '@jest/reporters': 27.5.1 + '@jest/test-result': 27.5.1 + '@jest/transform': 27.5.1 + '@jest/types': 27.5.1 + '@types/node': 18.13.0 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.8.1 + exit: 0.1.2 + graceful-fs: 4.2.10 + jest-changed-files: 27.5.1 + jest-config: 27.5.1_ts-node@10.9.1 + jest-haste-map: 27.5.1 + jest-message-util: 27.5.1 + jest-regex-util: 27.5.1 + jest-resolve: 27.5.1 + jest-resolve-dependencies: 27.5.1 + jest-runner: 27.5.1 + jest-runtime: 27.5.1 + jest-snapshot: 27.5.1 + jest-util: 27.5.1 + jest-validate: 27.5.1 + jest-watcher: 27.5.1 + micromatch: 4.0.5 + rimraf: 3.0.2 + slash: 3.0.0 + strip-ansi: 6.0.1 + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /@jest/environment/27.5.1: resolution: {integrity: sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 jest-mock: 27.5.1 dev: true @@ -1509,7 +1620,7 @@ packages: dependencies: '@jest/types': 27.5.1 '@sinonjs/fake-timers': 8.1.0 - '@types/node': 16.11.56 + '@types/node': 18.13.0 jest-message-util: 27.5.1 jest-mock: 27.5.1 jest-util: 27.5.1 @@ -1538,7 +1649,7 @@ packages: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 chalk: 4.1.2 collect-v8-coverage: 1.0.1 exit: 0.1.2 @@ -1622,7 +1733,7 @@ packages: dependencies: '@types/istanbul-lib-coverage': 2.0.4 '@types/istanbul-reports': 3.0.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 '@types/yargs': 16.0.5 chalk: 4.1.2 dev: true @@ -1666,6 +1777,12 @@ packages: '@jridgewell/resolve-uri': 3.1.0 '@jridgewell/sourcemap-codec': 1.4.14 + /@jridgewell/trace-mapping/0.3.9: + resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + dependencies: + '@jridgewell/resolve-uri': 3.1.0 + '@jridgewell/sourcemap-codec': 1.4.14 + /@manypkg/find-root/1.1.0: resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} dependencies: @@ -2524,6 +2641,18 @@ packages: engines: {node: '>= 6'} dev: true + /@tsconfig/node10/1.0.9: + resolution: {integrity: sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==} + + /@tsconfig/node12/1.0.11: + resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} + + /@tsconfig/node14/1.0.3: + resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} + + /@tsconfig/node16/1.0.4: + resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} + /@types/acorn/4.0.6: resolution: {integrity: sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==} dependencies: @@ -2570,7 +2699,7 @@ packages: dependencies: '@types/http-cache-semantics': 4.0.1 '@types/keyv': 3.1.4 - '@types/node': 16.11.56 + '@types/node': 18.13.0 '@types/responselike': 1.0.0 dev: true @@ -2622,7 +2751,7 @@ packages: /@types/fs-extra/9.0.13: resolution: {integrity: sha512-nEnwB++1u5lVDM2UI4c1+5R+FYaKfaAzS4OococimjVm3nQw3TuzH5UNsocrcTBbhnerblyHj4A49qXbIiZdpA==} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /@types/glob/7.2.0: @@ -2630,12 +2759,11 @@ packages: dependencies: '@types/minimatch': 5.1.1 '@types/node': 16.11.56 - dev: true /@types/graceful-fs/4.1.6: resolution: {integrity: sha512-Sig0SNORX9fdW+bQuTEovKj3uHcUL6LQKbCrrqb1X7J6/ReAbhCXRAhc+SMejhLELFj2QcyuxmUooZ4bt5ReSw==} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /@types/gradient-string/1.1.2: @@ -2654,6 +2782,13 @@ packages: resolution: {integrity: sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ==} dev: true + /@types/inquirer/6.5.0: + resolution: {integrity: sha512-rjaYQ9b9y/VFGOpqBEXRavc3jh0a+e6evAbI31tMda8VlPaSy0AZJfXsvmIe3wklc7W6C3zCSfleuMXR7NOyXw==} + dependencies: + '@types/through': 0.0.30 + rxjs: 6.6.7 + dev: false + /@types/inquirer/7.3.3: resolution: {integrity: sha512-HhxyLejTHMfohAuhRun4csWigAMjXTmRyiJTU1Y/I1xmggikFMkOUoMQRlFm+zQcPEGHSs3io/0FAmNZf8EymQ==} dependencies: @@ -2712,7 +2847,7 @@ packages: /@types/keyv/3.1.4: resolution: {integrity: sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /@types/liftoff/4.0.0: @@ -2740,7 +2875,6 @@ packages: /@types/minimatch/5.1.1: resolution: {integrity: sha512-v55NF6Dz0wrj14Rn8iEABTWrhYRmgkJYuokduunSiq++t3hZ9VZ6dvcDt+850Pm5sGJZk8RaHzkFCXPxVINZ+g==} - dev: true /@types/ms/0.7.31: resolution: {integrity: sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==} @@ -2756,7 +2890,6 @@ packages: /@types/node/16.11.56: resolution: {integrity: sha512-aFcUkv7EddxxOa/9f74DINReQ/celqH8DiB3fRYgVDM2Xm5QJL8sl80QKuAnGvwAsMn+H3IFA6WCrQh1CY7m1A==} - dev: true /@types/node/18.11.11: resolution: {integrity: sha512-KJ021B1nlQUBLopzZmPBVuGU9un7WJd/W4ya7Ih02B4Uwky5Nja0yGYav2EfYIk0RR2Q9oVhf60S2XR1BCWJ2g==} @@ -2782,7 +2915,7 @@ packages: /@types/responselike/1.0.0: resolution: {integrity: sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /@types/retry/0.12.2: @@ -2793,7 +2926,7 @@ packages: resolution: {integrity: sha512-F3OznnSLAUxFrCEu/L5PY8+ny8DtcFRjx7fZZ9bycvXRi3KPTRS9HOitGZwvPg0juRhXFWIeKX58cnX5YqLohQ==} dependencies: '@types/glob': 7.2.0 - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /@types/scheduler/0.16.2: @@ -2810,7 +2943,7 @@ packages: /@types/tar/6.1.4: resolution: {integrity: sha512-Cp4oxpfIzWt7mr2pbhHT2OTXGMAL0szYCzuf8lRWyIMCgsx6/Hfc3ubztuhvzXHXgraTQxyOCmmg7TDGIMIJJQ==} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 minipass: 4.0.0 dev: true @@ -2818,7 +2951,6 @@ packages: resolution: {integrity: sha512-FvnCJljyxhPM3gkRgWmxmDZyAQSiBQQWLI0A0VFL0K7W1oRUrPJSqNO0NvTnLkBcotdlp3lKvaT0JrnyRDkzOg==} dependencies: '@types/node': 18.13.0 - dev: true /@types/tinycolor2/1.4.3: resolution: {integrity: sha512-Kf1w9NE5HEgGxCRyIcRXR/ZYtDv0V8FVPtYHwLxl0O+maGX0erE77pQlD0gpP+/KByMZ87mOA79SjifhSB3PjQ==} @@ -2831,6 +2963,10 @@ packages: resolution: {integrity: sha512-kr90f+ERiQtKWMz5rP32ltJ/BtULDI5RVO0uavn1HQUOwjx0R1h0rnDYNL0CepF1zL5bSY6FISAfd9tOdDhU5Q==} dev: true + /@types/validate-npm-package-name/4.0.0: + resolution: {integrity: sha512-RpO62vB2lkjEkyLbwTheA2+uwYmtVMWTr/kWRI++UAgVdZqNqdAuIQl/SxBCGeMKfdjWaXPbyhZbiCc4PAj+KA==} + dev: true + /@types/webidl-conversions/7.0.0: resolution: {integrity: sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==} dev: false @@ -3188,6 +3324,10 @@ packages: engines: {node: '>=0.4.0'} dev: true + /acorn-walk/8.2.0: + resolution: {integrity: sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==} + engines: {node: '>=0.4.0'} + /acorn/7.4.1: resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} engines: {node: '>=0.4.0'} @@ -3218,7 +3358,6 @@ packages: dependencies: clean-stack: 2.2.0 indent-string: 4.0.0 - dev: true /ajv-keywords/3.5.2_ajv@6.12.6: resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} @@ -3306,6 +3445,9 @@ packages: resolution: {integrity: sha512-Wk7TEzl1KqvTGs/uyhmHO/3XLd3t1UeU4IstvPXVzGPM522cTjqjNZ99esCkcL52sjqjo8e8CTBcWhkxvGzoAw==} dev: false + /arg/4.1.3: + resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} + /arg/5.0.2: resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} dev: true @@ -3730,6 +3872,12 @@ packages: ieee754: 1.2.1 dev: true + /builtins/5.0.1: + resolution: {integrity: sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==} + dependencies: + semver: 7.5.0 + dev: false + /bundle-require/3.1.0_esbuild@0.14.49: resolution: {integrity: sha512-IIXtAO7fKcwPHNPt9kY/WNVJqy7NDy6YqJvv6ENH0TOZoJ+yjpEsn1w40WKZbR2ibfu5g1rfgJTvmFHpm5aOMA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -3816,6 +3964,13 @@ packages: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} + /camel-case/3.0.0: + resolution: {integrity: sha512-+MbKztAYHXPr1jNTSKQF52VpcFjwY5RkR7fxksV8Doo4KAYc5Fl4UJRgthBbTmEx8C54DqahhbLJkDwjI3PI/w==} + dependencies: + no-case: 2.3.2 + upper-case: 1.1.3 + dev: false + /camel-case/4.1.2: resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} dependencies: @@ -3896,6 +4051,29 @@ packages: engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} dev: true + /change-case/3.1.0: + resolution: {integrity: sha512-2AZp7uJZbYEzRPsFoa+ijKdvp9zsrnnt6+yFokfwEpeJm0xuJDVoxiRCAaTzyJND8GJkofo2IcKWaUZ/OECVzw==} + dependencies: + camel-case: 3.0.0 + constant-case: 2.0.0 + dot-case: 2.1.1 + header-case: 1.0.1 + is-lower-case: 1.1.3 + is-upper-case: 1.1.2 + lower-case: 1.1.4 + lower-case-first: 1.0.2 + no-case: 2.3.2 + param-case: 2.1.1 + pascal-case: 2.0.1 + path-case: 2.1.1 + sentence-case: 2.1.1 + snake-case: 2.1.0 + swap-case: 1.1.2 + title-case: 2.1.1 + upper-case: 1.1.3 + upper-case-first: 1.1.2 + dev: false + /change-case/4.1.2: resolution: {integrity: sha512-bSxY2ws9OtviILG1EiY5K7NNxkqg/JnRnFxLtKQ96JaviiIxi7djMrSd0ECT9AC+lttClmYwKw53BWpOMblo7A==} dependencies: @@ -3991,7 +4169,6 @@ packages: /clean-stack/2.2.0: resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} engines: {node: '>=6'} - dev: true /cli-cursor/3.1.0: resolution: {integrity: sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==} @@ -4165,6 +4342,13 @@ packages: /concat-map/0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + /constant-case/2.0.0: + resolution: {integrity: sha512-eS0N9WwmjTqrOmR3o83F5vW8Z+9R1HnVz3xmzT2PMFug9ly+Au/fxRWlEBSb6LcZwspSsEn9Xs1uw9YgzAg1EQ==} + dependencies: + snake-case: 2.1.0 + upper-case: 1.1.3 + dev: false + /constant-case/3.0.4: resolution: {integrity: sha512-I2hSBi7Vvs7BEuJDr5dDHfzb/Ruj3FyvFyh7KLilAjNQw3Be+xgqUBA2W6scVEcL0hL1dwPRtIqEPVUCKkSsyQ==} dependencies: @@ -4211,12 +4395,14 @@ packages: /core-js-pure/3.26.1: resolution: {integrity: sha512-VVXcDpp/xJ21KdULRq/lXdLzQAtX7+37LzpyfFM973il0tWSsDEoyzG38G14AjTpK9VTfiNM9jnFauq/CpaWGQ==} requiresBuild: true - dev: true /core-util-is/1.0.3: resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} dev: true + /create-require/1.1.1: + resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} + /cross-env/7.0.3: resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==} engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'} @@ -4438,6 +4624,20 @@ packages: resolution: {integrity: sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ==} dev: true + /del/5.1.0: + resolution: {integrity: sha512-wH9xOVHnczo9jN2IW68BabcecVPxacIA3g/7z6vhSU/4stOKQzeCRK0yD0A24WiAAUJmmVpWqrERcTxnLo3AnA==} + engines: {node: '>=8'} + dependencies: + globby: 10.0.2 + graceful-fs: 4.2.10 + is-glob: 4.0.3 + is-path-cwd: 2.2.0 + is-path-inside: 3.0.3 + p-map: 3.0.0 + rimraf: 3.0.2 + slash: 3.0.0 + dev: false + /del/6.1.1: resolution: {integrity: sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==} engines: {node: '>=10'} @@ -4504,6 +4704,10 @@ packages: engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dev: true + /diff/4.0.2: + resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} + engines: {node: '>=0.3.1'} + /diff/5.1.0: resolution: {integrity: sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw==} engines: {node: '>=0.3.1'} @@ -4539,6 +4743,12 @@ packages: webidl-conversions: 5.0.0 dev: true + /dot-case/2.1.1: + resolution: {integrity: sha512-HnM6ZlFqcajLsyudHq7LeeLDr2rFAVYtDv/hV5qchQEidSck8j9OPUsXY9KwJv/lHMtYlX4DjRQqwFYa+0r8Ug==} + dependencies: + no-case: 2.3.2 + dev: false + /dot-case/3.0.4: resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} dependencies: @@ -6441,7 +6651,6 @@ packages: /function-bind/1.1.1: resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==} - dev: true /function.prototype.name/1.1.5: resolution: {integrity: sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==} @@ -6635,6 +6844,20 @@ packages: resolution: {integrity: sha512-40oNTM9UfG6aBmuKxk/giHn5nQ8RVz/SS4Ir6zgzOv9/qC3kKZ9v4etGTcJbEl/NyVQH7FGU7d+X1egr57Md2Q==} dev: true + /globby/10.0.2: + resolution: {integrity: sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg==} + engines: {node: '>=8'} + dependencies: + '@types/glob': 7.2.0 + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.2.12 + glob: 7.2.3 + ignore: 5.2.1 + merge2: 1.4.1 + slash: 3.0.0 + dev: false + /globby/11.1.0: resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} engines: {node: '>=10'} @@ -6717,7 +6940,6 @@ packages: wordwrap: 1.0.0 optionalDependencies: uglify-js: 3.17.4 - dev: true /has-bigints/1.0.2: resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} @@ -6790,7 +7012,6 @@ packages: engines: {node: '>= 0.4.0'} dependencies: function-bind: 1.1.1 - dev: true /hash-obj/4.0.0: resolution: {integrity: sha512-FwO1BUVWkyHasWDW4S8o0ssQXjvyghLV2rfVhnN36b2bbcj45eGiuzdn9XOvOpjV3TKQD7Gm2BWNXdE9V4KKYg==} @@ -6902,6 +7123,13 @@ packages: space-separated-tokens: 2.0.1 dev: false + /header-case/1.0.1: + resolution: {integrity: sha512-i0q9mkOeSuhXw6bGgiQCCBgY/jlZuV/7dZXyZ9c6LcBrqwvT8eT719E9uxE5LiZftdl+z81Ugbg/VvXV4OJOeQ==} + dependencies: + no-case: 2.3.2 + upper-case: 1.1.3 + dev: false + /header-case/2.0.4: resolution: {integrity: sha512-H/vuk5TEEVZwrR0lp2zed9OCo1uAILMlx0JEMgC26rzyJJ3N1v6XkwHHXJQdR2doSjcGPM6OKPYoJgf0plJ11Q==} dependencies: @@ -7037,7 +7265,6 @@ packages: /indent-string/4.0.0: resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} engines: {node: '>=8'} - dev: true /inflight/1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} @@ -7065,6 +7292,25 @@ packages: rxjs: 7.6.0 dev: false + /inquirer/7.3.3: + resolution: {integrity: sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==} + engines: {node: '>=8.0.0'} + dependencies: + ansi-escapes: 4.3.2 + chalk: 4.1.2 + cli-cursor: 3.1.0 + cli-width: 3.0.0 + external-editor: 3.1.0 + figures: 3.2.0 + lodash: 4.17.21 + mute-stream: 0.0.8 + run-async: 2.4.1 + rxjs: 6.6.7 + string-width: 4.2.3 + strip-ansi: 6.0.1 + through: 2.3.8 + dev: false + /inquirer/8.2.4: resolution: {integrity: sha512-nn4F01dxU8VeKfq192IjLsxu0/OmMZ4Lg3xKAns148rCaXP6ntAoEkVYZThWjwON8AlzdZZi6oqnhNbxUG9hVg==} engines: {node: '>=12.0.0'} @@ -7192,7 +7438,6 @@ packages: resolution: {integrity: sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw==} dependencies: has: 1.0.3 - dev: true /is-data-descriptor/0.1.4: resolution: {integrity: sha512-+w9D5ulSoBNlmw9OHn3U2v51SyoCd0he+bB3xMl62oijhrspxowjU+AIcDY0N3iEJbUEkB15IlMASQsxYigvXg==} @@ -7300,6 +7545,12 @@ packages: engines: {node: '>=12'} dev: true + /is-lower-case/1.1.3: + resolution: {integrity: sha512-+5A1e/WJpLLXZEDlgz4G//WYSHyQBD32qa4Jd3Lw06qQlv3fJHnp3YIHjTQSGzHMgzmVKz2ZP3rBxTHkPw/lxA==} + dependencies: + lower-case: 1.1.4 + dev: false + /is-negative-zero/2.0.2: resolution: {integrity: sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==} engines: {node: '>= 0.4'} @@ -7336,12 +7587,10 @@ packages: /is-path-cwd/2.2.0: resolution: {integrity: sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==} engines: {node: '>=6'} - dev: true /is-path-inside/3.0.3: resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} engines: {node: '>=8'} - dev: true /is-plain-obj/3.0.0: resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==} @@ -7455,6 +7704,12 @@ packages: engines: {node: '>=12'} dev: true + /is-upper-case/1.1.2: + resolution: {integrity: sha512-GQYSJMgfeAmVwh9ixyk888l7OIhNAGKtY6QA+IrWlu9MDTCaXmeozOZ2S9Knj7bQwBO/H6J2kb+pbyTUiMNbsw==} + dependencies: + upper-case: 1.1.3 + dev: false + /is-weakref/1.0.2: resolution: {integrity: sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==} dependencies: @@ -7480,7 +7735,6 @@ packages: /isbinaryfile/4.0.10: resolution: {integrity: sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw==} engines: {node: '>= 8.0.0'} - dev: true /isexe/2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} @@ -7559,7 +7813,7 @@ packages: '@jest/environment': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 chalk: 4.1.2 co: 4.6.0 dedent: 0.7.0 @@ -7609,6 +7863,36 @@ packages: - utf-8-validate dev: true + /jest-cli/27.5.1_ts-node@10.9.1: + resolution: {integrity: sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 27.5.1_ts-node@10.9.1 + '@jest/test-result': 27.5.1 + '@jest/types': 27.5.1 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.10 + import-local: 3.1.0 + jest-config: 27.5.1_ts-node@10.9.1 + jest-util: 27.5.1 + jest-validate: 27.5.1 + prompts: 2.4.2 + yargs: 16.2.0 + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /jest-config/27.5.1: resolution: {integrity: sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} @@ -7649,6 +7933,47 @@ packages: - utf-8-validate dev: true + /jest-config/27.5.1_ts-node@10.9.1: + resolution: {integrity: sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + peerDependencies: + ts-node: '>=9.0.0' + peerDependenciesMeta: + ts-node: + optional: true + dependencies: + '@babel/core': 7.20.12 + '@jest/test-sequencer': 27.5.1 + '@jest/types': 27.5.1 + babel-jest: 27.5.1_@babel+core@7.20.12 + chalk: 4.1.2 + ci-info: 3.8.0 + deepmerge: 4.2.2 + glob: 7.2.3 + graceful-fs: 4.2.10 + jest-circus: 27.5.1 + jest-environment-jsdom: 27.5.1 + jest-environment-node: 27.5.1 + jest-get-type: 27.5.1 + jest-jasmine2: 27.5.1 + jest-regex-util: 27.5.1 + jest-resolve: 27.5.1 + jest-runner: 27.5.1 + jest-util: 27.5.1 + jest-validate: 27.5.1 + micromatch: 4.0.5 + parse-json: 5.2.0 + pretty-format: 27.5.1 + slash: 3.0.0 + strip-json-comments: 3.1.1 + ts-node: 10.9.1_53e5n3kefom5jmudvwxecmm4oi + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - utf-8-validate + dev: true + /jest-diff/27.5.1: resolution: {integrity: sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} @@ -7684,7 +8009,7 @@ packages: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 jest-mock: 27.5.1 jest-util: 27.5.1 jsdom: 16.7.0 @@ -7702,7 +8027,7 @@ packages: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 jest-mock: 27.5.1 jest-util: 27.5.1 dev: true @@ -7718,7 +8043,7 @@ packages: dependencies: '@jest/types': 27.5.1 '@types/graceful-fs': 4.1.6 - '@types/node': 16.11.56 + '@types/node': 18.13.0 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.10 @@ -7740,7 +8065,7 @@ packages: '@jest/source-map': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 chalk: 4.1.2 co: 4.6.0 expect: 27.5.1 @@ -7795,7 +8120,7 @@ packages: engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 dev: true /jest-pnp-resolver/1.2.3_jest-resolve@27.5.1: @@ -7851,7 +8176,7 @@ packages: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 chalk: 4.1.2 emittery: 0.8.1 graceful-fs: 4.2.10 @@ -7908,7 +8233,7 @@ packages: resolution: {integrity: sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 graceful-fs: 4.2.10 dev: true @@ -7937,7 +8262,7 @@ packages: jest-util: 27.5.1 natural-compare: 1.4.0 pretty-format: 27.5.1 - semver: 7.3.8 + semver: 7.5.0 transitivePeerDependencies: - supports-color dev: true @@ -7972,7 +8297,7 @@ packages: dependencies: '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 16.11.56 + '@types/node': 18.13.0 ansi-escapes: 4.3.2 chalk: 4.1.2 jest-util: 27.5.1 @@ -7983,7 +8308,7 @@ packages: resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} engines: {node: '>= 10.13.0'} dependencies: - '@types/node': 16.11.56 + '@types/node': 18.13.0 merge-stream: 2.0.0 supports-color: 8.1.1 dev: true @@ -8009,6 +8334,27 @@ packages: - utf-8-validate dev: true + /jest/27.5.1_ts-node@10.9.1: + resolution: {integrity: sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + dependencies: + '@jest/core': 27.5.1_ts-node@10.9.1 + import-local: 3.1.0 + jest-cli: 27.5.1_ts-node@10.9.1 + transitivePeerDependencies: + - bufferutil + - canvas + - supports-color + - ts-node + - utf-8-validate + dev: true + /joycon/3.1.1: resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} engines: {node: '>=10'} @@ -8340,7 +8686,6 @@ packages: /lodash.get/4.4.2: resolution: {integrity: sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ==} - dev: true /lodash.memoize/4.1.2: resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} @@ -8401,6 +8746,16 @@ packages: dependencies: js-tokens: 4.0.0 + /lower-case-first/1.0.2: + resolution: {integrity: sha512-UuxaYakO7XeONbKrZf5FEgkantPf5DUqDayzP5VXZrtRPdH86s4kN47I8B3TW10S4QKiE3ziHNf3kRN//okHjA==} + dependencies: + lower-case: 1.1.4 + dev: false + + /lower-case/1.1.4: + resolution: {integrity: sha512-2Fgx1Ycm599x+WGpIYwJOvsjmXFzTSc34IwDWALRA/8AopUKAVPwfJ+h5+f85BCp0PWmmJcWzEpxOpoXycMpdA==} + dev: false + /lower-case/2.0.2: resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} dependencies: @@ -8450,7 +8805,6 @@ packages: /make-error/1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - dev: true /make-iterator/1.0.1: resolution: {integrity: sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw==} @@ -9125,6 +9479,13 @@ packages: brace-expansion: 2.0.1 dev: false + /minimatch/9.0.0: + resolution: {integrity: sha512-0jJj8AvgKqWN05mrwuqi8QYKx1WmYSUoKSxu5Qhs9prezTz10sxAHGNZe9J9cqIJzta8DWsleh2KaVaLl6Ru2w==} + engines: {node: '>=16 || 14 >=14.17'} + dependencies: + brace-expansion: 2.0.1 + dev: false + /minimist/1.2.6: resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==} dev: false @@ -9309,7 +9670,6 @@ packages: /neo-async/2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} - dev: true /next-seo/5.15.0_vwuoxxmoeydgew5hhkavebkk4e: resolution: {integrity: sha512-LGbcY91yDKGMb7YI+28n3g+RuChUkt6pXNpa8FkfKkEmNiJkeRDEXTnnjVtwT9FmMhG6NH8qwHTelGrlYm9rgg==} @@ -9516,6 +9876,12 @@ packages: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true + /no-case/2.3.2: + resolution: {integrity: sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==} + dependencies: + lower-case: 1.1.4 + dev: false + /no-case/3.0.4: resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} dependencies: @@ -9550,6 +9916,23 @@ packages: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==} dev: true + /node-plop/0.26.3: + resolution: {integrity: sha512-Cov028YhBZ5aB7MdMWJEmwyBig43aGL5WT4vdoB28Oitau1zZAcHUn8Sgfk9HM33TqhtLJ9PlM/O0Mv+QpV/4Q==} + engines: {node: '>=8.9.4'} + dependencies: + '@babel/runtime-corejs3': 7.20.6 + '@types/inquirer': 6.5.0 + change-case: 3.1.0 + del: 5.1.0 + globby: 10.0.2 + handlebars: 4.7.7 + inquirer: 7.3.3 + isbinaryfile: 4.0.10 + lodash.get: 4.4.2 + mkdirp: 0.5.6 + resolve: 1.22.1 + dev: false + /node-plop/0.31.0: resolution: {integrity: sha512-aKLPxiBoFTNUovvtK8j/Whc4PZREkYx6htw2HJPiU8wYquXmN8pkd9B3xlFo6AJ4ZlzFsQSf/NXR5xET8EqRYw==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -9913,6 +10296,13 @@ packages: dependencies: p-limit: 3.1.0 + /p-map/3.0.0: + resolution: {integrity: sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==} + engines: {node: '>=8'} + dependencies: + aggregate-error: 3.1.0 + dev: false + /p-map/4.0.0: resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} engines: {node: '>=10'} @@ -9924,6 +10314,12 @@ packages: resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} engines: {node: '>=6'} + /param-case/2.1.1: + resolution: {integrity: sha512-eQE845L6ot89sk2N8liD8HAuH4ca6Vvr7VWAWwt7+kvvG5aBcPmmphQ68JsEG2qa9n1TykS2DLeMt363AAH8/w==} + dependencies: + no-case: 2.3.2 + dev: false + /param-case/3.0.4: resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} dependencies: @@ -10008,6 +10404,13 @@ packages: entities: 4.5.0 dev: false + /pascal-case/2.0.1: + resolution: {integrity: sha512-qjS4s8rBOJa2Xm0jmxXiyh1+OFf6ekCWOvUaRgAQSktzlTbMotS0nmG9gyYAybCWBcuP4fsBeRCKNwGBnMe2OQ==} + dependencies: + camel-case: 3.0.0 + upper-case-first: 1.1.2 + dev: false + /pascal-case/3.1.2: resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} dependencies: @@ -10020,6 +10423,12 @@ packages: engines: {node: '>=0.10.0'} dev: true + /path-case/2.1.1: + resolution: {integrity: sha512-Ou0N05MioItesaLr9q8TtHVWmJ6fxWdqKB2RohFmNWVyJ+2zeKIeDNWAN6B/Pe7wpzWChhZX6nONYmOnMeJQ/Q==} + dependencies: + no-case: 2.3.2 + dev: false + /path-case/3.0.4: resolution: {integrity: sha512-qO4qCFjXqVTrcbPt/hQfhTQ+VhFsqNKOPtytgNKkKxSoEp3XPUQ8ObFuePylOIok5gjn69ry8XiULxCwot3Wfg==} dependencies: @@ -10055,7 +10464,6 @@ packages: /path-parse/1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} - dev: true /path-root-regex/0.1.2: resolution: {integrity: sha512-4GlJ6rZDhQZFE0DPVKh0e9jmZ5egZfxTkp7bcRDuPlJXbAwhxcl2dINPUAsjLdejqaLsCeg8axcLjIbvBjN4pQ==} @@ -10212,6 +10620,23 @@ packages: yaml: 1.10.2 dev: true + /postcss-load-config/3.1.4_ts-node@10.9.1: + resolution: {integrity: sha512-6DiM4E7v4coTE4uzA8U//WhtPwyhiim3eyjEMFCnUpzbrkK9wJHgKDT2mR+HbtSrd/NubVaYTOpSpjUl8NQeRg==} + engines: {node: '>= 10'} + peerDependencies: + postcss: '>=8.0.9' + ts-node: '>=9.0.0' + peerDependenciesMeta: + postcss: + optional: true + ts-node: + optional: true + dependencies: + lilconfig: 2.0.6 + ts-node: 10.9.1_53e5n3kefom5jmudvwxecmm4oi + yaml: 1.10.2 + dev: true + /postcss-nested/5.0.6_postcss@8.4.21: resolution: {integrity: sha512-rKqm2Fk0KbA8Vt3AdGN0FB9OBOMDVajMG6ZCf/GoHgdxUJ4sBFp0A/uMIRm+MJUdo33YXEtjqIz8u7DAp8B7DA==} engines: {node: '>=12.0'} @@ -10483,9 +10908,6 @@ packages: /regenerator-runtime/0.13.11: resolution: {integrity: sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==} - /regenerator-runtime/0.13.9: - resolution: {integrity: sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==} - /regex-not/1.0.2: resolution: {integrity: sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==} engines: {node: '>=0.10.0'} @@ -10685,7 +11107,6 @@ packages: is-core-module: 2.11.0 path-parse: 1.0.7 supports-preserve-symlinks-flag: 1.0.0 - dev: true /resolve/2.0.0-next.4: resolution: {integrity: sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==} @@ -10785,7 +11206,6 @@ packages: engines: {npm: '>=2.0.0'} dependencies: tslib: 1.14.1 - dev: true /rxjs/7.5.6: resolution: {integrity: sha512-dnyv2/YsXhnm461G+R/Pe5bWP41Nm6LBXEYWI6eiFP4fiwx6WRI/CD0zbdVAudd9xwLEF2IDcKXLHit0FYjUzw==} @@ -10912,6 +11332,13 @@ packages: dependencies: lru-cache: 6.0.0 + /sentence-case/2.1.1: + resolution: {integrity: sha512-ENl7cYHaK/Ktwk5OTD+aDbQ3uC8IByu/6Bkg+HDv8Mm+XnBnppVNalcfJTNsp1ibstKh030/JKQQWglDvtKwEQ==} + dependencies: + no-case: 2.3.2 + upper-case-first: 1.1.2 + dev: false + /sentence-case/3.0.4: resolution: {integrity: sha512-8LS0JInaQMCRoQ7YUytAo/xUu5W2XnQxV2HI/6uM6U7CITS1RqPElr30V6uIqyMKM9lJGRVFy5/4CuzcixNYSg==} dependencies: @@ -11070,6 +11497,12 @@ packages: engines: {node: '>= 6.0.0', npm: '>= 3.0.0'} dev: false + /snake-case/2.1.0: + resolution: {integrity: sha512-FMR5YoPFwOLuh4rRz92dywJjyKYZNLpMn1R5ujVpIYkbA9p01fq8RMg0FkO4M+Yobt4MjHeLTJVm5xFFBHSV2Q==} + dependencies: + no-case: 2.3.2 + dev: false + /snake-case/3.0.4: resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} dependencies: @@ -11159,7 +11592,6 @@ packages: /source-map/0.6.1: resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} engines: {node: '>=0.10.0'} - dev: true /source-map/0.7.4: resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} @@ -11503,7 +11935,13 @@ packages: /supports-preserve-symlinks-flag/1.0.0: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - dev: true + + /swap-case/1.1.2: + resolution: {integrity: sha512-BAmWG6/bx8syfc6qXPprof3Mn5vQgf5dwdUNJhsNqU9WdPt5P+ES/wQ5bxfijy8zwZgZZHslC3iAsxsuQMCzJQ==} + dependencies: + lower-case: 1.1.4 + upper-case: 1.1.3 + dev: false /swr/1.3.0_react@18.2.0: resolution: {integrity: sha512-dkghQrOl2ORX9HYrMDtPa7LTVHJjCTeZoB1dqTbnnEDlSvN8JEKpYIYurDfvbQFUUS8Cg8PceFVZNkW0KNNYPw==} @@ -11709,6 +12147,13 @@ packages: '@types/tinycolor2': 1.4.3 tinycolor2: 1.4.2 + /title-case/2.1.1: + resolution: {integrity: sha512-EkJoZ2O3zdCz3zJsYCsxyq2OC5hrxR9mfdd5I+w8h/tmFfeOxJ+vvkxsKxdmN0WtS9zLdHEgfgVOiMVgv+Po4Q==} + dependencies: + no-case: 2.3.2 + upper-case: 1.1.3 + dev: false + /title-case/3.0.3: resolution: {integrity: sha512-e1zGYRvbffpcHIrnuqT0Dh+gEJtDaxDSoG4JAIpq4oDFyooziLBIiYQv0GBT4FUAnUop5uZ1hiIAj7oAF6sOCA==} dependencies: @@ -11861,7 +12306,7 @@ packages: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 + semver: 7.5.0 typescript: 4.7.4 yargs-parser: 20.2.9 dev: true @@ -11897,11 +12342,47 @@ packages: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 + semver: 7.5.0 typescript: 4.7.4 yargs-parser: 20.2.9 dev: true + /ts-jest/27.1.5_fu5qd3dwfwo63mklk7zcmwwv6q: + resolution: {integrity: sha512-Xv6jBQPoBEvBq/5i2TeSG9tt/nqkbpcurrEG1b+2yfBrcJelOZF9Ml6dmyMh7bcW9JyFbRYpR5rxROSlBLTZHA==} + engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@types/jest': ^27.0.0 + babel-jest: '>=27.0.0 <28' + esbuild: '*' + jest: ^27.0.0 + typescript: '>=3.8 <5.0' + peerDependenciesMeta: + '@babel/core': + optional: true + '@types/jest': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + dependencies: + '@babel/core': 7.20.12 + '@types/jest': 27.5.2 + bs-logger: 0.2.6 + esbuild: 0.17.18 + fast-json-stable-stringify: 2.1.0 + jest: 27.5.1_ts-node@10.9.1 + jest-util: 27.5.1 + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.5.0 + typescript: 4.9.4 + yargs-parser: 20.2.9 + dev: true + /ts-jest/27.1.5_ndeyfkjscoamnbcpt4q6qsiybu: resolution: {integrity: sha512-Xv6jBQPoBEvBq/5i2TeSG9tt/nqkbpcurrEG1b+2yfBrcJelOZF9Ml6dmyMh7bcW9JyFbRYpR5rxROSlBLTZHA==} engines: {node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0} @@ -11932,7 +12413,7 @@ packages: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 + semver: 7.5.0 typescript: 4.9.4 yargs-parser: 20.2.9 dev: true @@ -11968,7 +12449,7 @@ packages: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 + semver: 7.5.0 typescript: 4.7.4 yargs-parser: 20.2.9 dev: true @@ -12004,7 +12485,7 @@ packages: json5: 2.2.3 lodash.memoize: 4.1.2 make-error: 1.3.6 - semver: 7.3.8 + semver: 7.5.0 typescript: 4.9.4 yargs-parser: 20.2.9 dev: true @@ -12023,6 +12504,36 @@ packages: typescript: 4.8.4 dev: false + /ts-node/10.9.1_53e5n3kefom5jmudvwxecmm4oi: + resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} + hasBin: true + peerDependencies: + '@swc/core': '>=1.2.50' + '@swc/wasm': '>=1.2.50' + '@types/node': '*' + typescript: '>=2.7' + peerDependenciesMeta: + '@swc/core': + optional: true + '@swc/wasm': + optional: true + dependencies: + '@cspotcode/source-map-support': 0.8.1 + '@tsconfig/node10': 1.0.9 + '@tsconfig/node12': 1.0.11 + '@tsconfig/node14': 1.0.3 + '@tsconfig/node16': 1.0.4 + '@types/node': 16.11.56 + acorn: 8.8.1 + acorn-walk: 8.2.0 + arg: 4.1.3 + create-require: 1.1.1 + diff: 4.0.2 + make-error: 1.3.6 + typescript: 4.9.4 + v8-compile-cache-lib: 3.0.1 + yn: 3.1.1 + /tsconfig-paths/3.14.1: resolution: {integrity: sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ==} dependencies: @@ -12184,6 +12695,42 @@ packages: - ts-node dev: true + /tsup/6.7.0_z6wznmtyb6ovnulj6iujpct7um: + resolution: {integrity: sha512-L3o8hGkaHnu5TdJns+mCqFsDBo83bJ44rlK7e6VdanIvpea4ArPcU3swWGsLVbXak1PqQx/V+SSmFPujBK+zEQ==} + engines: {node: '>=14.18'} + hasBin: true + peerDependencies: + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.1.0' + peerDependenciesMeta: + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + dependencies: + bundle-require: 4.0.1_esbuild@0.17.18 + cac: 6.7.12 + chokidar: 3.5.3 + debug: 4.3.4 + esbuild: 0.17.18 + execa: 5.1.1 + globby: 11.1.0 + joycon: 3.1.1 + postcss-load-config: 3.1.4_ts-node@10.9.1 + resolve-from: 5.0.0 + rollup: 3.21.5 + source-map: 0.8.0-beta.0 + sucrase: 3.24.0 + tree-kill: 1.2.2 + typescript: 4.9.4 + transitivePeerDependencies: + - supports-color + - ts-node + dev: true + /tsutils/3.21.0_typescript@4.8.4: resolution: {integrity: sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==} engines: {node: '>= 6'} @@ -12272,14 +12819,12 @@ packages: resolution: {integrity: sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==} engines: {node: '>=4.2.0'} hasBin: true - dev: true /uglify-js/3.17.4: resolution: {integrity: sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g==} engines: {node: '>=0.8.0'} hasBin: true requiresBuild: true - dev: true optional: true /unbox-primitive/1.0.2: @@ -12448,12 +12993,22 @@ packages: registry-url: 3.1.0 dev: false + /upper-case-first/1.1.2: + resolution: {integrity: sha512-wINKYvI3Db8dtjikdAqoBbZoP6Q+PZUyfMR7pmwHzjC2quzSkUq5DmPrTtPEqHaz8AGtmsB4TqwapMTM1QAQOQ==} + dependencies: + upper-case: 1.1.3 + dev: false + /upper-case-first/2.0.2: resolution: {integrity: sha512-514ppYHBaKwfJRK/pNC6c/OxfGa0obSnAl106u97Ed0I625Nin96KAjttZF6ZL3e1XLtphxnqrOi9iWgm+u+bg==} dependencies: tslib: 2.4.1 dev: true + /upper-case/1.1.3: + resolution: {integrity: sha512-WRbjgmYzgXkCV7zNVpy5YgrHgbBv126rMALQQMrmzOVC4GM2waQ9x7xtm8VU+1yF2kWyPzI9zbZ48n4vSxwfSA==} + dev: false + /upper-case/2.0.2: resolution: {integrity: sha512-KgdgDGJt2TpuwBUIjgG6lzw2GWFRCW9Qkfkiv0DxqHHLYJHmtmdUIKcZd8rHgFSjopVTlw6ggzCm1b8MFQwikg==} dependencies: @@ -12506,6 +13061,9 @@ packages: sade: 1.8.1 dev: false + /v8-compile-cache-lib/3.0.1: + resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} + /v8-compile-cache/2.3.0: resolution: {integrity: sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==} dev: true @@ -12531,6 +13089,13 @@ packages: spdx-expression-parse: 3.0.1 dev: true + /validate-npm-package-name/5.0.0: + resolution: {integrity: sha512-YuKoXDAhBYxY7SfOKxHBDoSyENFeW5VvIIQp2TGQuit8gpK6MnWaQelBKxso72DoxTZfZdcP3W90LqpSkgPzLQ==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + builtins: 5.0.1 + dev: false + /vfile-location/4.0.1: resolution: {integrity: sha512-JDxPlTbZrZCQXogGheBHjbRWjESSPEak770XwWPfw5mTc1v1nWGLB/apzZxsx8a0SJVfF8HK8ql8RD308vXRUw==} dependencies: @@ -12737,7 +13302,6 @@ packages: /wordwrap/1.0.0: resolution: {integrity: sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==} - dev: true /wrap-ansi/6.2.0: resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} @@ -12841,6 +13405,10 @@ packages: yargs-parser: 20.2.9 dev: true + /yn/3.1.1: + resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} + engines: {node: '>=6'} + /yocto-queue/0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} diff --git a/turbo/generators/config.ts b/turbo/generators/config.ts new file mode 100644 index 0000000000000..4fab73712ab6c --- /dev/null +++ b/turbo/generators/config.ts @@ -0,0 +1,36 @@ +import { PlopTypes } from "@turbo/gen"; + +export default function generator(plop: PlopTypes.NodePlopAPI): void { + plop.setGenerator("example", { + description: + "An example Turborepo generator - creates a new file at the root of the project", + prompts: [ + { + type: "input", + name: "file", + placeholder: "generator-docs", + message: "What is the name of the file to create?", + }, + { + type: "input", + name: "author", + default: "turbobot", + message: "What is your name? (Will be added as the file author)", + }, + { + type: "list", + name: "type", + message: "What type of file should be created?", + choices: [".md", ".txt"], + default: ".md", + }, + ], + actions: [ + { + type: "add", + path: "{{ turbo.paths.root }}/{{ dashCase file }}{{ type }}", + templateFile: "templates/turborepo-generators.hbs", + }, + ], + }); +} diff --git a/turbo/generators/templates/turborepo-generators.hbs b/turbo/generators/templates/turborepo-generators.hbs new file mode 100644 index 0000000000000..2b7e200b2f0e7 --- /dev/null +++ b/turbo/generators/templates/turborepo-generators.hbs @@ -0,0 +1,5 @@ +# Turborepo Generators + +Read the docs at [turbo.build](https://turbo.build/repo/docs). + +Created by {{ author }}. diff --git a/turborepo-tests/integration/tests/dry_json/monorepo.t b/turborepo-tests/integration/tests/dry_json/monorepo.t index e70feaef5aceb..939187ce3fb59 100644 --- a/turborepo-tests/integration/tests/dry_json/monorepo.t +++ b/turborepo-tests/integration/tests/dry_json/monorepo.t @@ -80,6 +80,7 @@ Setup "scm", "tasks", "turboVersion", + "user", "version" ] diff --git a/turborepo-tests/integration/tests/dry_json/single_package.t b/turborepo-tests/integration/tests/dry_json/single_package.t index 9b658cb7d8aa9..e5a2ef9a2ab5a 100644 --- a/turborepo-tests/integration/tests/dry_json/single_package.t +++ b/turborepo-tests/integration/tests/dry_json/single_package.t @@ -84,6 +84,7 @@ Setup } } ], + "user": ".*", (re) "scm": { "type": "git", "sha": "[a-z0-9]+", (re) diff --git a/turborepo-tests/integration/tests/dry_json/single_package_no_config.t b/turborepo-tests/integration/tests/dry_json/single_package_no_config.t index 0c0521402dbc8..88cb4ae5151a7 100644 --- a/turborepo-tests/integration/tests/dry_json/single_package_no_config.t +++ b/turborepo-tests/integration/tests/dry_json/single_package_no_config.t @@ -78,6 +78,7 @@ Setup } } ], + "user": ".*", (re) "scm": { "type": "git", "sha": "[a-z0-9]+", (re) diff --git a/turborepo-tests/integration/tests/dry_json/single_package_with_deps.t b/turborepo-tests/integration/tests/dry_json/single_package_with_deps.t index 67eed5d90d1a7..51181ed7db03e 100644 --- a/turborepo-tests/integration/tests/dry_json/single_package_with_deps.t +++ b/turborepo-tests/integration/tests/dry_json/single_package_with_deps.t @@ -145,6 +145,7 @@ Setup } } ], + "user": ".*", (re) "scm": { "type": "git", "sha": "[a-z0-9]+", (re) diff --git a/turborepo-tests/integration/tests/no_args.t b/turborepo-tests/integration/tests/no_args.t index f4f5e628bd5bf..752a6ee9d3ec7 100644 --- a/turborepo-tests/integration/tests/no_args.t +++ b/turborepo-tests/integration/tests/no_args.t @@ -12,6 +12,7 @@ Make sure exit code is 2 when no args are passed completion Generate the autocompletion script for the specified shell daemon Runs the Turborepo background daemon link Link your local directory to a Vercel organization and enable remote caching + generate Generate a new app / package login Login to your Vercel account logout Logout to your Vercel account prune Prepare a subset of your monorepo diff --git a/turborepo-tests/integration/tests/prune/out_dir.t b/turborepo-tests/integration/tests/prune/out_dir.t new file mode 100644 index 0000000000000..78021de54bf45 --- /dev/null +++ b/turborepo-tests/integration/tests/prune/out_dir.t @@ -0,0 +1,24 @@ +Setup + $ . ${TESTDIR}/../../../helpers/setup.sh + $ . ${TESTDIR}/../_helpers/setup_monorepo.sh $(pwd) monorepo_with_root_dep + +Test that absolute paths can be passed as out-dir + $ TMPFILE=$(mktemp) + $ ${TURBO} prune --scope=web --out-dir=${TMPFILE} + Generating pruned monorepo for web in .* (re) + - Added shared + - Added util + - Added web + $ cat ${TMPFILE}/package.json + { + "devDependencies": { + "util": "workspace:*" + }, + "name": "monorepo", + "packageManager": "pnpm@7.25.1", + "pnpm": { + "patchedDependencies": { + "is-number@7.0.0": "patches/is-number@7.0.0.patch" + } + } + } diff --git a/turborepo-tests/integration/tests/run_summary/monorepo.t b/turborepo-tests/integration/tests/run_summary/monorepo.t index 2414293d899b7..e9f7912e2885d 100644 --- a/turborepo-tests/integration/tests/run_summary/monorepo.t +++ b/turborepo-tests/integration/tests/run_summary/monorepo.t @@ -40,6 +40,7 @@ Setup "scm", "tasks", "turboVersion", + "user", "version" ] diff --git a/turborepo-tests/integration/tests/run_summary/single-package.t b/turborepo-tests/integration/tests/run_summary/single-package.t index 76ffeb438f2fb..4bd0f51006821 100644 --- a/turborepo-tests/integration/tests/run_summary/single-package.t +++ b/turborepo-tests/integration/tests/run_summary/single-package.t @@ -39,6 +39,7 @@ Check "scm", "tasks", "turboVersion", + "user", "version" ] diff --git a/turborepo-tests/integration/tests/turbo_help.t b/turborepo-tests/integration/tests/turbo_help.t index 8c448c355fa73..2d20f57e34370 100644 --- a/turborepo-tests/integration/tests/turbo_help.t +++ b/turborepo-tests/integration/tests/turbo_help.t @@ -12,6 +12,7 @@ Test help flag completion Generate the autocompletion script for the specified shell daemon Runs the Turborepo background daemon link Link your local directory to a Vercel organization and enable remote caching + generate Generate a new app / package login Login to your Vercel account logout Logout to your Vercel account prune Prepare a subset of your monorepo @@ -78,6 +79,7 @@ Test help flag completion Generate the autocompletion script for the specified shell daemon Runs the Turborepo background daemon link Link your local directory to a Vercel organization and enable remote caching + generate Generate a new app / package login Login to your Vercel account logout Logout to your Vercel account prune Prepare a subset of your monorepo diff --git a/version.txt b/version.txt index 811f867854624..d80f02b4e80d4 100644 --- a/version.txt +++ b/version.txt @@ -1,2 +1,2 @@ -1.9.4-canary.11 -canary +1.9.6 +latest