diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6009ee556..63476f046 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,7 +26,7 @@ env: NEXTEST_STATUS_LEVEL: "fail" jobs: - setup-test-projects: + compile_sway_projects: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 @@ -82,20 +82,39 @@ jobs: path: | e2e/sway/**/out/* - get-workspace-members: - runs-on: ubuntu-latest - outputs: - members: ${{ steps.set-members.outputs.members }} - steps: - - name: Checkout repository - uses: actions/checkout@v3 - - id: set-members - run: | - # install dasel - curl -sSLf "$DASEL_VERSION" -L -o dasel && chmod +x dasel - mv ./dasel /usr/local/bin/dasel - members=$(cat Cargo.toml | dasel -r toml -w json 'workspace.members' | jq -r ".[]" | xargs -I '{}' dasel -f {}/Cargo.toml 'package.name' | jq -R '[.]' | jq -s -c 'add') - echo "members=$members" >> $GITHUB_OUTPUT + build-checks-binary: + runs-on: ubuntu-latest + outputs: + config: ${{ steps.generate-config.outputs.config }} + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install toolchain + uses: dtolnay/rust-toolchain@master + with: + toolchain: ${{ env.RUST_VERSION }} + + - uses: Swatinem/rust-cache@v2 + with: + prefix-key: "v1-rust" + + - name: Build checks binary + run: cargo build --package checks --bin checks + + - name: Upload checks binary + uses: actions/upload-artifact@v4 + with: + name: checks-binary + path: target/debug/checks + + - name: Generate config + id: generate-config + run: | + EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64) + echo "config<<$EOF" >> "$GITHUB_OUTPUT" + ./target/debug/checks -r . --dump-ci-config >> "$GITHUB_OUTPUT" + echo "$EOF" >> "$GITHUB_OUTPUT" verify-rust-version: runs-on: ubuntu-latest @@ -130,66 +149,44 @@ jobs: cargo-verifications: needs: - - setup-test-projects + - compile_sway_projects - verify-rust-version - - get-workspace-members + - build-checks-binary - publish-crates-check runs-on: ubuntu-latest strategy: matrix: - cargo_command: [check] - args: [--all-features] - package: ${{fromJSON(needs.get-workspace-members.outputs.members)}} - include: - - cargo_command: fmt - args: --all --verbose -- --check - - cargo_command: clippy - args: --all-targets - download_sway_artifacts: sway-examples - - cargo_command: clippy - args: --all-targets --features "default fuel-core-lib test-type-paths" - download_sway_artifacts: sway-examples-w-type-paths - - cargo_command: nextest - args: run --all-targets --features "default fuel-core-lib test-type-paths coin-cache" --workspace --cargo-quiet - download_sway_artifacts: sway-examples-w-type-paths - install_fuel_core: true - - cargo_command: nextest - args: run --all-targets --workspace --cargo-quiet - download_sway_artifacts: sway-examples - install_fuel_core: true - - cargo_command: test - args: --doc --workspace - - cargo_command: machete - args: --skip-target-dir - - command: test_wasm - args: - - command: check_fuel_core_version - args: - - command: check_doc_anchors_valid - args: - - command: check_doc_unresolved_links - args: - - command: check_typos - args: + config: ${{fromJSON(needs.build-checks-binary.outputs.config)}} + name: ${{ matrix.config.name }} steps: - name: Checkout repository uses: actions/checkout@v3 with: ref: ${{ github.event.pull_request.head.sha }} - - uses: dtolnay/rust-toolchain@master + + - name: Install Rust toolchain + if: ${{ matrix.config.deps.rust && matrix.config.deps.rust.nightly == false }} + uses: dtolnay/rust-toolchain@master with: toolchain: ${{ env.RUST_VERSION }} - components: clippy,rustfmt + components: ${{ matrix.config.deps.rust.components || '' }} - # selecting a toolchain either by action or manual `rustup` calls should happen - # before the cache plugin, as it uses the current rustc version as its cache key + - name: Install Rust nightly toolchain + if: ${{ matrix.config.deps.rust && matrix.config.deps.rust.nightly == true }} + uses: dtolnay/rust-toolchain@master + with: + toolchain: nightly + components: ${{ matrix.config.deps.rust.components || '' }} + + # selecting a toolchain either by action or manual `rustup` calls should happen + # before the cache plugin, as it uses the current rustc version as its cache key - uses: Swatinem/rust-cache@v2.0.1 continue-on-error: true with: - key: "${{ matrix.cargo_command }} ${{ matrix.args }} ${{ matrix.package }}" + key: "${{ matrix.config.cache_key }}" - name: Install Fuel Core - if: ${{ matrix.install_fuel_core }} + if: ${{ matrix.config.deps.fuel_core_binary }} run: | if [[ -n $FUEL_CORE_PATCH_BRANCH ]]; then cargo install --locked fuel-core-bin --git https://github.com/FuelLabs/fuel-core --branch "$FUEL_CORE_PATCH_BRANCH" @@ -200,60 +197,59 @@ jobs: mv fuel-core-${{ env.FUEL_CORE_VERSION }}-x86_64-unknown-linux-gnu/fuel-core /usr/local/bin/fuel-core fi - - name: Download sway example artifacts - if: ${{ matrix.download_sway_artifacts }} + - name: Download sway example artifacts (Normal) + if: ${{ matrix.config.deps.sway_artifacts == 'Normal' }} + uses: actions/download-artifact@v4 + with: + name: sway-examples + # Needed because `upload-artifact` will remove 'e2e/sway' because it is shared between all matched files + path: e2e/sway/ + + - name: Download sway example artifacts (TypePaths) + if: ${{ matrix.config.deps.sway_artifacts == 'TypePaths' }} uses: actions/download-artifact@v4 with: - name: ${{ matrix.download_sway_artifacts }} + name: sway-examples-w-type-paths # Needed because `upload-artifact` will remove 'e2e/sway' because it is shared between all matched files path: e2e/sway/ - name: Install nextest - if: ${{ matrix.cargo_command == 'nextest' }} + if: ${{ matrix.config.deps.cargo.nextest }} uses: taiki-e/install-action@nextest - name: Install cargo-machete - if: ${{ matrix.cargo_command == 'machete' }} + if: ${{ matrix.config.deps.cargo.machete }} uses: taiki-e/install-action@cargo-machete - - name: Cargo (workspace-level) - if: ${{ matrix.cargo_command && !matrix.package }} - run: cargo ${{ matrix.cargo_command }} ${{ matrix.args }} - - - name: Cargo (package-level) - if: ${{ matrix.cargo_command && matrix.package }} - run: cargo ${{ matrix.cargo_command }} -p ${{ matrix.package }} ${{ matrix.args }} - - name: Install NodeJS for WASM testing - if: ${{ matrix.command == 'test_wasm' }} + if: ${{ matrix.config.deps.wasm }} uses: actions/setup-node@v3 with: node-version: 18 - - name: Test WASM - if: ${{ matrix.command == 'test_wasm' }} + - name: Install wasm toolchain + if: ${{ matrix.config.deps.wasm }} run: | rustup target add wasm32-unknown-unknown curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh - cd wasm-tests - wasm-pack test --node - - name: Check that fuel_core version.rs file is up to date - if: ${{ matrix.command == 'check_fuel_core_version' }} - run: cargo run --bin fuel-core-version -- --manifest-path ./Cargo.toml verify + - name: Install typos tool + if: ${{ matrix.config.deps.typos_cli }} + run: | + curl -sSLf 'https://github.com/crate-ci/typos/releases/download/v1.21.0/typos-v1.21.0-x86_64-unknown-linux-musl.tar.gz' > typos.tar.gz + tar xf typos.tar.gz --wildcards "./typos" && rm typos.tar.gz + chmod +x typos + mv typos /usr/local/bin/typos - - name: Check for invalid documentation anchors - if: ${{ matrix.command == 'check_doc_anchors_valid' }} - run: cargo run --bin check-docs + - name: Download checks binary + uses: actions/download-artifact@v4 + with: + name: checks-binary - - name: Check for unresolved documentation links - if: ${{ matrix.command == 'check_doc_unresolved_links' }} + - name: Run checks run: | - ! cargo doc --document-private-items |& grep -A 6 "warning: unresolved link to" - - - name: Check for typos - if: ${{ matrix.command == 'check_typos' }} - uses: crate-ci/typos@v1.20.3 + chmod +x checks + ./checks --only-tasks-with-ids ${{ matrix.config.task_ids }} --root . --sway-type-paths publish: needs: diff --git a/Cargo.toml b/Cargo.toml index 089d3c85a..08252259d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,8 +24,7 @@ members = [ "packages/fuels-macros", "packages/fuels-programs", "packages/fuels-test-helpers", - "scripts/check-docs", - "scripts/fuel-core-version", + "scripts/checks", "scripts/versions-replacer", "wasm-tests", ] @@ -41,40 +40,53 @@ rust-version = "1.76.0" version = "0.62.0" [workspace.dependencies] -Inflector = "0.11.4" +Inflector = { version = "0.11.4", default-features = false } async-trait = { version = "0.1.74", default-features = false } -bech32 = "0.9.1" +bech32 = { version = "0.9.1", default-features = false } bytes = { version = "1.5.0", default-features = false } -chrono = "0.4.31" +chrono = { version = "0.4.31", default-features = false } +clap = { version = "4.5", default-features = false } elliptic-curve = { version = "0.13.8", default-features = false } -eth-keystore = "0.5.0" -fuel-abi-types = "0.5.0" -futures = "0.3.29" +eth-keystore = { version = "0.5.0", default-features = false } +fuel-abi-types = { version = "0.5.0", default-features = false } +futures = { version = "0.3.29", default-features = false } hex = { version = "0.4.3", default-features = false } -itertools = "0.12.0" -portpicker = "0.1.1" -proc-macro2 = "1.0.70" -quote = "1.0.33" -rand = { version = "0.8.5", default-features = false, features = [ - "std_rng", - "getrandom", -] } -regex = "1.10.2" -semver = "1.0.20" +itertools = { version = "0.12.0", default-features = false } +portpicker = { version = "0.1.1", default-features = false } +proc-macro2 = { version = "1.0.70", default-features = false } +quote = { version = "1.0.33", default-features = false } +rand = { version = "0.8.5", default-features = false } +regex = { version = "1.10.2", default-features = false } +semver = { version = "1.0.20", default-features = false } serde = { version = "1.0.193", default-features = false } -serde_json = "1.0.108" +serde_json = { version = "1.0.108", default-features = false } serde_with = { version = "3.4.0", default-features = false } sha2 = { version = "0.10.8", default-features = false } -syn = "2.0.39" +syn = { version = "2.0.39", default-features = false } tai64 = { version = "4.0.0", default-features = false } tempfile = { version = "3.8.1", default-features = false } thiserror = { version = "1.0.50", default-features = false } tokio = { version = "1.34.0", default-features = false } -tracing = "0.1.40" -trybuild = "1.0.85" +tracing = { version = "0.1.40", default-features = false } +trybuild = { version = "1.0.85", default-features = false } uint = { version = "0.9.5", default-features = false } which = { version = "6.0.0", default-features = false } -zeroize = "1.7.0" +zeroize = { version = "1.7.0", default-features = false } +pretty_assertions = { version = "1.4.0", default-features = false } +walkdir = { version = "2.5", default-features = false } +tokio-util = { version = "0.7", default-features = false } +nix = { version = "0.28", default-features = false } +anyhow = { version = "1.0", default-features = false } +getrandom = { version = "0.2", default-features = false } +wasm-bindgen-test = { version = "0.3", default-features = false } +serde_yaml = { version = "0.9", default-features = false } +colored = { version = "2.1", default-features = false } +duct = { version = "0.13", default-features = false } +toml = { version = "0.8", default-features = false } +once_cell = { version = "1.19", default-features = false } +color-eyre = { version = "0.6", default-features = false } +cargo_metadata = { version = "0.18", default-features = false } +argh = { version = "0.1", default-features = false } # Dependencies from the `fuel-core` repository: fuel-core = { version = "0.26.0", default-features = false } @@ -85,13 +97,13 @@ fuel-core-services = { version = "0.26.0", default-features = false } fuel-core-types = { version = "0.26.0", default-features = false } # Dependencies from the `fuel-vm` repository: -fuel-asm = { version = "0.49.0" } -fuel-crypto = { version = "0.49.0" } -fuel-merkle = { version = "0.49.0" } -fuel-storage = { version = "0.49.0" } -fuel-tx = { version = "0.49.0" } -fuel-types = { version = "0.49.0" } -fuel-vm = { version = "0.49.0" } +fuel-asm = { version = "0.49.0", default-features = false } +fuel-crypto = { version = "0.49.0", default-features = false } +fuel-merkle = { version = "0.49.0", default-features = false } +fuel-storage = { version = "0.49.0", default-features = false } +fuel-tx = { version = "0.49.0", default-features = false } +fuel-types = { version = "0.49.0", default-features = false } +fuel-vm = { version = "0.49.0", default-features = false } # Workspace projects fuels = { version = "0.62.0", path = "./packages/fuels", default-features = false } diff --git a/e2e/Cargo.toml b/e2e/Cargo.toml index 8a2d180fe..c3f51ede1 100644 --- a/e2e/Cargo.toml +++ b/e2e/Cargo.toml @@ -15,7 +15,7 @@ publish = false chrono = { workspace = true } # TODO: [issue](https://github.com/FuelLabs/fuels-rs/issues/1375) needs to be removed, `ScriptTransaction` and `CreateTransaction` in `fuels` use `fuel_tx::Input` but don't reexport or convert it into a `fuels` owned type fuel-tx = { workspace = true } -fuels = { workspace = true } +fuels = { workspace = true, features = ["test-helpers", "std"] } # TODO: [issue](https://github.com/FuelLabs/fuels-rs/issues/1376) because setup-program-test uses it but `fuels` did not reexport `rand` rand = { workspace = true } # used in test assertions diff --git a/examples/codec/Cargo.toml b/examples/codec/Cargo.toml index a97f79f13..4d0cdd230 100644 --- a/examples/codec/Cargo.toml +++ b/examples/codec/Cargo.toml @@ -10,5 +10,7 @@ repository = { workspace = true } description = "Fuel Rust SDK codec examples." [dev-dependencies] -fuels = { workspace = true } -tokio = { workspace = true, features = ["full"] } +fuels = { workspace = true, features = [ + "std", +] } +tokio = { workspace = true, features = ["macros"] } diff --git a/examples/contracts/Cargo.toml b/examples/contracts/Cargo.toml index 0a02a9290..ac5c29cd4 100644 --- a/examples/contracts/Cargo.toml +++ b/examples/contracts/Cargo.toml @@ -10,9 +10,13 @@ repository = { workspace = true } description = "Fuel Rust SDK contract examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = [ + "std", + "test-helpers", + "fuel-core-lib", +] } rand = { workspace = true } -tokio = { workspace = true, features = ["full"] } +tokio = { workspace = true, features = ["macros"] } [features] fuel-core-lib = ["fuels/fuel-core-lib"] diff --git a/examples/cookbook/Cargo.toml b/examples/cookbook/Cargo.toml index bdd360baf..d53cf3fb1 100644 --- a/examples/cookbook/Cargo.toml +++ b/examples/cookbook/Cargo.toml @@ -10,7 +10,11 @@ repository = { workspace = true } description = "Fuel Rust SDK cookbook examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = [ + "std", + "test-helpers", + "fuel-core-lib", +] } rand = { workspace = true } tokio = { workspace = true, features = ["full"] } diff --git a/examples/debugging/Cargo.toml b/examples/debugging/Cargo.toml index 68d178083..0adbb35d0 100644 --- a/examples/debugging/Cargo.toml +++ b/examples/debugging/Cargo.toml @@ -11,7 +11,7 @@ description = "Fuel Rust SDK debugging examples." [dev-dependencies] fuel-abi-types = { workspace = true } -fuels = { workspace = true } +fuels = { workspace = true, features = ["std"] } rand = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true, features = ["full"] } diff --git a/examples/macros/Cargo.toml b/examples/macros/Cargo.toml index 6be83c5d0..d21e385b6 100644 --- a/examples/macros/Cargo.toml +++ b/examples/macros/Cargo.toml @@ -10,5 +10,5 @@ repository = { workspace = true } description = "Fuel Rust SDK macro examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = ["std"] } tokio = { workspace = true, features = ["full"] } diff --git a/examples/predicates/Cargo.toml b/examples/predicates/Cargo.toml index 0785a218f..f104b8c2a 100644 --- a/examples/predicates/Cargo.toml +++ b/examples/predicates/Cargo.toml @@ -10,6 +10,10 @@ repository = { workspace = true } description = "Fuel Rust SDK predicate examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = [ + "std", + "test-helpers", + "fuel-core-lib", +] } rand = { workspace = true } tokio = { workspace = true, features = ["full"] } diff --git a/examples/providers/Cargo.toml b/examples/providers/Cargo.toml index 08843ee8e..50eb174fb 100644 --- a/examples/providers/Cargo.toml +++ b/examples/providers/Cargo.toml @@ -10,6 +10,10 @@ repository = { workspace = true } description = "Fuel Rust SDK provider examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = [ + "std", + "test-helpers", + "fuel-core-lib", +] } rand = { workspace = true } tokio = { workspace = true, features = ["full"] } diff --git a/examples/rust_bindings/Cargo.toml b/examples/rust_bindings/Cargo.toml index 5228d46b6..1a826f539 100644 --- a/examples/rust_bindings/Cargo.toml +++ b/examples/rust_bindings/Cargo.toml @@ -10,8 +10,7 @@ repository = { workspace = true } description = "Fuel Rust SDK examples for Rust-native bindings" [dev-dependencies] -fuels = { workspace = true } -fuels-code-gen = { workspace = true } +fuels = { workspace = true, features = ["std"] } fuels-macros = { workspace = true } proc-macro2 = { workspace = true } rand = { workspace = true } diff --git a/examples/rust_bindings/src/lib.rs b/examples/rust_bindings/src/lib.rs index 4402e8ed8..47e1c0c87 100644 --- a/examples/rust_bindings/src/lib.rs +++ b/examples/rust_bindings/src/lib.rs @@ -5,8 +5,6 @@ mod tests { #[tokio::test] #[allow(unused_variables)] async fn transform_json_to_bindings() -> Result<()> { - use fuels::test_helpers::launch_provider_and_get_wallet; - let wallet = launch_provider_and_get_wallet().await?; { // ANCHOR: use_abigen use fuels::prelude::*; diff --git a/examples/types/Cargo.toml b/examples/types/Cargo.toml index 74c688adc..e6501343c 100644 --- a/examples/types/Cargo.toml +++ b/examples/types/Cargo.toml @@ -10,6 +10,6 @@ repository = { workspace = true } description = "Fuel Rust SDK types examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = ["std"] } rand = { workspace = true } -tokio = { workspace = true, features = ["full"] } +tokio = { workspace = true, features = ["macros"] } diff --git a/examples/wallets/Cargo.toml b/examples/wallets/Cargo.toml index 5fa6ad736..1442c5fe3 100644 --- a/examples/wallets/Cargo.toml +++ b/examples/wallets/Cargo.toml @@ -10,6 +10,10 @@ repository = { workspace = true } description = "Fuel Rust SDK wallet examples." [dev-dependencies] -fuels = { workspace = true } +fuels = { workspace = true, features = [ + "std", + "test-helpers", + "fuel-core-lib", +] } rand = { workspace = true } -tokio = { workspace = true, features = ["full"] } +tokio = { workspace = true, features = ["macros"] } diff --git a/packages/fuels-accounts/Cargo.toml b/packages/fuels-accounts/Cargo.toml index 1c6b01d22..917731b47 100644 --- a/packages/fuels-accounts/Cargo.toml +++ b/packages/fuels-accounts/Cargo.toml @@ -9,35 +9,54 @@ repository = { workspace = true } rust-version = { workspace = true } description = "Fuel Rust SDK accounts." +[package.metadata.cargo-udeps.ignore] +development = ["fuels-accounts", "tokio"] + [dependencies] -async-trait = { workspace = true, default-features = false } -chrono = { workspace = true } -elliptic-curve = { workspace = true, default-features = false } +async-trait = { workspace = true, optional = true } +chrono = { workspace = true, optional = true } +elliptic-curve = { workspace = true, optional = true } eth-keystore = { workspace = true, optional = true } fuel-core-client = { workspace = true, optional = true } -fuel-core-types = { workspace = true } -fuel-crypto = { workspace = true, features = ["random"] } +fuel-core-types = { workspace = true, optional = true } +fuel-crypto = { workspace = true, optional = true } fuel-tx = { workspace = true } -fuel-types = { workspace = true, features = ["random"] } -fuels-core = { workspace = true, default-features = false } -rand = { workspace = true, default-features = false } -semver = { workspace = true } -tai64 = { workspace = true, features = ["serde"] } -thiserror = { workspace = true, default-features = false } -tokio = { workspace = true, features = ["full"], optional = true } -zeroize = { workspace = true, features = ["derive"] } +fuel-types = { workspace = true, optional = true } +fuels-core = { workspace = true } +rand = { workspace = true, optional = true } +semver = { workspace = true, optional = true } +tai64 = { workspace = true, optional = true } +thiserror = { workspace = true, optional = true } +tokio = { workspace = true, optional = true } +zeroize = { workspace = true, optional = true } [dev-dependencies] fuel-tx = { workspace = true, features = ["test-helpers", "random"] } +fuels-accounts = { workspace = true, features = ["std", "coin-cache"] } tempfile = { workspace = true } -tokio = { workspace = true, features = ["test-util"] } +tokio = { workspace = true, features = ["test-util", "macros"] } [features] default = ["std"] -coin-cache = ["tokio?/time"] +coin-cache = ["tokio/time", "dep:fuel-types"] std = [ "fuels-core/std", - "dep:tokio", "fuel-core-client/default", + "fuel-crypto/std", + "fuel-crypto/random", "dep:eth-keystore", + "dep:chrono", + "dep:elliptic-curve", + "dep:eth-keystore", + "dep:fuel-core-client", + "dep:fuel-core-types", + "dep:fuel-crypto", + "dep:fuel-types", + "dep:rand", + "dep:semver", + "dep:tai64", + "dep:thiserror", + "dep:tokio", + "dep:zeroize", + "dep:async-trait", ] diff --git a/packages/fuels-accounts/src/lib.rs b/packages/fuels-accounts/src/lib.rs index 94612bf50..1acc234a4 100644 --- a/packages/fuels-accounts/src/lib.rs +++ b/packages/fuels-accounts/src/lib.rs @@ -10,7 +10,7 @@ pub mod wallet; #[cfg(feature = "std")] pub use account::*; -#[cfg(feature = "coin-cache")] +#[cfg(all(feature = "std", feature = "coin-cache"))] mod coin_cache; pub mod predicate; diff --git a/packages/fuels-accounts/src/predicate.rs b/packages/fuels-accounts/src/predicate.rs index d27c7d18a..8a6675bb0 100644 --- a/packages/fuels-accounts/src/predicate.rs +++ b/packages/fuels-accounts/src/predicate.rs @@ -1,12 +1,8 @@ -use std::{fmt::Debug, fs}; +use std::fmt::Debug; #[cfg(feature = "std")] -use fuels_core::types::{input::Input, AssetId}; -use fuels_core::{ - error, - types::{bech32::Bech32Address, errors::Result}, - Configurables, -}; +use fuels_core::types::{errors::Result, input::Input, AssetId}; +use fuels_core::{types::bech32::Bech32Address, Configurables}; #[cfg(feature = "std")] use crate::accounts_utils::try_provider_error; @@ -39,9 +35,10 @@ impl Predicate { fuel_tx::Input::predicate_owner(code).into() } + #[cfg(feature = "std")] pub fn load_from(file_path: &str) -> Result { - let code = fs::read(file_path).map_err(|e| { - error!( + let code = std::fs::read(file_path).map_err(|e| { + fuels_core::error!( IO, "could not read predicate binary {file_path:?}. Reason: {e}" ) diff --git a/packages/fuels-accounts/src/provider.rs b/packages/fuels-accounts/src/provider.rs index 761545e5e..988a0fcdc 100644 --- a/packages/fuels-accounts/src/provider.rs +++ b/packages/fuels-accounts/src/provider.rs @@ -3,6 +3,7 @@ use std::{collections::HashMap, fmt::Debug, net::SocketAddr}; mod retry_util; mod retryable_client; mod supported_versions; +mod version; #[cfg(feature = "coin-cache")] use std::sync::Arc; diff --git a/packages/fuels-accounts/src/provider/supported_versions.rs b/packages/fuels-accounts/src/provider/supported_versions.rs index da20a4b8f..957d91169 100644 --- a/packages/fuels-accounts/src/provider/supported_versions.rs +++ b/packages/fuels-accounts/src/provider/supported_versions.rs @@ -1,7 +1,6 @@ use semver::Version; -pub const SUPPORTED_FUEL_CORE_VERSION: Version = - include!("../../../../scripts/fuel-core-version/version.rs"); +use super::version::SUPPORTED_FUEL_CORE_VERSION; #[derive(Debug, PartialEq, Eq)] pub(crate) struct VersionCompatibility { diff --git a/packages/fuels-accounts/src/provider/version.rs b/packages/fuels-accounts/src/provider/version.rs new file mode 100644 index 000000000..bf74373d8 --- /dev/null +++ b/packages/fuels-accounts/src/provider/version.rs @@ -0,0 +1 @@ +pub(crate) const SUPPORTED_FUEL_CORE_VERSION: ::semver::Version = ::semver::Version::new(0, 26, 0); diff --git a/packages/fuels-code-gen/Cargo.toml b/packages/fuels-code-gen/Cargo.toml index 8eab6bea4..23da1f880 100644 --- a/packages/fuels-code-gen/Cargo.toml +++ b/packages/fuels-code-gen/Cargo.toml @@ -12,7 +12,7 @@ description = "Used for code generation in the Fuel Rust SDK" [dependencies] Inflector = { workspace = true } fuel-abi-types = { workspace = true } -itertools = { workspace = true } +itertools = { workspace = true, features = ["use_std"] } proc-macro2 = { workspace = true } quote = { workspace = true } regex = { workspace = true } @@ -20,7 +20,7 @@ serde_json = { workspace = true } syn = { workspace = true } [dev-dependencies] -pretty_assertions = "1.4.0" +pretty_assertions = { workspace = true, features = ["alloc"] } [package.metadata.cargo-machete] ignored = ["Inflector"] diff --git a/packages/fuels-code-gen/src/program_bindings/abigen/bindings/contract.rs b/packages/fuels-code-gen/src/program_bindings/abigen/bindings/contract.rs index 77b4481ab..8367832d2 100644 --- a/packages/fuels-code-gen/src/program_bindings/abigen/bindings/contract.rs +++ b/packages/fuels-code-gen/src/program_bindings/abigen/bindings/contract.rs @@ -148,9 +148,6 @@ fn expand_functions(functions: &[FullABIFunction]) -> Result { /// Transforms a function defined in [`FullABIFunction`] into a [`TokenStream`] /// that represents that same function signature as a Rust-native function /// declaration. -/// -/// The generated function prepares the necessary data and proceeds to call -/// [::fuels_contract::contract::method_hash] for the actual call. pub(crate) fn expand_fn(abi_fun: &FullABIFunction) -> Result { let mut generator = FunctionGenerator::new(abi_fun)?; diff --git a/packages/fuels-core/Cargo.toml b/packages/fuels-core/Cargo.toml index 7c33da1f2..f13d314d9 100644 --- a/packages/fuels-core/Cargo.toml +++ b/packages/fuels-core/Cargo.toml @@ -10,30 +10,40 @@ rust-version = { workspace = true } description = "Fuel Rust SDK core." [dependencies] -async-trait = { workspace = true, default-features = false } +async-trait = { workspace = true } bech32 = { workspace = true } -chrono = { workspace = true } +chrono = { workspace = true, optional = true } fuel-abi-types = { workspace = true } fuel-asm = { workspace = true } -fuel-core-chain-config = { workspace = true } +fuel-core-chain-config = { workspace = true, optional = true } fuel-core-client = { workspace = true, optional = true } -fuel-core-types = { workspace = true } +fuel-core-types = { workspace = true, optional = true } fuel-crypto = { workspace = true } -fuel-tx = { workspace = true } -fuel-types = { workspace = true, features = ["default"] } -fuel-vm = { workspace = true } +fuel-tx = { workspace = true, features = ["alloc"] } +fuel-types = { workspace = true } +fuel-vm = { workspace = true, features = ["alloc"] } fuels-macros = { workspace = true } -hex = { workspace = true, features = ["std"] } -itertools = { workspace = true } +hex = { workspace = true } +itertools = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, default-features = true } -thiserror = { workspace = true, default-features = false } -uint = { workspace = true, default-features = false } +serde_json = { workspace = true } +thiserror = { workspace = true } +uint = { workspace = true } [dev-dependencies] fuel-tx = { workspace = true, features = ["test-helpers", "random"] } +fuels-core = { workspace = true, features = ["std"] } tokio = { workspace = true, features = ["test-util", "macros"] } [features] default = ["std"] -std = ["dep:fuel-core-client"] +std = [ + "dep:fuel-core-client", + "fuel-crypto/std", + "hex/std", + "itertools/use_std", + "dep:chrono", + "dep:fuel-core-chain-config", + "dep:fuel-core-types", + "dep:itertools", +] diff --git a/packages/fuels-core/src/types/errors.rs b/packages/fuels-core/src/types/errors.rs index 34812a7d9..86dc59997 100644 --- a/packages/fuels-core/src/types/errors.rs +++ b/packages/fuels-core/src/types/errors.rs @@ -95,6 +95,7 @@ macro_rules! impl_error_from { impl_error_from!(Other, &'static str); impl_error_from!(Other, bech32::Error); +#[cfg(feature = "std")] impl_error_from!(Other, fuel_crypto::Error); impl_error_from!(Other, serde_json::Error); impl_error_from!(Other, FromHexError); diff --git a/packages/fuels-core/src/types/wrappers.rs b/packages/fuels-core/src/types/wrappers.rs index 90b98152c..152769a8a 100644 --- a/packages/fuels-core/src/types/wrappers.rs +++ b/packages/fuels-core/src/types/wrappers.rs @@ -7,6 +7,7 @@ pub mod input; pub mod message; pub mod message_proof; pub mod node_info; +#[cfg(feature = "std")] pub mod transaction; pub mod transaction_response; pub mod output { diff --git a/packages/fuels-core/src/utils.rs b/packages/fuels-core/src/utils.rs index 788293d32..b74f2492f 100644 --- a/packages/fuels-core/src/utils.rs +++ b/packages/fuels-core/src/utils.rs @@ -1,8 +1,7 @@ pub mod constants; pub mod offsets; -use constants::{WITNESS_STATIC_SIZE, WORD_SIZE}; -use fuel_tx::Witness; +use constants::WORD_SIZE; use crate::{error, types::errors::Result}; @@ -26,15 +25,18 @@ pub fn checked_round_up_to_word_alignment(bytes_len: usize) -> Result { ) }) } -pub(crate) fn calculate_witnesses_size<'a, I: IntoIterator>( + +#[cfg(feature = "std")] +pub(crate) fn calculate_witnesses_size<'a, I: IntoIterator>( witnesses: I, ) -> usize { witnesses .into_iter() - .map(|w| w.as_ref().len() + WITNESS_STATIC_SIZE) + .map(|w| w.as_ref().len() + constants::WITNESS_STATIC_SIZE) .sum() } +#[cfg(feature = "std")] pub(crate) mod sealed { pub trait Sealed {} } diff --git a/packages/fuels-macros/Cargo.toml b/packages/fuels-macros/Cargo.toml index 1df44598d..8160ef5c7 100644 --- a/packages/fuels-macros/Cargo.toml +++ b/packages/fuels-macros/Cargo.toml @@ -17,9 +17,7 @@ fuels-code-gen = { workspace = true } itertools = { workspace = true } proc-macro2 = { workspace = true } quote = { workspace = true } -rand = { workspace = true } syn = { workspace = true, features = ["extra-traits"] } [dev-dependencies] trybuild = { workspace = true } - diff --git a/packages/fuels-programs/Cargo.toml b/packages/fuels-programs/Cargo.toml index 3799f8b08..a94f657ca 100644 --- a/packages/fuels-programs/Cargo.toml +++ b/packages/fuels-programs/Cargo.toml @@ -10,21 +10,31 @@ rust-version = { workspace = true } description = "Fuel Rust SDK contracts." [dependencies] -async-trait = { workspace = true, default-features = false } -fuel-abi-types = { workspace = true } -fuel-asm = { workspace = true } +async-trait = { workspace = true, optional = true } +fuel-abi-types = { workspace = true, optional = true } +fuel-asm = { workspace = true, optional = true } fuel-tx = { workspace = true } -fuel-types = { workspace = true, features = ["default"] } -fuels-accounts = { workspace = true } +fuel-types = { workspace = true, optional = true } +fuels-accounts = { workspace = true, optional = true } fuels-core = { workspace = true } -itertools = { workspace = true } -rand = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } +itertools = { workspace = true, features = ["use_std"], optional = true } +serde_json = { workspace = true, optional = true } [dev-dependencies] -tempfile = "3.8.1" +fuels-programs = { workspace = true, features = ["std"] } +rand = { workspace = true } +tempfile = { workspace = true } +tokio = { workspace = true, features = ["macros", "test-util"] } [features] default = ["std"] -std = ["fuels-core/std", "fuels-accounts/std"] +std = [ + "fuels-core/std", + "fuels-accounts/std", + "dep:itertools", + "dep:serde_json", + "dep:async-trait", + "dep:fuel-abi-types", + "dep:fuel-asm", + "dep:fuel-types", +] diff --git a/packages/fuels-programs/src/call_utils.rs b/packages/fuels-programs/src/call_utils.rs index b26576053..dba483f8d 100644 --- a/packages/fuels-programs/src/call_utils.rs +++ b/packages/fuels-programs/src/call_utils.rs @@ -144,7 +144,7 @@ pub(crate) async fn transaction_builder_from_contract_calls( .with_outputs(outputs)) } -/// Creates a [`ScriptTransaction`] from contract calls. The internal [Transaction] is +/// Creates a [`ScriptTransaction`] from contract calls. The internal [`fuel_tx::Transaction`] is /// initialized with the actual script instructions, script data needed to perform the call and /// transaction inputs/outputs consisting of assets and contracts. pub(crate) async fn build_tx_from_contract_calls( @@ -320,7 +320,7 @@ pub(crate) fn build_script_data_from_contract_calls( } /// Returns the VM instructions for calling a contract method -/// We use the [`Opcode`] to call a contract: [`CALL`](Opcode::CALL) +/// We use the [`fuel_asm::Opcode`] to call a contract: [`CALL`](fuel_asm::Opcode::CALL) /// pointing at the following registers: /// /// 0x10 Script data offset @@ -455,13 +455,14 @@ fn generate_asset_change_outputs( .collect() } -pub(crate) fn generate_contract_outputs(num_of_contracts: usize) -> Vec { +pub(crate) fn generate_contract_outputs(num_of_contracts: usize) -> impl Iterator { (0..num_of_contracts) .map(|idx| Output::contract(idx as u16, Bytes32::zeroed(), Bytes32::zeroed())) - .collect() } -pub(crate) fn generate_contract_inputs(contract_ids: HashSet) -> Vec { +pub(crate) fn generate_contract_inputs( + contract_ids: HashSet, +) -> impl Iterator { contract_ids .into_iter() .enumerate() @@ -474,7 +475,6 @@ pub(crate) fn generate_contract_inputs(contract_ids: HashSet) -> Vec contract_id, ) }) - .collect() } fn extract_unique_contract_ids(calls: &[ContractCall]) -> HashSet { diff --git a/packages/fuels-programs/src/contract.rs b/packages/fuels-programs/src/contract.rs index e49cb62e6..3be7da775 100644 --- a/packages/fuels-programs/src/contract.rs +++ b/packages/fuels-programs/src/contract.rs @@ -135,7 +135,7 @@ impl StorageConfiguration { self.autoload_storage } - /// Slots added via [`add_slot_overrides`] will override any + /// Slots added via [`Self::add_slot_overrides`] will override any /// existing slots with matching keys. pub fn add_slot_overrides( mut self, @@ -145,7 +145,7 @@ impl StorageConfiguration { self } - /// Slots added via [`add_slot_overrides_from_file`] will override any + /// Slots added via [`Self::add_slot_overrides_from_file`] will override any /// existing slots with matching keys. /// /// `path` - path to a JSON file containing the storage slots. diff --git a/packages/fuels-programs/src/lib.rs b/packages/fuels-programs/src/lib.rs index 5a4aa4a9c..899358e94 100644 --- a/packages/fuels-programs/src/lib.rs +++ b/packages/fuels-programs/src/lib.rs @@ -1,6 +1,10 @@ pub mod call_response; +#[cfg(feature = "std")] pub mod call_utils; +#[cfg(feature = "std")] pub mod contract; pub mod receipt_parser; +#[cfg(feature = "std")] pub mod script_calls; +#[cfg(feature = "std")] mod submit_response; diff --git a/packages/fuels-programs/src/script_calls.rs b/packages/fuels-programs/src/script_calls.rs index 735319125..a3966d84e 100644 --- a/packages/fuels-programs/src/script_calls.rs +++ b/packages/fuels-programs/src/script_calls.rs @@ -20,7 +20,6 @@ use fuels_core::{ tx_status::TxStatus, }, }; -use itertools::chain; use crate::{ call_response::FuelCallResponse, @@ -176,22 +175,18 @@ where .collect(); let num_of_contracts = contract_ids.len(); - let inputs = chain!( - generate_contract_inputs(contract_ids), - self.script_call.inputs.clone(), - ) - .collect(); + let inputs = generate_contract_inputs(contract_ids) + .chain(self.script_call.inputs.clone()) + .collect(); // Note the contract_outputs need to come first since the // contract_inputs are referencing them via `output_index`. The node // will, upon receiving our request, use `output_index` to index the // `inputs` array we've sent over. - let outputs = chain!( - generate_contract_outputs(num_of_contracts), - self.script_call.outputs.clone(), - self.script_call.variable_outputs.clone(), - ) - .collect(); + let outputs = generate_contract_outputs(num_of_contracts) + .chain(self.script_call.outputs.clone()) + .chain(self.script_call.variable_outputs.clone()) + .collect(); Ok((inputs, outputs)) } diff --git a/packages/fuels-programs/src/submit_response.rs b/packages/fuels-programs/src/submit_response.rs index 13d0b332b..61e971363 100644 --- a/packages/fuels-programs/src/submit_response.rs +++ b/packages/fuels-programs/src/submit_response.rs @@ -30,8 +30,6 @@ use crate::{ /// - `retry_config`: The retry configuration for the transaction. /// - `tx_id`: The optional transaction ID of the submitted transaction. /// - `call_handler`: The call handler that manages the type of call. -/// -/// ``` #[derive(Debug)] pub struct SubmitResponse { tx_id: Bytes32, diff --git a/packages/fuels-test-helpers/Cargo.toml b/packages/fuels-test-helpers/Cargo.toml index 844fb7c0d..d06ee5cd0 100644 --- a/packages/fuels-test-helpers/Cargo.toml +++ b/packages/fuels-test-helpers/Cargo.toml @@ -10,24 +10,33 @@ rust-version = { workspace = true } description = "Fuel Rust SDK test helpers." [dependencies] -fuel-core = { workspace = true, default-features = false, features = ["test-helpers"], optional = true } +fuel-core = { workspace = true, features = ["test-helpers"], optional = true } fuel-core-chain-config = { workspace = true, features = ["test-helpers"] } fuel-core-client = { workspace = true } -fuel-core-poa = { workspace = true } +fuel-core-poa = { workspace = true, optional = true } fuel-core-services = { workspace = true } fuel-crypto = { workspace = true } fuel-tx = { workspace = true } -fuel-types = { workspace = true, features = ["random"] } -fuels-accounts = { workspace = true, optional = true } +fuel-types = { workspace = true } +fuels-accounts = { workspace = true } fuels-core = { workspace = true } futures = { workspace = true } portpicker = { workspace = true } -rand = { workspace = true, default-features = false } -tempfile = { workspace = true, default-features = false } -tokio = { workspace = true, default-features = false } -which = { workspace = true, default-features = false } +rand = { workspace = true } +tempfile = { workspace = true } +tokio = { workspace = true } +which = { workspace = true } + +[dev-dependencies] +fuels-test-helpers = { workspace = true, features = ["std"] } [features] -default = ["fuels-accounts", "std"] -std = ["fuels-accounts?/std", "fuels-core/std", "fuel-core-chain-config/std"] -fuel-core-lib = ["dep:fuel-core"] +default = ["std"] +std = [ + "fuels-accounts/std", + "fuels-core/std", + "fuel-core-chain-config/std", + "fuel-types/random", + "fuel-core-chain-config/std", +] +fuel-core-lib = ["dep:fuel-core", "dep:fuel-core-poa"] diff --git a/packages/fuels-test-helpers/src/accounts.rs b/packages/fuels-test-helpers/src/accounts.rs index 1dc9e99e6..716247fe4 100644 --- a/packages/fuels-test-helpers/src/accounts.rs +++ b/packages/fuels-test-helpers/src/accounts.rs @@ -14,7 +14,7 @@ use crate::{ /// The provider and the wallets are instantiated with the default configs. /// For more configurable options, see the `launch_custom_provider_and_get_wallets` function. /// # Examples -/// ``` +/// ```rust /// use fuels_test_helpers::launch_provider_and_get_wallet; /// /// async fn single_wallet() -> Result<(), Box> { @@ -34,7 +34,7 @@ pub async fn launch_provider_and_get_wallet() -> Result { /// Launches a custom node and provider, along with a configurable number of wallets. /// /// # Examples -/// ``` +/// ```rust /// use fuels_test_helpers::launch_custom_provider_and_get_wallets; /// use fuels_test_helpers::WalletsConfig; /// diff --git a/packages/fuels-test-helpers/src/lib.rs b/packages/fuels-test-helpers/src/lib.rs index 3fc19a095..6ebcfea7e 100644 --- a/packages/fuels-test-helpers/src/lib.rs +++ b/packages/fuels-test-helpers/src/lib.rs @@ -1,178 +1,37 @@ //! Testing helpers/utilities for Fuel SDK. -extern crate core; - -#[cfg(feature = "fuels-accounts")] +#[cfg(feature = "std")] pub use accounts::*; -use fuel_tx::{Bytes32, ConsensusParameters, ContractParameters, TxParameters, UtxoId}; -use fuel_types::{AssetId, Nonce}; -use fuels_accounts::provider::Provider; -use fuels_core::types::{ - bech32::Bech32Address, - coin::{Coin, CoinStatus}, - errors::Result, - message::{Message, MessageStatus}, -}; pub use node_types::*; -use rand::Fill; -use utils::{into_coin_configs, into_message_configs}; +#[cfg(feature = "std")] +pub use service::*; +#[cfg(feature = "std")] +pub use utils::*; pub use wallets_config::*; -mod node_types; - -#[cfg(not(feature = "fuel-core-lib"))] -pub(crate) mod fuel_bin_service; -#[cfg(feature = "fuels-accounts")] +#[cfg(feature = "std")] mod accounts; - -pub use service::*; +#[cfg(all(not(feature = "fuel-core-lib"), feature = "std"))] +pub(crate) mod fuel_bin_service; +mod node_types; +#[cfg(feature = "std")] mod service; - +#[cfg(feature = "std")] mod utils; mod wallets_config; -/// Create a vector of `num_asset`*`coins_per_asset` UTXOs and a vector of the unique corresponding -/// asset IDs. `AssetId`. Each UTXO (=coin) contains `amount_per_coin` amount of a random asset. The -/// output of this function can be used with `setup_test_provider` to get a client with some -/// pre-existing coins, with `num_asset` different asset ids. Note that one of the assets is the -/// base asset to pay for gas. -pub fn setup_multiple_assets_coins( - owner: &Bech32Address, - num_asset: u64, - coins_per_asset: u64, - amount_per_coin: u64, -) -> (Vec, Vec) { - let mut rng = rand::thread_rng(); - // Create `num_asset-1` asset ids so there is `num_asset` in total with the base asset - let asset_ids = (0..(num_asset - 1)) - .map(|_| { - let mut random_asset_id = AssetId::zeroed(); - random_asset_id - .try_fill(&mut rng) - .expect("failed to fill with random data"); - random_asset_id - }) - .chain([AssetId::zeroed()]) - .collect::>(); - - let coins = asset_ids - .iter() - .flat_map(|id| setup_single_asset_coins(owner, *id, coins_per_asset, amount_per_coin)) - .collect::>(); - - (coins, asset_ids) -} - -/// Create a vector of UTXOs with the provided AssetIds, num_coins, and amount_per_coin -pub fn setup_custom_assets_coins(owner: &Bech32Address, assets: &[AssetConfig]) -> Vec { - let coins = assets - .iter() - .flat_map(|asset| { - setup_single_asset_coins(owner, asset.id, asset.num_coins, asset.coin_amount) - }) - .collect::>(); - coins -} - -/// Create a vector of `num_coins` UTXOs containing `amount_per_coin` amount of asset `asset_id`. -/// The output of this function can be used with `setup_test_provider` to get a client with some -/// pre-existing coins, but with only one asset ID. -pub fn setup_single_asset_coins( - owner: &Bech32Address, - asset_id: AssetId, - num_coins: u64, - amount_per_coin: u64, -) -> Vec { - let mut rng = rand::thread_rng(); - - let coins: Vec = (1..=num_coins) - .map(|_i| { - let mut r = Bytes32::zeroed(); - r.try_fill(&mut rng) - .expect("failed to fill with random data"); - let utxo_id = UtxoId::new(r, 0); - - Coin { - owner: owner.clone(), - utxo_id, - amount: amount_per_coin, - asset_id, - status: CoinStatus::Unspent, - block_created: Default::default(), - } - }) - .collect(); - - coins -} - -pub fn setup_single_message( - sender: &Bech32Address, - recipient: &Bech32Address, - amount: u64, - nonce: Nonce, - data: Vec, -) -> Message { - Message { - sender: sender.clone(), - recipient: recipient.clone(), - nonce, - amount, - data, - da_height: 0, - status: MessageStatus::Unspent, - } -} - -pub async fn setup_test_provider( - coins: Vec, - messages: Vec, - node_config: Option, - chain_config: Option, -) -> Result { - let node_config = node_config.unwrap_or_default(); - let chain_config = chain_config.unwrap_or_else(testnet_chain_config); - - let coin_configs = into_coin_configs(coins); - let message_configs = into_message_configs(messages); - - let state_config = StateConfig { - coins: coin_configs, - messages: message_configs, - ..StateConfig::local_testnet() - }; - - let srv = FuelService::start(node_config, chain_config, state_config).await?; - - let address = srv.bound_address(); - - tokio::spawn(async move { - let _own_the_handle = srv; - let () = futures::future::pending().await; - }); - - Provider::from(address).await -} - -// Testnet ChainConfig with increased tx size and contract size limits -fn testnet_chain_config() -> ChainConfig { - let mut consensus_parameters = ConsensusParameters::default(); - let tx_params = TxParameters::default().with_max_size(10_000_000); - let contract_params = ContractParameters::default().with_contract_max_size(1_000_000); - consensus_parameters.set_tx_params(tx_params); - consensus_parameters.set_contract_params(contract_params); - - ChainConfig { - consensus_parameters, - ..ChainConfig::local_testnet() - } -} - #[cfg(test)] mod tests { use std::net::{Ipv4Addr, SocketAddr}; - use fuel_tx::{ConsensusParameters, ContractParameters, FeeParameters, TxParameters}; - use fuels_core::types::bech32::FUEL_BECH32_HRP; + use fuel_tx::{ + AssetId, Bytes32, ConsensusParameters, ContractParameters, FeeParameters, TxParameters, + }; + use fuels_core::types::{ + bech32::{Bech32Address, FUEL_BECH32_HRP}, + coin::Coin, + errors::Result, + }; + use rand::Fill; use super::*; diff --git a/packages/fuels-test-helpers/src/utils.rs b/packages/fuels-test-helpers/src/utils.rs index d459f6335..cc056e462 100644 --- a/packages/fuels-test-helpers/src/utils.rs +++ b/packages/fuels-test-helpers/src/utils.rs @@ -1,5 +1,15 @@ -use fuel_core_chain_config::{CoinConfig, MessageConfig}; -use fuels_core::types::{coin::Coin, message::Message}; +use fuel_core_chain_config::{ChainConfig, CoinConfig, MessageConfig, StateConfig}; +use fuel_tx::{AssetId, Bytes32, ConsensusParameters, ContractParameters, TxParameters, UtxoId}; +use fuel_types::Nonce; +use fuels_accounts::provider::Provider; +use fuels_core::types::{ + bech32::Bech32Address, + coin::{Coin, CoinStatus}, + message::{Message, MessageStatus}, +}; +use rand::Fill; + +use crate::{AssetConfig, FuelService, NodeConfig}; pub(crate) fn into_coin_configs(coins: Vec) -> Vec { coins @@ -14,3 +24,140 @@ pub(crate) fn into_message_configs(messages: Vec) -> Vec .map(Into::into) .collect::>() } + +/// Create a vector of `num_asset`*`coins_per_asset` UTXOs and a vector of the unique corresponding +/// asset IDs. `AssetId`. Each UTXO (=coin) contains `amount_per_coin` amount of a random asset. The +/// output of this function can be used with `setup_test_provider` to get a client with some +/// pre-existing coins, with `num_asset` different asset ids. Note that one of the assets is the +/// base asset to pay for gas. +pub fn setup_multiple_assets_coins( + owner: &Bech32Address, + num_asset: u64, + coins_per_asset: u64, + amount_per_coin: u64, +) -> (Vec, Vec) { + let mut rng = rand::thread_rng(); + // Create `num_asset-1` asset ids so there is `num_asset` in total with the base asset + let asset_ids = (0..(num_asset - 1)) + .map(|_| { + let mut random_asset_id = AssetId::zeroed(); + random_asset_id + .try_fill(&mut rng) + .expect("failed to fill with random data"); + random_asset_id + }) + .chain([AssetId::zeroed()]) + .collect::>(); + + let coins = asset_ids + .iter() + .flat_map(|id| setup_single_asset_coins(owner, *id, coins_per_asset, amount_per_coin)) + .collect::>(); + + (coins, asset_ids) +} + +/// Create a vector of UTXOs with the provided AssetIds, num_coins, and amount_per_coin +pub fn setup_custom_assets_coins(owner: &Bech32Address, assets: &[AssetConfig]) -> Vec { + let coins = assets + .iter() + .flat_map(|asset| { + setup_single_asset_coins(owner, asset.id, asset.num_coins, asset.coin_amount) + }) + .collect::>(); + coins +} + +/// Create a vector of `num_coins` UTXOs containing `amount_per_coin` amount of asset `asset_id`. +/// The output of this function can be used with `setup_test_provider` to get a client with some +/// pre-existing coins, but with only one asset ID. +pub fn setup_single_asset_coins( + owner: &Bech32Address, + asset_id: AssetId, + num_coins: u64, + amount_per_coin: u64, +) -> Vec { + let mut rng = rand::thread_rng(); + + let coins: Vec = (1..=num_coins) + .map(|_i| { + let mut r = Bytes32::zeroed(); + r.try_fill(&mut rng) + .expect("failed to fill with random data"); + let utxo_id = UtxoId::new(r, 0); + + Coin { + owner: owner.clone(), + utxo_id, + amount: amount_per_coin, + asset_id, + status: CoinStatus::Unspent, + block_created: Default::default(), + } + }) + .collect(); + + coins +} + +pub fn setup_single_message( + sender: &Bech32Address, + recipient: &Bech32Address, + amount: u64, + nonce: Nonce, + data: Vec, +) -> Message { + Message { + sender: sender.clone(), + recipient: recipient.clone(), + nonce, + amount, + data, + da_height: 0, + status: MessageStatus::Unspent, + } +} + +pub async fn setup_test_provider( + coins: Vec, + messages: Vec, + node_config: Option, + chain_config: Option, +) -> fuels_core::types::errors::Result { + let node_config = node_config.unwrap_or_default(); + let chain_config = chain_config.unwrap_or_else(testnet_chain_config); + + let coin_configs = into_coin_configs(coins); + let message_configs = into_message_configs(messages); + + let state_config = StateConfig { + coins: coin_configs, + messages: message_configs, + ..StateConfig::local_testnet() + }; + + let srv = FuelService::start(node_config, chain_config, state_config).await?; + + let address = srv.bound_address(); + + tokio::spawn(async move { + let _own_the_handle = srv; + let () = futures::future::pending().await; + }); + + Provider::from(address).await +} + +// Testnet ChainConfig with increased tx size and contract size limits +fn testnet_chain_config() -> ChainConfig { + let mut consensus_parameters = ConsensusParameters::default(); + let tx_params = TxParameters::default().with_max_size(10_000_000); + let contract_params = ContractParameters::default().with_contract_max_size(1_000_000); + consensus_parameters.set_tx_params(tx_params); + consensus_parameters.set_contract_params(contract_params); + + ChainConfig { + consensus_parameters, + ..ChainConfig::local_testnet() + } +} diff --git a/packages/fuels/Cargo.toml b/packages/fuels/Cargo.toml index 5da952793..22237c6ac 100644 --- a/packages/fuels/Cargo.toml +++ b/packages/fuels/Cargo.toml @@ -14,18 +14,18 @@ description = "Fuel Rust SDK." ignored = ["fuel-core"] [dependencies] -fuel-core = { workspace = true, default-features = false, optional = true } +fuel-core = { workspace = true, optional = true } fuel-core-client = { workspace = true, optional = true } fuel-crypto = { workspace = true } fuel-tx = { workspace = true } -fuels-accounts = { workspace = true, default-features = false } +fuels-accounts = { workspace = true } fuels-core = { workspace = true } fuels-macros = { workspace = true } fuels-programs = { workspace = true, optional = true } fuels-test-helpers = { workspace = true, optional = true } [features] -default = ["std", "fuels-test-helpers?/fuels-accounts", "coin-cache"] +default = ["std", "coin-cache"] coin-cache = ["fuels-accounts/coin-cache"] # The crates enabled via `dep:` below are not currently wasm compatible, as @@ -36,9 +36,10 @@ std = [ "dep:fuels-programs", "dep:fuels-test-helpers", "fuels-accounts/std", - "fuels-programs?/std", + "fuels-programs/std", "fuels-core/std", "fuels-test-helpers?/std", ] fuel-core-lib = ["fuels-test-helpers?/fuel-core-lib", "dep:fuel-core"] +test-helpers = ["dep:fuels-test-helpers"] rocksdb = ["fuel-core?/rocksdb"] diff --git a/packages/fuels/src/lib.rs b/packages/fuels/src/lib.rs index 6231ff9ae..95fc8078e 100644 --- a/packages/fuels/src/lib.rs +++ b/packages/fuels/src/lib.rs @@ -53,13 +53,15 @@ pub mod types { pub use fuels_core::types::*; } -#[cfg(feature = "std")] +#[cfg(feature = "test-helpers")] pub mod test_helpers { pub use fuels_test_helpers::*; } #[doc(hidden)] pub mod prelude { + #[cfg(feature = "test-helpers")] + pub use super::test_helpers::*; #[cfg(feature = "std")] pub use super::{ accounts::{ @@ -79,8 +81,7 @@ pub mod prelude { SettableContract, StorageConfiguration, }, }, - test_helpers::*, - types::transaction_builders::*, + types::{transaction::*, transaction_builders::*}, }; pub use super::{ core::constants::*, @@ -89,7 +90,6 @@ pub mod prelude { types::{ bech32::{Bech32Address, Bech32ContractId}, errors::{Error, Result}, - transaction::*, Address, AssetId, Bytes, ContractId, RawSlice, Salt, }, }; diff --git a/scripts/check-docs/Cargo.toml b/scripts/check-docs/Cargo.toml deleted file mode 100644 index 29ac54550..000000000 --- a/scripts/check-docs/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "check-docs" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } -homepage = { workspace = true } -license = { workspace = true } -publish = false -repository = { workspace = true } -rust-version = { workspace = true } - -[dependencies] -anyhow = "1.0.75" -itertools = { workspace = true } -regex = { workspace = true } diff --git a/scripts/check-docs/src/lib.rs b/scripts/check-docs/src/lib.rs deleted file mode 100644 index bcdd18e89..000000000 --- a/scripts/check-docs/src/lib.rs +++ /dev/null @@ -1,261 +0,0 @@ -use std::{ - collections::HashSet, - path::{Path, PathBuf}, -}; - -use anyhow::{anyhow, bail, Error}; -use itertools::{chain, Itertools}; -use regex::Regex; - -pub fn report_errors(error_type: &str, errors: &[Error]) { - if !errors.is_empty() { - eprintln!("\nInvalid {error_type} detected!\n"); - for error in errors { - eprintln!("{error}\n") - } - } -} - -pub fn report_warnings(warnings: &[Error]) { - if !warnings.is_empty() { - eprintln!("\nWarnings detected!\n"); - for warning in warnings { - eprintln!("{warning}\n") - } - } -} - -pub fn validate_includes( - includes: Vec, - valid_anchors: Vec, -) -> (Vec, Vec) { - let (pairs, errors): (Vec<_>, Vec<_>) = includes - .into_iter() - .filter(|include| !include.anchor_name.is_empty()) - .map(|include| { - let mut maybe_anchor = valid_anchors.iter().find(|anchor| { - anchor.file == include.anchor_file && anchor.name == include.anchor_name - }); - - match maybe_anchor.take() { - Some(anchor) => Ok(anchor.clone()), - None => Err(anyhow!( - "No anchor available to satisfy include {include:?}" - )), - } - }) - .partition_result(); - - let additional_warnings = valid_anchors - .iter() - .filter(|valid_anchor| { - let anchor_used_in_a_pair = pairs.iter().any(|anchor| anchor == *valid_anchor); - !anchor_used_in_a_pair - }) - .map(|unused_anchor| anyhow!("Anchor unused: {unused_anchor:?}!")) - .collect::>(); - - (errors, additional_warnings) -} - -#[allow(dead_code)] -#[derive(Debug, Clone)] -pub struct Include { - pub anchor_name: String, - pub anchor_file: PathBuf, - pub include_file: PathBuf, - pub line_no: usize, -} - -pub fn parse_includes(text_w_includes: String) -> (Vec, Vec) { - let apply_regex = |regex: Regex| { - let (includes, errors): (Vec<_>, Vec<_>) = text_w_includes - .lines() - .filter_map(|line| regex.captures(line)) - .map(|capture| { - let include_file = PathBuf::from(&capture[1]).canonicalize()?; - let line_no = capture[2].parse()?; - let anchor_file = PathBuf::from(&capture[3]); - let anchor_name = capture.get(4).map_or("", |m| m.as_str()).to_string(); - - let the_path = include_file.parent().unwrap().join(anchor_file); - - let anchor_file = the_path.canonicalize().map_err(|err| { - anyhow!( - "{the_path:?} when canonicalized gives error {err:?}\ninclude_file: {:?}", - include_file - ) - })?; - - Ok(Include { - anchor_name, - anchor_file, - include_file, - line_no, - }) - }) - .partition_result(); - (includes, errors) - }; - - apply_regex( - Regex::new(r"^(\S+):(\d+):\s*\{\{\s*#include\s*(\S+?)\s*(?::\s*(\S+)\s*)?\}\}") - .expect("could not construct regex"), - ) -} - -pub fn filter_valid_anchors(starts: Vec, ends: Vec) -> (Vec, Vec) { - let find_anchor_end_by_name = |anchor_name: &str, file: &Path| { - ends.iter() - .filter(|el| el.name == *anchor_name && el.file == file) - .collect::>() - }; - - let (pairs, errors):(Vec<_>, Vec<_>) = starts.into_iter().map(|start| { - let matches_by_name = find_anchor_end_by_name(&start.name, &start.file); - - let (begin, end) = match matches_by_name.as_slice() { - [single_match] => Ok((start, (*single_match).clone())), - [] => Err(anyhow!("Couldn't find a matching end anchor for {start:?}")), - multiple_ends => Err(anyhow!("Found too many matching anchor ends for anchor: {start:?}. The matching ends are: {multiple_ends:?}")), - }?; - - match check_validity_of_anchor_pair(&begin, &end) { - None => Ok((begin, end)), - Some(err) => { - let err_msg = err.to_string(); - Err(anyhow!("{err_msg}")) - } - } - }).partition_result(); - - let additional_errors = filter_unused_ends(&ends, &pairs) - .into_iter() - .map(|unused_end| anyhow!("Missing anchor start for {unused_end:?}")) - .collect::>(); - - let start_only = pairs.into_iter().map(|(begin, _)| begin).collect(); - - (start_only, chain!(errors, additional_errors).collect()) -} - -pub fn filter_unused_ends<'a>(ends: &'a [Anchor], pairs: &[(Anchor, Anchor)]) -> Vec<&'a Anchor> { - ends.iter() - .filter(|end| { - let end_used_in_pairs = pairs.iter().any(|(_, used_end)| *end == used_end); - !end_used_in_pairs - }) - .collect() -} - -pub fn check_validity_of_anchor_pair(begin: &Anchor, end: &Anchor) -> Option { - if begin.line_no > end.line_no { - Some(anyhow!("The end of the anchor appears before the beginning. End anchor: {end:?}. Begin anchor: {begin:?}")) - } else { - None - } -} - -#[derive(Debug, Clone, Hash, Eq, PartialEq)] -pub struct Anchor { - pub line_no: usize, - pub name: String, - pub file: PathBuf, -} - -pub fn extract_starts_and_ends( - text_w_anchors: &str, -) -> anyhow::Result<(Vec, Vec), Error> { - let apply_regex = |regex: Regex| { - text_w_anchors - .lines() - .filter_map(|line| regex.captures(line)) - .map(|capture| { - let file = PathBuf::from(&capture[1]).canonicalize()?; - let line_no = &capture[2]; - let anchor_name = &capture[3]; - - Ok(Anchor { - line_no: line_no.parse()?, - name: anchor_name.to_string(), - file, - }) - }) - .collect::, Error>>() - }; - - let begins = apply_regex(Regex::new( - r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR\s*:\s*([\w_-]+)\s*(?:\*/)?", - )?)?; - let ends = apply_regex(Regex::new( - r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR_END\s*:\s*([\w_-]+)\s*(?:\*/)?", - )?)?; - - Ok((begins, ends)) -} - -pub fn parse_md_files(text_w_files: String, path: &str) -> HashSet { - let regex = Regex::new(r"\((.*\.md)\)").expect("could not construct regex"); - - text_w_files - .lines() - .filter_map(|line| regex.captures(line)) - .map(|capture| { - PathBuf::from(path) - .join(&capture[1]) - .canonicalize() - .expect("could not canonicalize md path") - }) - .collect() -} - -pub fn validate_md_files( - md_files_summary: HashSet, - md_files_in_src: String, -) -> Vec { - md_files_in_src - .lines() - .filter_map(|file| { - let file = PathBuf::from(file) - .canonicalize() - .expect("could not canonicalize md path"); - - (!md_files_summary.contains(&file)) - .then(|| anyhow!("file `{}` not in SUMMARY.md", file.to_str().unwrap())) - }) - .collect() -} - -pub fn search_for_pattern(pattern: &str, location: &str) -> anyhow::Result { - let grep_project = std::process::Command::new("grep") - .arg("-H") // print filename - .arg("-n") // print line-number - .arg("-r") // search recursively - .arg("--binary-files=without-match") - .arg("--exclude-dir=check-docs") - .arg(pattern) - .arg(location) - .output() - .expect("failed grep command"); - - if !grep_project.status.success() { - bail!("Failed running `grep` command for pattern '{}'", pattern); - } - - Ok(String::from_utf8(grep_project.stdout)?) -} - -pub fn find_files(pattern: &str, location: &str, exclude: &str) -> anyhow::Result { - let find = std::process::Command::new("find") - .args([ - location, "-type", "f", "-name", pattern, "!", "-name", exclude, - ]) - .output() - .expect("Program `find` not in PATH"); - - if !find.status.success() { - bail!("Failed running `find` command for pattern {}", pattern); - } - - Ok(String::from_utf8(find.stdout)?) -} diff --git a/scripts/check-docs/src/main.rs b/scripts/check-docs/src/main.rs deleted file mode 100644 index 1badf4ba3..000000000 --- a/scripts/check-docs/src/main.rs +++ /dev/null @@ -1,37 +0,0 @@ -use anyhow::{bail, Error}; -use check_docs::{ - extract_starts_and_ends, filter_valid_anchors, find_files, parse_includes, parse_md_files, - report_errors, search_for_pattern, validate_includes, validate_md_files, -}; - -fn main() -> anyhow::Result<(), Error> { - let text_w_anchors = search_for_pattern("ANCHOR", ".")?; - let (starts, ends) = extract_starts_and_ends(&text_w_anchors)?; - let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, ends); - - let text_mentioning_include = search_for_pattern("{{#include", ".")?; - let (includes, include_path_errors) = parse_includes(text_mentioning_include); - let (include_errors, additional_warnings) = validate_includes(includes, valid_anchors); - - let text_with_md_files = search_for_pattern(".md", "./docs/src/SUMMARY.md")?; - let md_files_in_summary = parse_md_files(text_with_md_files, "./docs/src/"); - let md_files_in_src = find_files("*.md", "./docs/src/", "SUMMARY.md")?; - let md_files_errors = validate_md_files(md_files_in_summary, md_files_in_src); - - report_errors("warning", &additional_warnings); - report_errors("include paths", &include_path_errors); - report_errors("anchors", &anchor_errors); - report_errors("includes", &include_errors); - report_errors("md files", &md_files_errors); - - if !anchor_errors.is_empty() - || !include_errors.is_empty() - || !include_path_errors.is_empty() - || !additional_warnings.is_empty() - || !md_files_errors.is_empty() - { - bail!("Finished with errors"); - } - - Ok(()) -} diff --git a/scripts/check-docs/tests/harness.rs b/scripts/check-docs/tests/harness.rs deleted file mode 100644 index acda36099..000000000 --- a/scripts/check-docs/tests/harness.rs +++ /dev/null @@ -1,107 +0,0 @@ -use anyhow::Error; -use check_docs::{ - extract_starts_and_ends, filter_valid_anchors, find_files, parse_includes, parse_md_files, - search_for_pattern, validate_includes, validate_md_files, Anchor, Include, -}; - -enum TestEnum { - Anchor(Vec), - Include(Vec), - Errors(Vec), -} - -fn contains_any(vec: &TestEnum, str: &str) -> bool { - match vec { - TestEnum::Anchor(anchor_vec) => anchor_vec.iter().any(|anchor| anchor.name == str), - TestEnum::Include(include_vec) => { - include_vec.iter().any(|include| include.anchor_name == str) - } - TestEnum::Errors(err_vec) => err_vec.iter().any(|err| err.to_string().contains(str)), - } -} - -#[test] -fn test_anchors() -> anyhow::Result<()> { - let test_data = search_for_pattern("ANCHOR", ".")?; - - let (starts, ends) = extract_starts_and_ends(&test_data)?; - let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, ends); - - let valid_vec = TestEnum::Anchor(valid_anchors.clone()); - let anchor_err_vec = TestEnum::Errors(anchor_errors); - - assert!(contains_any(&valid_vec, "test_anchor_line_comment")); - assert!(contains_any(&valid_vec, "test_anchor_block_comment")); - assert!(contains_any(&valid_vec, "test_with_more_forward_slashes")); - assert!(!contains_any(&valid_vec, "no_anchor_with_this_name")); - - assert!(contains_any( - &anchor_err_vec, - "Missing anchor start for Anchor { line_no: 10, name: \"test_no_anchor_beginning\"" - )); - assert!(contains_any(&anchor_err_vec, "Couldn't find a matching end anchor for Anchor { line_no: 12, name: \"test_no_anchor_end\"")); - assert!(contains_any(&anchor_err_vec, "The end of the anchor appears before the beginning. End anchor: Anchor { line_no: 14, name: \"test_end_before_beginning\"")); - - assert!(contains_any(&anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 17, name: \"test_same_name_multiple_time\"")); - assert!(contains_any(&anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 20, name: \"test_same_name_multiple_time\"")); - // Caused by too many matching anchors - assert!(contains_any( - &anchor_err_vec, - "Missing anchor start for Anchor { line_no: 18, name: \"test_same_name_multiple_time\"" - )); - assert!(contains_any( - &anchor_err_vec, - "Missing anchor start for Anchor { line_no: 21, name: \"test_same_name_multiple_time\"" - )); - - let text_mentioning_include = search_for_pattern("{{#include", ".")?; - - let (includes, include_path_errors) = parse_includes(text_mentioning_include); - - let includes_vec = TestEnum::Include(includes.clone()); - - assert!(contains_any(&includes_vec, "test_anchor_line_comment")); - assert!(contains_any(&includes_vec, "test_anchor_block_comment")); - assert!(contains_any( - &includes_vec, - "test_with_more_forward_slashes" - )); - assert!(contains_any(&includes_vec, "")); //Check the file include without anchor - - let include_path_errors = TestEnum::Errors(include_path_errors); - - assert!(contains_any( - &include_path_errors, - "test_anchor_data2.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" - )); - - assert!(contains_any( - &include_path_errors, - "test_anchor_data3.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" - )); - - let (include_errors, _) = validate_includes(includes, valid_anchors); - - let include_err_vec = TestEnum::Errors(include_errors); - - assert!(contains_any( - &include_err_vec, - "No anchor available to satisfy include Include { anchor_name: \"no_existing_anchor\"" - )); - - Ok(()) -} - -#[test] -fn test_unused_md() -> anyhow::Result<()> { - let text_with_md_files = search_for_pattern(".md", "./tests/test_data/docs/src/SUMMARY.md")?; - let md_files_in_summary = parse_md_files(text_with_md_files, "./tests/test_data/docs/src/"); - let md_files_in_src = find_files("*.md", "./tests/test_data/docs/src/", "SUMMARY.md")?; - let md_files_errors = validate_md_files(md_files_in_summary, md_files_in_src); - - let error_msg = md_files_errors.first().unwrap().to_string(); - - assert!(error_msg.contains("test-not-there.md` not in SUMMARY.md")); - - Ok(()) -} diff --git a/scripts/check-docs/tests/test_data/docs/src/SUMMARY.md b/scripts/check-docs/tests/test_data/docs/src/SUMMARY.md deleted file mode 100644 index 556dad99a..000000000 --- a/scripts/check-docs/tests/test_data/docs/src/SUMMARY.md +++ /dev/null @@ -1 +0,0 @@ -- [Test](./test.md) diff --git a/scripts/check-docs/tests/test_data/docs/src/test-not-there.md b/scripts/check-docs/tests/test_data/docs/src/test-not-there.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/scripts/check-docs/tests/test_data/docs/src/test.md b/scripts/check-docs/tests/test_data/docs/src/test.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/scripts/check-docs/tests/test_data/test_anchor_data.rs b/scripts/check-docs/tests/test_data/test_anchor_data.rs deleted file mode 100644 index 1cd26a039..000000000 --- a/scripts/check-docs/tests/test_data/test_anchor_data.rs +++ /dev/null @@ -1,21 +0,0 @@ -// ANCHOR: test_anchor_line_comment -///// ANCHOR_END: test_anchor_line_comment - -/* ANCHOR: test_anchor_block_comment */ -/* ANCHOR_END: test_anchor_block_comment */ - -// ANCHOR: test_with_more_forward_slashes -///// ANCHOR_END: test_with_more_forward_slashes - -// ANCHOR_END: test_no_anchor_beginning - -// ANCHOR: test_no_anchor_end - -// ANCHOR_END: test_end_before_beginning -// ANCHOR: test_end_before_beginning - -// ANCHOR: test_same_name_multiple_time -// ANCHOR_END: test_same_name_multiple_time - -// ANCHOR: test_same_name_multiple_time -// ANCHOR_END: test_same_name_multiple_time diff --git a/scripts/check-docs/tests/test_data/test_include_data.md b/scripts/check-docs/tests/test_data/test_include_data.md deleted file mode 100644 index 53bb84642..000000000 --- a/scripts/check-docs/tests/test_data/test_include_data.md +++ /dev/null @@ -1,34 +0,0 @@ - -```rust,ignore -{{#include ./test_anchor_data.rs:test_anchor_line_comment}} -``` - -```rust,ignore -{{#include ./test_anchor_data.rs:test_anchor_block_comment}} -``` - -```rust,ignore -{{#include ./test_anchor_data.rs:test_with_more_forward_slashes}} -``` - -```rust,ignore -{{#include ./test_anchor_data.rs:no_existing_anchor}} -``` - -Include file with correct path - -```rust,ignore -{{#include ./test_anchor_data.rs}} -``` - -Include file with wrong path - -```rust,ignore -{{#include ./test_anchor_data2.rs}} -``` - -Another include file with wrong path - -```rust,ignore -{{#include ./test_anchor_data3.rs}} -``` diff --git a/scripts/checks/Cargo.toml b/scripts/checks/Cargo.toml new file mode 100644 index 000000000..2cd2fbcc9 --- /dev/null +++ b/scripts/checks/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "checks" +authors = { workspace = true } +edition = { workspace = true } +homepage = { workspace = true } +readme = { workspace = true } +license = { workspace = true } +repository = { workspace = true } +rust-version = { workspace = true } +version = { workspace = true } +publish = false + +build = "build.rs" + +[dependencies] +pretty_assertions = { workspace = true, features = ["alloc"] } +anyhow = { workspace = true, features = ["std"] } +clap = { workspace = true, features = ["default", "derive"] } +colored = { workspace = true } +duct = { workspace = true } +hex = { workspace = true, features = ["std"] } +itertools = { workspace = true, features = ["use_std"] } +nix = { workspace = true, features = ["process", "signal"] } +regex = { workspace = true, features = ["std", "unicode-perl"] } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true, features = ["std"] } +sha2 = { workspace = true, features = ["std"] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros", "signal"] } +tokio-util = { workspace = true } +semver = { workspace = true } + +[dev-dependencies] +pretty_assertions = { workspace = true, features = ["std"] } +rand = { workspace = true, features = ["std", "std_rng"] } +tempfile = { workspace = true } + +[build-dependencies] +itertools = { workspace = true, features = ["use_alloc"] } +serde = { workspace = true, features = ["derive"] } +toml = { workspace = true, features = ["parse"] } +semver = { workspace = true } diff --git a/scripts/checks/build.rs b/scripts/checks/build.rs new file mode 100644 index 000000000..45b4d7644 --- /dev/null +++ b/scripts/checks/build.rs @@ -0,0 +1,70 @@ +use itertools::Itertools; +use semver::Version; +use std::{collections::HashMap, path::Path, str::FromStr}; + +fn main() { + let path = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../Cargo.toml"); + + let path = path + .canonicalize() + .unwrap_or_else(|_| panic!("Path not found: {path:?}")); + + let cargo = workspace_cargo(&path); + + let fuel_core_version = extract_fuel_core_version(&cargo); + + generate_rust_code(&cargo.workspace.members, &fuel_core_version); + + println!("cargo:rerun-if-changed={}", path.display()); +} + +fn extract_fuel_core_version(cargo: &Cargo) -> Version { + let fuel_core = cargo.workspace.dependencies.get("fuel-core").expect("fuel-core to be present in the workspace Cargo.toml so that we may use its version when doing compatibility checks in fuels-accounts"); + let version_str = fuel_core + .version + .clone() + .expect("fuel-core dep in workspace Cargo.toml to have `version` field set"); + + Version::from_str(&version_str).expect("fuel-core version to be a valid semver version") +} + +fn workspace_cargo(cargo: &Path) -> Cargo { + let data = std::fs::read_to_string(cargo).unwrap(); + toml::from_str(&data).unwrap() +} + +fn generate_rust_code(members: &[String], fuel_core_version: &Version) { + let members = members + .iter() + .map(|member| format!("{member:?}")) + .join(",\n"); + + let members_code = + format!("#[allow(dead_code)] static WORKSPACE_MEMBERS: &[&str] = &[{members}];"); + let version_code = { + let major = fuel_core_version.major; + let minor = fuel_core_version.minor; + let patch = fuel_core_version.patch; + format!("#[allow(dead_code)] static FUEL_CORE_VERSION: ::semver::Version = ::semver::Version::new({major}, {minor}, {patch});") + }; + let code = format!("{}\n{}", members_code, version_code); + + let out_dir = std::env::var("OUT_DIR").unwrap(); + let dest_path = Path::new(&out_dir).join("workspace_cargo.rs"); + std::fs::write(dest_path, code).unwrap(); +} + +#[derive(Debug, Clone, serde::Deserialize)] +struct Dep { + version: Option, +} + +#[derive(Debug, Clone, serde::Deserialize)] +struct Workspace { + members: Vec, + dependencies: HashMap, +} +#[derive(Debug, Clone, serde::Deserialize)] +struct Cargo { + workspace: Workspace, +} diff --git a/scripts/checks/src/cli.rs b/scripts/checks/src/cli.rs new file mode 100644 index 000000000..54415e6dc --- /dev/null +++ b/scripts/checks/src/cli.rs @@ -0,0 +1,114 @@ +use std::path::PathBuf; + +use clap::{arg, Parser, ValueEnum}; + +#[derive(Parser)] +#[command(about = "Runs various checks. Some part of the CI, others meant to be run manually.")] +pub struct Cli { + /// Comma separated list of tasks to run + #[arg( + short, + long, + value_delimiter = ',', + num_args = 0.. + + )] + pub only_tasks_with_ids: Option>, + + /// Prints out all tasks available (depends on what `flavor` is enabled) + #[arg(short, long, action)] + pub list_tasks: bool, + + /// Print the JSON object used as configuration for the CI matrix step + #[arg(long)] + pub dump_ci_config: bool, + + /// Only run tasks in the given directories + #[arg( + long, + value_delimiter = ',', + num_args = 0.. + + )] + pub only_tasks_in_dir: Option>, + + /// Used to enable/disable tests that take too long/are too resource intense. + #[arg(short, long, default_value = "normal")] + pub flavor: Flavor, + + /// Enables tests that need the sway artifacts to be built with the type paths enabled. + #[arg(short, long, action)] + pub sway_type_paths: bool, + + /// Enable verbose output. + #[arg(short, long, default_value = "false")] + pub verbose: bool, + + /// If ran as a binary from elsewhere the [`Self::root`] needs to be pointed to where the + /// project workspace root is + #[arg(short, long = "root", required = true)] + pub root: PathBuf, +} + +#[derive(Debug, Copy, Clone, ValueEnum)] +pub enum Flavor { + Normal, + HackFeatures, + HackDeps, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn tasks_can_be_selected() { + // given + let cli = "foo --only-tasks-with-ids one,two -r ."; + + // when + let cli = Cli::try_parse_from(cli.split_whitespace()).unwrap(); + + // then + assert_eq!( + cli.only_tasks_with_ids, + Some(vec!["one".to_string(), "two".to_string()]) + ); + } + + #[test] + fn tasks_can_be_listed() { + // given + let cli = "foo --list-tasks -r ."; + + // when + let cli = Cli::try_parse_from(cli.split_whitespace()).unwrap(); + + // then + assert!(cli.list_tasks); + } + + #[test] + fn flavor_can_be_chosen() { + // given + let cli = "foo --flavor hack-features -r ."; + + // when + let cli = Cli::try_parse_from(cli.split_whitespace()).unwrap(); + + // then + assert!(matches!(cli.flavor, Flavor::HackFeatures)); + } + + #[test] + fn default_flavor_is_normal() { + // given + let cli = "foo -r ."; + + // when + let cli = Cli::try_parse_from(cli.split_whitespace()).unwrap(); + + // then + assert!(matches!(cli.flavor, Flavor::Normal)); + } +} diff --git a/scripts/checks/src/custom_checks.rs b/scripts/checks/src/custom_checks.rs new file mode 100644 index 000000000..ba4cd068c --- /dev/null +++ b/scripts/checks/src/custom_checks.rs @@ -0,0 +1,3 @@ +pub mod md_check; + +pub mod fuel_core_version; diff --git a/scripts/checks/src/custom_checks/fuel_core_version.rs b/scripts/checks/src/custom_checks/fuel_core_version.rs new file mode 100644 index 000000000..4b73b7a89 --- /dev/null +++ b/scripts/checks/src/custom_checks/fuel_core_version.rs @@ -0,0 +1,23 @@ +use std::path::Path; + +include!(concat!(env!("OUT_DIR"), "/workspace_cargo.rs")); + +pub fn verify_core_version(fuels_accounts: &Path) -> anyhow::Result<()> { + let contents = std::fs::read_to_string(fuels_accounts.join("./src/provider/version.rs"))?; + + let correct_version = &self::FUEL_CORE_VERSION; + let apply_template = |version: &semver::Version| -> String { + let major = version.major; + let minor = version.minor; + let patch = version.patch; + format!("pub(crate) const SUPPORTED_FUEL_CORE_VERSION: ::semver::Version = ::semver::Version::new({major}, {minor}, {patch});\n") + }; + + let expected_contents = apply_template(correct_version); + let diff = pretty_assertions::StrComparison::new(&expected_contents, &contents); + if contents != expected_contents { + return Err(anyhow::anyhow!("Fuel core version mismatch. {diff}")); + } + + Ok(()) +} diff --git a/scripts/checks/src/custom_checks/md_check.rs b/scripts/checks/src/custom_checks/md_check.rs new file mode 100644 index 000000000..8822fb18c --- /dev/null +++ b/scripts/checks/src/custom_checks/md_check.rs @@ -0,0 +1,465 @@ +use anyhow::{anyhow, bail, Error}; +use duct::cmd; +use itertools::{chain, Itertools}; +use regex::Regex; +use std::{ + collections::HashSet, + path::{Path, PathBuf}, +}; + +pub fn run(dir: &Path) -> anyhow::Result<(), Error> { + let sources = ["packages", "e2e", "examples"].map(|source| dir.join(source)); + let text_w_anchors = search_for_pattern("ANCHOR", &sources)?; + let (starts, ends) = extract_starts_and_ends(&text_w_anchors)?; + let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, &ends); + + let text_mentioning_include = search_for_pattern("{{#include", &[dir.join("docs")])?; + let (includes, include_path_errors) = parse_includes(&text_mentioning_include); + let (include_errors, additional_warnings) = validate_includes(includes, &valid_anchors); + + let text_with_md_files = search_for_pattern(".md", &[dir.join("./docs/src/SUMMARY.md")])?; + let md_files_in_summary = parse_md_files(&text_with_md_files, dir.join("./docs/src/")); + let md_files_in_src = find_files("*.md", dir.join("./docs/src/"), "SUMMARY.md")?; + let md_files_errors = validate_md_files(&md_files_in_summary, &md_files_in_src); + + let errors = chain!( + additional_warnings, + anchor_errors, + include_path_errors, + include_errors, + md_files_errors + ) + .collect_vec(); + + if !errors.is_empty() { + let err_str = errors.iter().map(ToString::to_string).join("\n"); + bail!("Errors: {err_str}") + } + + Ok(()) +} + +pub fn validate_includes( + includes: Vec, + valid_anchors: &[Anchor], +) -> (Vec, Vec) { + let (pairs, errors): (Vec<_>, Vec<_>) = includes + .into_iter() + .filter(|include| !include.anchor_name.is_empty()) + .map(|include| { + let mut maybe_anchor = valid_anchors.iter().find(|anchor| { + anchor.file == include.anchor_file && anchor.name == include.anchor_name + }); + + maybe_anchor.take().map_or_else( + || { + Err(anyhow!( + "No anchor available to satisfy include {include:?}" + )) + }, + |anchor| Ok(anchor.clone()), + ) + }) + .partition_result(); + + let additional_warnings = valid_anchors + .iter() + .filter(|valid_anchor| { + let anchor_used_in_a_pair = pairs.iter().any(|anchor| anchor == *valid_anchor); + !anchor_used_in_a_pair + }) + .map(|unused_anchor| anyhow!("Anchor unused: {unused_anchor:?}!")) + .collect::>(); + + (errors, additional_warnings) +} + +#[allow(dead_code)] +#[derive(Debug, Clone)] +pub struct Include { + pub anchor_name: String, + pub anchor_file: PathBuf, + pub file: PathBuf, + pub line_no: usize, +} + +pub fn parse_includes(text_w_includes: &str) -> (Vec, Vec) { + let apply_regex = |regex: Regex| { + let (includes, errors): (Vec<_>, Vec<_>) = text_w_includes + .lines() + .filter_map(|line| regex.captures(line)) + .map(|capture| { + let include_file = PathBuf::from(&capture[1]).canonicalize()?; + let line_no = capture[2].parse()?; + let anchor_file = PathBuf::from(&capture[3]); + let anchor_name = capture.get(4).map_or("", |m| m.as_str()).to_string(); + + let the_path = include_file.parent().unwrap().join(anchor_file); + + let anchor_file = the_path.canonicalize().map_err(|err| { + anyhow!( + "{the_path:?} when canonicalized gives error {err:?}\ninclude_file: {:?}", + include_file + ) + })?; + + Ok(Include { + anchor_name, + anchor_file, + file: include_file, + line_no, + }) + }) + .partition_result(); + (includes, errors) + }; + + apply_regex( + Regex::new(r"^(\S+):(\d+):\s*\{\{\s*#include\s*(\S+?)\s*(?::\s*(\S+)\s*)?\}\}") + .expect("could not construct regex"), + ) +} + +pub fn filter_valid_anchors(starts: Vec, ends: &[Anchor]) -> (Vec, Vec) { + let find_anchor_end_by_name = |anchor_name: &str, file: &Path| { + ends.iter() + .filter(|el| el.name == *anchor_name && el.file == file) + .collect::>() + }; + + let (pairs, errors):(Vec<_>, Vec<_>) = starts.into_iter().map(|start| { + let matches_by_name = find_anchor_end_by_name(&start.name, &start.file); + + let (begin, end) = match matches_by_name.as_slice() { + [single_match] => Ok((start, (*single_match).clone())), + [] => Err(anyhow!("Couldn't find a matching end anchor for {start:?}")), + multiple_ends => Err(anyhow!("Found too many matching anchor ends for anchor: {start:?}. The matching ends are: {multiple_ends:?}")), + }?; + + check_validity_of_anchor_pair(&begin, &end).map_or_else(|| Ok((begin, end)), |err| { + let err_msg = err.to_string(); + Err(anyhow!("{err_msg}")) + }) + }).partition_result(); + + let additional_errors = filter_unused_ends(ends, &pairs) + .into_iter() + .map(|unused_end| anyhow!("Missing anchor start for {unused_end:?}")) + .collect::>(); + + let start_only = pairs.into_iter().map(|(begin, _)| begin).collect(); + + (start_only, chain!(errors, additional_errors).collect()) +} + +pub fn filter_unused_ends<'a>(ends: &'a [Anchor], pairs: &[(Anchor, Anchor)]) -> Vec<&'a Anchor> { + ends.iter() + .filter(|end| { + let end_used_in_pairs = pairs.iter().any(|(_, used_end)| *end == used_end); + !end_used_in_pairs + }) + .collect() +} + +pub fn check_validity_of_anchor_pair(begin: &Anchor, end: &Anchor) -> Option { + if begin.line_no > end.line_no { + Some(anyhow!("The end of the anchor appears before the beginning. End anchor: {end:?}. Begin anchor: {begin:?}")) + } else { + None + } +} + +#[derive(Debug, Clone, Hash, Eq, PartialEq)] +pub struct Anchor { + pub line_no: usize, + pub name: String, + pub file: PathBuf, +} + +pub fn extract_starts_and_ends( + text_w_anchors: &str, +) -> anyhow::Result<(Vec, Vec), Error> { + let apply_regex = |regex: Regex| { + text_w_anchors + .lines() + .filter_map(|line| regex.captures(line)) + .map(|capture| { + let file = PathBuf::from(&capture[1]).canonicalize()?; + let line_no = &capture[2]; + let anchor_name = &capture[3]; + + Ok(Anchor { + line_no: line_no.parse()?, + name: anchor_name.to_string(), + file, + }) + }) + .collect::, Error>>() + }; + + let begins = apply_regex(Regex::new( + r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR\s*:\s*([\w_-]+)\s*(?:\*/)?", + )?)?; + let ends = apply_regex(Regex::new( + r"^(.+):(\d+):\s*(?:/{2,}|/\*)\s*ANCHOR_END\s*:\s*([\w_-]+)\s*(?:\*/)?", + )?)?; + + Ok((begins, ends)) +} + +pub fn parse_md_files(text_w_files: &str, path: impl AsRef) -> HashSet { + let regex = Regex::new(r"\((.*\.md)\)").expect("could not construct regex"); + + text_w_files + .lines() + .filter_map(|line| regex.captures(line)) + .map(|capture| { + PathBuf::from(path.as_ref()) + .join(&capture[1]) + .canonicalize() + .expect("could not canonicalize md path") + }) + .collect() +} + +pub fn validate_md_files(md_files_summary: &HashSet, md_files_in_src: &str) -> Vec { + md_files_in_src + .lines() + .filter_map(|file| { + let file = PathBuf::from(file) + .canonicalize() + .expect("could not canonicalize md path"); + + (!md_files_summary.contains(&file)) + .then(|| anyhow!("file `{}` not in SUMMARY.md", file.to_str().unwrap())) + }) + .collect() +} + +pub fn search_for_pattern(pattern: &str, location: &[PathBuf]) -> anyhow::Result { + let mut args = vec!["-H", "-n", "-r", "--binary-files=without-match", pattern]; + args.extend(location.iter().map(|path| path.to_str().unwrap())); + + duct::cmd("grep", args) + .stdin_null() + .stderr_null() + .read() + .map_err(|err| anyhow!("Failed running `grep` command for pattern '{pattern}': {err}")) +} + +pub fn find_files( + pattern: &str, + location: impl AsRef, + exclude: &str, +) -> anyhow::Result { + Ok(cmd!( + "find", + location.as_ref().to_str().unwrap(), + "-type", + "f", + "-name", + pattern, + "!", + "-name", + exclude, + ) + .stdin_null() + .stderr_null() + .read()?) +} + +#[cfg(test)] +mod tests { + + use super::*; + + use anyhow::Error; + + enum TestEnum { + Anchor(Vec), + Include(Vec), + Errors(Vec), + } + + fn contains_any(vec: &TestEnum, str: &str) -> bool { + match vec { + TestEnum::Anchor(anchor_vec) => anchor_vec.iter().any(|anchor| anchor.name == str), + TestEnum::Include(include_vec) => { + include_vec.iter().any(|include| include.anchor_name == str) + } + TestEnum::Errors(err_vec) => err_vec.iter().any(|err| err.to_string().contains(str)), + } + } + + #[test] + fn test_anchors() -> anyhow::Result<()> { + let test_data = generate_test_data()?; + let path = test_data.path(); + + let data = search_for_pattern("ANCHOR", &[path.to_owned()])?; + + let (starts, ends) = extract_starts_and_ends(&data)?; + let (valid_anchors, anchor_errors) = filter_valid_anchors(starts, &ends); + + let valid_vec = TestEnum::Anchor(valid_anchors.clone()); + let anchor_err_vec = TestEnum::Errors(anchor_errors); + + assert!(contains_any(&valid_vec, "test_anchor_line_comment")); + assert!(contains_any(&valid_vec, "test_anchor_block_comment")); + assert!(contains_any(&valid_vec, "test_with_more_forward_slashes")); + assert!(!contains_any(&valid_vec, "no_anchor_with_this_name")); + + assert!(contains_any( + &anchor_err_vec, + "Missing anchor start for Anchor { line_no: 11, name: \"test_no_anchor_beginning\"" + )); + assert!(contains_any(&anchor_err_vec, "Couldn't find a matching end anchor for Anchor { line_no: 13, name: \"test_no_anchor_end\"")); + assert!(contains_any(&anchor_err_vec, "The end of the anchor appears before the beginning. End anchor: Anchor { line_no: 15, name: \"test_end_before_beginning\"")); + + assert!(contains_any(&anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 18, name: \"test_same_name_multiple_time\"")); + assert!(contains_any(&anchor_err_vec, "Found too many matching anchor ends for anchor: Anchor { line_no: 21, name: \"test_same_name_multiple_time\"")); + // Caused by too many matching anchors + assert!(contains_any( + &anchor_err_vec, + "Missing anchor start for Anchor { line_no: 19, name: \"test_same_name_multiple_time\"" + )); + assert!(contains_any( + &anchor_err_vec, + "Missing anchor start for Anchor { line_no: 22, name: \"test_same_name_multiple_time\"" + )); + + let text_mentioning_include = search_for_pattern("{{#include", &[path.to_owned()])?; + + let (includes, include_path_errors) = parse_includes(&text_mentioning_include); + + let includes_vec = TestEnum::Include(includes.clone()); + + assert!(contains_any(&includes_vec, "test_anchor_line_comment")); + assert!(contains_any(&includes_vec, "test_anchor_block_comment")); + assert!(contains_any( + &includes_vec, + "test_with_more_forward_slashes" + )); + assert!(contains_any(&includes_vec, "")); // Check the file include without anchor + + let include_path_errors = TestEnum::Errors(include_path_errors); + + assert!(contains_any( + &include_path_errors, + "test_anchor_data2.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" + )); + + assert!(contains_any( + &include_path_errors, + "test_anchor_data3.rs\" when canonicalized gives error Os { code: 2, kind: NotFound" + )); + + let (include_errors, _) = validate_includes(includes, &valid_anchors); + + let include_err_vec = TestEnum::Errors(include_errors); + + assert!(contains_any( + &include_err_vec, + "No anchor available to satisfy include Include { anchor_name: \"no_existing_anchor\"" + )); + + Ok(()) + } + + #[test] + fn test_unused_md() -> anyhow::Result<()> { + let test_data = generate_test_data()?; + let path = test_data.path(); + + let text_with_md_files = search_for_pattern(".md", &[path.join("docs/src/SUMMARY.md")])?; + let md_files_in_summary = parse_md_files(&text_with_md_files, path.join("docs/src/")); + let md_files_in_src = find_files("*.md", path.join("docs/src/"), "SUMMARY.md")?; + let md_files_errors = validate_md_files(&md_files_in_summary, &md_files_in_src); + + let error_msg = md_files_errors.first().unwrap().to_string(); + + eprintln!("{error_msg}"); + assert!(error_msg.contains("test-not-there.md` not in SUMMARY.md")); + + Ok(()) + } + + fn generate_test_data() -> anyhow::Result { + let temp_dir = tempfile::tempdir()?; + + let anchor_data = r#" +// ANCHOR: test_anchor_line_comment +///// ANCHOR_END: test_anchor_line_comment + +/* ANCHOR: test_anchor_block_comment */ +/* ANCHOR_END: test_anchor_block_comment */ + +// ANCHOR: test_with_more_forward_slashes +///// ANCHOR_END: test_with_more_forward_slashes + +// ANCHOR_END: test_no_anchor_beginning + +// ANCHOR: test_no_anchor_end + +// ANCHOR_END: test_end_before_beginning +// ANCHOR: test_end_before_beginning + +// ANCHOR: test_same_name_multiple_time +// ANCHOR_END: test_same_name_multiple_time + +// ANCHOR: test_same_name_multiple_time +// ANCHOR_END: test_same_name_multiple_time +"#; + let path = temp_dir.path(); + std::fs::write(path.join("test_anchor_data.rs"), anchor_data)?; + + let include_data = r#" +```rust,ignore +{{#include ./test_anchor_data.rs:test_anchor_line_comment}} +``` + +```rust,ignore +{{#include ./test_anchor_data.rs:test_anchor_block_comment}} +``` + +```rust,ignore +{{#include ./test_anchor_data.rs:test_with_more_forward_slashes}} +``` + +```rust,ignore +{{#include ./test_anchor_data.rs:no_existing_anchor}} +``` + +Include file with correct path + +```rust,ignore +{{#include ./test_anchor_data.rs}} +``` + +Include file with wrong path + +```rust,ignore +{{#include ./test_anchor_data2.rs}} +``` + +Another include file with wrong path + +```rust,ignore +{{#include ./test_anchor_data3.rs}} +``` +"#; + + std::fs::write(path.join("test_include_data.md"), include_data)?; + + let src = path.join("docs/src"); + std::fs::create_dir_all(&src)?; + + let summary = r#"- [Test](./test.md)"#; + std::fs::write(src.join("SUMMARY.md"), summary)?; + + std::fs::write(src.join("test.md"), "")?; + std::fs::write(src.join("test-not-there.md"), "")?; + + Ok(temp_dir) + } +} diff --git a/scripts/checks/src/customize.rs b/scripts/checks/src/customize.rs new file mode 100644 index 000000000..4328d12ac --- /dev/null +++ b/scripts/checks/src/customize.rs @@ -0,0 +1,61 @@ +use crate::{ + cli, + tasks::{builder::Builder, ci_job::CiJob, Tasks}, +}; +use std::path::PathBuf; + +pub fn ci_jobs(workspace_root: PathBuf) -> Vec { + let tasks = normal(workspace_root); + tasks.ci_jobs() +} + +pub fn choose_tasks(cli: &cli::Cli) -> Tasks { + let mut tasks = match cli.flavor { + cli::Flavor::Normal => normal(cli.root.clone()), + cli::Flavor::HackFeatures => hack_features(cli.root.clone()), + cli::Flavor::HackDeps => hack_deps(cli.root.clone()), + }; + + if let Some(ids) = &cli.only_tasks_with_ids { + tasks.retain_with_ids(ids); + } + + if let Some(dirs) = &cli.only_tasks_in_dir { + tasks.retain_with_dirs(dirs); + } + + if !cli.sway_type_paths { + tasks.retain_without_type_paths(); + } + + tasks +} + +fn normal(workspace_root: PathBuf) -> Tasks { + let mut builder = Builder::new(workspace_root, &["-Dwarnings"]); + + builder.common(); + builder.e2e_specific(); + builder.wasm_specific(); + builder.workspace_level(); + builder.fuels_accounts_core_version(); + + builder.build() +} + +fn hack_features(workspace_root: PathBuf) -> Tasks { + let mut builder = Builder::new(workspace_root, &["-Dwarnings"]); + + builder.hack_features_common(); + builder.hack_features_e2e(); + + builder.build() +} + +fn hack_deps(workspace_root: PathBuf) -> Tasks { + let mut builder = Builder::new(workspace_root, &["-Dwarnings"]); + + builder.hack_deps_common(); + + builder.build() +} diff --git a/scripts/checks/src/main.rs b/scripts/checks/src/main.rs new file mode 100644 index 000000000..c407af549 --- /dev/null +++ b/scripts/checks/src/main.rs @@ -0,0 +1,38 @@ +use std::io::IsTerminal; + +use clap::Parser; +mod cli; +mod custom_checks; +mod customize; +mod tasks; +mod util; + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + let cli = cli::Cli::parse(); + util::configure_child_process_cleanup()?; + + if cli.dump_ci_config { + let jobs = customize::ci_jobs(cli.root.clone()); + // Json used because the CI needs it as such + let jsonified = serde_json::to_string_pretty(&jobs)?; + println!("{jsonified}"); + return Ok(()); + } + + let tasks = customize::choose_tasks(&cli); + + if cli.list_tasks { + println!("{tasks}"); + return Ok(()); + } + + let is_tty = std::io::stderr().is_terminal(); + + let cancel_token = tokio_util::sync::CancellationToken::new(); + util::watch_for_cancel(cancel_token.clone()); + + tasks.run(is_tty, cli.verbose, cancel_token).await?; + + Ok(()) +} diff --git a/scripts/checks/src/tasks.rs b/scripts/checks/src/tasks.rs new file mode 100644 index 000000000..ac20c0118 --- /dev/null +++ b/scripts/checks/src/tasks.rs @@ -0,0 +1,188 @@ +use std::{ + collections::BTreeSet, + fmt::Display, + path::{Path, PathBuf}, +}; + +use itertools::Itertools; +use nix::{sys::signal::Signal, unistd::Pid, NixPath}; +use sha2::Digest; +use tokio::task::JoinSet; +use tokio_util::sync::CancellationToken; + +use self::ci_job::CiJob; + +pub mod builder; +pub mod ci_job; +pub mod command; +pub mod deps; +pub mod report; +pub mod task; + +fn short_sha256(input: &str) -> String { + let mut hasher = sha2::Sha256::default(); + hasher.update(input.as_bytes()); + hex::encode(&hasher.finalize()[..8]) +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct Tasks { + tasks: BTreeSet, + workspace_root: PathBuf, +} + +impl Display for Tasks { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for task in &self.tasks { + writeln!(f, "{task}")?; + } + Ok(()) + } +} + +impl Tasks { + pub fn new( + tasks: impl IntoIterator, + workspace_root: impl AsRef, + ) -> Self { + Self { + tasks: BTreeSet::from_iter(tasks), + workspace_root: workspace_root.as_ref().canonicalize().unwrap(), + } + } + + pub fn ci_jobs(&self) -> Vec { + // tasks grouped by dir to reuse compilation artifacts and shorten CI time + self.tasks + .iter() + .sorted_by_key(|task| task.cwd.clone()) + .group_by(|task| task.cwd.clone()) + .into_iter() + .flat_map(|(cwd, tasks)| { + let (tasks_requiring_type_paths, normal_tasks) = + separate_out_type_path_tasks(tasks); + + let name = self.create_job_name(&cwd); + + // You cannot have type paths and not have them in the same job, so they need to be + // separate jobs. + [ + job_with_merged_deps(&tasks_requiring_type_paths, name.clone()), + job_with_merged_deps(&normal_tasks, name), + ] + .into_iter() + .flatten() + }) + .collect() + } + + pub async fn run( + self, + tty: bool, + verbose: bool, + cancel_token: CancellationToken, + ) -> anyhow::Result<()> { + let mut set = JoinSet::new(); + for task in self.tasks { + set.spawn_blocking(|| task.run()); + } + + let mut errors = false; + + let mut handle_task_response = |execution: report::Report| { + if let report::Status::Failed { .. } = execution.status { + errors = true; + } + + let report = execution.report(tty, verbose); + eprintln!("{report}"); + anyhow::Ok(()) + }; + + let kill_processes = || { + // All spawned processes are in the same process group created in main. + nix::sys::signal::killpg(Pid::from_raw(0), Signal::SIGINT) + }; + + loop { + tokio::select! { + () = cancel_token.cancelled() => { + kill_processes()?; + return Ok(()); + } + task_response = set.join_next() => { + if let Some(result) = task_response { + handle_task_response(result?)?; + } else { + break; + } + } + } + } + + if errors { + anyhow::bail!("Some checks failed"); + } + + Ok(()) + } + + pub fn retain_with_ids(&mut self, ids: &[String]) { + self.tasks.retain(|task| ids.contains(&task.id())); + } + + pub fn retain_with_dirs(&mut self, dirs: &[PathBuf]) { + let dirs = dirs + .iter() + .map(|dir| { + dir.canonicalize() + .unwrap_or_else(|_| panic!("unable to canonicalize path {dir:?}")) + }) + .collect_vec(); + self.tasks.retain(|task| dirs.contains(&task.cwd)); + } + + pub fn retain_without_type_paths(&mut self) { + self.tasks.retain(|task| { + matches!( + task.cmd.deps().sway_artifacts, + Some(deps::Sway::Normal) | None + ) + }); + } + + fn create_job_name(&self, cwd: &Path) -> String { + // So we don't take up much real estate printing the full canonicalized path + let relative_path = cwd.strip_prefix(&self.workspace_root).unwrap_or_else(|_| { + panic!( + "expected {cwd:?} to be a prefix of {}", + self.workspace_root.display() + ) + }); + + if relative_path.is_empty() { + "workspace".to_string() + } else { + format!("{}", relative_path.display()) + } + } +} + +fn job_with_merged_deps(tasks: &[&task::Task], name: String) -> Option { + tasks + .iter() + .map(|ty| ty.cmd.deps()) + .reduce(|acc, next| acc + next) + .map(|dep| CiJob::new(dep, tasks, name)) +} + +fn separate_out_type_path_tasks<'a>( + tasks: impl IntoIterator, +) -> (Vec<&'a task::Task>, Vec<&'a task::Task>) { + tasks.into_iter().partition(|task| { + task.cmd + .deps() + .sway_artifacts + .is_some_and(|dep| matches!(dep, deps::Sway::TypePaths)) + }) +} diff --git a/scripts/checks/src/tasks/builder.rs b/scripts/checks/src/tasks/builder.rs new file mode 100644 index 000000000..64ddba172 --- /dev/null +++ b/scripts/checks/src/tasks/builder.rs @@ -0,0 +1,395 @@ +use itertools::Itertools; +use std::{ + collections::BTreeSet, + path::{Path, PathBuf}, +}; + +use crate::tasks::{command::Command, deps, task::Task, Tasks}; +pub struct Builder { + workspace: PathBuf, + rust_flags: Vec, + tasks: Vec, +} + +include!(concat!(env!("OUT_DIR"), "/workspace_cargo.rs")); +impl Builder { + pub fn new(workspace: PathBuf, rust_flags: &[&str]) -> Self { + Self { + workspace, + rust_flags: rust_flags.iter().map(|s| (*s).to_string()).collect(), + tasks: vec![], + } + } + + pub fn common(&mut self) { + let exclude = None; + let tasks = self + .all_workspace_members(exclude) + .into_iter() + .flat_map(|member| { + let deps = { + // Some examples run abigen! on sway projects in e2e + let sway_artifacts = member + .starts_with(self.workspace_path("examples")) + .then_some(deps::Sway::Normal); + + deps::Deps { + sway_artifacts, + ..Default::default() + } + }; + + let mut commands = vec![ + self.cargo_fmt("--verbose --check", deps.clone()), + Self::custom( + "typos", + "", + &deps::Deps { + typos_cli: true, + ..deps.clone() + }, + ), + ]; + + // e2e ignored because we have to control the features carefully (e.g. rocksdb, test-type-paths, etc) + if member != self.workspace_path("e2e") { + let cmd = + self.cargo_clippy("--all-targets --all-features --no-deps", deps.clone()); + commands.push(cmd); + } + + // e2e ignored because we have to control the features carefully (e.g. rocksdb, test-type-paths, etc) + // wasm ignored because wasm tests need to be run with wasm-pack + if member != self.workspace_path("wasm-tests") + && member != self.workspace_path("e2e") + { + let cmd = self.cargo_nextest("run --all-features", deps.clone()); + commands.push(cmd); + } + + // because these don't have libs + if member != self.workspace_path("e2e") + && member != self.workspace_path("wasm-tests") + && member != self.workspace_path("scripts/checks") + { + let cmd = self.cargo("test --doc", None, deps.clone()); + commands.push(cmd); + + let cmd = self.cargo( + "doc --document-private-items", + Some(("RUSTDOCFLAGS", "-Dwarnings")), + deps, + ); + commands.push(cmd); + } + + commands.into_iter().map(move |cmd| Task { + cwd: member.clone(), + cmd, + }) + }) + .collect_vec(); + + self.tasks.extend(tasks); + } + + pub fn e2e_specific(&mut self) { + let tasks = [ + self.cargo_nextest( + "run --features default,fuel-core-lib,test-type-paths", + deps::Deps { + sway_artifacts: Some(deps::Sway::TypePaths), + ..Default::default() + }, + ), + self.cargo_nextest( + "run --features default,fuel-core-lib", + deps::Deps { + sway_artifacts: Some(deps::Sway::Normal), + ..Default::default() + }, + ), + self.cargo_nextest( + "run --features default,test-type-paths", + deps::Deps { + fuel_core_binary: true, + sway_artifacts: Some(deps::Sway::TypePaths), + ..Default::default() + }, + ), + self.cargo_clippy( + "--all-targets --no-deps --features default,test-type-paths", + deps::Deps { + sway_artifacts: Some(deps::Sway::TypePaths), + ..Default::default() + }, + ), + ] + .map(|cmd| Task { + cwd: self.workspace_path("e2e"), + cmd, + }); + + self.tasks.extend(tasks); + } + + pub fn wasm_specific(&mut self) { + let task = Task { + cwd: self.workspace_path("wasm-tests"), + cmd: Self::custom( + "wasm-pack", + "test --node", + &deps::Deps { + wasm: true, + ..Default::default() + }, + ), + }; + self.tasks.push(task); + } + + pub fn fuels_accounts_core_version(&mut self) { + let task = Task { + cwd: self.workspace_path("packages/fuels-accounts"), + cmd: Command::VerifyCoreVersion, + }; + self.tasks.push(task); + } + + pub fn workspace_level(&mut self) { + let tasks = [ + Command::MdCheck, + Self::custom( + "cargo-machete", + "--skip-target-dir", + &deps::Deps { + cargo: deps::Cargo { + machete: true, + ..Default::default() + }, + ..Default::default() + }, + ), + self.cargo_clippy( + "--workspace --all-features", + deps::Deps { + sway_artifacts: Some(deps::Sway::Normal), + ..Default::default() + }, + ), + Self::custom( + "typos", + "", + &deps::Deps { + typos_cli: true, + ..Default::default() + }, + ), + ] + .map(|cmd| Task { + cwd: self.workspace_path("."), + cmd, + }); + + self.tasks.extend(tasks); + } + + pub fn hack_features_common(&mut self) { + let ignore = self.workspace_path("e2e"); + let tasks = self + .all_workspace_members(Some(&ignore)) + .into_iter() + .flat_map(|member| { + [ + self.cargo_hack("--feature-powerset check", deps::Deps::default()), + self.cargo_hack("--feature-powerset check --tests", deps::Deps::default()), + ] + .into_iter() + .map(move |cmd| Task { + cwd: member.clone(), + cmd, + }) + }) + .collect_vec(); + + self.tasks.extend(tasks); + } + + pub fn hack_features_e2e(&mut self) { + let tasks = [ + self.cargo_hack( + "--feature-powerset check --tests", + deps::Deps { + sway_artifacts: Some(deps::Sway::TypePaths), + ..Default::default() + }, + ), + self.cargo_hack( + "--feature-powerset --exclude-features test-type-paths check --tests", + deps::Deps { + sway_artifacts: Some(deps::Sway::Normal), + ..Default::default() + }, + ), + ] + .map(|cmd| Task { + cwd: self.workspace_path("e2e"), + cmd, + }) + .to_vec(); + + self.tasks.extend(tasks); + } + + pub fn hack_deps_common(&mut self) { + let ignore = self.workspace_path("e2e"); + let tasks = self + .all_workspace_members(Some(&ignore)) + .into_iter() + .flat_map(|member| { + let deps = deps::Deps { + cargo: deps::Cargo { + udeps: true, + ..Default::default() + }, + rust: Some(deps::Rust { + nightly: true, + ..Default::default() + }), + ..Default::default() + }; + [ + self.cargo_hack("udeps", deps.clone()), + self.cargo_hack("udeps --tests", deps), + ] + .into_iter() + .map(move |cmd| Task { + cwd: member.clone(), + cmd, + }) + }) + .collect_vec(); + + self.tasks.extend(tasks); + } + + fn cargo_fmt(&self, cmd: impl Into, mut deps: deps::Deps) -> Command { + deps += deps::Deps { + rust: Some(deps::Rust { + components: BTreeSet::from_iter(["rustfmt".to_string()]), + ..Default::default() + }), + ..Default::default() + }; + + let cmd = format!("fmt {}", cmd.into()); + + self.cargo(cmd, None, deps) + } + + fn cargo_clippy(&self, cmd: impl Into, mut deps: deps::Deps) -> Command { + deps += deps::Deps { + rust: Some(deps::Rust { + components: BTreeSet::from_iter(["clippy".to_string()]), + ..Default::default() + }), + ..Default::default() + }; + + let cmd = format!("clippy {}", cmd.into()); + self.cargo(cmd, None, deps) + } + + fn cargo_hack(&self, cmd: impl Into, mut deps: deps::Deps) -> Command { + deps += deps::Deps { + cargo: deps::Cargo { + hack: true, + ..Default::default() + }, + ..Default::default() + }; + + let cmd = format!("hack {}", cmd.into()); + self.cargo(cmd, None, deps) + } + + fn cargo_nextest(&self, cmd: impl Into, mut deps: deps::Deps) -> Command { + deps += deps::Deps { + cargo: deps::Cargo { + nextest: true, + ..Default::default() + }, + ..Default::default() + }; + + let cmd = format!("nextest {}", cmd.into()); + + self.cargo(cmd, None, deps) + } + + fn cargo( + &self, + cmd: impl Into, + env: Option<(&str, &str)>, + deps: deps::Deps, + ) -> Command { + let envs = { + let flags = self.rust_flags.iter().join(" "); + let mut envs = vec![("RUSTFLAGS".to_owned(), flags)]; + + if let Some(env) = env { + envs.push((env.0.into(), env.1.into())); + } + envs + }; + + let nightly = if deps.rust.as_ref().is_some_and(|r| r.nightly) { + "+nightly" + } else { + "" + }; + + Command::Custom { + program: "cargo".to_string(), + args: parse_cmd(nightly, &cmd.into()), + env: envs, + deps, + } + } + + fn custom(program: &str, args: &str, deps: &deps::Deps) -> Command { + Command::Custom { + program: program.to_owned(), + args: parse_cmd("", args), + env: vec![], + deps: deps.clone(), + } + } + + fn workspace_path(&self, path: &str) -> PathBuf { + let path = self.workspace.join(path); + path.canonicalize() + .unwrap_or_else(|_| panic!("Path not found: {path:?}")) + } + + fn all_workspace_members(&self, ignore: Option<&Path>) -> Vec { + self::WORKSPACE_MEMBERS + .iter() + .map(|member| self.workspace_path(member)) + .filter(|member| ignore.map_or(true, |ignore| !member.starts_with(ignore))) + .collect() + } + + pub fn build(self) -> Tasks { + Tasks::new(self.tasks, self.workspace) + } +} + +fn parse_cmd(prepend: &str, string: &str) -> Vec { + let parts = string.split_whitespace().map(ToString::to_string).collect(); + if prepend.is_empty() { + parts + } else { + [vec![prepend.to_owned()], parts].concat() + } +} diff --git a/scripts/checks/src/tasks/ci_job.rs b/scripts/checks/src/tasks/ci_job.rs new file mode 100644 index 000000000..4a8a0f508 --- /dev/null +++ b/scripts/checks/src/tasks/ci_job.rs @@ -0,0 +1,25 @@ +use itertools::Itertools; + +use super::{deps, task::Task}; + +#[derive(Debug, Clone, serde::Serialize)] +pub struct CiJob { + deps: deps::Deps, + // Comma separated task ids + task_ids: String, + name: String, + // Must not contain commas, rust-cache complains + cache_key: String, +} + +impl CiJob { + pub fn new(deps: deps::Deps, tasks: &[&Task], name: String) -> Self { + let ids = tasks.iter().map(|t| t.id()).join(","); + Self { + deps, + cache_key: super::short_sha256(&ids), + task_ids: ids, + name, + } + } +} diff --git a/scripts/checks/src/tasks/command.rs b/scripts/checks/src/tasks/command.rs new file mode 100644 index 000000000..e116b008e --- /dev/null +++ b/scripts/checks/src/tasks/command.rs @@ -0,0 +1,47 @@ +use crate::tasks::deps; +use itertools::Itertools; +use std::fmt::Display; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Command { + Custom { + program: String, + args: Vec, + env: Vec<(String, String)>, + deps: deps::Deps, + }, + MdCheck, + VerifyCoreVersion, +} + +impl Command { + pub fn deps(&self) -> deps::Deps { + match self { + Self::Custom { deps, .. } => deps.clone(), + Self::MdCheck | Self::VerifyCoreVersion => deps::Deps::default(), + } + } +} + +impl Display for Command { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Custom { + program, args, env, .. + } => { + let args = args.iter().join(" "); + if env.is_empty() { + write!(f, "{program} {args}") + } else { + let env = env + .iter() + .map(|(key, value)| format!("{key}='{value}'")) + .join(" "); + write!(f, "{env} {program} {args}") + } + } + Self::MdCheck => write!(f, "MdCheck"), + Self::VerifyCoreVersion => write!(f, "VerifyCoreVersion"), + } + } +} diff --git a/scripts/checks/src/tasks/deps.rs b/scripts/checks/src/tasks/deps.rs new file mode 100644 index 000000000..e20e23bf7 --- /dev/null +++ b/scripts/checks/src/tasks/deps.rs @@ -0,0 +1,106 @@ +use std::collections::BTreeSet; + +use itertools::Itertools; +use serde::{Serialize, Serializer}; + +#[derive(Debug, Clone, serde::Serialize, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Sway { + TypePaths, + Normal, +} + +#[derive(Debug, Default, Clone, serde::Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Rust { + pub nightly: bool, + #[serde(serialize_with = "comma_separated")] + pub components: BTreeSet, +} + +fn comma_separated(components: &BTreeSet, serializer: S) -> Result +where + S: Serializer, +{ + let components = components.iter().join(","); + components.serialize(serializer) +} + +#[derive(Debug, Default, Clone, serde::Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Cargo { + pub hack: bool, + pub nextest: bool, + pub machete: bool, + pub udeps: bool, +} + +impl std::ops::Add for Cargo { + type Output = Self; + fn add(mut self, other: Self) -> Self { + self += other; + self + } +} + +impl std::ops::AddAssign for Cargo { + fn add_assign(&mut self, other: Self) { + self.hack |= other.hack; + self.nextest |= other.nextest; + self.machete |= other.machete; + } +} + +#[derive(Debug, Default, Clone, serde::Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Deps { + pub fuel_core_binary: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub rust: Option, + pub wasm: bool, + pub cargo: Cargo, + pub typos_cli: bool, + #[serde(skip_serializing_if = "Option::is_none")] + pub sway_artifacts: Option, +} + +impl std::ops::Add for Deps { + type Output = Self; + fn add(mut self, other: Self) -> Self { + self += other; + self + } +} + +impl std::ops::AddAssign for Deps { + fn add_assign(&mut self, other: Self) { + self.fuel_core_binary |= other.fuel_core_binary; + + let rust = match (self.rust.take(), other.rust) { + (Some(mut self_rust), Some(other_rust)) => { + self_rust.nightly |= other_rust.nightly; + self_rust.components = self_rust + .components + .union(&other_rust.components) + .cloned() + .collect(); + Some(self_rust) + } + (Some(self_rust), None) => Some(self_rust), + (None, Some(other_rust)) => Some(other_rust), + (None, None) => None, + }; + self.rust = rust; + + self.wasm |= other.wasm; + self.cargo += other.cargo; + self.typos_cli |= other.typos_cli; + + let sway_artifacts = match (self.sway_artifacts, other.sway_artifacts) { + (Some(self_sway), Some(other_sway)) => { + assert_eq!(self_sway, other_sway, "Deps cannot be unified. Cannot have type paths and normal artifacts at once! {self_sway:?} != {other_sway:?}"); + Some(self_sway) + } + (Some(self_sway), None) => Some(self_sway), + (None, Some(other_sway)) => Some(other_sway), + (None, None) => None, + }; + self.sway_artifacts = sway_artifacts; + } +} diff --git a/scripts/checks/src/tasks/report.rs b/scripts/checks/src/tasks/report.rs new file mode 100644 index 000000000..e7a1596da --- /dev/null +++ b/scripts/checks/src/tasks/report.rs @@ -0,0 +1,50 @@ +use colored::Colorize; + +#[derive(Debug, Clone)] +pub struct Report { + pub cmd_desc: String, + pub status: Status, +} + +impl From for Status { + fn from(value: std::io::Error) -> Self { + Self::Failed { + reason: value.to_string(), + } + } +} + +impl From for Status { + fn from(value: anyhow::Error) -> Self { + Self::Failed { + reason: value.to_string(), + } + } +} + +#[derive(Debug, Clone)] +pub enum Status { + Success { out: String }, + Failed { reason: String }, +} + +impl Report { + pub fn report(&self, tty: bool, verbose: bool) -> String { + let status = match &self.status { + Status::Failed { reason } => { + let err = if tty { "error".red() } else { "error".normal() }; + format!("{err}\n{reason}") + } + Status::Success { out } => { + let ok = if tty { "ok".green() } else { "ok".normal() }; + if verbose { + format!("{ok}\n{out}") + } else { + ok.to_string() + } + } + }; + + format!("{} ... {status}", self.cmd_desc) + } +} diff --git a/scripts/checks/src/tasks/task.rs b/scripts/checks/src/tasks/task.rs new file mode 100644 index 000000000..fba867964 --- /dev/null +++ b/scripts/checks/src/tasks/task.rs @@ -0,0 +1,96 @@ +use crate::custom_checks; + +use super::{ + command::Command, + report::{Report, Status}, + short_sha256, +}; + +use std::fmt::Display; + +use std::path::PathBuf; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Task { + pub cwd: PathBuf, + pub cmd: Command, +} + +impl Display for Task { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Task {}, dir: {:?}, {}", self.id(), self.cwd, self.cmd) + } +} + +impl Task { + pub fn id(&self) -> String { + short_sha256(&format!("{self:?}")) + } + + pub fn run(self) -> Report { + match &self.cmd { + Command::Custom { + program, args, env, .. + } => self.run_custom(program, args.iter().map(String::as_str), env), + Command::MdCheck => self.run_md_check(), + Command::VerifyCoreVersion => self.run_verify_core_version(), + } + } + + fn run_verify_core_version(&self) -> Report { + let status = + if let Err(e) = custom_checks::fuel_core_version::verify_core_version(&self.cwd) { + e.into() + } else { + Status::Success { out: String::new() } + }; + self.report(status) + } + + fn run_md_check(&self) -> Report { + let status = if let Err(e) = custom_checks::md_check::run(&self.cwd) { + e.into() + } else { + Status::Success { out: String::new() } + }; + + self.report(status) + } + + fn run_custom<'a, F>(&self, program: &str, args: F, env: &[(String, String)]) -> Report + where + F: IntoIterator, + { + let mut cmd = duct::cmd(program, args) + .stderr_to_stdout() + .dir(&self.cwd) + .stdin_null() + .stdout_capture() + .unchecked(); + + for (key, value) in env { + cmd = cmd.env(key, value); + } + + let output = match cmd.run() { + Ok(output) => output, + Err(err) => return self.report(err), + }; + + let decoded = String::from_utf8_lossy(&output.stdout).into_owned(); + let status = if output.status.success() { + Status::Success { out: decoded } + } else { + Status::Failed { reason: decoded } + }; + + self.report(status) + } + + pub(crate) fn report(&self, status: impl Into) -> Report { + Report { + cmd_desc: self.to_string(), + status: status.into(), + } + } +} diff --git a/scripts/checks/src/util.rs b/scripts/checks/src/util.rs new file mode 100644 index 000000000..6b0f3fc52 --- /dev/null +++ b/scripts/checks/src/util.rs @@ -0,0 +1,14 @@ +use nix::unistd::Pid; + +pub fn watch_for_cancel(cancel_token: tokio_util::sync::CancellationToken) { + tokio::task::spawn(async move { + tokio::signal::ctrl_c().await.unwrap(); + cancel_token.cancel(); + }); +} + +pub fn configure_child_process_cleanup() -> anyhow::Result<()> { + // This process is moved into its own process group so that it's easier to kill any of its children. + nix::unistd::setpgid(Pid::from_raw(0), Pid::from_raw(0))?; + Ok(()) +} diff --git a/scripts/fuel-core-version/Cargo.toml b/scripts/fuel-core-version/Cargo.toml deleted file mode 100644 index af158ca4e..000000000 --- a/scripts/fuel-core-version/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "fuel-core-version" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } -homepage = { workspace = true } -license = { workspace = true } -publish = false -repository = { workspace = true } -rust-version = { workspace = true } - -[dependencies] -clap = { version = "4.5.3", features = ["derive"] } -color-eyre = "0.6.2" -semver = { workspace = true } -versions-replacer = { workspace = true } diff --git a/scripts/fuel-core-version/src/main.rs b/scripts/fuel-core-version/src/main.rs deleted file mode 100644 index 4725a0808..000000000 --- a/scripts/fuel-core-version/src/main.rs +++ /dev/null @@ -1,84 +0,0 @@ -use std::{ - fs, - path::{Path, PathBuf}, -}; - -use clap::{Parser, Subcommand}; -use color_eyre::{ - eyre::{bail, ContextCompat}, - Result, -}; -use semver::Version; -use versions_replacer::metadata::collect_versions_from_cargo_toml; - -fn get_version_from_toml(manifest_path: impl AsRef) -> Result { - let versions = collect_versions_from_cargo_toml(manifest_path)?; - let version = versions["fuel-core-types"].parse::()?; - Ok(version) -} - -fn write_version_to_file(version: Version, version_file_path: impl AsRef) -> Result<()> { - let Version { - major, - minor, - patch, - .. - } = version; - let text = format!("Version::new({major}, {minor}, {patch})"); - fs::write(version_file_path, text.as_bytes())?; - Ok(()) -} - -fn get_version_file_path( - manifest_path: impl AsRef, -) -> Result { - Ok(manifest_path - .as_ref() - .parent() - .wrap_err("Invalid manifest path")? - .join("scripts/fuel-core-version/version.rs")) -} - -fn verify_version_from_file(version: Version) -> Result<()> { - let version_from_file: Version = include!("../version.rs"); - if version != version_from_file { - bail!( - "fuel_core version in version.rs ({}) doesn't match one in Cargo.toml ({})", - version_from_file, - version - ); - } - println!( - "fuel_core versions in versions.rs and Cargo.toml match ({})", - version - ); - Ok(()) -} - -#[derive(Debug, Parser)] -struct App { - #[clap(subcommand)] - command: Command, - #[clap(long)] - manifest_path: PathBuf, -} - -#[derive(Debug, Subcommand)] -enum Command { - Write, - Verify, -} - -fn main() -> Result<()> { - let App { - command, - manifest_path, - } = App::parse(); - let version = get_version_from_toml(&manifest_path)?; - let version_file_path = get_version_file_path(&manifest_path)?; - match command { - Command::Write => write_version_to_file(version, version_file_path)?, - Command::Verify => verify_version_from_file(version)?, - } - Ok(()) -} diff --git a/scripts/fuel-core-version/version.rs b/scripts/fuel-core-version/version.rs deleted file mode 100644 index e41fa22a2..000000000 --- a/scripts/fuel-core-version/version.rs +++ /dev/null @@ -1 +0,0 @@ -Version::new(0, 26, 0) diff --git a/scripts/versions-replacer/Cargo.toml b/scripts/versions-replacer/Cargo.toml index 5e875ae41..8ca5b83f2 100644 --- a/scripts/versions-replacer/Cargo.toml +++ b/scripts/versions-replacer/Cargo.toml @@ -10,10 +10,10 @@ repository = { workspace = true } rust-version = { workspace = true } [dependencies] -argh = "0.1.12" -cargo_metadata = "0.18.1" -color-eyre = "0.6.2" -once_cell = "1.18.0" -regex = { workspace = true } +argh = { workspace = true } +cargo_metadata = { workspace = true } +color-eyre = { workspace = true } +once_cell = { workspace = true } +regex = { workspace = true, features = ["unicode-perl"] } serde = { workspace = true, features = ["derive"] } -walkdir = "2.4.0" +walkdir = { workspace = true } diff --git a/wasm-tests/Cargo.toml b/wasm-tests/Cargo.toml index 4d13e5d1a..88936f1be 100644 --- a/wasm-tests/Cargo.toml +++ b/wasm-tests/Cargo.toml @@ -15,5 +15,5 @@ crate-type = ['cdylib'] [dev-dependencies] fuels = { workspace = true } fuels-core = { workspace = true } -getrandom = { version = "0.2.11", features = ["js"] } -wasm-bindgen-test = "0.3.39" +getrandom = { workspace = true, features = ["js"] } +wasm-bindgen-test = { workspace = true }