Skip to content

Commit

Permalink
chore(tests): heavy integration to use large dataset
Browse files Browse the repository at this point in the history
  • Loading branch information
Jon-Becker committed Jun 6, 2024
1 parent 773acc6 commit fae2273
Show file tree
Hide file tree
Showing 3 changed files with 103 additions and 111 deletions.
75 changes: 41 additions & 34 deletions .github/workflows/heavy-integration.yml
Original file line number Diff line number Diff line change
@@ -1,45 +1,52 @@
name: heavy integration

on:
schedule:
# Runs at 10PM utc
- cron: "0 22 * * *"
workflow_dispatch:
schedule:
# Runs at 10PM utc
- cron: "0 22 * * *"
workflow_dispatch:

concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

env:
CARGO_TERM_COLOR: always
CARGO_TERM_COLOR: always

jobs:
heavy-integration:
name: heavy (long-running) integration tests
runs-on: ubuntu-latest
timeout-minutes: 120
heavy-integration:
name: heavy (long-running) integration tests
runs-on: ubuntu-latest
timeout-minutes: 120

steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@stable
- uses: taiki-e/install-action@nextest
- name: Run Tests
run: |
cargo nextest r --no-fail-fast --release --nocapture -- --ignored
steps:
- uses: actions/checkout@v3
- uses: dtolnay/rust-toolchain@stable
- uses: taiki-e/install-action@nextest
- name: Fetch Dataset
run: |
# download from https://jbecker.dev/data/largest1k.tar.gz
wget https://jbecker.dev/data/largest1k.tar.gz
# If any of the jobs fail, this will create a high-priority issue to signal so.
issue:
name: Open an issue
runs-on: ubuntu-latest
needs: heavy-integration
if: ${{ failure() }}
steps:
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WORKFLOW_URL: |
${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
with:
update_existing: true
filename: .github/INTEGRATION_FAILURE.md
# extract the dataset
tar -xvf largest1k.tar.gz
- name: Run Tests
run: |
cargo nextest r --no-fail-fast --release --nocapture -- --ignored
# If any of the jobs fail, this will create a high-priority issue to signal so.
issue:
name: Open an issue
runs-on: ubuntu-latest
needs: heavy-integration
if: ${{ failure() }}
steps:
- uses: actions/checkout@v4
- uses: JasonEtco/create-an-issue@v2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WORKFLOW_URL: |
${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
with:
update_existing: true
filename: .github/INTEGRATION_FAILURE.md
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,5 @@ false/*

*.svg
*.sh

largest1k
137 changes: 60 additions & 77 deletions crates/core/tests/test_decompile.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
#[cfg(test)]
mod integration_tests {
use std::{path::PathBuf, process::Command};

use alloy_json_abi::JsonAbi;
use heimdall_common::utils::io::file::delete_path;
use heimdall_decompiler::{decompile, DecompilerArgs};
use heimdall_decompiler::{decompile, DecompilerArgs, DecompilerArgsBuilder};
use serde_json::Value;

#[tokio::test]
async fn test_decompile_precompile() {
Expand Down Expand Up @@ -172,91 +175,71 @@ mod integration_tests {
#[tokio::test]
#[ignore]
async fn heavy_test_decompile_thorough() {
let rpc_url = std::env::var("RPC_URL").unwrap_or_else(|_| {
println!("RPC_URL not set, skipping test");
let root_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"))
.parent()
.expect("no parent")
.parent()
.expect("no parent")
.to_owned();

// if the ./largest1k directory does not exist, download it from https://jbecker.dev/data/largest1k.tar.gz
let dataset_dir = root_dir.join("largest1k");
if !dataset_dir.exists() {
eprintln!("dataset not found in root, skipping test");
std::process::exit(0);
});
}

let contracts = [
"0xdAC17F958D2ee523a2206206994597C13D831ec7",
"0x3fC91A3afd70395Cd496C647d5a6CC9D4B2b7FAD",
"0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D",
"0xA3C66393049fAB4830C330Dfe658f94A4de0A122",
"0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48",
"0x32400084C286CF3E17e7B677ea9583e60a000324",
"0x00000000000000ADc04C56Bf30aC9d3c0aAF14dC",
"0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2",
"0x881D40237659C251811CEC9c364ef91dC08D300C",
"0x1111111254EEB25477B68fb85Ed929f73A960582",
"0x6b75d8AF000000e20B7a7DDf000Ba900b4009A80",
"0xDef1C0ded9bec7F1a1670819833240f027b25EfF",
"0xaBEA9132b05A70803a4E85094fD0e1800777fBEF",
"0x6B175474E89094C44Da98b954EedeAC495271d0F",
"0xae0Ee0A63A2cE6BaeEFFE56e7714FB4EFE48D419",
"0x1a0ad011913A150f69f6A19DF447A0CfD9551054",
"0x29469395eAf6f95920E59F858042f0e28D98a20B",
"0xA69babEF1cA67A37Ffaf7a485DfFF3382056e78C",
"0x95aD61b0a150d79219dCF64E1E6Cc01f0B64C4cE",
"0xA9D1e08C7793af67e9d92fe308d5697FB81d3E43",
"0x3dB52cE065f728011Ac6732222270b3F2360d919",
"0x000000000000Ad05Ccc4F10045630fb830B95127",
"0x253553366Da8546fC250F225fe3d25d0C782303b",
"0x65f2F6Fba44e5AbeFD90C2aEE52B11a243FC7A16",
"0x1f9840a85d5aF5bf1D1762F925BDADdC4201F984",
"0x7D1AfA7B718fb893dB30A3aBc0Cfc608AaCfeBB0",
"0xb0fcB43D3701f00aFD2Fb3d5f577a806F551D2F2",
"0x0000000000A39bb272e79075ade125fd351887Ac",
"0xEf1c6E67703c7BD7107eed8303Fbe6EC2554BF6B",
"0x68b3465833fb72A70ecDF485E0e4C7bD8665Fc45",
"0x98C3d3183C4b8A650614ad179A1a98be0a8d6B8E",
"0x514910771AF9Ca656af840dff83E8264EcF986CA",
"0x06450dEe7FD2Fb8E39061434BAbCFC05599a6Fb8",
"0x6982508145454Ce325dDbE47a25d4ec3d2311933",
"0x2a3DD3EB832aF982ec71669E178424b10Dca2EDe",
"0xa24787320ede4CC19D800bf87B41Ab9539c4dA9D",
"0x473037de59cf9484632f4A27B509CFE8d4a31404",
"0xFD14567eaf9ba941cB8c8a94eEC14831ca7fD1b4",
"0x4d224452801ACEd8B2F0aebE155379bb5D594381",
"0xDe30da39c46104798bB5aA3fe8B9e0e1F348163F",
"0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9",
"0x388C818CA8B9251b393131C08a736A67ccB19297",
"0x3999D2c5207C06BBC5cf8A6bEa52966cabB76d41",
"0x3b3ae790Df4F312e745D270119c6052904FB6790",
"0xB517850510997a34b4DdC8c3797B4F83fAd510c4",
"0x902F09715B6303d4173037652FA7377e5b98089E",
"0x5a54fe5234E811466D5366846283323c954310B2",
"0xd1d2Eb1B1e90B638588728b4130137D262C87cae",
"0x95e05e2Abbd26943874ac000D87C3D9e115B543c",
"0x00000000A991C429eE2Ec6df19d40fe0c80088B8",
"0x737673b5e0a3c68adf4c1a87bca5623cfc537ec3",
"0x940259178FbF021e625510919BC2FF0B944E5613",
"0xff612db0583be8d5498731e4e32bc12e08fa6292",
"0xd5FEa30Ed719693Ec8848Dc7501b582F5de6a5BB",
"0x4C727a07246A70862e45B2E58fcd82c0eD5Eda85",
"0x9baa53dD2aB408D9135e549831C06E5c6407bF1d",
];
// list files in root_dir
let contracts = std::fs::read_dir(dataset_dir)
.expect("failed to read dataset directory")
.map(|res| {
// HashMap from filename (without extension) to bytecode (from serde_json::Value)
res.map(|e| {
let path = e.path();
let filename = path
.file_stem()
.expect("no file stem")
.to_str()
.expect("no file stem")
.to_owned();

// read contents as json and parse to serde_json::Value
let contents_json: Value = serde_json::from_str(
&std::fs::read_to_string(path).expect("failed to read file"),
)
.expect("failed to parse json");
let bytecode = contents_json["code"].as_str().expect("no bytecode").to_owned();

(filename, bytecode)
})
})
.collect::<Result<Vec<_>, std::io::Error>>()
.expect("failed to collect files");

// define flag checks
let mut is_function_covered = false;
let mut is_event_covered = false;
let mut is_require_covered = false;
let mut is_error_covered = false;

for contract in contracts {
println!("Testing contract: {contract}");
let result = decompile(DecompilerArgs {
target: contract.to_string(),
rpc_url: rpc_url.to_string(),
default: true,
skip_resolving: true,
include_solidity: true,
include_yul: false,
output: String::from(""),
name: String::from(""),
timeout: 10000,
})
.await
.expect("failed to decompile");
for (contract_address, bytecode) in contracts {
println!("Testing contract: {contract_address}");
let args = DecompilerArgsBuilder::new()
.target(bytecode)
.skip_resolving(true)
.include_solidity(true)
.timeout(10000)
.output(String::from("./output/tests/decompile/integration"))
.build()
.expect("failed to build args");

let result = decompile(args)
.await
.map_err(|e| {
eprintln!("failed to decompile {contract_address}: {e}");
e
})
.expect("failed to decompile");

// assert that the number of opening and closing brackets, parentheses, and curly braces
// are equal
Expand Down

0 comments on commit fae2273

Please sign in to comment.