Skip to content

Commit

Permalink
Merge 9182bca into 520c7c6
Browse files Browse the repository at this point in the history
  • Loading branch information
Nurchik committed May 4, 2021
2 parents 520c7c6 + 9182bca commit 6e2461c
Show file tree
Hide file tree
Showing 11 changed files with 35,740 additions and 114 deletions.
4 changes: 3 additions & 1 deletion .gitignore
Expand Up @@ -101,4 +101,6 @@ ENV/
# mypy
.mypy_cache/
.idea/
.vscode/
.vscode/

.mythx.yml
9 changes: 0 additions & 9 deletions .mythx.yml

This file was deleted.

7 changes: 0 additions & 7 deletions mythx_cli/fuzz/faas.py
@@ -1,9 +1,7 @@
import json
import logging
import random
import string

import click
import requests

from mythx_cli.analyze.scribble import ScribbleMixin
Expand Down Expand Up @@ -72,11 +70,6 @@ def create_faas_campaign(self, campaign_data, seed_state):
:return: Campaign ID
"""
try:
if self.project_type != "brownie":
raise click.exceptions.UsageError(
"Currently only Brownie projects are supported"
)

try:
api_payload_params = {
"discovery-probability-threshold": seed_state[
Expand Down
8 changes: 1 addition & 7 deletions mythx_cli/fuzz/ide/brownie.py
@@ -1,15 +1,9 @@
import json
import logging
import os
from pathlib import Path
from typing import Dict, List

from mythx_cli.fuzz.exceptions import (
BrownieError,
BuildArtifactsError,
PayloadError,
SourceError,
)
from mythx_cli.fuzz.exceptions import BuildArtifactsError
from mythx_cli.fuzz.ide.generic import IDEArtifacts, JobBuilder

from ...util import sol_files_by_directory
Expand Down
111 changes: 111 additions & 0 deletions mythx_cli/fuzz/ide/hardhat.py
@@ -0,0 +1,111 @@
import json
from os.path import commonpath, relpath
from pathlib import Path
from typing import List

from mythx_cli.fuzz.exceptions import BuildArtifactsError
from mythx_cli.fuzz.ide.generic import IDEArtifacts, JobBuilder

from ...util import sol_files_by_directory


class HardhatArtifacts(IDEArtifacts):
def __init__(self, build_dir=None, targets=None):
self._include = []
if targets:
include = []
for target in targets:
include.extend(sol_files_by_directory(target))
self._include = include

self._build_dir = Path(build_dir).absolute() or Path("./artifacts").absolute()
self._contracts, self._sources = self.fetch_data()

@property
def contracts(self):
return self._contracts

@property
def sources(self):
return self._sources

def fetch_data(self):
result_contracts = {}
result_sources = {}

for file_path in self._include:
cp = commonpath([self._build_dir, file_path])
relative_file_path = relpath(file_path, cp)

if relative_file_path in result_contracts:
continue

file_name = Path(file_path).stem
file_artifact_path: Path = self._build_dir.joinpath(
relative_file_path
).joinpath(f"{file_name}.json")
file_debug_path: Path = self._build_dir.joinpath(
relative_file_path
).joinpath(f"{file_name}.dbg.json")
if not file_artifact_path.exists() or not file_debug_path.exists():
raise BuildArtifactsError("Could not find target artifacts")

with file_artifact_path.open("r") as file:
file_artifact = json.load(file)
with file_debug_path.open("r") as file:
file_debug_artifact = json.load(file)
build_info_name = Path(file_debug_artifact["buildInfo"]).name
with self._build_dir.joinpath(f"build-info/{build_info_name}").open(
"r"
) as file:
build_info = json.load(file)

result_contracts[relative_file_path] = []

contracts = build_info["output"]["contracts"][relative_file_path]

for contract, data in contracts.items():
if data["evm"]["bytecode"]["object"] == "":
continue
result_contracts[relative_file_path] += [
{
"sourcePaths": {
i: k
for i, k in enumerate(
build_info["output"]["contracts"].keys()
)
},
"deployedSourceMap": data["evm"]["deployedBytecode"][
"sourceMap"
],
"deployedBytecode": data["evm"]["deployedBytecode"]["object"],
"sourceMap": data["evm"]["bytecode"]["sourceMap"],
"bytecode": data["evm"]["bytecode"]["object"],
"contractName": file_artifact["contractName"],
"mainSourceFile": file_artifact["sourceName"],
}
]

for source_file_dep, data in build_info["output"]["sources"].items():
if source_file_dep in result_sources.keys():
continue

result_sources[source_file_dep] = {
"fileIndex": data["id"],
"source": build_info["input"]["sources"][source_file_dep][
"content"
],
"ast": data["ast"],
}

return result_contracts, result_sources


class HardhatJob:
def __init__(self, target: List[str], build_dir: Path):
artifacts = HardhatArtifacts(build_dir, targets=target)
self._jb = JobBuilder(artifacts)
self.payload = None

def generate_payload(self):
self.payload = self._jb.payload()
29 changes: 15 additions & 14 deletions mythx_cli/fuzz/rpc.py
@@ -1,7 +1,5 @@
import json
import logging
import os
from pathlib import Path
from typing import Optional

import click
import requests
Expand Down Expand Up @@ -78,8 +76,20 @@ def get_all_blocks(self):
blocks.append(self.get_block(block_number=i))
return blocks

def get_seed_state(self, address: str, other_addresses: [str]):
def get_seed_state(
self, address: str, other_addresses: [str], corpus_target: Optional[str] = None
):
seed_state = {
"time-limit-secs": time_limit_seconds,
"discovery-probability-threshold": 0.0,
"assertion-checking-mode": 1,
"emit-mythx-report": True,
"num-cores": self.number_of_cores,
}
"""Get a seed state for the target contract to be used by Harvey"""
if corpus_target:
return dict({**seed_state, "analysis-setup": {"target": corpus_target}})

try:
blocks = self.get_all_blocks()
processed_transactions = []
Expand All @@ -96,16 +106,7 @@ def get_seed_state(self, address: str, other_addresses: [str]):
"other-addresses-under-test": other_addresses,
}
)
return dict(
{
"time-limit-secs": time_limit_seconds,
"analysis-setup": setup,
"discovery-probability-threshold": 0.0,
"assertion-checking-mode": 1,
"emit-mythx-report": True,
"num-cores": self.number_of_cores,
}
)
return dict({**seed_state, "analysis-setup": setup})
except Exception as e:
LOGGER.warning(f"Could not generate seed state for address: {address}")
raise click.exceptions.UsageError(
Expand Down
103 changes: 75 additions & 28 deletions mythx_cli/fuzz/run.py
@@ -1,11 +1,13 @@
import logging
import random
import string
import os
import traceback
from enum import Enum
from pathlib import Path

import click
import requests

from mythx_cli.fuzz.ide.brownie import BrownieJob
from mythx_cli.fuzz.ide.hardhat import HardhatJob

from .exceptions import RPCCallError
from .faas import FaasClient
Expand All @@ -18,6 +20,27 @@
time_limit_seconds = 3000


class IDE(Enum):
BROWNIE = "brownie"
HARDHAT = "hardhat"
TRUFFLE = "truffle"
SOLIDITY = "solidity"


def determine_ide() -> IDE:
root_dir = Path.cwd().absolute()
files = list(os.walk(root_dir))[0][2]
if "brownie-config.yaml" in files:
return IDE.BROWNIE
if "hardhat.config.ts" in files:
return IDE.HARDHAT
if "hardhat.config.js" in files:
return IDE.HARDHAT
if "truffle-config.js" in files:
return IDE.TRUFFLE
return IDE.SOLIDITY


@click.command("run")
@click.argument("target", default=None, nargs=-1, required=False)
@click.option(
Expand All @@ -29,8 +52,17 @@
type=click.STRING,
help="Addresses of other contracts to analyze, separated by commas",
)
@click.option(
"-c",
"--corpus-target",
type=click.STRING,
help="Project UUID, Campaign UUID or Corpus UUID to reuse the corpus from. "
"In case of a project, corpus from the project's latest submitted campaign will be used",
default=None,
required=False,
)
@click.pass_obj
def fuzz_run(ctx, address, more_addresses, target):
def fuzz_run(ctx, address, more_addresses, target, corpus_target):
# read YAML config params from ctx dict, e.g. ganache rpc url
# Introduce a separate `fuzz` section in the YAML file

Expand Down Expand Up @@ -101,45 +133,60 @@ def fuzz_run(ctx, address, more_addresses, target):
else default_config["campaign_name_prefix"]
)

try:
rpc_client = RPCClient(rpc_url, number_of_cores)
contract_code_response = rpc_client.contract_exists(contract_address)
except RPCCallError as e:
raise click.exceptions.UsageError(f"RPC endpoint." f"\n{e}")

if not contract_code_response:
LOGGER.warning(f"Contract code not found")
raise click.exceptions.ClickException(
f"Unable to find a contract deployed at {contract_address}."
)

if more_addresses is None:
other_addresses = []
else:
other_addresses = more_addresses.split(",")

# We get the seed state from the provided rpc endpoint
seed_state = rpc_client.get_seed_state(contract_address, other_addresses)
brownie_artifacts = BrownieJob(target, analyze_config["build_directory"])
brownie_artifacts.generate_payload()
_corpus_target = corpus_target or analyze_config.get("corpus_target", None)

rpc_client = RPCClient(rpc_url, number_of_cores)
if not _corpus_target:
try:
contract_code_response = rpc_client.contract_exists(contract_address)
except RPCCallError as e:
raise click.exceptions.UsageError(f"RPC endpoint." f"\n{e}")

if not contract_code_response:
LOGGER.warning(f"Contract code not found")
raise click.exceptions.ClickException(
f"Unable to find a contract deployed at {contract_address}."
)
seed_state = rpc_client.get_seed_state(
contract_address, other_addresses, _corpus_target
)

ide = determine_ide()

if ide == IDE.BROWNIE:
artifacts = BrownieJob(target, analyze_config["build_directory"])
artifacts.generate_payload()
elif ide == IDE.HARDHAT:
artifacts = HardhatJob(target, analyze_config["build_directory"])
artifacts.generate_payload()
elif ide == IDE.TRUFFLE:
raise click.exceptions.UsageError(
f"Projects using Truffle IDE is not supported right now"
)
else:
raise click.exceptions.UsageError(
f"Projects using plain solidity files is not supported right now"
)

faas_client = FaasClient(
faas_url=faas_url,
campaign_name_prefix=campaign_name_prefix,
project_type="brownie",
faas_url=faas_url, campaign_name_prefix=campaign_name_prefix, project_type=ide
)
try:
campaign_id = faas_client.create_faas_campaign(
campaign_data=brownie_artifacts, seed_state=seed_state
campaign_data=artifacts, seed_state=seed_state
)
click.echo(
"You can view campaign here: " + faas_url + "/campaigns/" + str(campaign_id)
)
except Exception as e:
LOGGER.warning(f"Could not submit campaign to the FaaS")
LOGGER.warning(
f"Could not submit campaign to the FaaS\n{traceback.format_exc()}"
)
raise click.exceptions.UsageError(
f"Unable to submit the campaign to the faas. Are you sure the service is running on {faas_url} ?"
)


pass

0 comments on commit 6e2461c

Please sign in to comment.