Skip to content
This repository has been archived by the owner on Feb 26, 2024. It is now read-only.

Commit

Permalink
chore: upgrade @ethereumjs/vm to v6.0.0 (#3656)
Browse files Browse the repository at this point in the history
Co-authored-by: David Murdoch <david@davidmurdoch.com>
  • Loading branch information
MicaiahReid and davidmurdoch committed Nov 14, 2022
1 parent 999adf7 commit 2e8eec4
Show file tree
Hide file tree
Showing 15 changed files with 10,565 additions and 10,480 deletions.
4,290 changes: 2,145 additions & 2,145 deletions src/chains/ethereum/address/package-lock.json

Large diffs are not rendered by default.

4,186 changes: 2,093 additions & 2,093 deletions src/chains/ethereum/block/package-lock.json

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions src/chains/ethereum/ethereum/src/blockchain.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { EOL } from "os";
import Miner, { Capacity } from "./miner/miner";
import Database from "./database";
import Database, { DBType } from "./database";
import Emittery from "emittery";
import {
BlockLogs,
Expand Down Expand Up @@ -289,7 +289,7 @@ export default class Blockchain extends Emittery<BlockchainTypedEvents> {
} else {
stateRoot = null;
}
this.trie = makeTrie(this, database.trie, stateRoot);
this.trie = makeTrie(this, database, stateRoot);
}

// create VM and listen to step events
Expand Down Expand Up @@ -1190,6 +1190,7 @@ export default class Blockchain extends Emittery<BlockchainTypedEvents> {
context: transactionContext
});
if (result.execResult.exceptionError) {
// @ts-ignore types are dumbs
throw new CallError(result);
} else {
return Data.from(result.execResult.returnValue || "0x");
Expand Down Expand Up @@ -1250,6 +1251,8 @@ export default class Blockchain extends Emittery<BlockchainTypedEvents> {
stateManager,
evm
});
//@ts-ignore
vm._allowUnlimitedContractSize = this.vm.evm._allowUnlimitedContractSize;

const storage: StorageRecords = {};

Expand Down Expand Up @@ -1591,11 +1594,7 @@ export default class Blockchain extends Emittery<BlockchainTypedEvents> {
const parentBlock = await this.blocks.getByHash(
targetBlock.header.parentHash.toBuffer()
);
const trie = makeTrie(
this,
this.#database.trie,
parentBlock.header.stateRoot
);
const trie = makeTrie(this, this.#database, parentBlock.header.stateRoot);

// get the contractAddress account storage trie
const contractAddressBuffer = Address.from(contractAddress).toBuffer();
Expand Down Expand Up @@ -1649,6 +1648,7 @@ export default class Blockchain extends Emittery<BlockchainTypedEvents> {
};

const rs = storageTrie.createReadStream();
// @ts-ignore
rs.on("data", handleData).on("error", reject).on("end", handleEnd);
});
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,15 +152,15 @@ export async function* findRelated(
request: Request,
options: FindOptions
) {
const readStream = db.createReadStream({
const readStream = db.iterator({
keys: true,
values: true,
...options
});

for await (const pair of readStream) {
const { key, value } = pair as unknown as { key: Buffer; value: Buffer };
const node = Tree.deserialize(key, value);
const [key, value] = pair as [key: Buffer, value: Uint8Array];
const node = Tree.deserialize(key, Buffer.from(value));
const { height: candidateHeight } = node.decodeKey();
const block = await getBlockByNumber(request, candidateHeight);
// if the chain has a block at this height, and the hash of the
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import { Tree } from "./tree";
import { promises } from "fs";
import envPaths from "env-paths";
import leveldown from "leveldown";
import sub from "subleveldown";
import encode from "encoding-down";
import * as lexico from "../lexicographic-key-codec";
import { BUFFER_ZERO, Data, Quantity } from "@ganache/utils";
import { Ancestry } from "./ancestry";
Expand All @@ -19,11 +16,11 @@ import type { AbstractLevelDOWN } from "abstract-leveldown";
import { GanacheLevelUp } from "../../database";
const levelup = require("levelup");

const levelupOptions = {
keyEncoding: "binary",
valueEncoding: "binary"
const levelOptions = {
keyEncoding: "buffer",
valueEncoding: "buffer",
prefix: ""
};
const leveldownOpts = { prefix: "" };
const maxValueByteBuffer = Buffer.from([0xff]);

/**
Expand All @@ -46,7 +43,9 @@ export class PersistentCache {
static async deleteDb(dbSuffix?: string) {
return new Promise((resolve, reject) => {
const directory = PersistentCache.getDbDirectory(dbSuffix);
leveldown.destroy(directory, err => {
// @ts-ignore `Level.destroy` does exist, `Level` types are just
// incomplete
Level.destroy(directory, err => {
if (err) return void reject(err);
resolve(void 0);
});
Expand All @@ -59,17 +58,16 @@ export class PersistentCache {
const cache = await PersistentCache.create(dbSuffix);
type Tree = Record<string, { descendants: Tree }>;
return await new Promise<Tree>(async resolve => {
const rs = cache.ancestorDb.createReadStream({
const rs = cache.ancestorDb.iterator({
gte: BUFFER_ZERO,
keys: true,
values: true
});
const tree: Tree = {};
const collection = {};
for await (const data of rs) {
const { key, value } = data as any as { key: Buffer; value: Buffer };

const node = Tree.deserialize(key, value);
const [key, value] = data as [key: Buffer, value: Uint8Array];
const node = Tree.deserialize(key, Buffer.from(value));
(node as any).height = node.decodeKey().height.toNumber();
const keyHex = key.toString("hex");
const parentKeyHex = node.closestKnownAncestor.toString("hex");
Expand Down Expand Up @@ -120,8 +118,8 @@ export class PersistentCache {
});
});
cache.db = db;
cache.cacheDb = sub(db, "c", levelupOptions);
cache.ancestorDb = sub(db, "a", levelupOptions);
cache.cacheDb = <GanacheSublevel>db.sublevel("c", levelOptions);
cache.ancestorDb = <GanacheSublevel>db.sublevel("a", levelOptions);
await cache.cacheDb.open();
await cache.ancestorDb.open();

Expand All @@ -143,84 +141,90 @@ export class PersistentCache {

this.ancestry = new Ancestry(this.ancestorDb, closestAncestor);

const allKnownDescendants = [...targetBlock.closestKnownDescendants];
const atomicBatch = this.ancestorDb.batch();
try {
// if we changed closest ancestors remove our targetBlock from the previous
// ancestor so our target block doesn't appear in the database more than
// once, and update our targetBlock to point to this new ancestor
if (
previousClosestAncestor &&
!previousClosestAncestor.key.equals(closestAncestor.key)
) {
targetBlock.closestKnownAncestor = closestAncestor.key;

const index = previousClosestAncestor.closestKnownDescendants.findIndex(
buf => buf.equals(targetBlock.key)
);
previousClosestAncestor.closestKnownDescendants.splice(index, 1);
atomicBatch.put(
previousClosestAncestor.key,
previousClosestAncestor.serialize()
);
}

// if we changed closest ancestors remove our targetBlock from the previous
// ancestor so our target block doesn't appear in the database more than
// once, and update our targetBlock to point to this new ancestor
if (
previousClosestAncestor &&
!previousClosestAncestor.key.equals(closestAncestor.key)
) {
targetBlock.closestKnownAncestor = closestAncestor.key;

const index = previousClosestAncestor.closestKnownDescendants.findIndex(
buf => buf.equals(targetBlock.key)
);
previousClosestAncestor.closestKnownDescendants.splice(index, 1);
atomicBatch.put(
previousClosestAncestor.key,
previousClosestAncestor.serialize()
);
}
// if we don't have a closestAncestor it because the target block is block 0
if (closestAncestor == null) {
atomicBatch.put(targetBlock.key, targetBlock.serialize());
} else {
const ancestorsDescendants = [targetBlock.key];

await Promise.all(
closestAncestor.closestKnownDescendants.map(async descendantKey => {
// don't match ourself
if (descendantKey.equals(targetBlock.key)) return;

const { height: descendantHeight } = Tree.decodeKey(descendantKey);
// if the block number is less than our own it can't be our descendant
if (descendantHeight.toBigInt() <= height.toBigInt()) {
ancestorsDescendants.push(descendantKey);
return;
}

const descendantValue = await this.ancestorDb.get(descendantKey);
const descendantNode = Tree.deserialize(
descendantKey,
descendantValue
);

const descendantRawBlock = await this.getBlock(descendantHeight);
// if the block doesn't exist on our chain, it can't be our child,
// keep it in the parent
if (
descendantRawBlock == null ||
descendantRawBlock.hash !== Data.toString(descendantNode.hash, 32)
) {
ancestorsDescendants.push(descendantKey);
} else {
targetBlock.closestKnownDescendants.push(descendantNode.key);
// keep track of *all* known descendants so we don't bother
// checking if they are a known closest descendant later on
allKnownDescendants.push(
...descendantNode.closestKnownDescendants
);
descendantNode.closestKnownAncestor = targetBlock.key;
// update the descendant node with its newly assigned
// closestKnownAncestor
atomicBatch.put(descendantNode.key, descendantNode.serialize());
}
})
);

closestAncestor.closestKnownDescendants = ancestorsDescendants;
atomicBatch.put(closestAncestor.key, closestAncestor.serialize());
}

let allKnownDescendants = [...targetBlock.closestKnownDescendants];
// if we don't have a closestAncestor it because the target block is block 0
if (closestAncestor == null) {
// TODO(perf): we always re-save the targetBlock but could optimize to only
// resave if it is needed.
// Issue: https://github.com/trufflesuite/ganache/issues/3485
atomicBatch.put(targetBlock.key, targetBlock.serialize());
} else {
const ancestorsDescendants = [targetBlock.key];

await Promise.all(
closestAncestor.closestKnownDescendants.map(async descendantKey => {
// don't match ourself
if (descendantKey.equals(targetBlock.key)) return;

const { height: descendantHeight } = Tree.decodeKey(descendantKey);
// if the block number is less than our own it can't be our descendant
if (descendantHeight.toBigInt() <= height.toBigInt()) {
ancestorsDescendants.push(descendantKey);
return;
}

const descendantValue = await this.ancestorDb.get(descendantKey);
const descendantNode = Tree.deserialize(
descendantKey,
descendantValue
);

const descendantRawBlock = await this.getBlock(descendantHeight);
// if the block doesn't exist on our chain, it can't be our child,
// keep it in the parent
if (
descendantRawBlock == null ||
descendantRawBlock.hash !== Data.toString(descendantNode.hash, 32)
) {
ancestorsDescendants.push(descendantKey);
} else {
targetBlock.closestKnownDescendants.push(descendantNode.key);
// keep track of *all* known descendants so we don't bother
// checking if they are a known closest descendant later on
allKnownDescendants.push(...descendantNode.closestKnownDescendants);
descendantNode.closestKnownAncestor = targetBlock.key;
// update the descendant node with its newly assigned
// closestKnownAncestor
atomicBatch.put(descendantNode.key, descendantNode.serialize());
}
})
);

closestAncestor.closestKnownDescendants = ancestorsDescendants;
atomicBatch.put(closestAncestor.key, closestAncestor.serialize());
await atomicBatch.write();
} catch (e: any) {
atomicBatch.close();
throw e;
}

// TODO(perf): we always re-save the targetBlock but could optimize to only
// resave if it is needed.
// Issue: https://github.com/trufflesuite/ganache/issues/3485
atomicBatch.put(targetBlock.key, targetBlock.serialize());

await atomicBatch.write();

// we DO want to re-balance the descendants, but we don't want to wait for
// it because it can't effect our current fork block's cache results since
// these caches will be for blocks higher than our own fork block
Expand Down Expand Up @@ -309,7 +313,7 @@ export class PersistentCache {
// check `this.ancestorDb.isOpen()` as we don't need to try to write if
// the db was shutdown in the meantime. This can happen if ganache was
// closed while we were still updating the descendants
if (atomicBatch.length > 0 && this.ancestorDb.isOpen())
if (atomicBatch.length > 0 && this.ancestorDb.status === "open")
await atomicBatch.write();
}
}
Expand All @@ -323,23 +327,23 @@ export class PersistentCache {
const start = lexico.encode([height.toBuffer(), bufKey]);
const end = Buffer.concat([start, maxValueByteBuffer]);

const readStream = this.cacheDb.createReadStream({
const readStream = this.cacheDb.iterator({
gt: start,
lt: end,
keys: true,
values: true
});

for await (const data of readStream) {
const { key: k, value } = data as any as { key: Buffer; value: Buffer };
const [k, value] = data as [key: Buffer, value: Uint8Array];
const [_height, _key, blockHash] = lexico.decode(k);
// if our key no longer matches make sure we don't keep searching
if (!_key.equals(bufKey)) return;
if (
this.hashBuffer.equals(blockHash) ||
(await this.ancestry.has(blockHash))
) {
return value;
return Buffer.from(value);
}
}
}
Expand Down
7 changes: 4 additions & 3 deletions src/chains/ethereum/ethereum/src/forking/trie.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ const GET_STORAGE_AT = "eth_getStorageAt";
const MetadataSingletons = new WeakMap<TrieDB, GanacheLevelUp>();

const LEVELDOWN_OPTIONS = {
keyEncoding: "binary",
valueEncoding: "binary"
keyEncoding: "buffer",
valueEncoding: "buffer"
};

function isEqualKey(encodedKey: Buffer, address: Buffer, key: Buffer) {
Expand Down Expand Up @@ -95,6 +95,7 @@ export class ForkTrie extends GanacheTrie {
Quantity.from(endBlockNumber.toBigInt() + 1n).toBuffer()
])
});
//@ts-ignore
const batch = db.batch();
for await (const [key] of stream) {
batch.del(key);
Expand Down Expand Up @@ -225,7 +226,7 @@ export class ForkTrie extends GanacheTrie {
account.codeHash = keccak(code);
if (!account.codeHash.equals(KECCAK256_NULL)) {
// insert the code directly into the database with a key of `codeHash`
promises[2] = this.db.put(account.codeHash, code);
promises[2] = this._db.put(account.codeHash, code);
}
}
} catch (e) {
Expand Down
1 change: 1 addition & 0 deletions src/chains/ethereum/ethereum/src/helpers/run-call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import { Address } from "@ganache/ethereum-address";
import { VM } from "@ethereumjs/vm";
import { KECCAK256_NULL } from "@ethereumjs/util";
import { GanacheTrie } from "./trie";
import type { Address as EthereumJsAddress } from "@ethereumjs/util";

export type SimulationTransaction = {
/**
Expand Down

0 comments on commit 2e8eec4

Please sign in to comment.