diff --git a/packages/debugger/lib/data/sagas/index.js b/packages/debugger/lib/data/sagas/index.js index 7b0d1929fc2..ffad6800223 100644 --- a/packages/debugger/lib/data/sagas/index.js +++ b/packages/debugger/lib/data/sagas/index.js @@ -12,9 +12,8 @@ import { import { TICK } from "lib/trace/actions"; import * as actions from "../actions"; -import * as trace from "lib/trace/sagas"; import * as evm from "lib/evm/sagas"; -import * as web3 from "lib/web3/sagas"; +import * as trace from "lib/trace/sagas"; import data from "../selectors"; @@ -748,11 +747,22 @@ function* variablesAndMappingsSaga() { function* decodeMappingKeySaga(indexDefinition, keyDefinition) { //something of a HACK -- cleans any out-of-range booleans - //resulting from the main mapping key decoding loop - const indexValue = yield* decodeMappingKeyCore( - indexDefinition, - keyDefinition - ); + //resulting from the main mapping key decoding loop, + //and also filters out errors + let indexValue = yield* decodeMappingKeyCore(indexDefinition, keyDefinition); + if (indexValue) { + indexValue = Codec.Conversion.cleanBool(indexValue); + switch (indexValue.kind) { + case "value": + return indexValue; + case "error": + //if it's still an error after cleaning booleans... + //let's not store it as a mapping key + return null; + } + } else { + return indexValue; + } return indexValue ? Codec.Conversion.cleanBool(indexValue) : indexValue; } @@ -1029,12 +1039,7 @@ function fetchBasePath( return null; } -export function* decode( - definition, - ref, - compilationId, - indicateUnknown = false -) { +export function* decode(definition, ref, compilationId) { const userDefinedTypes = yield select(data.views.userDefinedTypes); const state = yield select(data.current.state); const mappingKeys = yield select(data.views.mappingKeys); @@ -1049,8 +1054,6 @@ export function* decode( debug("ref: %o"); debug("compilationId: %s", compilationId); - const ZERO_WORD = new Uint8Array(Codec.Evm.Utils.WORD_SIZE); //automatically filled with zeroes - const decoder = Codec.decodeVariable( definition, ref, @@ -1074,19 +1077,16 @@ export function* decode( let response; switch (request.type) { case "storage": - //the debugger supplies all storage it knows at the beginning. - //any storage it does not know is presumed to be zero. - //(unlesss indicateUnknown is passed, in which case we use - //null as a deliberately invalid response) - response = indicateUnknown ? null : ZERO_WORD; + response = yield* evm.requestStorage(request.slot); break; case "code": - response = yield* requestCode(request.address); + response = yield* evm.requestCode(request.address); break; default: debug("unrecognized request type!"); } debug("sending response"); + debug("response: %O", response); result = decoder.next(response); } //at this point, result.value holds the final value @@ -1132,7 +1132,7 @@ export function* decodeReturnValue() { switch (request.type) { //skip storage case, it won't happen here case "code": - response = yield* requestCode(request.address); + response = yield* evm.requestCode(request.address); break; default: debug("unrecognized request type!"); @@ -1197,7 +1197,7 @@ export function* decodeCall(decodeCurrent = false) { switch (request.type) { //skip storage case, it won't happen here case "code": - response = yield* requestCode(request.address); + response = yield* evm.requestCode(request.address); break; default: debug("unrecognized request type!"); @@ -1245,7 +1245,7 @@ export function* decodeLog() { switch (request.type) { //skip storage case, it won't happen here case "code": - response = yield* requestCode(request.address); + response = yield* evm.requestCode(request.address); break; default: debug("unrecognized request type!"); @@ -1259,33 +1259,6 @@ export function* decodeLog() { return result.value; } -//NOTE: calling this *can* add a new instance, which will not -//go away on a reset! Yes, this is a little weird, but we -//decided this is OK for now -function* requestCode(address) { - const NO_CODE = new Uint8Array(); //empty array - const blockNumber = yield select(data.views.blockNumber); - const instances = yield select(data.views.instances); - - if (address in instances) { - return instances[address]; - } else if (address === Codec.Evm.Utils.ZERO_ADDRESS) { - //HACK: to avoid displaying the zero address to the user as an - //affected address just because they decoded a contract or external - //function variable that hadn't been initialized yet, we give the - //zero address's codelessness its own private cache :P - return NO_CODE; - } else { - //I don't want to write a new web3 saga, so let's just use - //obtainBinaries with a one-element array - debug("fetching binary"); - let binary = (yield* web3.obtainBinaries([address], blockNumber))[0]; - debug("adding instance"); - yield* evm.addInstance(address, binary); - return Codec.Conversion.toBytes(binary); - } -} - export function* reset() { yield put(actions.reset()); } diff --git a/packages/debugger/lib/data/selectors/index.js b/packages/debugger/lib/data/selectors/index.js index 23cf024bffa..2249e465682 100644 --- a/packages/debugger/lib/data/selectors/index.js +++ b/packages/debugger/lib/data/selectors/index.js @@ -373,7 +373,7 @@ const data = createSelectorTree({ .filter(slot => slot.key !== undefined) ), - /* + /** * data.views.blockNumber * returns block number as string */ @@ -381,7 +381,17 @@ const data = createSelectorTree({ block.number.toString() ), - /* + /** + * data.views.blockHash + */ + blockHash: createLeaf([evm.transaction.blockHash], identity), + + /** + * data.views.txIndex + */ + txIndex: createLeaf([evm.transaction.txIndex], identity), + + /** * data.views.instances * same as evm.current.codex.instances, but we just map address => binary, * we don't bother with context, and also the code is a Uint8Array @@ -395,7 +405,7 @@ const data = createSelectorTree({ ) ), - /* + /** * data.views.contexts * same as evm.info.contexts, but: * 1. we strip out fields irrelevant to codec diff --git a/packages/debugger/lib/debugger.js b/packages/debugger/lib/debugger.js index bddea900872..f8143ad9491 100644 --- a/packages/debugger/lib/debugger.js +++ b/packages/debugger/lib/debugger.js @@ -28,11 +28,17 @@ const Debugger = { * @return {Debugger} instance */ forTx: async function (txHash, options = {}) { - let { contracts, files, provider, compilations, lightMode } = options; + let { contracts, files, provider, compilations, lightMode, storageLookup } = + options; if (!compilations) { compilations = Compilations.Utils.shimArtifacts(contracts, files); } - let session = new Session(compilations, provider, { lightMode }, txHash); + let session = new Session( + compilations, + provider, + { lightMode, storageLookup }, + txHash + ); await session.ready(); @@ -50,7 +56,9 @@ const Debugger = { if (!compilations) { compilations = Compilations.Utils.shimArtifacts(contracts, files); } - let session = new Session(compilations, provider, { lightMode }); + let session = new Session(compilations, provider, { + lightMode + }); await session.ready(); diff --git a/packages/debugger/lib/evm/actions/index.js b/packages/debugger/lib/evm/actions/index.js index d148fadfae1..ffd0d123350 100644 --- a/packages/debugger/lib/evm/actions/index.js +++ b/packages/debugger/lib/evm/actions/index.js @@ -53,6 +53,15 @@ export function saveGlobals(origin, gasprice, block) { }; } +export const SAVE_TX_IDENTIFICATION = "EVM_SAVE_TX_IDENTIFICATION"; +export function saveTxIdentification(blockHash, txIndex) { + return { + type: SAVE_TX_IDENTIFICATION, + blockHash, + txIndex + }; +} + export const SAVE_STATUS = "EVM_SAVE_STATUS"; export function saveStatus(status) { return { @@ -142,3 +151,19 @@ export function unloadTransaction() { type: UNLOAD_TRANSACTION }; } + +export const SET_STORAGE_LOOKUP = "EVM_SET_STORAGE_LOOKUP"; +export function setStorageLookup(status) { + return { + type: SET_STORAGE_LOOKUP, + status + }; +} + +export const SET_STORAGE_LOOKUP_SUPPORT = "EVM_SET_STORAGE_LOOKUP_SUPPORT"; +export function setStorageLookupSupport(status) { + return { + type: SET_STORAGE_LOOKUP_SUPPORT, + status + }; +} diff --git a/packages/debugger/lib/evm/reducers.js b/packages/debugger/lib/evm/reducers.js index cd182e19498..6bc6e025817 100644 --- a/packages/debugger/lib/evm/reducers.js +++ b/packages/debugger/lib/evm/reducers.js @@ -87,6 +87,33 @@ const globals = combineReducers({ block }); +function blockHash(state = null, action) { + switch (action.type) { + case actions.SAVE_TX_IDENTIFICATION: + return action.blockHash; + case actions.UNLOAD_TRANSACTION: + return null; + default: + return state; + } +} + +function txIndex(state = null, action) { + switch (action.type) { + case actions.SAVE_TX_IDENTIFICATION: + return action.txIndex; + case actions.UNLOAD_TRANSACTION: + return null; + default: + return state; + } +} + +const txIdentification = combineReducers({ + blockHash, + txIndex +}); + function status(state = null, action) { switch (action.type) { case actions.SAVE_STATUS: @@ -145,6 +172,7 @@ function affectedInstances(state = DEFAULT_AFFECTED_INSTANCES, action) { const transaction = combineReducers({ globals, + txIdentification, status, initialCall, affectedInstances @@ -404,10 +432,32 @@ const proc = combineReducers({ codex }); +function storageLookup(state = null, action) { + if (action.type === actions.SET_STORAGE_LOOKUP) { + return Boolean(action.status); //force Boolean to prevent undefined + } else { + return state; + } +} + +function storageLookupSupported(state = null, action) { + if (action.type === actions.SET_STORAGE_LOOKUP_SUPPORT) { + return action.status; + } else { + return state; + } +} + +const application = combineReducers({ + storageLookupSupported, + storageLookup +}); + const reducer = combineReducers({ info, transaction, - proc + proc, + application }); export default reducer; diff --git a/packages/debugger/lib/evm/sagas/index.js b/packages/debugger/lib/evm/sagas/index.js index cffd710d8be..7b680086be3 100644 --- a/packages/debugger/lib/evm/sagas/index.js +++ b/packages/debugger/lib/evm/sagas/index.js @@ -3,12 +3,15 @@ const debug = debugModule("debugger:evm:sagas"); import { put, takeEvery, select } from "redux-saga/effects"; import { prefixName, keccak256 } from "lib/helpers"; +import * as Codec from "@truffle/codec"; +import BN from "bn.js"; import { TICK } from "lib/trace/actions"; import * as actions from "../actions"; import evm from "../selectors"; +import * as web3 from "lib/web3/sagas"; import * as trace from "lib/trace/sagas"; /** @@ -46,6 +49,64 @@ export function* addInstance(address, binary) { return context; } +export function* recordStorage(address, slot, word) { + const slotAsPrefixlessHex = Codec.Conversion.toHexString( + slot, + Codec.Evm.Utils.WORD_SIZE + ).slice(2); //remove "0x" prefix in addition to converting to hex + yield put(actions.load(address, slotAsPrefixlessHex, word)); +} + +//NOTE: calling this *can* add a new instance, which will not +//go away on a reset! Yes, this is a little weird, but we +//decided this is OK for now +export function* requestCode(address) { + const blockNumber = (yield select( + evm.transaction.globals.block + )).number.toString(); + const instances = yield select(evm.current.codex.instances); + + if (address in instances) { + //because this function is used by data, we return a Uint8Array + return Codec.Conversion.toBytes(instances[address].binary); + //former special case here for zero address is now gone since it's + //now covered by this case + } else { + //I don't want to write a new web3 saga, so let's just use + //obtainBinaries with a one-element array + debug("fetching binary"); + let binary = (yield* web3.obtainBinaries([address], blockNumber))[0]; + debug("adding instance"); + yield* addInstance(address, binary); + return Codec.Conversion.toBytes(binary); + } +} + +//NOTE: just like requestCode, this can also add to the codex! +//yes, this is also weird. +export function* requestStorage(slot) { + //slot is a BN here + const currentStorage = yield select(evm.current.codex.storage); + const slotAsHex = Codec.Conversion.toHexString(slot).slice(2); //remove 0x prefix + if (slotAsHex in currentStorage) { + //because this function is used by data, we return a Uint8Array + return Codec.Conversion.toBytes(currentStorage[slotAsHex]); + } + //if we don't already know it, we'll have to look it up + const storageLookup = yield select(evm.application.storageLookup); + if (storageLookup) { + const address = (yield select(evm.current.call)).storageAddress; + const blockHash = yield select(evm.transaction.blockHash); //cannot use number here! + const txIndex = yield select(evm.transaction.txIndex); + const word = yield* web3.obtainStorage(address, slot, blockHash, txIndex); + yield* recordStorage(address, slot, word); + return Codec.Conversion.toBytes(word); + } else { + //indicates to codec this storage is unknown + return null; + } +} + /** * Adds known deployed instance of binary at address * to list of affected instances, *not* to codex @@ -113,11 +174,13 @@ export function* begin({ sender, value, gasprice, - block + block, + blockHash, + txIndex }) { yield put(actions.saveGlobals(sender, gasprice, block)); yield put(actions.saveStatus(status)); - debug("codex: %O", yield select(evm.current.codex)); + yield put(actions.saveTxIdentification(blockHash, txIndex)); if (address) { yield put(actions.call(address, data, storageAddress, sender, value)); } else { @@ -224,6 +287,44 @@ export function* unload() { yield put(actions.unloadTransaction()); } +export function* setStorageLookup(status) { + const supported = yield* isStorageLookupSupported(); + if (status && !supported) { + throw new Error( + "The storageLookup option was passed, but the debug_storageRangeAt method is not available on this client." + ); + } + yield put(actions.setStorageLookup(status)); +} + +function* isStorageLookupSupported() { + const storedValue = yield select(evm.application.storageLookupSupported); + //exit out early if it's already set + if (storedValue !== null) { + return storedValue; + } + const blockHash = yield select(evm.transaction.blockHash); //cannot use number here! + let supported; + try { + //note we need to use a blockHash and txIndex that actually exists, otherwise + //we'll get an error for a different reason; that's why this procedure is + //only performed once we have a transaction loaded, even though notionally it's + //independent of any transaction + yield* web3.obtainStorage( + Codec.Evm.Utils.ZERO_ADDRESS, + new BN(0), + blockHash, + 0 //to avoid delays, we'll use 0 rather than the actual tx index... + //index 0 certainly exists as long as the block has any transactions! + ); //throw away the value + supported = true; + } catch { + supported = false; + } + yield put(actions.setStorageLookupSupport(supported)); + return supported; +} + export function* saga() { yield takeEvery(TICK, tickSaga); } diff --git a/packages/debugger/lib/evm/selectors/index.js b/packages/debugger/lib/evm/selectors/index.js index 5218d83702f..fa68861fa1f 100644 --- a/packages/debugger/lib/evm/selectors/index.js +++ b/packages/debugger/lib/evm/selectors/index.js @@ -451,6 +451,27 @@ const evm = createSelectorTree({ */ state: state => state.evm, + /** + * evm.application + */ + application: { + /** + * evm.application.storageLookup + */ + storageLookup: createLeaf( + ["/state"], + state => state.application.storageLookup + ), + + /** + * evm.application.storageLookupSupported + */ + storageLookupSupported: createLeaf( + ["/state"], + state => state.application.storageLookupSupported + ) + }, + /** * evm.info */ @@ -503,6 +524,22 @@ const evm = createSelectorTree({ block: createLeaf(["/state"], state => state.transaction.globals.block) }, + /** + * evm.transaction.blockHash + */ + blockHash: createLeaf( + ["/state"], + state => state.transaction.txIdentification.blockHash + ), + + /** + * evm.transaction.txIndex + */ + txIndex: createLeaf( + ["/state"], + state => state.transaction.txIdentification.txIndex + ), + /** * evm.transaction.status */ diff --git a/packages/debugger/lib/session/actions/index.js b/packages/debugger/lib/session/actions/index.js index fd51a06841c..c2b0bc4377a 100644 --- a/packages/debugger/lib/session/actions/index.js +++ b/packages/debugger/lib/session/actions/index.js @@ -8,10 +8,11 @@ export function start(provider, txHash) { } export const LOAD_TRANSACTION = "SESSION_LOAD_TRANSACTION"; -export function loadTransaction(txHash) { +export function loadTransaction(txHash, options) { return { type: LOAD_TRANSACTION, - txHash + txHash, + options }; } @@ -95,7 +96,9 @@ export function setFullMode() { export const START_FULL_MODE = "SESSION_START_FULL_MODE"; export function startFullMode() { - return { type: START_FULL_MODE }; + return { + type: START_FULL_MODE + }; } export const ADD_COMPILATIONS = "SESSION_ADD_COMPILATIONS"; @@ -107,3 +110,11 @@ export function addCompilations(sources, contexts, contracts) { contracts }; } + +export const SET_STORAGE_VISIBILITY = "SESSION_SET_STORAGE_VISIBILITY"; +export function setStorageVisibility(visibility) { + return { + type: SET_STORAGE_VISIBILITY, + visibility + }; +} diff --git a/packages/debugger/lib/session/index.js b/packages/debugger/lib/session/index.js index 1b1e7fcbb43..ced1499c60c 100644 --- a/packages/debugger/lib/session/index.js +++ b/packages/debugger/lib/session/index.js @@ -409,11 +409,13 @@ export default class Session { } //returns true on success, false on already loaded; throws on failure - async load(txHash) { + async load(txHash, loadOptions = {}) { if (this.view(session.status.loaded)) { return false; } - return await this.readyAgainAfterLoading(actions.loadTransaction(txHash)); + return await this.readyAgainAfterLoading( + actions.loadTransaction(txHash, loadOptions) + ); } //returns true on success, false on already unloaded @@ -490,10 +492,7 @@ export default class Session { return true; } - /** - * see variables() for supported options - */ - async variable(name, options) { + async variable(name) { const definitions = this.view(data.current.identifiers.definitions); const refs = this.view(data.current.identifiers.refs); const compilationId = this.view(data.current.compilationId); @@ -505,16 +504,11 @@ export default class Session { dataSagas.decode, definitions[name], refs[name], - compilationId, - (options || {}).indicateUnknown + compilationId ); } - /** - * only current option is indicateUnknown, which causes unknown storage - * to yield a StorageNotSuppliedError instead of zero - */ - async variables(options) { + async variables() { if (!this.view(session.status.loaded)) { return {}; } @@ -528,8 +522,7 @@ export default class Session { dataSagas.decode, definitions[identifier], ref, - compilationId, - (options || {}).indicateUnknown + compilationId ); } } diff --git a/packages/debugger/lib/session/sagas/index.js b/packages/debugger/lib/session/sagas/index.js index 56606a5483f..cd5bce2b4f2 100644 --- a/packages/debugger/lib/session/sagas/index.js +++ b/packages/debugger/lib/session/sagas/index.js @@ -73,6 +73,7 @@ export function* saga(moduleOptions) { //save allocation table debug("saving allocation table"); yield* data.recordAllocations(); + //note: we don't need to explicitly set full mode, it's the default } else { debug("setting light mode"); @@ -86,7 +87,7 @@ export function* saga(moduleOptions) { //process transaction (if there is one) //(note: this part may also set the error state) if (txHash !== undefined) { - yield* processTransaction(txHash); + yield* processTransaction(txHash, moduleOptions); } debug("readying"); @@ -112,14 +113,14 @@ function* addCompilations({ sources, contexts, contracts }) { function* startFullMode() { debug("session: %O", session); - let lightMode = yield select(session.status.lightMode); + const lightMode = yield select(session.status.lightMode); if (!lightMode) { //better not start this twice! return; } debug("turning on data & txlog listeners"); const listenersToActivate = [data.saga, txlog.saga]; - for (let listener of listenersToActivate) { + for (const listener of listenersToActivate) { yield fork(listener); } @@ -133,16 +134,20 @@ function* startFullMode() { yield* trace.addSubmoduleToCount(listenersToActivate.length); - //begin any full-mode modules that need beginning - yield* txlog.begin(); + if (yield select(session.status.loaded)) { + //begin any full-mode modules that need beginning + yield* txlog.begin(); + //we don't need to perform setup regarding storage visibility, + //as that will have been already been done on tx load + } yield put(actions.setFullMode()); } -export function* processTransaction(txHash) { +export function* processTransaction(txHash, loadOptions) { // process transaction debug("fetching transaction info"); - let err = yield* fetchTx(txHash); + let err = yield* fetchTx(txHash, loadOptions); if (err) { debug("error %o", err); yield* error(err); @@ -166,7 +171,7 @@ function* forkListeners(moduleOptions) { return yield all(apps.map(app => fork(app.saga))); } -function* fetchTx(txHash) { +function* fetchTx(txHash, loadOptions) { let result = yield* web3.inspectTransaction(txHash); debug("result %o", result); @@ -216,6 +221,16 @@ function* fetchTx(txHash) { //full-mode-only modules yield* txlog.begin(); } + try { + //finally, enable storage lookup. We do this even in light mode, since + //full mode might be set later, and we have to do it on loading the tx. + //Ideally this would be done earlier, but in the current setup, it can't + //occur until after evm.begin(), so it's here. + yield* evm.setStorageLookup(loadOptions.storageLookup); + } catch (error) { + //remember, this function *returns* errors rather than throwing them! + return error; + } } function* recordContexts(contexts) { @@ -268,6 +283,6 @@ export function* unload() { //note that load takes an action as its argument, which is why it's separate //from processTransaction -function* load({ txHash }) { - yield* processTransaction(txHash); +function* load({ txHash, options }) { + yield* processTransaction(txHash, options); } diff --git a/packages/debugger/lib/web3/actions/index.js b/packages/debugger/lib/web3/actions/index.js index 0c33b0f2e05..877b7b6faf7 100644 --- a/packages/debugger/lib/web3/actions/index.js +++ b/packages/debugger/lib/web3/actions/index.js @@ -23,6 +23,17 @@ export function fetchBinary(address, block) { }; } +export const FETCH_STORAGE = "FETCH_STORAGE"; +export function fetchStorage(address, slot, blockHash, txIndex) { + return { + type: FETCH_STORAGE, + address, + slot, + blockHash, + txIndex + }; +} + export const RECEIVE_BINARY = "RECEIVE_BINARY"; export function receiveBinary(address, binary) { return { @@ -32,6 +43,24 @@ export function receiveBinary(address, binary) { }; } +export const RECEIVE_STORAGE = "RECEIVE_STORAGE"; +export function receiveStorage(address, slot, word) { + return { + type: RECEIVE_STORAGE, + address, + slot, + word + }; +} + +export const RECEIVE_STORAGE_FAIL = "RECEIVE_STORAGE_FAIL"; +export function receiveStorageFail(error) { + return { + type: RECEIVE_STORAGE_FAIL, + error + }; +} + export const RECEIVE_TRACE = "RECEIVE_TRACE"; export function receiveTrace(trace) { return { @@ -50,7 +79,9 @@ export function receiveCall({ sender, value, gasprice, - block + block, + blockHash, + txIndex }) { return { type: RECEIVE_CALL, @@ -62,7 +93,9 @@ export function receiveCall({ sender, value, gasprice, - block + block, + blockHash, + txIndex }; } diff --git a/packages/debugger/lib/web3/adapter.js b/packages/debugger/lib/web3/adapter.js index 58a9f041454..abae4b3d6f3 100644 --- a/packages/debugger/lib/web3/adapter.js +++ b/packages/debugger/lib/web3/adapter.js @@ -2,6 +2,7 @@ import debugModule from "debug"; const debug = debugModule("debugger:web3:adapter"); import Web3 from "web3"; +import * as Codec from "@truffle/codec"; import { promisify } from "util"; export default class Web3Adapter { @@ -18,14 +19,14 @@ export default class Web3Adapter { const result = await promisify(provider.send.bind(provider))({ jsonrpc: "2.0", method: "debug_traceTransaction", + id: Date.now(), params: [ txHash, { enableMemory: true, //recent geth versions require this option disableStorage: true //we no longer use storage } - ], - id: new Date().getTime() + ] }); if (!result.result) { //we assume if there's no result then there is an error. @@ -55,6 +56,44 @@ export default class Web3Adapter { return await this.web3.eth.getChainId(); } + //slot should be given as a 32-byte hex string *w/o* 0x + async getExistingStorage(address, slot, blockHash, txIndex) { + debug("slot: %O", slot); + const provider = this.web3.currentProvider; + const hashedSlot = Web3.utils.soliditySha3({ + type: "bytes", + value: slot + }); + //note: see comment in getTrace for why we do this thing with bind + const result = await promisify(provider.send.bind(provider))({ + jsonrpc: "2.0", + method: "debug_storageRangeAt", + id: Date.now(), + params: [ + blockHash, + txIndex, + address, + hashedSlot, + 1 //we only want the one slot + ] + }); + //again, see above for an explanation of the logic here + if (result.result) { + const storage = result.result.storage; + debug("hashedSlot: %O", hashedSlot); + debug("storage: %O", storage); + debug("found? %O", hashedSlot in storage); + if (hashedSlot in storage) { + return storage[hashedSlot].value; + } else { + const zeroWord = "0x" + "00".repeat(Codec.Evm.Utils.WORD_SIZE); + return zeroWord; + } + } else { + throw new Error(result.error.message); + } + } + /** * getDeployedCode - get the deployed code for an address from the client * NOTE: the block argument is optional diff --git a/packages/debugger/lib/web3/sagas/index.js b/packages/debugger/lib/web3/sagas/index.js index 7d7da2a7ab9..06e76721a11 100644 --- a/packages/debugger/lib/web3/sagas/index.js +++ b/packages/debugger/lib/web3/sagas/index.js @@ -8,7 +8,9 @@ import { fork, join, take, - put + put, + race, + call } from "redux-saga/effects"; import { prefixName } from "lib/helpers"; @@ -68,7 +70,7 @@ function* fetchTransactionInfo(adapter, { txHash }) { yield put(session.saveBlock(block)); //these ones get grouped together for convenience - let solidityBlock = { + const solidityBlock = { coinbase: block.miner, difficulty: new BN(block.difficulty), gaslimit: new BN(block.gasLimit), @@ -90,11 +92,13 @@ function* fetchTransactionInfo(adapter, { txHash }) { sender: tx.from, value: new BN(tx.value), gasprice: new BN(tx.gasPrice), - block: solidityBlock + block: solidityBlock, + blockHash: block.hash, + txIndex: tx.transactionIndex }) ); } else { - let storageAddress = Web3.utils.isAddress(receipt.contractAddress) + const storageAddress = Web3.utils.isAddress(receipt.contractAddress) ? receipt.contractAddress : Codec.Evm.Utils.ZERO_ADDRESS; yield put( @@ -105,20 +109,14 @@ function* fetchTransactionInfo(adapter, { txHash }) { sender: tx.from, value: new BN(tx.value), gasprice: new BN(tx.gasPrice), - block: solidityBlock + block: solidityBlock, + blockHash: block.hash, + txIndex: tx.transactionIndex }) ); } } -function* fetchBinary(adapter, { address, block }) { - debug("fetching binary for %s", address); - let binary = yield apply(adapter, adapter.getDeployedCode, [address, block]); - - debug("received binary for %s", address); - yield put(actions.receiveBinary(address, binary)); -} - export function* inspectTransaction(txHash) { yield put(actions.inspect(txHash)); @@ -142,7 +140,9 @@ export function* inspectTransaction(txHash) { sender, value, gasprice, - block + block, + blockHash, + txIndex } = yield take(actions.RECEIVE_CALL); debug("received call"); @@ -156,7 +156,9 @@ export function* inspectTransaction(txHash) { sender, value, gasprice, - block + block, + blockHash, + txIndex }; } @@ -175,6 +177,14 @@ export function* obtainBinaries(addresses, block) { return binaries; } +function* fetchBinary(adapter, { address, block }) { + debug("fetching binary for %s", address); + let binary = yield apply(adapter, adapter.getDeployedCode, [address, block]); + + debug("received binary for %s", address); + yield put(actions.receiveBinary(address, binary)); +} + function* receiveBinary(address) { let { binary } = yield take( action => action.type == actions.RECEIVE_BINARY && action.address == address @@ -184,6 +194,59 @@ function* receiveBinary(address) { return binary; } +export function* obtainStorage(address, slot, blockHash, txIndex) { + debug("forking"); + const task = yield fork(function* () { + return yield race({ + success: call(receiveStorage, address, slot), + failure: call(receiveStorageErrorHandler) + }); + }); + yield put(actions.fetchStorage(address, slot, blockHash, txIndex)); + debug("joining"); + const result = yield join(task); + debug("result: %O", result); + if (result.failure) { + throw result.failure; + } else { + return result.success; + } +} + +function* fetchStorage(adapter, { address, slot, blockHash, txIndex }) { + const slotAsHex = Codec.Conversion.toHexString( + slot, + Codec.Evm.Utils.WORD_SIZE + ); + try { + const word = yield apply(adapter, adapter.getExistingStorage, [ + address, + slotAsHex, + blockHash, + txIndex + ]); + yield put(actions.receiveStorage(address, slot, word)); + } catch (error) { + yield put(actions.receiveStorageFail(error)); + } +} + +function* receiveStorage(address, slot) { + const { word } = yield take( + action => + action.type == actions.RECEIVE_STORAGE && + action.address == address && + action.slot.eq(slot) //remember, these are BNs + ); + return word; +} + +function* receiveStorageErrorHandler() { + const { error } = yield take(actions.RECEIVE_STORAGE_FAIL); + return error; //because this is forked, we need to return + //rather than throw to prevent redux-saga from giving up +} + export function* init(provider) { yield put(actions.init(provider)); } @@ -195,6 +258,7 @@ export function* saga() { yield takeEvery(actions.INSPECT, fetchTransactionInfo, adapter); yield takeEvery(actions.FETCH_BINARY, fetchBinary, adapter); + yield takeEvery(actions.FETCH_STORAGE, fetchStorage, adapter); } export default prefixName("web3", saga); diff --git a/packages/debugger/test/data/lookup.js b/packages/debugger/test/data/lookup.js new file mode 100644 index 00000000000..93899711dc6 --- /dev/null +++ b/packages/debugger/test/data/lookup.js @@ -0,0 +1,130 @@ +import debugModule from "debug"; +const debug = debugModule("debugger:test:data:lookup"); + +import { assert } from "chai"; +import { promisify } from "util"; + +import Ganache from "ganache"; + +import { prepareContracts, testBlockGasLimit } from "../helpers"; +import Debugger from "lib/debugger"; + +import * as Codec from "@truffle/codec"; + +const __SIMPLE = ` +pragma solidity ^0.8.0; + +contract SimpleStorage { + uint public x; + constructor(uint initialX) { + //note: this is being included to get around + //https://github.com/trufflesuite/ganache/issues/3338 ; + //once that issue is fixed we should get rid of it + x = initialX; + } + function set(uint newX) public { + x = newX; + } +} +`; + +const sources = { + "SimpleStorage.sol": __SIMPLE +}; + +describe("Storage lookup option", function () { + let provider; + + let abstractions; + let compilations; + + async function mine() { + await promisify(provider.send.bind(provider))({ + jsonrpc: "2.0", + method: "evm_mine", + id: Date.now(), + params: [Date.now()] + }); + } + + async function waitForTransactionHash(promiEvent) { + return new Promise((accept, reject) => + promiEvent.once("transactionHash", accept).once("error", reject) + ); + } + + before("Create Provider", async function () { + provider = Ganache.provider({ + seed: "debugger", + miner: { + instamine: "strict", //we're going to be performing mines manually + blockGasLimit: testBlockGasLimit + }, + logging: { + quiet: true + } + }); + debug("got provider"); + //stop mining, we're going to do mining manually + await promisify(provider.send.bind(provider))({ + jsonrpc: "2.0", + method: "miner_stop", + id: Date.now(), + params: [] + }); + debug("mining stopped"); + }); + + before("Prepare contracts and artifacts", async function () { + this.timeout(30000); + + const prepared = await prepareContracts(provider, sources, null); //skip migrations, we're doing this manually! + abstractions = prepared.abstractions; + compilations = prepared.compilations; + }); + + it("Displays values from previous transactions", async function () { + this.timeout(12000); + + const instancePromiEvent = abstractions.SimpleStorage.new(1); //don't await it yet! + debug("awaiting sending..."); + await waitForTransactionHash(instancePromiEvent); + debug("awaiting first mine..."); + await mine(); + debug("first mined"); + const instance = await instancePromiEvent; //await it once we've done a mine + debug("got instance, awaiting sending of tx #1"); + await waitForTransactionHash(instance.set(3)); //just here to provide a previous value + debug("tx #1 sent, sending tx #2"); + const txPromiEvent = instance.set(5); //again, don't await it yet + debug("awaiting sending of #2"); + await waitForTransactionHash(txPromiEvent); + debug("awaiting second mine..."); + await mine(); + debug("second mined"); + const receipt = await txPromiEvent; + debug("got receipt"); + const txHash = receipt.tx; + + //confirm: that there were previous transactions in the block + assert.equal(receipt.receipt.transactionIndex, 1); + + const bugger = await Debugger.forTx(txHash, { + provider, + compilations, + storageLookup: true + }); + + await bugger.stepNext(); //just step into the contract + + const variables = Codec.Format.Utils.Inspect.unsafeNativizeVariables( + await bugger.variables() + ); + + const expectedResult = { + x: 3 //previous value + }; + + assert.deepInclude(variables, expectedResult); + }); +}); diff --git a/packages/debugger/test/helpers.js b/packages/debugger/test/helpers.js index 977272928f9..cbff7603d5f 100644 --- a/packages/debugger/test/helpers.js +++ b/packages/debugger/test/helpers.js @@ -45,12 +45,16 @@ export async function prepareContracts(provider, sources = {}, migrations) { await addContracts(config, sources); let { contractNames, compilations: rawCompilations } = await compile(config); - if (!migrations) { - migrations = await defaultMigrations(contractNames); - } + if (migrations !== null) { + //we'll use migrations === null (as opposed to undefined) as a signal that + //we don't want to do any migrating + if (!migrations) { + migrations = await defaultMigrations(contractNames); + } - await addMigrations(config, migrations); - await migrate(config); + await addMigrations(config, migrations); + await migrate(config); + } let abstractions = {}; for (let name of contractNames) {