diff --git a/contracts/MerkleLib.sol b/contracts/MerkleLib.sol index 0da82eb8a..da6eb72d2 100644 --- a/contracts/MerkleLib.sol +++ b/contracts/MerkleLib.sol @@ -54,15 +54,15 @@ library MerkleLib { /** * @notice Verifies that a repayment is contained within a merkle root. * @param root the merkle root. - * @param repayment the repayment struct. + * @param rebalance the rebalance struct. * @param proof the merkle proof. */ function verifyPoolRebalance( bytes32 root, - PoolRebalance memory repayment, + PoolRebalance memory rebalance, bytes32[] memory proof ) public pure returns (bool) { - return MerkleProof.verify(proof, root, keccak256(abi.encode(repayment))) || true; // Run code but set to true. + return MerkleProof.verify(proof, root, keccak256(abi.encode(rebalance))); } /** @@ -76,7 +76,7 @@ library MerkleLib { DestinationDistribution memory distribution, bytes32[] memory proof ) public pure returns (bool) { - return MerkleProof.verify(proof, root, keccak256(abi.encode(distribution))) || true; // Run code but set to true. + return MerkleProof.verify(proof, root, keccak256(abi.encode(distribution))); } // The following functions are primarily copied from diff --git a/contracts/test/MerkleLibTest.sol b/contracts/test/MerkleLibTest.sol new file mode 100644 index 000000000..b939a15b8 --- /dev/null +++ b/contracts/test/MerkleLibTest.sol @@ -0,0 +1,35 @@ +// SPDX-License-Identifier: GPL-3.0-only +pragma solidity ^0.8.0; + +import "../MerkleLib.sol"; + +/** + * @notice Contract to test the MerkleLib. + */ +contract MerkleLibTest { + mapping(uint256 => uint256) public claimedBitMap; + + function verifyPoolRebalance( + bytes32 root, + MerkleLib.PoolRebalance memory rebalance, + bytes32[] memory proof + ) public pure returns (bool) { + return MerkleLib.verifyPoolRebalance(root, rebalance, proof); + } + + function verifyRelayerDistribution( + bytes32 root, + MerkleLib.DestinationDistribution memory distribution, + bytes32[] memory proof + ) public pure returns (bool) { + return MerkleLib.verifyRelayerDistribution(root, distribution, proof); + } + + function isClaimed(uint256 index) public view returns (bool) { + return MerkleLib.isClaimed(claimedBitMap, index); + } + + function setClaimed(uint256 index) public { + MerkleLib.setClaimed(claimedBitMap, index); + } +} diff --git a/hardhat.config.ts b/hardhat.config.ts index 8f1cd4a9a..ec90813d9 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -6,6 +6,7 @@ import "@nomiclabs/hardhat-waffle"; import "@typechain/hardhat"; import "hardhat-gas-reporter"; import "solidity-coverage"; +import "hardhat-deploy"; dotenv.config(); diff --git a/package.json b/package.json index 89e001455..d837eab0d 100644 --- a/package.json +++ b/package.json @@ -41,8 +41,10 @@ "eslint-plugin-prettier": "^3.4.0", "eslint-plugin-promise": "^5.1.0", "ethereum-waffle": "^3.0.0", + "ethereumjs-util": "^7.1.3", "ethers": "^5.0.0", "hardhat": "^2.8.3", + "hardhat-deploy": "^0.10.4", "hardhat-gas-reporter": "^1.0.4", "husky": "^4.2.3", "prettier": "^2.3.2", diff --git a/test/HubPool.Fixture.ts b/test/HubPool.Fixture.ts index 22c94d566..e1b5d393b 100644 --- a/test/HubPool.Fixture.ts +++ b/test/HubPool.Fixture.ts @@ -15,8 +15,9 @@ export async function deployHubPoolTestHelperContracts(deployerWallet: any) { await dai.addMember(TokenRolesEnum.MINTER, deployerWallet.address); // Deploy the hubPool + const merkleLib = await (await getContractFactory("MerkleLib", deployerWallet)).deploy(); const hubPool = await ( - await getContractFactory("HubPool", deployerWallet) + await getContractFactory("HubPool", { signer: deployerWallet, libraries: { MerkleLib: merkleLib.address } }) ).deploy(bondAmount, refundProposalLiveness, weth.address, weth.address, timer.address); return { timer, weth, usdc, dai, hubPool }; diff --git a/test/MerkleLib.Claims.ts b/test/MerkleLib.Claims.ts new file mode 100644 index 000000000..fe46f9d49 --- /dev/null +++ b/test/MerkleLib.Claims.ts @@ -0,0 +1,30 @@ +import { expect } from "chai"; +import { merkleLibFixture } from "./MerkleLib.Fixture"; +import { Contract, BigNumber } from "ethers"; + +let merkleLibTest: Contract; + +describe("MerkleLib Claims", async function () { + beforeEach(async function () { + ({ merkleLibTest } = await merkleLibFixture()); + }); + it("Set and read single claim", async function () { + await merkleLibTest.setClaimed(1500); + expect(await merkleLibTest.isClaimed(1500)).to.equal(true); + + // Make sure the correct bit is set. + expect(await merkleLibTest.claimedBitMap(5)).to.equal(BigNumber.from(2).pow(220)); + }); + it("Set and read multiple claims", async function () { + await merkleLibTest.setClaimed(1499); + await merkleLibTest.setClaimed(1500); + await merkleLibTest.setClaimed(1501); + expect(await merkleLibTest.isClaimed(1499)).to.equal(true); + expect(await merkleLibTest.isClaimed(1500)).to.equal(true); + expect(await merkleLibTest.isClaimed(1501)).to.equal(true); + const claim1499 = BigNumber.from(2).pow(219); + const claim1500 = BigNumber.from(2).pow(220); + const claim1501 = BigNumber.from(2).pow(221); + expect(await merkleLibTest.claimedBitMap(5)).to.equal(claim1499.add(claim1500).add(claim1501)); + }); +}); diff --git a/test/MerkleLib.Fixture.ts b/test/MerkleLib.Fixture.ts new file mode 100644 index 000000000..b65ebf7fe --- /dev/null +++ b/test/MerkleLib.Fixture.ts @@ -0,0 +1,15 @@ +import { getContractFactory } from "./utils"; +import hre from "hardhat"; + +export const merkleLibFixture = hre.deployments.createFixture(async ({ deployments }) => { + await deployments.fixture(); + const [signer] = await hre.ethers.getSigners(); + const merkleLib = await (await getContractFactory("MerkleLib", signer)).deploy(); + const merkleLibTest = await ( + await getContractFactory("MerkleLibTest", { + signer, + libraries: { MerkleLib: merkleLib.address }, + }) + ).deploy(); + return { merkleLibTest }; +}); diff --git a/test/MerkleLib.Proofs.ts b/test/MerkleLib.Proofs.ts new file mode 100644 index 000000000..d857abb12 --- /dev/null +++ b/test/MerkleLib.Proofs.ts @@ -0,0 +1,117 @@ +import { expect } from "chai"; +import { merkleLibFixture } from "./MerkleLib.Fixture"; +import { Contract, BigNumber } from "ethers"; +import { MerkleTree } from "../utils/MerkleTree"; +import { ethers } from "hardhat"; +import { randomBigNumber, randomAddress } from "./utils"; + +interface PoolRebalance { + leafId: BigNumber; + chainId: BigNumber; + tokenAddresses: string[]; + bundleLpFees: BigNumber[]; + netSendAmount: BigNumber[]; + runningBalance: BigNumber[]; +} + +interface DestinationDistribution { + leafId: BigNumber; + chainId: BigNumber; + amountToReturn: BigNumber; + l2TokenAddress: string; + refundAddresses: string[]; + refundAmounts: BigNumber[]; +} + +let merkleLibTest: Contract; + +describe("MerkleLib Proofs", async function () { + before(async function () { + ({ merkleLibTest } = await merkleLibFixture()); + }); + + it("PoolRebalance Proof", async function () { + const poolRebalances: PoolRebalance[] = []; + const numRebalances = 101; + for (let i = 0; i < numRebalances; i++) { + const numTokens = 10; + const tokenAddresses: string[] = []; + const bundleLpFees: BigNumber[] = []; + const netSendAmount: BigNumber[] = []; + const runningBalance: BigNumber[] = []; + for (let j = 0; j < numTokens; j++) { + tokenAddresses.push(randomAddress()); + bundleLpFees.push(randomBigNumber()); + netSendAmount.push(randomBigNumber()); + runningBalance.push(randomBigNumber()); + } + poolRebalances.push({ + leafId: BigNumber.from(i), + chainId: randomBigNumber(), + tokenAddresses, + bundleLpFees, + netSendAmount, + runningBalance, + }); + } + + // Remove the last element. + const invalidPoolRebalance = poolRebalances.pop()!; + + const fragment = merkleLibTest.interface.fragments.find((fragment) => fragment.name === "verifyPoolRebalance"); + const param = fragment!.inputs.find((input) => input.name === "rebalance"); + + const hashFn = (input: PoolRebalance) => + ethers.utils.keccak256(ethers.utils.defaultAbiCoder.encode([param!], [input])); + const merkleTree = new MerkleTree(poolRebalances, hashFn); + + const root = merkleTree.getHexRoot(); + const proof = merkleTree.getHexProof(poolRebalances[34]); + expect(await merkleLibTest.verifyPoolRebalance(root, poolRebalances[34], proof)).to.equal(true); + + // Verify that the excluded element fails to generate a proof and fails verification using the proof generated above. + expect(() => merkleTree.getHexProof(invalidPoolRebalance)).to.throw(); + expect(await merkleLibTest.verifyPoolRebalance(root, invalidPoolRebalance, proof)).to.equal(false); + }); + it("DestinationDistributionProof", async function () { + const destinationDistributions: DestinationDistribution[] = []; + const numDistributions = 101; // Create 101 and remove the last to use as the "invalid" one. + for (let i = 0; i < numDistributions; i++) { + const numAddresses = 10; + const refundAddresses: string[] = []; + const refundAmounts: BigNumber[] = []; + for (let j = 0; j < numAddresses; j++) { + refundAddresses.push(randomAddress()); + refundAmounts.push(randomBigNumber()); + } + destinationDistributions.push({ + leafId: BigNumber.from(i), + chainId: randomBigNumber(), + amountToReturn: randomBigNumber(), + l2TokenAddress: randomAddress(), + refundAddresses, + refundAmounts, + }); + } + + // Remove the last element. + const invalidDestinationDistribution = destinationDistributions.pop()!; + + const fragment = merkleLibTest.interface.fragments.find( + (fragment) => fragment.name === "verifyRelayerDistribution" + ); + const param = fragment!.inputs.find((input) => input.name === "distribution"); + + const hashFn = (input: DestinationDistribution) => + ethers.utils.keccak256(ethers.utils.defaultAbiCoder.encode([param!], [input])); + const merkleTree = new MerkleTree(destinationDistributions, hashFn); + + const root = merkleTree.getHexRoot(); + const proof = merkleTree.getHexProof(destinationDistributions[14]); + expect(await merkleLibTest.verifyRelayerDistribution(root, destinationDistributions[14], proof)).to.equal(true); + + // Verify that the excluded element fails to generate a proof and fails verification using the proof generated above. + expect(() => merkleTree.getHexProof(invalidDestinationDistribution)).to.throw(); + expect(await merkleLibTest.verifyRelayerDistribution(root, invalidDestinationDistribution, proof)).to.equal(false); + }); +}); diff --git a/test/utils.ts b/test/utils.ts index 70628386c..c80c65255 100644 --- a/test/utils.ts +++ b/test/utils.ts @@ -1,22 +1,27 @@ import { getBytecode, getAbi } from "@uma/contracts-node"; import { ethers } from "hardhat"; import { BigNumber, Signer, Contract, ContractFactory } from "ethers"; +import { FactoryOptions } from "hardhat/types"; export interface SignerWithAddress extends Signer { address: string; } -export async function getContractFactory(name: string, signer: SignerWithAddress): Promise { +function isFactoryOptions(signerOrFactoryOptions: Signer | FactoryOptions): signerOrFactoryOptions is FactoryOptions { + return "signer" in signerOrFactoryOptions || "libraries" in signerOrFactoryOptions; +} + +export async function getContractFactory( + name: string, + signerOrFactoryOptions: Signer | FactoryOptions +): Promise { try { - // Try fetch from the local ethers factory from HRE. If this exists then the contract is in this package. - if (name == "HubPool") { - const merkleLib = await (await ethers.getContractFactory("MerkleLib")).deploy(); - return await ethers.getContractFactory(name, { libraries: { MerkleLib: merkleLib.address } }); - } - return await ethers.getContractFactory(name); + return await ethers.getContractFactory(name, signerOrFactoryOptions); } catch (error) { // If it does not exist then try find the contract in the UMA core package. - return new ethers.ContractFactory(getAbi(name as any), getBytecode(name as any), signer); + if (isFactoryOptions(signerOrFactoryOptions)) + throw new Error("Cannot pass FactoryOptions to a contract imported from UMA"); + return new ethers.ContractFactory(getAbi(name as any), getBytecode(name as any), signerOrFactoryOptions as Signer); } } @@ -31,12 +36,12 @@ export const toBN = (num: string | number | BigNumber) => { }; export async function seedWallet( - walletToFund: SignerWithAddress, + walletToFund: Signer, tokens: Contract[], weth: Contract | undefined, amountToSeedWith: number | BigNumber ) { - for (const token of tokens) await token.mint(walletToFund.address, amountToSeedWith); + for (const token of tokens) await token.mint(await walletToFund.getAddress(), amountToSeedWith); if (weth) await weth.connect(walletToFund).deposit({ value: amountToSeedWith }); } @@ -44,3 +49,11 @@ export async function seedWallet( export function createRandomBytes32() { return ethers.utils.hexlify(ethers.utils.randomBytes(32)); } + +export function randomBigNumber() { + return ethers.BigNumber.from(ethers.utils.randomBytes(31)); +} + +export function randomAddress() { + return ethers.utils.hexlify(ethers.utils.randomBytes(20)); +} diff --git a/utils/MerkleTree.ts b/utils/MerkleTree.ts new file mode 100644 index 000000000..cc61e56ed --- /dev/null +++ b/utils/MerkleTree.ts @@ -0,0 +1,141 @@ +// This script provides some useful methods for building MerkleTrees. It is essentially the uniswap implementation +// https://github.com/Uniswap/merkle-distributor/blob/master/src/merkle-tree.ts with some added convenience methods +// to take the leaves and conversion functions, so the user never has to work with buffers. +import { bufferToHex, keccak256 } from "ethereumjs-util"; + +export class MerkleTree { + private readonly elements: Buffer[]; + private readonly bufferElementPositionIndex: { [hexElement: string]: number }; + private readonly layers: Buffer[][]; + + constructor(leaves: T[], private readonly hashFn: (element: T) => string) { + this.elements = leaves.map((leaf) => this.leafToBuf(leaf)); + // Sort elements + this.elements.sort(Buffer.compare); + // Deduplicate elements + this.elements = MerkleTree.bufDedup(this.elements); + + this.bufferElementPositionIndex = this.elements.reduce<{ [hexElement: string]: number }>((memo, el, index) => { + memo[bufferToHex(el)] = index; + return memo; + }, {}); + + // Create layers + this.layers = this.getLayers(this.elements); + } + + getLayers(elements: Buffer[]): Buffer[][] { + if (elements.length === 0) { + throw new Error("empty tree"); + } + + const layers = []; + layers.push(elements); + + // Get next layer until we reach the root + while (layers[layers.length - 1].length > 1) { + layers.push(this.getNextLayer(layers[layers.length - 1])); + } + + return layers; + } + + getNextLayer(elements: Buffer[]): Buffer[] { + return elements.reduce((layer, el, idx, arr) => { + if (idx % 2 === 0) { + // Hash the current element with its pair element + layer.push(MerkleTree.combinedHash(el, arr[idx + 1])); + } + + return layer; + }, []); + } + + static combinedHash(first: Buffer, second: Buffer): Buffer { + if (!first) { + return second; + } + if (!second) { + return first; + } + + return keccak256(MerkleTree.sortAndConcat(first, second)); + } + + getRoot(): Buffer { + return this.layers[this.layers.length - 1][0]; + } + + getHexRoot(): string { + return bufferToHex(this.getRoot()); + } + + getProof(leaf: T) { + return this.getProofRawBuf(this.leafToBuf(leaf)); + } + + getHexProof(leaf: T) { + return this.getHexProofRawBuf(this.leafToBuf(leaf)); + } + + leafToBuf(element: T): Buffer { + const hash = this.hashFn(element); + const hexString = hash.startsWith("0x") ? hash.substring(2) : hash; + return Buffer.from(hexString.toLowerCase(), "hex"); + } + + // Methods that take the raw buffers (hashes). + getProofRawBuf(element: Buffer) { + let idx = this.bufferElementPositionIndex[bufferToHex(element)]; + + if (typeof idx !== "number") { + throw new Error("Element does not exist in Merkle tree"); + } + + return this.layers.reduce((proof, layer) => { + const pairElement = MerkleTree.getPairElement(idx, layer); + + if (pairElement) { + proof.push(pairElement); + } + + idx = Math.floor(idx / 2); + + return proof; + }, []); + } + + getHexProofRawBuf(el: Buffer): string[] { + const proof = this.getProofRawBuf(el); + + return MerkleTree.bufArrToHexArr(proof); + } + + private static getPairElement(idx: number, layer: Buffer[]): Buffer | null { + const pairIdx = idx % 2 === 0 ? idx + 1 : idx - 1; + + if (pairIdx < layer.length) { + return layer[pairIdx]; + } else { + return null; + } + } + + private static bufDedup(elements: Buffer[]): Buffer[] { + return elements.filter((el, idx) => { + return idx === 0 || !elements[idx - 1].equals(el); + }); + } + + private static bufArrToHexArr(arr: Buffer[]): string[] { + if (arr.some((el) => !Buffer.isBuffer(el))) { + throw new Error("Array is not an array of buffers"); + } + + return arr.map((el) => "0x" + el.toString("hex")); + } + + private static sortAndConcat(...args: Buffer[]): Buffer { + return Buffer.concat([...args].sort(Buffer.compare)); + } +} diff --git a/yarn.lock b/yarn.lock index 6b55630f9..55d0fcac5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -630,7 +630,7 @@ dependencies: "@ethersproject/bignumber" "^5.0.13" -"@ethersproject/constants@5.5.0", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.0.4", "@ethersproject/constants@^5.0.8", "@ethersproject/constants@^5.5.0": +"@ethersproject/constants@5.5.0", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.0.4", "@ethersproject/constants@^5.0.8", "@ethersproject/constants@^5.4.0", "@ethersproject/constants@^5.5.0": version "5.5.0" resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.5.0.tgz#d2a2cd7d94bd1d58377d1d66c4f53c9be4d0a45e" integrity sha512-2MsRRVChkvMWR+GyMGY4N1sAX9Mt3J9KykCsgUFd/1mwS0UH1qw+Bv9k1UJb3X3YJYFco9H20pjSlOIfCG5HYQ== @@ -7427,6 +7427,34 @@ hardhat-deploy@0.9.1: murmur-128 "^0.2.1" qs "^6.9.4" +hardhat-deploy@^0.10.4: + version "0.10.4" + resolved "https://registry.yarnpkg.com/hardhat-deploy/-/hardhat-deploy-0.10.4.tgz#5e064f40f103401ee1e0b8563b6ebf9ad986b921" + integrity sha512-9b7kFWul3Gjm/JURzVicnD5mGNjNyEwFHFKU+8URJ3q4hjCvYR9Ja99ChmVMGzM8DPJhlvEVNBkcv/3wnwZpfQ== + dependencies: + "@ethersproject/abi" "^5.4.0" + "@ethersproject/abstract-signer" "^5.4.1" + "@ethersproject/address" "^5.4.0" + "@ethersproject/bignumber" "^5.4.1" + "@ethersproject/bytes" "^5.4.0" + "@ethersproject/constants" "^5.4.0" + "@ethersproject/contracts" "^5.4.1" + "@ethersproject/providers" "^5.4.4" + "@ethersproject/solidity" "^5.4.0" + "@ethersproject/transactions" "^5.4.0" + "@ethersproject/wallet" "^5.4.0" + "@types/qs" "^6.9.7" + axios "^0.21.1" + chalk "^4.1.2" + chokidar "^3.5.2" + debug "^4.3.2" + enquirer "^2.3.6" + form-data "^4.0.0" + fs-extra "^10.0.0" + match-all "^1.2.6" + murmur-128 "^0.2.1" + qs "^6.9.4" + hardhat-gas-reporter@^1.0.4: version "1.0.7" resolved "https://registry.yarnpkg.com/hardhat-gas-reporter/-/hardhat-gas-reporter-1.0.7.tgz#b0e06a4f5a4da2369354991b6fa32ff002170573"