diff --git a/contracts/tokenomics/gauges/CurveLiquidityGauge.vy b/contracts/tokenomics/gauges/CurveLiquidityGauge.vy new file mode 100644 index 00000000..daa3f165 --- /dev/null +++ b/contracts/tokenomics/gauges/CurveLiquidityGauge.vy @@ -0,0 +1,689 @@ +# @version 0.2.15 +""" +@title Liquidity Gauge +@author Curve Finance +@license MIT +@notice Implementation contract for use with Curve Factory +""" + +from vyper.interfaces import ERC20 + +implements: ERC20 + + +interface CRV20: + def future_epoch_time_write() -> uint256: nonpayable + def rate() -> uint256: view + +interface Controller: + def period() -> int128: view + def period_write() -> int128: nonpayable + def period_timestamp(p: int128) -> uint256: view + def gauge_relative_weight(addr: address, time: uint256) -> uint256: view + def voting_escrow() -> address: view + def checkpoint(): nonpayable + def checkpoint_gauge(addr: address): nonpayable + +interface Minter: + def token() -> address: view + def controller() -> address: view + def minted(user: address, gauge: address) -> uint256: view + +interface VotingEscrow: + def user_point_epoch(addr: address) -> uint256: view + def user_point_history__ts(addr: address, epoch: uint256) -> uint256: view + +interface VotingEscrowBoost: + def adjusted_balance_of(_account: address) -> uint256: view + +interface ERC20Extended: + def symbol() -> String[26]: view + +interface Factory: + def admin() -> address: view + + +event Deposit: + provider: indexed(address) + value: uint256 + +event Withdraw: + provider: indexed(address) + value: uint256 + +event UpdateLiquidityLimit: + user: address + original_balance: uint256 + original_supply: uint256 + working_balance: uint256 + working_supply: uint256 + +event CommitOwnership: + admin: address + +event ApplyOwnership: + admin: address + +event Transfer: + _from: indexed(address) + _to: indexed(address) + _value: uint256 + +event Approval: + _owner: indexed(address) + _spender: indexed(address) + _value: uint256 + + +struct Reward: + token: address + distributor: address + period_finish: uint256 + rate: uint256 + last_update: uint256 + integral: uint256 + + +MAX_REWARDS: constant(uint256) = 8 +TOKENLESS_PRODUCTION: constant(uint256) = 40 +WEEK: constant(uint256) = 604800 +CLAIM_FREQUENCY: constant(uint256) = 3600 + +MINTER: constant(address) = 0xd061D61a4d941c39E5453435B6345Dc261C2fcE0 +CRV: constant(address) = 0xD533a949740bb3306d119CC777fa900bA034cd52 +VOTING_ESCROW: constant(address) = 0x5f3b5DfEb7B28CDbD7FAba78963EE202a494e2A2 +GAUGE_CONTROLLER: constant(address) = 0x2F50D538606Fa9EDD2B11E2446BEb18C9D5846bB +VEBOOST_PROXY: constant(address) = 0x8E0c00ed546602fD9927DF742bbAbF726D5B0d16 + + +lp_token: public(address) +future_epoch_time: public(uint256) + +balanceOf: public(HashMap[address, uint256]) +totalSupply: public(uint256) +allowance: public(HashMap[address, HashMap[address, uint256]]) + +name: public(String[64]) +symbol: public(String[32]) + +working_balances: public(HashMap[address, uint256]) +working_supply: public(uint256) + +# The goal is to be able to calculate ∫(rate * balance / totalSupply dt) from 0 till checkpoint +# All values are kept in units of being multiplied by 1e18 +period: public(int128) +period_timestamp: public(uint256[100000000000000000000000000000]) + +# 1e18 * ∫(rate(t) / totalSupply(t) dt) from 0 till checkpoint +integrate_inv_supply: public(uint256[100000000000000000000000000000]) # bump epoch when rate() changes + +# 1e18 * ∫(rate(t) / totalSupply(t) dt) from (last_action) till checkpoint +integrate_inv_supply_of: public(HashMap[address, uint256]) +integrate_checkpoint_of: public(HashMap[address, uint256]) + +# ∫(balance * rate(t) / totalSupply(t) dt) from 0 till checkpoint +# Units: rate * t = already number of coins per address to issue +integrate_fraction: public(HashMap[address, uint256]) + +inflation_rate: public(uint256) + +# For tracking external rewards +reward_count: public(uint256) +reward_tokens: public(address[MAX_REWARDS]) + +reward_data: public(HashMap[address, Reward]) + +# claimant -> default reward receiver +rewards_receiver: public(HashMap[address, address]) + +# reward token -> claiming address -> integral +reward_integral_for: public(HashMap[address, HashMap[address, uint256]]) + +# user -> [uint128 claimable amount][uint128 claimed amount] +claim_data: HashMap[address, HashMap[address, uint256]] + +is_killed: public(bool) +factory: public(address) + +@external +def __init__(): + self.lp_token = 0x000000000000000000000000000000000000dEaD + + +@external +def initialize(_lp_token: address): + """ + @notice Contract constructor + @param _lp_token Liquidity Pool contract address + """ + + assert self.lp_token == ZERO_ADDRESS + self.lp_token = _lp_token + self.factory = msg.sender + + symbol: String[26] = ERC20Extended(_lp_token).symbol() + self.name = concat("Curve.fi ", symbol, " Gauge Deposit") + self.symbol = concat(symbol, "-gauge") + + self.period_timestamp[0] = block.timestamp + self.inflation_rate = CRV20(CRV).rate() + self.future_epoch_time = CRV20(CRV).future_epoch_time_write() + + +@view +@external +def decimals() -> uint256: + """ + @notice Get the number of decimals for this token + @dev Implemented as a view method to reduce gas costs + @return uint256 decimal places + """ + return 18 + + +@view +@external +def integrate_checkpoint() -> uint256: + return self.period_timestamp[self.period] + + +@internal +def _update_liquidity_limit(addr: address, l: uint256, L: uint256): + """ + @notice Calculate limits which depend on the amount of CRV token per-user. + Effectively it calculates working balances to apply amplification + of CRV production by CRV + @param addr User address + @param l User's amount of liquidity (LP tokens) + @param L Total amount of liquidity (LP tokens) + """ + # To be called after totalSupply is updated + voting_balance: uint256 = VotingEscrowBoost(VEBOOST_PROXY).adjusted_balance_of(addr) + voting_total: uint256 = ERC20(VOTING_ESCROW).totalSupply() + + lim: uint256 = l * TOKENLESS_PRODUCTION / 100 + if voting_total > 0: + lim += L * voting_balance / voting_total * (100 - TOKENLESS_PRODUCTION) / 100 + + lim = min(l, lim) + old_bal: uint256 = self.working_balances[addr] + self.working_balances[addr] = lim + _working_supply: uint256 = self.working_supply + lim - old_bal + self.working_supply = _working_supply + + log UpdateLiquidityLimit(addr, l, L, lim, _working_supply) + + +@internal +def _checkpoint_rewards(_user: address, _total_supply: uint256, _claim: bool, _receiver: address): + """ + @notice Claim pending rewards and checkpoint rewards for a user + """ + + user_balance: uint256 = 0 + receiver: address = _receiver + if _user != ZERO_ADDRESS: + user_balance = self.balanceOf[_user] + if _claim and _receiver == ZERO_ADDRESS: + # if receiver is not explicitly declared, check if a default receiver is set + receiver = self.rewards_receiver[_user] + if receiver == ZERO_ADDRESS: + # if no default receiver is set, direct claims to the user + receiver = _user + + reward_count: uint256 = self.reward_count + for i in range(MAX_REWARDS): + if i == reward_count: + break + token: address = self.reward_tokens[i] + + integral: uint256 = self.reward_data[token].integral + last_update: uint256 = min(block.timestamp, self.reward_data[token].period_finish) + duration: uint256 = last_update - self.reward_data[token].last_update + if duration != 0: + self.reward_data[token].last_update = last_update + if _total_supply != 0: + integral += duration * self.reward_data[token].rate * 10**18 / _total_supply + self.reward_data[token].integral = integral + + if _user != ZERO_ADDRESS: + integral_for: uint256 = self.reward_integral_for[token][_user] + new_claimable: uint256 = 0 + + if integral_for < integral: + self.reward_integral_for[token][_user] = integral + new_claimable = user_balance * (integral - integral_for) / 10**18 + + claim_data: uint256 = self.claim_data[_user][token] + total_claimable: uint256 = shift(claim_data, -128) + new_claimable + if total_claimable > 0: + total_claimed: uint256 = claim_data % 2**128 + if _claim: + response: Bytes[32] = raw_call( + token, + concat( + method_id("transfer(address,uint256)"), + convert(receiver, bytes32), + convert(total_claimable, bytes32), + ), + max_outsize=32, + ) + if len(response) != 0: + assert convert(response, bool) + self.claim_data[_user][token] = total_claimed + total_claimable + elif new_claimable > 0: + self.claim_data[_user][token] = total_claimed + shift(total_claimable, 128) + + +@internal +def _checkpoint(addr: address): + """ + @notice Checkpoint for a user + @param addr User address + """ + _period: int128 = self.period + _period_time: uint256 = self.period_timestamp[_period] + _integrate_inv_supply: uint256 = self.integrate_inv_supply[_period] + rate: uint256 = self.inflation_rate + new_rate: uint256 = rate + prev_future_epoch: uint256 = self.future_epoch_time + if prev_future_epoch >= _period_time: + self.future_epoch_time = CRV20(CRV).future_epoch_time_write() + new_rate = CRV20(CRV).rate() + self.inflation_rate = new_rate + + if self.is_killed: + # Stop distributing inflation as soon as killed + rate = 0 + + # Update integral of 1/supply + if block.timestamp > _period_time: + _working_supply: uint256 = self.working_supply + Controller(GAUGE_CONTROLLER).checkpoint_gauge(self) + prev_week_time: uint256 = _period_time + week_time: uint256 = min((_period_time + WEEK) / WEEK * WEEK, block.timestamp) + + for i in range(500): + dt: uint256 = week_time - prev_week_time + w: uint256 = Controller(GAUGE_CONTROLLER).gauge_relative_weight(self, prev_week_time / WEEK * WEEK) + + if _working_supply > 0: + if prev_future_epoch >= prev_week_time and prev_future_epoch < week_time: + # If we went across one or multiple epochs, apply the rate + # of the first epoch until it ends, and then the rate of + # the last epoch. + # If more than one epoch is crossed - the gauge gets less, + # but that'd meen it wasn't called for more than 1 year + _integrate_inv_supply += rate * w * (prev_future_epoch - prev_week_time) / _working_supply + rate = new_rate + _integrate_inv_supply += rate * w * (week_time - prev_future_epoch) / _working_supply + else: + _integrate_inv_supply += rate * w * dt / _working_supply + # On precisions of the calculation + # rate ~= 10e18 + # last_weight > 0.01 * 1e18 = 1e16 (if pool weight is 1%) + # _working_supply ~= TVL * 1e18 ~= 1e26 ($100M for example) + # The largest loss is at dt = 1 + # Loss is 1e-9 - acceptable + + if week_time == block.timestamp: + break + prev_week_time = week_time + week_time = min(week_time + WEEK, block.timestamp) + + _period += 1 + self.period = _period + self.period_timestamp[_period] = block.timestamp + self.integrate_inv_supply[_period] = _integrate_inv_supply + + # Update user-specific integrals + _working_balance: uint256 = self.working_balances[addr] + self.integrate_fraction[addr] += _working_balance * (_integrate_inv_supply - self.integrate_inv_supply_of[addr]) / 10 ** 18 + self.integrate_inv_supply_of[addr] = _integrate_inv_supply + self.integrate_checkpoint_of[addr] = block.timestamp + + +@external +def user_checkpoint(addr: address) -> bool: + """ + @notice Record a checkpoint for `addr` + @param addr User address + @return bool success + """ + assert msg.sender in [addr, MINTER] # dev: unauthorized + self._checkpoint(addr) + self._update_liquidity_limit(addr, self.balanceOf[addr], self.totalSupply) + return True + + +@external +def claimable_tokens(addr: address) -> uint256: + """ + @notice Get the number of claimable tokens per user + @dev This function should be manually changed to "view" in the ABI + @return uint256 number of claimable tokens per user + """ + self._checkpoint(addr) + return self.integrate_fraction[addr] - Minter(MINTER).minted(addr, self) + + +@view +@external +def claimed_reward(_addr: address, _token: address) -> uint256: + """ + @notice Get the number of already-claimed reward tokens for a user + @param _addr Account to get reward amount for + @param _token Token to get reward amount for + @return uint256 Total amount of `_token` already claimed by `_addr` + """ + return self.claim_data[_addr][_token] % 2**128 + + +@view +@external +def claimable_reward(_user: address, _reward_token: address) -> uint256: + """ + @notice Get the number of claimable reward tokens for a user + @param _user Account to get reward amount for + @param _reward_token Token to get reward amount for + @return uint256 Claimable reward token amount + """ + integral: uint256 = self.reward_data[_reward_token].integral + total_supply: uint256 = self.totalSupply + if total_supply != 0: + last_update: uint256 = min(block.timestamp, self.reward_data[_reward_token].period_finish) + duration: uint256 = last_update - self.reward_data[_reward_token].last_update + integral += (duration * self.reward_data[_reward_token].rate * 10**18 / total_supply) + + integral_for: uint256 = self.reward_integral_for[_reward_token][_user] + new_claimable: uint256 = self.balanceOf[_user] * (integral - integral_for) / 10**18 + + return shift(self.claim_data[_user][_reward_token], -128) + new_claimable + + +@external +def set_rewards_receiver(_receiver: address): + """ + @notice Set the default reward receiver for the caller. + @dev When set to ZERO_ADDRESS, rewards are sent to the caller + @param _receiver Receiver address for any rewards claimed via `claim_rewards` + """ + self.rewards_receiver[msg.sender] = _receiver + + +@external +@nonreentrant('lock') +def claim_rewards(_addr: address = msg.sender, _receiver: address = ZERO_ADDRESS): + """ + @notice Claim available reward tokens for `_addr` + @param _addr Address to claim for + @param _receiver Address to transfer rewards to - if set to + ZERO_ADDRESS, uses the default reward receiver + for the caller + """ + if _receiver != ZERO_ADDRESS: + assert _addr == msg.sender # dev: cannot redirect when claiming for another user + self._checkpoint_rewards(_addr, self.totalSupply, True, _receiver) + + +@external +def kick(addr: address): + """ + @notice Kick `addr` for abusing their boost + @dev Only if either they had another voting event, or their voting escrow lock expired + @param addr Address to kick + """ + t_last: uint256 = self.integrate_checkpoint_of[addr] + t_ve: uint256 = VotingEscrow(VOTING_ESCROW).user_point_history__ts( + addr, VotingEscrow(VOTING_ESCROW).user_point_epoch(addr) + ) + _balance: uint256 = self.balanceOf[addr] + + assert ERC20(VOTING_ESCROW).balanceOf(addr) == 0 or t_ve > t_last # dev: kick not allowed + assert self.working_balances[addr] > _balance * TOKENLESS_PRODUCTION / 100 # dev: kick not needed + + self._checkpoint(addr) + self._update_liquidity_limit(addr, self.balanceOf[addr], self.totalSupply) + + +@external +@nonreentrant('lock') +def deposit(_value: uint256, _addr: address = msg.sender, _claim_rewards: bool = False): + """ + @notice Deposit `_value` LP tokens + @dev Depositting also claims pending reward tokens + @param _value Number of tokens to deposit + @param _addr Address to deposit for + """ + + self._checkpoint(_addr) + + if _value != 0: + is_rewards: bool = self.reward_count != 0 + total_supply: uint256 = self.totalSupply + if is_rewards: + self._checkpoint_rewards(_addr, total_supply, _claim_rewards, ZERO_ADDRESS) + + total_supply += _value + new_balance: uint256 = self.balanceOf[_addr] + _value + self.balanceOf[_addr] = new_balance + self.totalSupply = total_supply + + self._update_liquidity_limit(_addr, new_balance, total_supply) + + ERC20(self.lp_token).transferFrom(msg.sender, self, _value) + + log Deposit(_addr, _value) + log Transfer(ZERO_ADDRESS, _addr, _value) + + +@external +@nonreentrant('lock') +def withdraw(_value: uint256, _claim_rewards: bool = False): + """ + @notice Withdraw `_value` LP tokens + @dev Withdrawing also claims pending reward tokens + @param _value Number of tokens to withdraw + """ + self._checkpoint(msg.sender) + + if _value != 0: + is_rewards: bool = self.reward_count != 0 + total_supply: uint256 = self.totalSupply + if is_rewards: + self._checkpoint_rewards(msg.sender, total_supply, _claim_rewards, ZERO_ADDRESS) + + total_supply -= _value + new_balance: uint256 = self.balanceOf[msg.sender] - _value + self.balanceOf[msg.sender] = new_balance + self.totalSupply = total_supply + + self._update_liquidity_limit(msg.sender, new_balance, total_supply) + + ERC20(self.lp_token).transfer(msg.sender, _value) + + log Withdraw(msg.sender, _value) + log Transfer(msg.sender, ZERO_ADDRESS, _value) + + +@internal +def _transfer(_from: address, _to: address, _value: uint256): + self._checkpoint(_from) + self._checkpoint(_to) + + if _value != 0: + total_supply: uint256 = self.totalSupply + is_rewards: bool = self.reward_count != 0 + if is_rewards: + self._checkpoint_rewards(_from, total_supply, False, ZERO_ADDRESS) + new_balance: uint256 = self.balanceOf[_from] - _value + self.balanceOf[_from] = new_balance + self._update_liquidity_limit(_from, new_balance, total_supply) + + if is_rewards: + self._checkpoint_rewards(_to, total_supply, False, ZERO_ADDRESS) + new_balance = self.balanceOf[_to] + _value + self.balanceOf[_to] = new_balance + self._update_liquidity_limit(_to, new_balance, total_supply) + + log Transfer(_from, _to, _value) + + +@external +@nonreentrant('lock') +def transfer(_to : address, _value : uint256) -> bool: + """ + @notice Transfer token for a specified address + @dev Transferring claims pending reward tokens for the sender and receiver + @param _to The address to transfer to. + @param _value The amount to be transferred. + """ + self._transfer(msg.sender, _to, _value) + + return True + + +@external +@nonreentrant('lock') +def transferFrom(_from : address, _to : address, _value : uint256) -> bool: + """ + @notice Transfer tokens from one address to another. + @dev Transferring claims pending reward tokens for the sender and receiver + @param _from address The address which you want to send tokens from + @param _to address The address which you want to transfer to + @param _value uint256 the amount of tokens to be transferred + """ + _allowance: uint256 = self.allowance[_from][msg.sender] + if _allowance != MAX_UINT256: + self.allowance[_from][msg.sender] = _allowance - _value + + self._transfer(_from, _to, _value) + + return True + + +@external +def approve(_spender : address, _value : uint256) -> bool: + """ + @notice Approve the passed address to transfer the specified amount of + tokens on behalf of msg.sender + @dev Beware that changing an allowance via this method brings the risk + that someone may use both the old and new allowance by unfortunate + transaction ordering. This may be mitigated with the use of + {incraseAllowance} and {decreaseAllowance}. + https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + @param _spender The address which will transfer the funds + @param _value The amount of tokens that may be transferred + @return bool success + """ + self.allowance[msg.sender][_spender] = _value + log Approval(msg.sender, _spender, _value) + + return True + + +@external +def increaseAllowance(_spender: address, _added_value: uint256) -> bool: + """ + @notice Increase the allowance granted to `_spender` by the caller + @dev This is alternative to {approve} that can be used as a mitigation for + the potential race condition + @param _spender The address which will transfer the funds + @param _added_value The amount of to increase the allowance + @return bool success + """ + allowance: uint256 = self.allowance[msg.sender][_spender] + _added_value + self.allowance[msg.sender][_spender] = allowance + + log Approval(msg.sender, _spender, allowance) + + return True + + +@external +def decreaseAllowance(_spender: address, _subtracted_value: uint256) -> bool: + """ + @notice Decrease the allowance granted to `_spender` by the caller + @dev This is alternative to {approve} that can be used as a mitigation for + the potential race condition + @param _spender The address which will transfer the funds + @param _subtracted_value The amount of to decrease the allowance + @return bool success + """ + allowance: uint256 = self.allowance[msg.sender][_spender] - _subtracted_value + self.allowance[msg.sender][_spender] = allowance + + log Approval(msg.sender, _spender, allowance) + + return True + + +@external +def add_reward(_reward_token: address, _distributor: address): + """ + @notice Set the active reward contract + """ + assert msg.sender == Factory(self.factory).admin() # dev: only owner + + reward_count: uint256 = self.reward_count + assert reward_count < MAX_REWARDS + assert self.reward_data[_reward_token].distributor == ZERO_ADDRESS + + self.reward_data[_reward_token].distributor = _distributor + self.reward_tokens[reward_count] = _reward_token + self.reward_count = reward_count + 1 + + +@external +def set_reward_distributor(_reward_token: address, _distributor: address): + current_distributor: address = self.reward_data[_reward_token].distributor + + assert msg.sender == current_distributor or msg.sender == Factory(self.factory).admin() + assert current_distributor != ZERO_ADDRESS + assert _distributor != ZERO_ADDRESS + + self.reward_data[_reward_token].distributor = _distributor + + +@external +@nonreentrant("lock") +def deposit_reward_token(_reward_token: address, _amount: uint256): + assert msg.sender == self.reward_data[_reward_token].distributor + + self._checkpoint_rewards(ZERO_ADDRESS, self.totalSupply, False, ZERO_ADDRESS) + + response: Bytes[32] = raw_call( + _reward_token, + concat( + method_id("transferFrom(address,address,uint256)"), + convert(msg.sender, bytes32), + convert(self, bytes32), + convert(_amount, bytes32), + ), + max_outsize=32, + ) + if len(response) != 0: + assert convert(response, bool) + + period_finish: uint256 = self.reward_data[_reward_token].period_finish + if block.timestamp >= period_finish: + self.reward_data[_reward_token].rate = _amount / WEEK + else: + remaining: uint256 = period_finish - block.timestamp + leftover: uint256 = remaining * self.reward_data[_reward_token].rate + self.reward_data[_reward_token].rate = (_amount + leftover) / WEEK + + self.reward_data[_reward_token].last_update = block.timestamp + self.reward_data[_reward_token].period_finish = block.timestamp + WEEK + + +@external +def set_killed(_is_killed: bool): + """ + @notice Set the killed status for this contract + @dev When killed, the gauge always yields a rate of 0 and so cannot mint CRV + @param _is_killed Killed status to set + """ + assert msg.sender == Factory(self.factory).admin() # dev: only owner + + self.is_killed = _is_killed \ No newline at end of file diff --git a/contracts/xchainGauges/AnycallTranslator.sol b/contracts/xchainGauges/AnycallTranslator.sol new file mode 100644 index 00000000..88dca948 --- /dev/null +++ b/contracts/xchainGauges/AnycallTranslator.sol @@ -0,0 +1,147 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.4.0/proxy/transparent/TransparentUpgradeableProxy.sol"; +import "@openzeppelin/contracts-4.4.0/proxy/transparent/ProxyAdmin.sol"; + +import "@openzeppelin/contracts-upgradeable-4.4.0/token/ERC20/utils/SafeERC20Upgradeable.sol"; +import "@openzeppelin/contracts-upgradeable-4.4.0/access/OwnableUpgradeable.sol"; + +interface ICallProxy { + function anyCall( + address _to, + bytes calldata _data, + address _fallback, + uint256 _toChainId, + uint256 _flags + ) external payable; // nonpayable + + function deposit(address _account) external payable; + + function withdraw(uint256 amount) external; + + function executor() external view returns (address executor); +} + +interface IAnycallExecutor { + function context() + external + view + returns ( + address from, + uint256 fromChainID, + uint256 nonce + ); +} + +// Empty contract to ensure import of TransparentUpgradeableProxy contract +contract EmptyProxy is TransparentUpgradeableProxy { + constructor( + address _logic, + address admin_, + bytes memory _data + ) payable TransparentUpgradeableProxy(_logic, admin_, _data) {} +} + +// Empty contract to ensure import of ProxyAdmin contract +contract EmptyProxyAdmin is ProxyAdmin { + +} + +// Logic contract that will be used by the proxy +contract AnyCallTranslator is OwnableUpgradeable { + using SafeERC20Upgradeable for IERC20Upgradeable; + // consts + address public anycallContract; + address public anyCallExecutor; + mapping(address => bool) public isKnownCaller; + + constructor() initializer { + // logic contract + } + + receive() external payable { + // fallback payable function + } + + function initialize(address _owner, address _anycallContract) + public + initializer + { + _transferOwnership(_owner); + anycallContract = _anycallContract; + anyCallExecutor = ICallProxy(_anycallContract).executor(); + } + + function addKnownCallers(address[] calldata _callers) external onlyOwner { + for (uint256 i = 0; i < _callers.length; i++) { + isKnownCaller[_callers[i]] = true; + } + } + + function removeKnownCallers(address[] calldata _callers) + external + onlyOwner + { + for (uint256 i = 0; i < _callers.length; i++) { + isKnownCaller[_callers[i]] = false; + } + } + + function setAnycall(address _anycallContract) external onlyOwner { + anycallContract = _anycallContract; + anyCallExecutor = ICallProxy(_anycallContract).executor(); + } + + function withdraw(uint256 _amount) external onlyOwner { + ICallProxy(anycallContract).withdraw(_amount); + } + + function rescue(IERC20Upgradeable token, address to) external onlyOwner { + token.safeTransfer(to, token.balanceOf(address(this))); + } + + function anyCall( + address _to, + bytes calldata _data, + address _fallback, + uint256 _toChainId, + // Use 0 flag to pay fee on destination chain, 1 to pay on source + uint256 _flags + ) external payable { + require(isKnownCaller[msg.sender], "Unknown caller"); + ICallProxy(anycallContract).anyCall{value: msg.value}( + address(this), + abi.encode(_to, _data), + _fallback, + _toChainId, + _flags + ); + } + + function anyExecute(bytes calldata toAndData) + external + returns (bool, bytes memory) + { + // Check that caller is anycall executor + require( + msg.sender == anyCallExecutor, + "Caller is not anycall executor" + ); + // Get address of anycallExecutor + (address _from, , ) = IAnycallExecutor(msg.sender).context(); + // Check that caller is verified + require(_from == address(this), "Wrong context"); + + // Decode to and data + (address to, bytes memory data) = abi.decode( + toAndData, + (address, bytes) + ); + (bool success, bytes memory returnData) = to.call(data); + require(success, "Proxy call failed"); + return (success, returnData); + } +} diff --git a/contracts/xchainGauges/ChildGaugeFactory.vy b/contracts/xchainGauges/ChildGaugeFactory.vy new file mode 100644 index 00000000..0fb20f88 --- /dev/null +++ b/contracts/xchainGauges/ChildGaugeFactory.vy @@ -0,0 +1,296 @@ +# @version 0.3.1 +""" +@title Child Liquidity Gauge Factory +@license MIT +@author Curve Finance +""" + + +interface ChildGauge: + def initialize(_lp_token: address, _manager: address, _name: String[32]): nonpayable + def integrate_fraction(_user: address) -> uint256: view + def user_checkpoint(_user: address) -> bool: nonpayable + +interface CallProxy: + def anyCall( + _to: address, _data: Bytes[1024], _fallback: address, _to_chain_id: uint256, _flags: uint256 + ): nonpayable + + +event DeployedGauge: + _implementation: indexed(address) + _lp_token: indexed(address) + _deployer: indexed(address) + _salt: bytes32 + _gauge: address + _name: String[32] + +event Minted: + _user: indexed(address) + _gauge: indexed(address) + _new_total: uint256 + +event UpdateImplementation: + _old_implementation: address + _new_implementation: address + +event UpdateVotingEscrow: + _old_voting_escrow: address + _new_voting_escrow: address + +event UpdateCallProxy: + _old_call_proxy: address + _new_call_proxy: address + +event UpdateMirrored: + _gauge: indexed(address) + _mirrored: bool + +event TransferOwnership: + _old_owner: address + _new_owner: address + + +WEEK: constant(uint256) = 86400 * 7 + + +SDL: immutable(address) + + +get_implementation: public(address) +voting_escrow: public(address) + +owner: public(address) +future_owner: public(address) + +call_proxy: public(address) +# [last_request][has_counterpart][is_valid_gauge] +gauge_data: public(HashMap[address, uint256]) +# user -> gauge -> value +minted: public(HashMap[address, HashMap[address, uint256]]) + +get_gauge_from_lp_token: public(HashMap[address, address]) +get_gauge_count: public(uint256) +get_gauge: public(address[MAX_INT128]) + + +@external +def __init__(_call_proxy: address, _sdl: address, _owner: address): + SDL = _sdl + + self.call_proxy = _call_proxy + log UpdateCallProxy(ZERO_ADDRESS, _call_proxy) + + self.owner = _owner + log TransferOwnership(ZERO_ADDRESS, _owner) + + +@internal +def _psuedo_mint(_gauge: address, _user: address): + gauge_data: uint256 = self.gauge_data[_gauge] + assert gauge_data != 0 # dev: invalid gauge + + # if is_mirrored and last_request != this week + if bitwise_and(gauge_data, 2) != 0 and shift(gauge_data, -2) / WEEK != block.timestamp / WEEK: + CallProxy(self.call_proxy).anyCall( + self, + _abi_encode(_gauge, method_id=method_id("transmit_emissions(address)")), + ZERO_ADDRESS, + 1, + 0 + ) + # update last request time + self.gauge_data[_gauge] = shift(block.timestamp, 2) + 3 + + assert ChildGauge(_gauge).user_checkpoint(_user) + total_mint: uint256 = ChildGauge(_gauge).integrate_fraction(_user) + to_mint: uint256 = total_mint - self.minted[_user][_gauge] + + if to_mint != 0: + # transfer tokens to user + response: Bytes[32] = raw_call( + SDL, + _abi_encode(_user, to_mint, method_id=method_id("transfer(address,uint256)")), + max_outsize=32, + ) + if len(response) != 0: + assert convert(response, bool) + self.minted[_user][_gauge] = total_mint + + log Minted(_user, _gauge, total_mint) + + +@external +@nonreentrant("lock") +def mint(_gauge: address): + """ + @notice Mint everything which belongs to `msg.sender` and send to them + @param _gauge `LiquidityGauge` address to get mintable amount from + """ + self._psuedo_mint(_gauge, msg.sender) + + +@external +@nonreentrant("lock") +def mint_many(_gauges: address[32]): + """ + @notice Mint everything which belongs to `msg.sender` across multiple gauges + @param _gauges List of `LiquidityGauge` addresses + """ + for i in range(32): + if _gauges[i] == ZERO_ADDRESS: + pass + self._psuedo_mint(_gauges[i], msg.sender) + + +@external +def deploy_gauge(_lp_token: address, _salt: bytes32,_name: String[32], _manager: address = msg.sender) -> address: + """ + @notice Deploy a liquidity gauge + @param _lp_token The token to deposit in the gauge + @param _manager The address to set as manager of the gauge + @param _salt A value to deterministically deploy a gauge + @param _name The name of the gauge + """ + if self.get_gauge_from_lp_token[_lp_token] != ZERO_ADDRESS: + # overwriting lp_token -> gauge mapping requires + assert msg.sender == self.owner # dev: only owner + + gauge_data: uint256 = 1 # set is_valid_gauge = True + implementation: address = self.get_implementation + gauge: address = create_forwarder_to( + implementation, salt=keccak256(_abi_encode(chain.id, msg.sender, _salt)) + ) + + if msg.sender == self.call_proxy: + gauge_data += 2 # set mirrored = True + log UpdateMirrored(gauge, True) + # issue a call to the root chain to deploy a root gauge + CallProxy(self.call_proxy).anyCall( + self, + _abi_encode(chain.id, _salt, _name, method_id=method_id("deploy_gauge(uint256,bytes32,string)")), + ZERO_ADDRESS, + 1, + 0 + ) + + self.gauge_data[gauge] = gauge_data + + idx: uint256 = self.get_gauge_count + self.get_gauge[idx] = gauge + self.get_gauge_count = idx + 1 + self.get_gauge_from_lp_token[_lp_token] = gauge + + ChildGauge(gauge).initialize(_lp_token, _manager, _name) + + log DeployedGauge(implementation, _lp_token, msg.sender, _salt, gauge, _name) + return gauge + + +@external +def set_voting_escrow(_voting_escrow: address): + """ + @notice Update the voting escrow contract + @param _voting_escrow Contract to use as the voting escrow oracle + """ + assert msg.sender == self.owner # dev: only owner + + log UpdateVotingEscrow(self.voting_escrow, _voting_escrow) + self.voting_escrow = _voting_escrow + + +@external +def set_implementation(_implementation: address): + """ + @notice Set the implementation + @param _implementation The address of the implementation to use + """ + assert msg.sender == self.owner # dev: only owner + + log UpdateImplementation(self.get_implementation, _implementation) + self.get_implementation = _implementation + + +@external +def set_mirrored(_gauge: address, _mirrored: bool): + """ + @notice Set the mirrored bit of the gauge data for `_gauge` + @param _gauge The gauge of interest + @param _mirrored Boolean deteremining whether to set the mirrored bit to True/False + """ + gauge_data: uint256 = self.gauge_data[_gauge] + assert gauge_data != 0 # dev: invalid gauge + assert msg.sender == self.owner # dev: only owner + + gauge_data = shift(shift(gauge_data, -2), 2) + 1 # set is_valid_gauge = True + if _mirrored: + gauge_data += 2 # set is_mirrored = True + + self.gauge_data[_gauge] = gauge_data + log UpdateMirrored(_gauge, _mirrored) + + +@external +def set_call_proxy(_new_call_proxy: address): + """ + @notice Set the address of the call proxy used + @dev _new_call_proxy should adhere to the same interface as defined + @param _new_call_proxy Address of the cross chain call proxy + """ + assert msg.sender == self.owner + + log UpdateCallProxy(self.call_proxy, _new_call_proxy) + self.call_proxy = _new_call_proxy + + +@external +def commit_transfer_ownership(_future_owner: address): + """ + @notice Transfer ownership to `_future_owner` + @param _future_owner The account to commit as the future owner + """ + assert msg.sender == self.owner # dev: only owner + + self.future_owner = _future_owner + + +@external +def accept_transfer_ownership(): + """ + @notice Accept the transfer of ownership + @dev Only the committed future owner can call this function + """ + assert msg.sender == self.future_owner # dev: only future owner + + log TransferOwnership(self.owner, msg.sender) + self.owner = msg.sender + + +@view +@external +def is_valid_gauge(_gauge: address) -> bool: + """ + @notice Query whether the gauge is a valid one deployed via the factory + @param _gauge The address of the gauge of interest + """ + return self.gauge_data[_gauge] != 0 + + +@view +@external +def is_mirrored(_gauge: address) -> bool: + """ + @notice Query whether the gauge is mirrored on Ethereum mainnet + @param _gauge The address of the gauge of interest + """ + return bitwise_and(self.gauge_data[_gauge], 2) != 0 + + +@view +@external +def last_request(_gauge: address) -> uint256: + """ + @notice Query the timestamp of the last cross chain request for emissions + @param _gauge The address of the gauge of interest + """ + return shift(self.gauge_data[_gauge], -2) diff --git a/contracts/xchainGauges/RewardForwarder.sol b/contracts/xchainGauges/RewardForwarder.sol new file mode 100644 index 00000000..fc3520d9 --- /dev/null +++ b/contracts/xchainGauges/RewardForwarder.sol @@ -0,0 +1,37 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +interface IGauge { + function deposit_reward_token(address _reward_token, uint256 amount) + external; // nonpayable +} + +contract RewardForwarder { + // consts + using SafeERC20 for IERC20; + address immutable GAUGE; + uint256 private constant MAX_UINT256 = 2**256 - 1; + + constructor(address _gauge) { + GAUGE = _gauge; + } + + function depositRewardToken(address _rewardToken) external { + IGauge(GAUGE).deposit_reward_token( + _rewardToken, + IERC20(_rewardToken).balanceOf(address(this)) + ); + } + + function allow(address _rewardToken) external { + IERC20(_rewardToken).safeApprove(GAUGE, MAX_UINT256); + } + + function gauge() external view returns (address) { + return (GAUGE); + } +} diff --git a/contracts/xchainGauges/RootGaugeFactory.vy b/contracts/xchainGauges/RootGaugeFactory.vy new file mode 100644 index 00000000..4b14a049 --- /dev/null +++ b/contracts/xchainGauges/RootGaugeFactory.vy @@ -0,0 +1,194 @@ +# @version 0.3.1 +""" +@title Root Liquidity Gauge Factory +@license MIT +@author Curve Finance +""" + + +interface Bridger: + def check(_addr: address) -> bool: view + +interface RootGauge: + def bridger() -> address: view + def initialize(_bridger: address, _chain_id: uint256, _name: String[32]): nonpayable + def transmit_emissions(): nonpayable + +interface CallProxy: + def anyCall( + _to: address, _data: Bytes[1024], _fallback: address, _to_chain_id: uint256, _flags: uint256 + ): nonpayable + + +event BridgerUpdated: + _chain_id: indexed(uint256) + _old_bridger: address + _new_bridger: address + +event DeployedGauge: + _implementation: indexed(address) + _chain_id: indexed(uint256) + _deployer: indexed(address) + _salt: bytes32 + _gauge: address + +event TransferOwnership: + _old_owner: address + _new_owner: address + +event UpdateCallProxy: + _old_call_proxy: address + _new_call_proxy: address + +event UpdateImplementation: + _old_implementation: address + _new_implementation: address + + + +call_proxy: public(address) + +get_bridger: public(HashMap[uint256, address]) +get_implementation: public(address) + +get_gauge: public(HashMap[uint256, address[MAX_UINT256]]) +get_gauge_count: public(HashMap[uint256, uint256]) +is_valid_gauge: public(HashMap[address, bool]) + +owner: public(address) +future_owner: public(address) + + +@external +def __init__(_call_proxy: address, _owner: address): + self.call_proxy = _call_proxy + log UpdateCallProxy(ZERO_ADDRESS, _call_proxy) + + self.owner = _owner + log TransferOwnership(ZERO_ADDRESS, _owner) + + +@external +def transmit_emissions(_gauge: address): + """ + @notice Call `transmit_emissions` on a root gauge + @dev Entrypoint for anycall to request emissions for a child gauge. + The way that gauges work, this can also be called on the root + chain without a request. + """ + # in most cases this will return True + # for special bridges *cough cough Multichain, we can only do + # one bridge per tx, therefore this will verify msg.sender in [tx.origin, self.call_proxy] + assert Bridger(RootGauge(_gauge).bridger()).check(msg.sender) + RootGauge(_gauge).transmit_emissions() + + +@payable +@external +def deploy_gauge(_chain_id: uint256, _salt: bytes32, _name: String[32]) -> address: + """ + @notice Deploy a root liquidity gauge + @param _chain_id The chain identifier of the counterpart child gauge + @param _salt A value to deterministically deploy a gauge + """ + + bridger: address = self.get_bridger[_chain_id] + assert bridger != ZERO_ADDRESS, "chain id not supported" # dev: chain id not supported + + implementation: address = self.get_implementation + gauge: address = create_forwarder_to( + implementation, + value=msg.value, + salt=keccak256(_abi_encode(_chain_id, msg.sender, _salt)) + ) + + idx: uint256 = self.get_gauge_count[_chain_id] + self.get_gauge[_chain_id][idx] = gauge + self.get_gauge_count[_chain_id] = idx + 1 + self.is_valid_gauge[gauge] = True + + RootGauge(gauge).initialize(bridger, _chain_id, _name) + + log DeployedGauge(implementation, _chain_id, msg.sender, _salt, gauge) + return gauge + + +@external +def deploy_child_gauge(_chain_id: uint256, _lp_token: address, _salt: bytes32, _name:String[32], _manager: address = msg.sender): + bridger: address = self.get_bridger[_chain_id] + assert bridger != ZERO_ADDRESS # dev: chain id not supported + + CallProxy(self.call_proxy).anyCall( + self, + _abi_encode( + _lp_token, + _salt, + _name, + _manager, + method_id=method_id("deploy_gauge(address,bytes32,string,address)") + ), + ZERO_ADDRESS, + _chain_id, + 0 + ) + + +@external +def set_bridger(_chain_id: uint256, _bridger: address): + """ + @notice Set the bridger for `_chain_id` + @param _chain_id The chain identifier to set the bridger for + @param _bridger The bridger contract to use + """ + assert msg.sender == self.owner # dev: only owner + + log BridgerUpdated(_chain_id, self.get_bridger[_chain_id], _bridger) + self.get_bridger[_chain_id] = _bridger + + +@external +def set_implementation(_implementation: address): + """ + @notice Set the implementation + @param _implementation The address of the implementation to use + """ + assert msg.sender == self.owner # dev: only owner + + log UpdateImplementation(self.get_implementation, _implementation) + self.get_implementation = _implementation + + +@external +def set_call_proxy(_new_call_proxy: address): + """ + @notice Set the address of the call proxy used + @dev _new_call_proxy should adhere to the same interface as defined + @param _new_call_proxy Address of the cross chain call proxy + """ + assert msg.sender == self.owner + + log UpdateCallProxy(self.call_proxy, _new_call_proxy) + self.call_proxy = _new_call_proxy + + +@external +def commit_transfer_ownership(_future_owner: address): + """ + @notice Transfer ownership to `_future_owner` + @param _future_owner The account to commit as the future owner + """ + assert msg.sender == self.owner # dev: only owner + + self.future_owner = _future_owner + + +@external +def accept_transfer_ownership(): + """ + @notice Accept the transfer of ownership + @dev Only the committed future owner can call this function + """ + assert msg.sender == self.future_owner # dev: only future owner + + log TransferOwnership(self.owner, msg.sender) + self.owner = msg.sender \ No newline at end of file diff --git a/contracts/xchainGauges/bridgers/ArbitrumBirdger.sol b/contracts/xchainGauges/bridgers/ArbitrumBirdger.sol new file mode 100644 index 00000000..88d0a01a --- /dev/null +++ b/contracts/xchainGauges/bridgers/ArbitrumBirdger.sol @@ -0,0 +1,136 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +interface IGatewayRouter { + function getGateWay(address _token) external view returns (address); + + function outboundTransfer( + address _token, + address _to, + uint256 _amount, + uint256 _maxGas, + uint256 _gasPriceBid, + bytes calldata _data // _max_submission_cost, _extra_data + ) external payable; +} + +contract ArbitrumBridger { + // consts + address private SDL; + // Arbitrum: L1 ERC20 Gateway + address private constant ARB_GATEWAY = + 0xa3A7B6F88361F48403514059F1F16C8E78d60EeC; + address private constant ARB_GATEWAY_ROUTER = + 0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef; + uint256 private constant MAX_UINT256 = 2**256 - 1; + address private constant ZERO_ADDRESS = + 0x0000000000000000000000000000000000000000; + // vars + uint256 private gasLimit; + uint256 private gasPrice; + uint256 private maxSubmissionCost; + + mapping(address => bool) public approved; + + // owner + address public owner; + address public futureOwner; + + using SafeERC20 for IERC20; + + event TransferOwnership(address oldOwner, address newOwner); + + event UpdateSubmissionData( + uint256[3] oldSubmissionData, + uint256[3] newSubmissionData + ); + + constructor( + uint256 _gasLimit, + uint256 _gasPrice, + uint256 _maxSubmissionCost, + address _SDL + ) { + SDL = _SDL; + // construct submission data + gasLimit = _gasLimit; + gasPrice = _gasPrice; + maxSubmissionCost = _maxSubmissionCost; + emit UpdateSubmissionData( + [uint256(0), uint256(0), uint256(0)], + [gasLimit, gasLimit, maxSubmissionCost] + ); + + // approve token transfer to gateway + IERC20 sdlToken = IERC20(SDL); + // TODO: doesn't allow for safeApprove? + assert(sdlToken.approve(ARB_GATEWAY, MAX_UINT256)); + approved[SDL] = true; + owner = msg.sender; + emit TransferOwnership(ZERO_ADDRESS, msg.sender); + } + + function bridge( + address _token, + address _to, + uint256 _amount + ) external payable { + // TODO: doesn't allow for safeTransferFrom? + assert(IERC20(_token).transferFrom(msg.sender, address(this), _amount)); + if (_token != SDL && !approved[_token]) { + // TODO: doesn't allow for safeApprove? + assert( + IERC20(_token).approve( + IGatewayRouter(ARB_GATEWAY_ROUTER).getGateWay(SDL), + MAX_UINT256 + ) + ); + approved[_token] = true; + } + IGatewayRouter(ARB_GATEWAY_ROUTER).outboundTransfer{ + value: gasLimit * gasPrice + maxSubmissionCost + }( + _token, + _to, + _amount, + gasLimit, + gasPrice, + abi.encode(maxSubmissionCost, new bytes(0)) + ); + } + + function cost() external view returns (uint256) { + // gasLimit * gasPrice + maxSubmissionCost + return (gasLimit * gasPrice + maxSubmissionCost); + } + + function setSubmissionData( + uint256 _gasLimit, + uint256 _gasPrice, + uint256 _maxSubmissionCost + ) external { + require(msg.sender == owner, "error msg"); + emit UpdateSubmissionData( + [gasLimit, gasPrice, maxSubmissionCost], + [_gasLimit, _gasPrice, _maxSubmissionCost] + ); + gasLimit = _gasLimit; + gasPrice = _gasPrice; + maxSubmissionCost = _maxSubmissionCost; + } + + function commitTransferOwnership(address _futureOwner) external { + require(msg.sender == owner); + futureOwner = _futureOwner; + } + + function acceptTransferOwnership() external { + require(msg.sender == futureOwner); + emit TransferOwnership(owner, msg.sender); + owner = msg.sender; + } +} diff --git a/contracts/xchainGauges/bridgers/OptimismBirdger.sol b/contracts/xchainGauges/bridgers/OptimismBirdger.sol new file mode 100644 index 00000000..845c378a --- /dev/null +++ b/contracts/xchainGauges/bridgers/OptimismBirdger.sol @@ -0,0 +1,108 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +interface IOptimismStandardBridge { + function depositERC20To( + address _l1token, + address _l2token, + address _to, + uint256 _amount, + uint32 l2Gas, + bytes calldata _data + ) external payable; +} + +contract OptimismBridger { + // consts + address private SDL; + address private OP_SDL; + address private constant OPTIMISM_L1_STANDARD_BRIDGE = + 0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1; + address private constant OPTIMISM_L2_STANDARD_BRIDGE = + 0x4200000000000000000000000000000000000010; + uint256 private constant MAX_UINT256 = 2**256 - 1; + address private constant ZERO_ADDRESS = + 0x0000000000000000000000000000000000000000; + // vars + uint32 private gasLimit; + + mapping(address => bool) public approved; + + // owner + address public owner; + address public futureOwner; + + using SafeERC20 for IERC20; + + event TransferOwnership(address oldOwner, address newOwner); + + event UpdateGasLimit(uint32 oldGasLimit, uint32 newGasLimit); + + constructor( + uint32 _gasLimit, + address _SDL, + address _OP_SDL + ) { + SDL = _SDL; + OP_SDL = _OP_SDL; + gasLimit = _gasLimit; + emit UpdateGasLimit(uint32(0), gasLimit); + + // approve token transfer to gateway + IERC20 sdlToken = IERC20(SDL); + // TODO: doesn't allow for safeApprove? + assert(sdlToken.approve(OPTIMISM_L1_STANDARD_BRIDGE, MAX_UINT256)); + approved[SDL] = true; + owner = msg.sender; + emit TransferOwnership(ZERO_ADDRESS, msg.sender); + } + + function bridge( + address _token, + address _to, + uint256 _amount + ) external payable { + // TODO: doesn't allow for safeTransferFrom? + assert(IERC20(_token).transferFrom(msg.sender, address(this), _amount)); + if (_token != SDL && !approved[_token]) { + // TODO: doesn't allow for safeApprove? + assert( + IERC20(_token).approve(OPTIMISM_L1_STANDARD_BRIDGE, MAX_UINT256) + ); + approved[_token] = true; + } + IOptimismStandardBridge(OPTIMISM_L1_STANDARD_BRIDGE).depositERC20To( + SDL, + OP_SDL, + _to, + _amount, + gasLimit, + "0x" + ); + } + + function cost() external view returns (uint256) { + return (gasLimit); + } + + function setGasLimit(uint32 _gasLimit) external { + require(msg.sender == owner, "error msg"); + emit UpdateGasLimit(gasLimit, _gasLimit); + gasLimit = _gasLimit; + } + + function commitTransferOwnership(address _futureOwner) external { + require(msg.sender == owner); + futureOwner = _futureOwner; + } + + function acceptTransferOwnership() external { + require(msg.sender == futureOwner); + emit TransferOwnership(owner, msg.sender); + owner = msg.sender; + } +} diff --git a/contracts/xchainGauges/implementations/ChildGauge.vy b/contracts/xchainGauges/implementations/ChildGauge.vy new file mode 100644 index 00000000..0ad34d8b --- /dev/null +++ b/contracts/xchainGauges/implementations/ChildGauge.vy @@ -0,0 +1,696 @@ +# @version 0.3.1 +""" +@title Child Liquidity Gauge +@license MIT +@author Curve Finance +""" +from vyper.interfaces import ERC20 + +implements: ERC20 + + +interface ERC20Extended: + def symbol() -> String[26]: view + +interface Factory: + def owner() -> address: view + def voting_escrow() -> address: view + +interface Minter: + def minted(_user: address, _gauge: address) -> uint256: view + +interface ERC1271: + def isValidSignature(_hash: bytes32, _signature: Bytes[65]) -> bytes32: view + + +event Approval: + _owner: indexed(address) + _spender: indexed(address) + _value: uint256 + +event Transfer: + _from: indexed(address) + _to: indexed(address) + _value: uint256 + +event Deposit: + _user: indexed(address) + _value: uint256 + +event Withdraw: + _user: indexed(address) + _value: uint256 + +event UpdateLiquidityLimit: + _user: indexed(address) + _original_balance: uint256 + _original_supply: uint256 + _working_balance: uint256 + _working_supply: uint256 + + +struct Reward: + distributor: address + period_finish: uint256 + rate: uint256 + last_update: uint256 + integral: uint256 + + +DOMAIN_TYPE_HASH: constant(bytes32) = keccak256("EIP712Domain(string name,string version,uint256 chainId,address verifyingContract)") +PERMIT_TYPE_HASH: constant(bytes32) = keccak256("Permit(address owner,address spender,uint256 value,uint256 nonce,uint256 deadline)") +ERC1271_MAGIC_VAL: constant(bytes32) = 0x1626ba7e00000000000000000000000000000000000000000000000000000000 + +MAX_REWARDS: constant(uint256) = 8 +TOKENLESS_PRODUCTION: constant(uint256) = 40 +WEEK: constant(uint256) = 86400 * 7 +VERSION: constant(String[8]) = "v0.1.0" + +# @dev our oracle's address +SDL: immutable(address) +FACTORY: immutable(address) + + +DOMAIN_SEPARATOR: public(bytes32) +nonces: public(HashMap[address, uint256]) + +name: public(String[64]) +symbol: public(String[32]) + +allowance: public(HashMap[address, HashMap[address, uint256]]) +balanceOf: public(HashMap[address, uint256]) +totalSupply: public(uint256) + +lp_token: public(address) +manager: public(address) + +voting_escrow: public(address) +working_balances: public(HashMap[address, uint256]) +working_supply: public(uint256) + +period: public(uint256) +period_timestamp: public(HashMap[uint256, uint256]) + +integrate_checkpoint_of: public(HashMap[address, uint256]) +integrate_fraction: public(HashMap[address, uint256]) +integrate_inv_supply: public(HashMap[uint256, uint256]) +integrate_inv_supply_of: public(HashMap[address, uint256]) + +# For tracking external rewards +reward_count: public(uint256) +reward_tokens: public(address[MAX_REWARDS]) +reward_data: public(HashMap[address, Reward]) +# claimant -> default reward receiver +rewards_receiver: public(HashMap[address, address]) +# reward token -> claiming address -> integral +reward_integral_for: public(HashMap[address, HashMap[address, uint256]]) +# user -> token -> [uint128 claimable amount][uint128 claimed amount] +claim_data: HashMap[address, HashMap[address, uint256]] + +is_killed: public(bool) +inflation_rate: public(HashMap[uint256, uint256]) + + +@external +def __init__(_sdl_token: address, _factory: address): + self.lp_token = 0x000000000000000000000000000000000000dEaD + + SDL = _sdl_token + FACTORY = _factory + + + + +@internal +def _checkpoint(_user: address): + """ + @notice Checkpoint a user calculating their SDL entitlement + @param _user User address + """ + period: uint256 = self.period + period_time: uint256 = self.period_timestamp[period] + integrate_inv_supply: uint256 = self.integrate_inv_supply[period] + + if block.timestamp > period_time: + + working_supply: uint256 = self.working_supply + prev_week_time: uint256 = period_time + week_time: uint256 = min((period_time + WEEK) / WEEK * WEEK, block.timestamp) + + for i in range(256): + dt: uint256 = week_time - prev_week_time + + if working_supply != 0: + # we don't have to worry about crossing inflation epochs + # and if we miss any weeks, those weeks inflation rates will be 0 for sure + # but that means no one interacted with the gauge for that long + # TODO: is this the same calculation for us? + integrate_inv_supply += self.inflation_rate[prev_week_time / WEEK] * 10 ** 18 * dt / working_supply + + if week_time == block.timestamp: + break + prev_week_time = week_time + week_time = min(week_time + WEEK, block.timestamp) + + # check SDL balance and increase weekly inflation rate by delta for the rest of the week + sdl_balance: uint256 = ERC20(SDL).balanceOf(self) + if sdl_balance != 0: + current_week: uint256 = block.timestamp / WEEK + self.inflation_rate[current_week] += sdl_balance / ((current_week + 1) * WEEK - block.timestamp) + ERC20(SDL).transfer(FACTORY, sdl_balance) + + period += 1 + self.period = period + self.period_timestamp[period] = block.timestamp + self.integrate_inv_supply[period] = integrate_inv_supply + + working_balance: uint256 = self.working_balances[_user] + self.integrate_fraction[_user] += working_balance * (integrate_inv_supply - self.integrate_inv_supply_of[_user]) / 10 ** 18 + self.integrate_inv_supply_of[_user] = integrate_inv_supply + self.integrate_checkpoint_of[_user] = block.timestamp + + +@internal +def _update_liquidity_limit(_user: address, _user_balance: uint256, _total_supply: uint256): + """ + @param _user The user address + @param _user_balance User's amount of liquidity (LP tokens) + @param _total_supply Total amount of liquidity (LP tokens) + """ + working_balance: uint256 = _user_balance * TOKENLESS_PRODUCTION / 100 + + ve: address = self.voting_escrow + if ve != ZERO_ADDRESS: + ve_ts: uint256 = ERC20(ve).totalSupply() + if ve_ts != 0: + working_balance += _total_supply * ERC20(ve).balanceOf(_user) / ve_ts * (100 - TOKENLESS_PRODUCTION) / 100 + working_balance = min(_user_balance, working_balance) + + old_working_balance: uint256 = self.working_balances[_user] + self.working_balances[_user] = working_balance + + working_supply: uint256 = self.working_supply + working_balance - old_working_balance + self.working_supply = working_supply + + log UpdateLiquidityLimit(_user, _user_balance, _total_supply, working_balance, working_supply) + + +@internal +def _checkpoint_rewards(_user: address, _total_supply: uint256, _claim: bool, _receiver: address): + """ + @notice Claim pending rewards and checkpoint rewards for a user + """ + user_balance: uint256 = 0 + receiver: address = _receiver + if _user != ZERO_ADDRESS: + user_balance = self.balanceOf[_user] + if _claim and _receiver == ZERO_ADDRESS: + # if receiver is not explicitly declared, check if a default receiver is set + receiver = self.rewards_receiver[_user] + if receiver == ZERO_ADDRESS: + # if no default receiver is set, direct claims to the user + receiver = _user + + reward_count: uint256 = self.reward_count + for i in range(MAX_REWARDS): + if i == reward_count: + break + token: address = self.reward_tokens[i] + + integral: uint256 = self.reward_data[token].integral + last_update: uint256 = min(block.timestamp, self.reward_data[token].period_finish) + duration: uint256 = last_update - self.reward_data[token].last_update + if duration != 0: + self.reward_data[token].last_update = last_update + if _total_supply != 0: + integral += duration * self.reward_data[token].rate * 10**18 / _total_supply + self.reward_data[token].integral = integral + + if _user != ZERO_ADDRESS: + integral_for: uint256 = self.reward_integral_for[token][_user] + new_claimable: uint256 = 0 + + if integral_for < integral: + self.reward_integral_for[token][_user] = integral + new_claimable = user_balance * (integral - integral_for) / 10**18 + + claim_data: uint256 = self.claim_data[_user][token] + total_claimable: uint256 = shift(claim_data, -128) + new_claimable + if total_claimable > 0: + total_claimed: uint256 = claim_data % 2**128 + if _claim: + response: Bytes[32] = raw_call( + token, + _abi_encode( + receiver, + total_claimable, + method_id=method_id("transfer(address,uint256)") + ), + max_outsize=32, + ) + if len(response) != 0: + assert convert(response, bool) + self.claim_data[_user][token] = total_claimed + total_claimable + elif new_claimable > 0: + self.claim_data[_user][token] = total_claimed + shift(total_claimable, 128) + + +@internal +def _transfer(_from: address, _to: address, _value: uint256): + if _value == 0: + return + total_supply: uint256 = self.totalSupply + + has_rewards: bool = self.reward_count != 0 + for addr in [_from, _to]: + self._checkpoint(addr) + self._checkpoint_rewards(addr, total_supply, False, ZERO_ADDRESS) + + new_balance: uint256 = self.balanceOf[_from] - _value + self.balanceOf[_from] = new_balance + self._update_liquidity_limit(_from, new_balance, total_supply) + + new_balance = self.balanceOf[_to] + _value + self.balanceOf[_to] = new_balance + self._update_liquidity_limit(_to, new_balance, total_supply) + + log Transfer(_from, _to, _value) + + +@external +@nonreentrant("lock") +def deposit(_value: uint256, _user: address = msg.sender, _claim_rewards: bool = False): + """ + @notice Deposit `_value` LP tokens + @param _value Number of tokens to deposit + @param _user The account to send gauge tokens to + """ + self._checkpoint(_user) + if _value == 0: + return + + total_supply: uint256 = self.totalSupply + new_balance: uint256 = self.balanceOf[_user] + _value + + if self.reward_count != 0: + self._checkpoint_rewards(_user, total_supply, _claim_rewards, ZERO_ADDRESS) + + total_supply += _value + + self.balanceOf[_user] = new_balance + self.totalSupply = total_supply + + self._update_liquidity_limit(_user, new_balance, total_supply) + + ERC20(self.lp_token).transferFrom(msg.sender, self, _value) + + log Deposit(_user, _value) + log Transfer(ZERO_ADDRESS, _user, _value) + + +@external +@nonreentrant("lock") +def withdraw(_value: uint256, _user: address = msg.sender, _claim_rewards: bool = False): + """ + @notice Withdraw `_value` LP tokens + @param _value Number of tokens to withdraw + @param _user The account to send LP tokens to + """ + self._checkpoint(_user) + if _value == 0: + return + + total_supply: uint256 = self.totalSupply + new_balance: uint256 = self.balanceOf[msg.sender] - _value + + if self.reward_count != 0: + self._checkpoint_rewards(_user, total_supply, _claim_rewards, ZERO_ADDRESS) + + total_supply -= _value + + self.balanceOf[msg.sender] = new_balance + self.totalSupply = total_supply + + self._update_liquidity_limit(msg.sender, new_balance, total_supply) + + ERC20(self.lp_token).transfer(_user, _value) + + log Withdraw(_user, _value) + log Transfer(msg.sender, ZERO_ADDRESS, _value) + + +@external +@nonreentrant("lock") +def transferFrom(_from: address, _to: address, _value: uint256) -> bool: + """ + @notice Transfer tokens from one address to another + @param _from The address which you want to send tokens from + @param _to The address which you want to transfer to + @param _value the amount of tokens to be transferred + @return bool success + """ + allowance: uint256 = self.allowance[_from][msg.sender] + if allowance != MAX_UINT256: + self.allowance[_from][msg.sender] = allowance - _value + + self._transfer(_from, _to, _value) + return True + + +@external +def approve(_spender: address, _value: uint256) -> bool: + """ + @notice Approve the passed address to transfer the specified amount of + tokens on behalf of msg.sender + @dev Beware that changing an allowance via this method brings the risk + that someone may use both the old and new allowance by unfortunate + transaction ordering. This may be mitigated with the use of + {increaseAllowance} and {decreaseAllowance}. + https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 + @param _spender The address which will transfer the funds + @param _value The amount of tokens that may be transferred + @return bool success + """ + self.allowance[msg.sender][_spender] = _value + + log Approval(msg.sender, _spender, _value) + return True + + +@external +def permit( + _owner: address, + _spender: address, + _value: uint256, + _deadline: uint256, + _v: uint8, + _r: bytes32, + _s: bytes32 +) -> bool: + """ + @notice Approves spender by owner's signature to expend owner's tokens. + See https://eips.ethereum.org/EIPS/eip-2612. + @dev Inspired by https://github.com/yearn/yearn-vaults/blob/main/contracts/Vault.vy#L753-L793 + @dev Supports smart contract wallets which implement ERC1271 + https://eips.ethereum.org/EIPS/eip-1271 + @param _owner The address which is a source of funds and has signed the Permit. + @param _spender The address which is allowed to spend the funds. + @param _value The amount of tokens to be spent. + @param _deadline The timestamp after which the Permit is no longer valid. + @param _v The bytes[64] of the valid secp256k1 signature of permit by owner + @param _r The bytes[0:32] of the valid secp256k1 signature of permit by owner + @param _s The bytes[32:64] of the valid secp256k1 signature of permit by owner + @return True, if transaction completes successfully + """ + assert _owner != ZERO_ADDRESS + assert block.timestamp <= _deadline + + nonce: uint256 = self.nonces[_owner] + digest: bytes32 = keccak256( + concat( + b"\x19\x01", + self.DOMAIN_SEPARATOR, + keccak256(_abi_encode(PERMIT_TYPE_HASH, _owner, _spender, _value, nonce, _deadline)) + ) + ) + + if _owner.is_contract: + sig: Bytes[65] = concat(_abi_encode(_r, _s), slice(convert(_v, bytes32), 31, 1)) + assert ERC1271(_owner).isValidSignature(digest, sig) == ERC1271_MAGIC_VAL + else: + assert ecrecover(digest, convert(_v, uint256), convert(_r, uint256), convert(_s, uint256)) == _owner + + self.allowance[_owner][_spender] = _value + self.nonces[_owner] = nonce + 1 + + log Approval(_owner, _spender, _value) + return True + + +@external +@nonreentrant("lock") +def transfer(_to: address, _value: uint256) -> bool: + """ + @notice Transfer token to a specified address + @param _to The address to transfer to + @param _value The amount to be transferred + @return bool success + """ + self._transfer(msg.sender, _to, _value) + return True + + +@external +def increaseAllowance(_spender: address, _added_value: uint256) -> bool: + """ + @notice Increase the allowance granted to `_spender` by the caller + @dev This is alternative to {approve} that can be used as a mitigation for + the potential race condition + @param _spender The address which will transfer the funds + @param _added_value The amount of to increase the allowance + @return bool success + """ + allowance: uint256 = self.allowance[msg.sender][_spender] + _added_value + self.allowance[msg.sender][_spender] = allowance + + log Approval(msg.sender, _spender, allowance) + return True + + +@external +def decreaseAllowance(_spender: address, _subtracted_value: uint256) -> bool: + """ + @notice Decrease the allowance granted to `_spender` by the caller + @dev This is alternative to {approve} that can be used as a mitigation for + the potential race condition + @param _spender The address which will transfer the funds + @param _subtracted_value The amount of to decrease the allowance + @return bool success + """ + allowance: uint256 = self.allowance[msg.sender][_spender] - _subtracted_value + self.allowance[msg.sender][_spender] = allowance + + log Approval(msg.sender, _spender, allowance) + return True + + +@external +def user_checkpoint(addr: address) -> bool: + """ + @notice Record a checkpoint for `addr` + @param addr User address + @return bool success + """ + assert msg.sender in [addr, FACTORY] # dev: unauthorized + self._checkpoint(addr) + self._update_liquidity_limit(addr, self.balanceOf[addr], self.totalSupply) + return True + + +@external +def claimable_tokens(addr: address) -> uint256: + """ + @notice Get the number of claimable tokens per user + @dev This function should be manually changed to "view" in the ABI + @return uint256 number of claimable tokens per user + """ + self._checkpoint(addr) + return self.integrate_fraction[addr] - Minter(FACTORY).minted(addr, self) + + +@view +@external +def claimed_reward(_addr: address, _token: address) -> uint256: + """ + @notice Get the number of already-claimed reward tokens for a user + @param _addr Account to get reward amount for + @param _token Token to get reward amount for + @return uint256 Total amount of `_token` already claimed by `_addr` + """ + return self.claim_data[_addr][_token] % 2**128 + + +@view +@external +def claimable_reward(_user: address, _reward_token: address) -> uint256: + """ + @notice Get the number of claimable reward tokens for a user + @param _user Account to get reward amount for + @param _reward_token Token to get reward amount for + @return uint256 Claimable reward token amount + """ + integral: uint256 = self.reward_data[_reward_token].integral + total_supply: uint256 = self.totalSupply + if total_supply != 0: + last_update: uint256 = min(block.timestamp, self.reward_data[_reward_token].period_finish) + duration: uint256 = last_update - self.reward_data[_reward_token].last_update + integral += (duration * self.reward_data[_reward_token].rate * 10**18 / total_supply) + + integral_for: uint256 = self.reward_integral_for[_reward_token][_user] + new_claimable: uint256 = self.balanceOf[_user] * (integral - integral_for) / 10**18 + + return shift(self.claim_data[_user][_reward_token], -128) + new_claimable + + +@external +def set_rewards_receiver(_receiver: address): + """ + @notice Set the default reward receiver for the caller. + @dev When set to ZERO_ADDRESS, rewards are sent to the caller + @param _receiver Receiver address for any rewards claimed via `claim_rewards` + """ + self.rewards_receiver[msg.sender] = _receiver + + +@external +@nonreentrant('lock') +def claim_rewards(_addr: address = msg.sender, _receiver: address = ZERO_ADDRESS): + """ + @notice Claim available reward tokens for `_addr` + @param _addr Address to claim for + @param _receiver Address to transfer rewards to - if set to + ZERO_ADDRESS, uses the default reward receiver + for the caller + """ + if _receiver != ZERO_ADDRESS: + assert _addr == msg.sender # dev: cannot redirect when claiming for another user + self._checkpoint_rewards(_addr, self.totalSupply, True, _receiver) + + +@external +def add_reward(_reward_token: address, _distributor: address): + """ + @notice Set the active reward contract + """ + assert msg.sender == self.manager or msg.sender == Factory(FACTORY).owner() + + reward_count: uint256 = self.reward_count + assert reward_count < MAX_REWARDS + assert self.reward_data[_reward_token].distributor == ZERO_ADDRESS + + self.reward_data[_reward_token].distributor = _distributor + self.reward_tokens[reward_count] = _reward_token + self.reward_count = reward_count + 1 + + +@external +def set_reward_distributor(_reward_token: address, _distributor: address): + current_distributor: address = self.reward_data[_reward_token].distributor + + assert msg.sender == current_distributor or msg.sender == self.manager or msg.sender == Factory(FACTORY).owner() + assert current_distributor != ZERO_ADDRESS + assert _distributor != ZERO_ADDRESS + + self.reward_data[_reward_token].distributor = _distributor + + +@external +@nonreentrant("lock") +def deposit_reward_token(_reward_token: address, _amount: uint256): + assert msg.sender == self.reward_data[_reward_token].distributor + + self._checkpoint_rewards(ZERO_ADDRESS, self.totalSupply, False, ZERO_ADDRESS) + + response: Bytes[32] = raw_call( + _reward_token, + _abi_encode( + msg.sender, + self, + _amount, + method_id=method_id("transferFrom(address,address,uint256)") + ), + max_outsize=32, + ) + if len(response) != 0: + assert convert(response, bool) + + period_finish: uint256 = self.reward_data[_reward_token].period_finish + if block.timestamp >= period_finish: + self.reward_data[_reward_token].rate = _amount / WEEK + else: + remaining: uint256 = period_finish - block.timestamp + leftover: uint256 = remaining * self.reward_data[_reward_token].rate + self.reward_data[_reward_token].rate = (_amount + leftover) / WEEK + + self.reward_data[_reward_token].last_update = block.timestamp + self.reward_data[_reward_token].period_finish = block.timestamp + WEEK + + +@external +def set_manager(_manager: address): + assert msg.sender == Factory(FACTORY).owner() + + self.manager = _manager + + +@external +def update_voting_escrow(): + """ + @notice Update the voting escrow contract in storage + """ + self.voting_escrow = Factory(FACTORY).voting_escrow() + + +@external +def set_killed(_is_killed: bool): + """ + @notice Set the kill status of the gauge + @param _is_killed Kill status to put the gauge into + """ + assert msg.sender == Factory(FACTORY).owner() + + self.is_killed = _is_killed + + +@view +@external +def decimals() -> uint256: + """ + @notice Returns the number of decimals the token uses + """ + return 18 + + +@view +@external +def integrate_checkpoint() -> uint256: + return self.period_timestamp[self.period] + + +@view +@external +def version() -> String[8]: + return VERSION + + +@view +@external +def factory() -> address: + return FACTORY + + +@external +def initialize(_lp_token: address, _manager: address, _name: String[32]): + assert self.lp_token == ZERO_ADDRESS # dev: already initialzed + + self.lp_token = _lp_token + self.manager = _manager + + self.voting_escrow = Factory(msg.sender).voting_escrow() + + symbol: String[26] = ERC20Extended(_lp_token).symbol() + + name: String[64] = concat("Saddle ", _name, " Child Gauge") + self.name = name + self.symbol = concat(symbol, "-gauge") + + self.period_timestamp[0] = block.timestamp + self.DOMAIN_SEPARATOR = keccak256( + _abi_encode( + DOMAIN_TYPE_HASH, + keccak256(_name), + keccak256(VERSION), + chain.id, + self + ) + ) diff --git a/contracts/xchainGauges/implementations/RootGauge.vy b/contracts/xchainGauges/implementations/RootGauge.vy new file mode 100644 index 00000000..5d0c0c5b --- /dev/null +++ b/contracts/xchainGauges/implementations/RootGauge.vy @@ -0,0 +1,218 @@ +# @version 0.3.1 +""" +@title Root Liquidity Gauge Implementation +@license MIT +@author Curve Finance +""" + + +interface Bridger: + def cost() -> uint256: view + def bridge(_token: address, _destination: address, _amount: uint256): payable + +interface ERC20: + def balanceOf(_account: address) -> uint256: view + def approve(_account: address, _value: uint256): nonpayable + def transfer(_to: address, _amount: uint256): nonpayable + +interface GaugeController: + def checkpoint_gauge(addr: address): nonpayable + def gauge_relative_weight(addr: address, time: uint256) -> uint256: view + +interface Factory: + def get_bridger(_chain_id: uint256) -> address: view + def owner() -> address: view + +interface Minter: + def mint(_gauge: address): nonpayable + def rate() -> uint256: view + def committed_rate() -> uint256: view + def future_epoch_time_write() -> uint256: view + + +struct InflationParams: + rate: uint256 + finish_time: uint256 + + +WEEK: constant(uint256) = 604800 +YEAR: constant(uint256) = 86400 * 365 +RATE_DENOMINATOR: constant(uint256) = 10 ** 18 +RATE_REDUCTION_COEFFICIENT: constant(uint256) = 1189207115002721024 # 2 ** (1/4) * 1e18 +RATE_REDUCTION_TIME: constant(uint256) = YEAR + +SDL: immutable(address) +GAUGE_CONTROLLER: immutable(address) +MINTER: immutable(address) + + +chain_id: public(uint256) +bridger: public(address) +factory: public(address) +name: public(String[64]) +inflation_params: public(InflationParams) + +last_period: public(uint256) +total_emissions: public(uint256) + +is_killed: public(bool) + + +@external +def __init__(_sdl_token: address, _gauge_controller: address, _minter: address): + # TODO: this is set during deploy and never changes, how is it used to determine if its initialized? + self.factory = 0x000000000000000000000000000000000000dEaD + + # assign immutable variables + SDL = _sdl_token + GAUGE_CONTROLLER = _gauge_controller + MINTER = _minter + + + +@payable +@external +def __default__(): + pass + + +@external +def transmit_emissions(): + """ + @notice Mint any new emissions and transmit across to child gauge + """ + assert msg.sender == self.factory # dev: call via factory + + Minter(MINTER).mint(self) + minted: uint256 = ERC20(SDL).balanceOf(self) + + assert minted != 0 # dev: nothing minted + bridger: address = self.bridger + + Bridger(bridger).bridge(SDL, self, minted, value=Bridger(bridger).cost()) + + +@view +@external +def integrate_fraction(_user: address) -> uint256: + """ + @notice Query the total emissions `_user` is entitled to + @dev Any value of `_user` other than the gauge address will return 0 + """ + if _user == self: + return self.total_emissions + return 0 + + +@external +def user_checkpoint(_user: address) -> bool: + """ + @notice Checkpoint the gauge updating total emissions + @param _user Vestigal parameter with no impact on the function + """ + # the last period we calculated emissions up to (but not including) + last_period: uint256 = self.last_period + # our current period (which we will calculate emissions up to) + current_period: uint256 = block.timestamp / WEEK + + # only checkpoint if the current period is greater than the last period + # last period is always less than or equal to current period and we only calculate + # emissions up to current period (not including it) + if last_period != current_period: + # checkpoint the gauge filling in any missing weight data + GaugeController(GAUGE_CONTROLLER).checkpoint_gauge(self) + + rate: uint256 = Minter(MINTER).rate() + self.inflation_params.rate = rate + + params: InflationParams = self.inflation_params + emissions: uint256 = 0 + + # only calculate emissions for at most 256 periods since the last checkpoint + for i in range(last_period, last_period + 256): + if i == current_period: + # don't calculate emissions for the current period + break + period_time: uint256 = i * WEEK + weight: uint256 = GaugeController(GAUGE_CONTROLLER).gauge_relative_weight(self, period_time) + + if period_time <= params.finish_time and params.finish_time < period_time + WEEK: + # calculate with old rate + emissions += weight * params.rate * (params.finish_time - period_time) / 10 ** 18 + # update rate + params.rate = Minter(MINTER).committed_rate() + if (params.rate == MAX_UINT256): + params.rate = rate + # calculate with new rate + emissions += weight * params.rate * (period_time + WEEK - params.finish_time) / 10 ** 18 + # update finish time + params.finish_time += RATE_REDUCTION_TIME + # update storage + self.inflation_params = params + else: + emissions += weight * params.rate * WEEK / 10 ** 18 + + self.last_period = current_period + self.total_emissions += emissions + + return True + + +@external +def set_killed(_is_killed: bool): + """ + @notice Set the gauge kill status + @dev Inflation params are modified accordingly to disable/enable emissions + """ + assert msg.sender == Factory(self.factory).owner() + + if _is_killed: + self.inflation_params.rate = 0 + else: + self.inflation_params = InflationParams({ + rate: Minter(MINTER).rate(), + finish_time: Minter(MINTER).future_epoch_time_write() + }) + self.last_period = block.timestamp / WEEK + self.is_killed = _is_killed + + +@external +def update_bridger(): + """ + @notice Update the bridger used by this contract + @dev Bridger contracts should prevent briding if ever updated + """ + # reset approval + bridger: address = Factory(self.factory).get_bridger(self.chain_id) + ERC20(SDL).approve(self.bridger, 0) + ERC20(SDL).approve(bridger, MAX_UINT256) + self.bridger = bridger + + +@external +def initialize(_bridger: address, _chain_id: uint256, _name: String[32]): + """ + @notice Proxy initialization method + """ + #TODO: how is this a check for initialize? Its never changed besides contract creation, + # this currently passed if called from the factory but not manually calling from a test + assert self.factory == ZERO_ADDRESS, "already initialized" + + + self.chain_id = _chain_id + self.bridger = _bridger + name: String[64] = concat("Saddle ", _name, " Root Gauge") + self.name = name + self.factory = msg.sender + + inflation_params: InflationParams = InflationParams({ + rate: Minter(MINTER).rate(), + finish_time: Minter(MINTER).future_epoch_time_write() + }) + assert inflation_params.rate != 0, "inflation rate is 0" + + self.inflation_params = inflation_params + self.last_period = block.timestamp / WEEK + + ERC20(SDL).approve(_bridger, MAX_UINT256) \ No newline at end of file diff --git a/contracts/xchainGauges/mock/AnyCallExecutor.sol b/contracts/xchainGauges/mock/AnyCallExecutor.sol new file mode 100644 index 00000000..343b95d0 --- /dev/null +++ b/contracts/xchainGauges/mock/AnyCallExecutor.sol @@ -0,0 +1,48 @@ +// SPDX-License-Identifier: GPL-3.0-or-later +pragma solidity ^0.8.6; + +/// IApp interface of the application +interface IApp { + /// (required) call on the destination chain to exec the interaction + function anyExecute(bytes calldata _data) + external + returns (bool success, bytes memory result); + + /// (optional,advised) call back on the originating chain if the cross chain interaction fails + function anyFallback(address _to, bytes calldata _data) external; +} + +/// anycall executor is the delegator to execute contract calling (like a sandbox) +contract AnyCallExecutor { + struct Context { + address from; + uint256 fromChainID; + uint256 nonce; + } + + Context public context; + address public creator; + + constructor() { + creator = msg.sender; + } + + function execute( + address _to, + bytes calldata _data, + address _from, + uint256 _fromChainID, + uint256 _nonce + ) external returns (bool success, bytes memory result) { + if (msg.sender != creator) { + return (false, "AnyCallExecutor: caller is not the creator"); + } + context = Context({ + from: _from, + fromChainID: _fromChainID, + nonce: _nonce + }); + (success, result) = IApp(_to).anyExecute(_data); + context = Context({from: address(0), fromChainID: 0, nonce: 0}); + } +} diff --git a/contracts/xchainGauges/mock/MockAnycall.sol b/contracts/xchainGauges/mock/MockAnycall.sol new file mode 100644 index 00000000..e3191a24 --- /dev/null +++ b/contracts/xchainGauges/mock/MockAnycall.sol @@ -0,0 +1,78 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +interface IAnyCallTranslator { + function anyExecute(bytes calldata _data) + external + returns (bool success, bytes memory result); +} + +contract MockAnyCall { + address public anyCallTranslator; + + event successMsg(bool); + event resultMsg(bytes); + event AnyCallMessage( + address to, + bytes data, + address _fallback, + uint256 chainId, + uint256 flags + ); + + /** + * Mimics the source chain of AnyCall messaging system. + * @param _to The address of the contract to call + * @param _data The data to send to the contract + * @param _fallback The fallback address to call + * @param _to_chain_id The chainId of the destination chain + * @param _flags The flags for who is paying for the tx. + */ + function anyCall( + address _to, + bytes memory _data, + address _fallback, + uint256 _to_chain_id, + uint256 _flags + ) external payable { + emit AnyCallMessage(_to, _data, _fallback, _to_chain_id, _flags); + } + + function setanyCallTranslator(address _anyCallTranslator) external { + anyCallTranslator = _anyCallTranslator; + } + + /** + * Mimics the destination chain of AnyCall messaging system. + * When the destination chain detects incoming message, it will process it + * by calling `anyExecute` on the to address. + * @param _to address of the contract to call + * @param _data bytes of the data to send to the contract + */ + function callAnyExecute(address _to, bytes calldata _data) + external + returns (bool success, bytes memory result) + { + (success, result) = IAnyCallTranslator(_to).anyExecute(_data); + emit successMsg(success); + emit resultMsg(result); + } + + function context() + external + view + returns ( + address from, + uint256 fromChainID, + uint256 nonce + ) + { + return (anyCallTranslator, 1, 0); + } + + function executor() external view returns (address _executor) { + return (address(this)); + } +} diff --git a/contracts/xchainGauges/mock/MockBridger.sol b/contracts/xchainGauges/mock/MockBridger.sol new file mode 100644 index 00000000..bb1a768e --- /dev/null +++ b/contracts/xchainGauges/mock/MockBridger.sol @@ -0,0 +1,73 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +interface IGatewayRouter { + function getGateWay(address _token) external view returns (address); + + function outboundTransfer( + address _token, + address _to, + uint256 _amount, + uint256 _maxGas, + uint256 _gasPriceBid, + bytes calldata _data // _max_submission_cost, _extra_data + ) external payable; +} + +contract MockBridger { + // consts + address private constant SDL = 0xf1Dc500FdE233A4055e25e5BbF516372BC4F6871; + // Arbitrum: L1 ERC20 Gateway + address private constant ARB_GATEWAY = + 0xa3A7B6F88361F48403514059F1F16C8E78d60EeC; + address private constant ARB_GATEWAY_ROUTER = + 0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef; + uint256 private constant MAX_UINT256 = 2**256 - 1; + address private constant ZERO_ADDRESS = + 0x0000000000000000000000000000000000000000; + + mapping(address => bool) public approved; + + using SafeERC20 for IERC20; + + constructor() { + // // approve token transfer to gateway + // IERC20 sdlToken = IERC20(SDL); + // // TODO: doesn't allow for safeApprove? + // assert(sdlToken.approve(ARB_GATEWAY, MAX_UINT256)); + approved[SDL] = true; + } + + function bridge( + address _token, + address _to, + uint256 _amount + ) external payable { + // TODO: doesn't allow for safeTransferFrom? + assert(IERC20(_token).transferFrom(msg.sender, address(this), _amount)); + if (_token != SDL && !approved[_token]) { + // TODO: doesn't allow for safeApprove? + assert( + IERC20(_token).approve( + IGatewayRouter(ARB_GATEWAY_ROUTER).getGateWay(SDL), + MAX_UINT256 + ) + ); + approved[_token] = true; + } + // IGatewayRouter(ARB_GATEWAY_ROUTER).outboundTransfer{ + // value: gasLimit * gasPrice + maxSubmissionCost + // }( + // _token, + // _to, + // _amount, + // gasLimit, + // gasPrice, + // abi.encode(maxSubmissionCost, new bytes(0)) + // ); + } +} diff --git a/contracts/xchainGauges/oracles/ChildOracle.sol b/contracts/xchainGauges/oracles/ChildOracle.sol new file mode 100644 index 00000000..4eb2c30e --- /dev/null +++ b/contracts/xchainGauges/oracles/ChildOracle.sol @@ -0,0 +1,95 @@ +// SPDX-License-Identifier: MIT + +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +contract ChildOracle { + // consts + address public callProxy; + address public owner; + address public futureOwner; + address private constant ZERO_ADDRESS = + 0x0000000000000000000000000000000000000000; + + // vars + mapping(address => Point) public userPoints; + Point public globalPoint; + + struct Point { + int128 bias; + int128 slope; + uint256 ts; + } + + // events + event TransferOwnership(address oldOwner, address newOwner); + event UpdateCallProxy(address oldCallProxy, address newCallProxy); + event Recieve(Point userPoint, Point globalPoint, address user); + + constructor(address _callProxy) { + callProxy = _callProxy; + emit UpdateCallProxy(ZERO_ADDRESS, _callProxy); + owner = msg.sender; + emit TransferOwnership(ZERO_ADDRESS, msg.sender); + } + + function balanceOf(address _user) external view returns (uint256) { + Point memory lastPoint = userPoints[_user]; + lastPoint.bias -= + lastPoint.slope * + int128(int256(block.timestamp - lastPoint.ts)); + if (lastPoint.bias < 0) { + lastPoint.bias = 0; + } + return (uint256(uint128(lastPoint.bias))); + } + + function totalSupply() external view returns (uint256) { + Point memory lastPoint = globalPoint; + lastPoint.bias -= + lastPoint.slope * + int128(int256(block.timestamp - lastPoint.ts)); + if (lastPoint.bias < 0) { + lastPoint.bias = 0; + } + return (uint256(uint128(lastPoint.bias))); + } + + function recieve( + Point memory _userPoint, + Point memory _globalPoint, + address _user + ) external { + require(msg.sender == callProxy, "not translator"); + Point storage prevUserPoint = userPoints[_user]; + if (_userPoint.ts > prevUserPoint.ts) { + userPoints[_user] = _userPoint; + } + + Point memory prevGlobalPoint = globalPoint; + if (_globalPoint.ts > prevGlobalPoint.ts) { + globalPoint = _globalPoint; + } + + emit Recieve(_userPoint, _globalPoint, _user); + } + + function setCallProxy(address _newCallProxy) external { + require(msg.sender == owner, "not owner"); + emit UpdateCallProxy(callProxy, _newCallProxy); + callProxy = _newCallProxy; + } + + function commitTransferOwnership(address _futureOwner) external { + require(msg.sender == owner); + futureOwner = _futureOwner; + } + + function acceptTransferOwnership() external { + require(msg.sender == futureOwner); + emit TransferOwnership(owner, msg.sender); + owner = msg.sender; + } +} diff --git a/contracts/xchainGauges/oracles/RootOracle.sol b/contracts/xchainGauges/oracles/RootOracle.sol new file mode 100644 index 00000000..b46817b2 --- /dev/null +++ b/contracts/xchainGauges/oracles/RootOracle.sol @@ -0,0 +1,134 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.6; +pragma experimental ABIEncoderV2; + +import "@openzeppelin/contracts-4.2.0/token/ERC20/utils/SafeERC20.sol"; + +struct Point { + int128 bias; + int128 slope; + uint256 ts; +} + +interface ICallProxy { + function anyCall( + address _to, + bytes calldata _data, + address _fallback, + uint256 _toChainId, + uint256 _flags + ) external; // TODO: nonpayable but doesn't let me +} + +interface Factory { + function get_bridger(uint256 _chain_id) external view returns (address); +} + +interface votingEscrow { + function epoch() external view returns (uint256); + + function point_history(uint256 _idx) external view returns (Point memory); + + function user_point_epoch(address _user) external view returns (uint256); + + function user_point_history(address _user, uint256 _idx) + external + view + returns (Point memory); +} + +contract RootOracle { + // consts + address public immutable FACTORY; + address public immutable VE; + + // events + event TransferOwnership(address oldOwner, address newOwner); + event UpdateCallProxy(address oldCallProxy, address newCallProxy); + + // vars + address public callProxy; + address public owner; + address public futureOwner; + + constructor( + address _factory, + address _ve, + address _callProxy + ) { + FACTORY = _factory; + VE = _ve; + + callProxy = _callProxy; + emit UpdateCallProxy(address(0), _callProxy); + + owner = msg.sender; + emit TransferOwnership(address(0), msg.sender); + } + + function push(uint256 _chainId) external { + address user = msg.sender; + assert(Factory(FACTORY).get_bridger(_chainId) != address(0)); + + require(IERC20(VE).balanceOf(user) != 0, "no ve balance"); + Point memory userPoint = votingEscrow(VE).user_point_history( + user, + votingEscrow(VE).user_point_epoch(user) + ); + Point memory globalPoint = votingEscrow(VE).point_history( + votingEscrow(VE).epoch() + ); + ICallProxy(callProxy).anyCall( + address(this), + abi.encodeWithSelector( + bytes4( + keccak256( + "recieve((int128,int128,uint256),(int128,int128,uint256),address)" + ) + ), + userPoint, + globalPoint, + user + ), + address(0), + _chainId, + 0 + ); + } + + function push(uint256 _chainId, address _user) external { + assert(Factory(FACTORY).get_bridger(_chainId) != address(0)); + require(IERC20(VE).balanceOf(_user) != 0, "no ve balance"); + Point memory userPoint = votingEscrow(VE).user_point_history( + _user, + votingEscrow(VE).user_point_epoch(_user) + ); + Point memory globalPoint = votingEscrow(VE).point_history( + votingEscrow(VE).epoch() + ); + ICallProxy(callProxy).anyCall( + address(this), + abi.encode(userPoint, globalPoint, _user), + address(0), + _chainId, + 0 + ); + } + + function setCallProxy(address _newCallProxy) external { + require(msg.sender == owner, "not owner"); + emit UpdateCallProxy(callProxy, _newCallProxy); + callProxy = _newCallProxy; + } + + function commitTransferOwnership(address _futureOwner) external { + require(msg.sender == owner); + futureOwner = _futureOwner; + } + + function acceptTransferOwnership() external { + require(msg.sender == futureOwner); + emit TransferOwnership(owner, msg.sender); + owner = msg.sender; + } +} diff --git a/deploy/hardhat/620_deploy_x_chain_gauge_factory.ts b/deploy/hardhat/620_deploy_x_chain_gauge_factory.ts new file mode 100644 index 00000000..017c6516 --- /dev/null +++ b/deploy/hardhat/620_deploy_x_chain_gauge_factory.ts @@ -0,0 +1,132 @@ +import { DeployFunction } from "hardhat-deploy/types" +import { HardhatRuntimeEnvironment } from "hardhat/types" +import { increaseTimestamp } from "../../test/testUtils" + +const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { + const { deployments, getNamedAccounts, getChainId, ethers } = hre + const { deploy, get, getOrNull, execute, read, log } = deployments + const { deployer, libraryDeployer } = await getNamedAccounts() + + const deployOptions = { + from: deployer, + log: true, + skipIfAlreadyDeployed: true, + } + const executeOptions = { + from: deployer, + log: true, + } + + const mockBridger = await deploy("MockBridger", deployOptions) + const mockAnyCall = await deploy("MockAnyCall", deployOptions) + const anyCallTranslatorLogic = await deploy( + "AnyCallTranslator", + deployOptions, + ) + const proxyAdmin = await deploy("ProxyAdmin", deployOptions) + + const initializeCallData = ( + await ethers + .getContractAt("AnyCallTranslator", anyCallTranslatorLogic.address) + .then((c) => + c.populateTransaction.initialize(deployer, mockAnyCall.address), + ) + ).data as string + + const translatorProxy = await deploy("TransparentUpgradeableProxy", { + ...deployOptions, + args: [ + anyCallTranslatorLogic.address, // implementation + proxyAdmin.address, // admin + initializeCallData, // initialize data + ], + }) + + const rgf = await deploy("RootGaugeFactory", { + ...deployOptions, + args: [translatorProxy.address, deployer], + }) + + const rootGaugeImpl = await deploy("RootGauge", { + ...deployOptions, + args: [ + (await get("SDL")).address, + (await get("GaugeController")).address, + (await get("Minter")).address, + ], + }) + + // Add to RootGaugeFactory to master registry + await execute( + "MasterRegistry", + executeOptions, + "addRegistry", + ethers.utils.formatBytes32String("RootGaugeFactory"), + rgf.address, + ) + + // Update RootGaugeFactory bridger for chainId 11, set call proxy, set implementation + await execute( + "RootGaugeFactory", + executeOptions, + "set_bridger", + 11, + mockBridger.address, + ) + await execute( + "RootGaugeFactory", + executeOptions, + "set_call_proxy", + translatorProxy.address, + ) + await execute( + "RootGaugeFactory", + executeOptions, + "set_implementation", + rootGaugeImpl.address, + ) + + // Deploy a root gauge with name "Sample_Name" @ chainId 11 for testing + // But do not add to gauge controller + await execute( + "RootGaugeFactory", + { + ...executeOptions, + gasLimit: 500000, // 353411 gas expected + }, + "deploy_gauge", + 11, + ethers.utils.keccak256(ethers.utils.formatBytes32String("Sample_Name")), + "Sample_Name", + ) + + // Deploy a root gauge with name "Sample_Name_2" @ chainId 11 for testing + await execute( + "RootGaugeFactory", + { + ...executeOptions, + gasLimit: 500000, // 353411 gas expected + }, + "deploy_gauge", + 11, + ethers.utils.keccak256(ethers.utils.formatBytes32String("Sample_Name_2")), + "Sample_Name_2", + ) + const deployedGauge = await read("RootGaugeFactory", "get_gauge", 11, 1) + + await execute( + "GaugeController", + executeOptions, + "add_gauge(address,int128,uint256)", + deployedGauge, + 1, + 100, + ) + + // Skip a week to apply the new weights + await increaseTimestamp(86400 * 7) +} + +export default func +func.dependencies = ["veSDL"] +func.tags = ["CrossChaingGauges"] diff --git a/deploy/mainnet/670_deploy_root_x_chain_gauges.ts b/deploy/mainnet/670_deploy_root_x_chain_gauges.ts new file mode 100644 index 00000000..861e28a1 --- /dev/null +++ b/deploy/mainnet/670_deploy_root_x_chain_gauges.ts @@ -0,0 +1,127 @@ +import { DeployFunction } from "hardhat-deploy/types" +import { HardhatRuntimeEnvironment } from "hardhat/types" +import { AnyCallTranslator } from "../../build/typechain" +import { + BIG_NUMBER_1E18, + impersonateAccount, + setEtherBalance, + ZERO_ADDRESS, +} from "../../test/testUtils" +import { ANYCALL_ADDRESS, PROD_DEPLOYER_ADDRESS } from "../../utils/accounts" +import path from "path" + +const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { + const { deployments, getNamedAccounts, ethers } = hre + const { get, execute, deploy, log } = deployments + const { deployer, libraryDeployer } = await getNamedAccounts() + + if (process.env.HARDHAT_DEPLOY_FORK == null) { + log(`Not running on forked mode, skipping ${path.basename(__filename)}`) + return + } + + // In prod, update these values + const owner = deployer + const crossChainDeployer = libraryDeployer + + const xChainFactoryDeployOptions = { + log: true, + from: crossChainDeployer, + skipIfAlreadyDeployed: true, + } + + const executeOptions = { + log: true, + from: deployer, + } + + // 0: Deploy RootGaugeFactory + const rgf = await deploy("RootGaugeFactory", { + ...xChainFactoryDeployOptions, + args: [ + ZERO_ADDRESS, // AnyCallTranslator placeholder + owner, // owner + ], + }) + + // 1: Deploy Root Gauge + const rootGauge = await deploy("RootGauge", { + ...xChainFactoryDeployOptions, + args: [ + (await get("SDL")).address, + (await get("GaugeController")).address, + (await get("Minter")).address, + ], + }) + + // 2: Deploy ProxyAdmin to be used as admin of AnyCallTranslator + const proxyAdmin = await deploy("ProxyAdmin", xChainFactoryDeployOptions) + + // 3: Deploy AnyCallTranslator as a logic contract + const anyCallTranslatorLogic = await deploy( + "AnyCallTranslator", + xChainFactoryDeployOptions, + ) + + // Function data for AnyCallTranslator.initialize(multisigAddress, anyCallAddress) + // This will be passed as 3rd parameter when intiaizing the proxy + // and gets used as calldata for calling self.delegatecall(initializeCallData) + const initData = await ethers + .getContractFactory("AnyCallTranslator") + .then((c) => + c.interface.encodeFunctionData("initialize", [owner, ANYCALL_ADDRESS]), + ) + + // 4: Deploy Proxy to be used as AnyCallTranslator + const anyCallTranslatorProxy = await deploy("AnyCallTranslatorProxy", { + ...xChainFactoryDeployOptions, + contract: "TransparentUpgradeableProxy", + args: [anyCallTranslatorLogic.address, proxyAdmin.address, initData], + }) + + // 5: Deploy Root Oracle + const rootOracle = await deploy("RootOracle", { + ...xChainFactoryDeployOptions, + args: [ + rgf.address, + (await get("VotingEscrow")).address, + anyCallTranslatorProxy.address, + ], + }) + + // Set up storage variables in RootGaugeFactory from deployer account + await execute( + "RootGaugeFactory", + executeOptions, + "set_implementation", + rootGauge.address, + ) + await execute( + "RootGaugeFactory", + executeOptions, + "set_call_proxy", + anyCallTranslatorProxy.address, + ) + + // Add RootGaugeFactory to master registry + const prodDeployer = await impersonateAccount(PROD_DEPLOYER_ADDRESS) + await setEtherBalance(PROD_DEPLOYER_ADDRESS, BIG_NUMBER_1E18.mul(10000)) + const mr = await ethers.getContract("MasterRegistry") + await mr + .connect(prodDeployer) + .addRegistry( + ethers.utils.formatBytes32String("RootGaugeFactory"), + rgf.address, + ) + + // Add RGF and RootOracle to addKnownCallers from owner + const anyCallTranslatorProxyContract: AnyCallTranslator = + await ethers.getContractAt( + "AnyCallTranslator", + anyCallTranslatorProxy.address, + ) + await anyCallTranslatorProxyContract + .connect(await impersonateAccount(owner)) + .addKnownCallers([rgf.address, rootOracle.address]) +} +export default func diff --git a/deploy/mainnet/671_deploy_bridgers.ts b/deploy/mainnet/671_deploy_bridgers.ts new file mode 100644 index 00000000..79295292 --- /dev/null +++ b/deploy/mainnet/671_deploy_bridgers.ts @@ -0,0 +1,100 @@ +import { DeployFunction } from "hardhat-deploy/types" +import { HardhatRuntimeEnvironment } from "hardhat/types" +import path from "path" + +import { CHAIN_ID } from "../../utils/network" + +const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { + const { deployments, getNamedAccounts, ethers } = hre + const { get, execute, deploy, log } = deployments + const { deployer, libraryDeployer } = await getNamedAccounts() + + if (process.env.HARDHAT_DEPLOY_FORK == null) { + log(`Not running on forked mode, skipping ${path.basename(__filename)}`) + return + } + + // In prod, update these values + const owner = deployer + const crossChainDeployer = libraryDeployer + + const deployOptions = { + log: true, + from: deployer, + skipIfAlreadyDeployed: true, + } + + const executeOptions = { + log: true, + from: deployer, + } + + const gasLimit = 1000000 + const gasPrice = 990000000 + const maxSubmissionCost = 10000000000000 + + // Deploy Arbitrum Bridger + await deploy("ArbitrumBridger", { + ...deployOptions, + args: [gasLimit, gasPrice, maxSubmissionCost, (await get("SDL")).address], + }) + + // Set Bridger + await execute( + "RootGaugeFactory", + executeOptions, + "set_bridger", + CHAIN_ID.ARBITRUM_MAINNET, + ( + await get("ArbitrumBridger") + ).address, + ) + + // Impersonate AnyCall.executor() and call AnyCallTranslatorProxy.anyExecute() + // with correct calldata for creating a new child gauge + // For testing purposes, we will create a child gauge for SaddleFRAXBPPool + + // const executorAddress = await ethers + // .getContractAt("MockAnyCall", ANYCALL_ADDRESS) + // .then((c) => c.executor()) + // const executorContract: AnyCallExecutor = await ethers.getContractAt( + // "AnyCallExecutor", + // executorAddress, + // ) + // const executorCreatorAddress = await executorContract.creator() + // const executorCreator = await impersonateAccount(executorCreatorAddress) + // await setEtherBalance(executorCreatorAddress, BIG_NUMBER_1E18.mul(10000)) + + // // Format deploy_gauge call data that will be passed from RootGaugeFactory + // const callData = await ethers + // .getContractFactory("ChildGaugeFactory") + // .then(async (c) => + // c.interface.encodeFunctionData( + // "deploy_gauge(address,bytes32,string,address)", + // [ + // ( + // await get("SaddleFRAXBPPoolLPToken") + // ).address, // LP token address + // convertGaugeNameToSalt("FraxBP X-Chain Gauge"), // salt + // "FraxBP X-Chain Gauge", // name + // deployer, // manager of the gauge + // ], + // ), + // ) + + // // Format additional calldata for calling AnyCallTranslatorProxy.anyExecute() + // callData = ethers.utils.defaultAbiCoder.encode( + // ["address", "bytes"], + // [rgf.address, callData], + // ) + + // // Call anyExecute from impersonated executor account (owned by AnyCall) + // await executorContract.connect(executorCreator).execute( + // anyCallTranslatorProxy.address, + // callData, + // anyCallTranslatorProxy.address, // Pretend the call came from same address from source chain + // CHAIN_ID.MAINNET, // Source chain ID + // 0, // Source nonce + // ) +} +export default func diff --git a/deploy/mainnet/672_deploy_x_chain_gauges.ts b/deploy/mainnet/672_deploy_x_chain_gauges.ts new file mode 100644 index 00000000..781cb676 --- /dev/null +++ b/deploy/mainnet/672_deploy_x_chain_gauges.ts @@ -0,0 +1,101 @@ +import { anyValue } from "@nomicfoundation/hardhat-chai-matchers/withArgs" +import { expect } from "chai" +import { DeployFunction } from "hardhat-deploy/types" +import { HardhatRuntimeEnvironment } from "hardhat/types" +import path from "path" +import { AnyCallExecutor } from "../../build/typechain" +import { + BIG_NUMBER_1E18, + convertGaugeNameToSalt, + impersonateAccount, + setEtherBalance, +} from "../../test/testUtils" +import { ANYCALL_ADDRESS } from "../../utils/accounts" + +import { CHAIN_ID } from "../../utils/network" + +const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { + const { deployments, getNamedAccounts, ethers } = hre + const { get, execute, deploy, log } = deployments + const { deployer, libraryDeployer } = await getNamedAccounts() + + if (process.env.HARDHAT_DEPLOY_FORK == null) { + log(`Not running on forked mode, skipping ${path.basename(__filename)}`) + return + } + + // In prod, update these values + const owner = deployer + const crossChainDeployer = libraryDeployer + + const deployOptions = { + log: true, + from: deployer, + skipIfAlreadyDeployed: true, + } + + const executeOptions = { + log: true, + from: deployer, + } + // Impersonate AnyCall.executor() and call AnyCallTranslatorProxy.anyExecute() + // with correct calldata for creating a new child gauge + // For testing purposes, we will create a cross chain gauge for SaddleFRAXBPPool + const executorAddress = await ethers + .getContractAt("MockAnyCall", ANYCALL_ADDRESS) + .then((c) => c.executor()) + const executorContract: AnyCallExecutor = await ethers.getContractAt( + "AnyCallExecutor", + executorAddress, + ) + const anyCallTranslatorProxy = await ethers.getContract( + "AnyCallTranslatorProxy", + ANYCALL_ADDRESS, + ) + const executorCreatorAddress = await executorContract.creator() + const executorCreator = await impersonateAccount(executorCreatorAddress) + await setEtherBalance(executorCreatorAddress, BIG_NUMBER_1E18.mul(10000)) + + // Deploy a gauge for Arbitrum FraxBP + const deployGaugeData = { + chainId: CHAIN_ID.ARBITRUM_MAINNET, + salt: convertGaugeNameToSalt("Arb FraxBP"), + gaugeName: "Arb FraxBP", + } + + let callData = await ethers + .getContractFactory("RootGaugeFactory") + .then(async (c) => + c.interface.encodeFunctionData( + "deploy_gauge", + Object.values(deployGaugeData), + ), + ) + + // Format additional calldata for calling AnyCallTranslatorProxy.anyExecute() + callData = ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [(await get("RootGaugeFactory")).address, callData], + ) + + // Call anyExecute from impersonated executor account (owned by AnyCall) + // Then confirm new gauge was deployed via event emitted by RootGaugeFactory + await expect( + executorContract.connect(executorCreator).execute( + anyCallTranslatorProxy.address, + callData, + anyCallTranslatorProxy.address, // Pretend the call came from same address from source chain + CHAIN_ID.ARBITRUM_MAINNET, // Source chain ID + 0, // Source nonce + ), + ) + .to.emit(await ethers.getContract("RootGaugeFactory"), "DeployedGauge") + .withArgs( + anyValue, + deployGaugeData.chainId, + anyValue, + deployGaugeData.salt, + anyValue, + ) +} +export default func diff --git a/deploy/optimism/230_deploy_sidechain_x_chain_gauges.ts b/deploy/optimism/230_deploy_sidechain_x_chain_gauges.ts new file mode 100644 index 00000000..f08d57d6 --- /dev/null +++ b/deploy/optimism/230_deploy_sidechain_x_chain_gauges.ts @@ -0,0 +1,175 @@ +import { DeployFunction } from "hardhat-deploy/types" +import { HardhatRuntimeEnvironment } from "hardhat/types" +import { AnyCallTranslator, AnyCallExecutor } from "../../build/typechain" +import { + BIG_NUMBER_1E18, + convertGaugeNameToSalt, + impersonateAccount, + setEtherBalance, + ZERO_ADDRESS, +} from "../../test/testUtils" +import { ANYCALL_ADDRESS, PROD_DEPLOYER_ADDRESS } from "../../utils/accounts" +import { CHAIN_ID } from "../../utils/network" + +const func: DeployFunction = async function (hre: HardhatRuntimeEnvironment) { + const { deployments, getNamedAccounts, ethers } = hre + const { get, execute, deploy, log } = deployments + const { deployer, libraryDeployer } = await getNamedAccounts() + + // In prod, update these values + const owner = deployer + const crossChainDeployer = libraryDeployer + + const xChainFactoryDeployOptions = { + log: true, + from: crossChainDeployer, + skipIfAlreadyDeployed: true, + } + + const executeOptions = { + log: true, + from: deployer, + } + + if (process.env.HARDHAT_DEPLOY_FORK) { + // 0: Deploy ChildGaugeFactory + const cgf = await deploy("ChildGaugeFactory", { + ...xChainFactoryDeployOptions, + args: [ + ZERO_ADDRESS, // AnyCallTranslator placeholder + ( + await get("SDL") + ).address, // SDL + owner, // owner + ], + }) + + // 1: Deploy Child Gauge + const cg = await deploy("ChildGauge", { + ...xChainFactoryDeployOptions, + args: [(await get("SDL")).address, cgf.address], + }) + + // 2: Deploy ProxyAdmin to be used as admin of AnyCallTranslator + const proxyAdmin = await deploy("ProxyAdmin", xChainFactoryDeployOptions) + + // 3: Deploy AnyCallTranslator as a logic contract + const anyCallTranslatorLogic = await deploy( + "AnyCallTranslator", + xChainFactoryDeployOptions, + ) + + // Function data for AnyCallTranslator.initialize(multisigAddress, anyCallAddress) + // This will be passed as 3rd parameter when intiaizing the proxy + // and gets used as calldata for calling self.delegatecall(initializeCallData) + const initData = await ethers + .getContractFactory("AnyCallTranslator") + .then((c) => + c.interface.encodeFunctionData("initialize", [owner, ANYCALL_ADDRESS]), + ) + + // 4: Deploy Proxy to be used as AnyCallTranslator + const anyCallTranslatorProxy = await deploy("AnyCallTranslatorProxy", { + ...xChainFactoryDeployOptions, + contract: "TransparentUpgradeableProxy", + args: [anyCallTranslatorLogic.address, proxyAdmin.address, initData], + }) + + // 5: Deploy child oracle + const co = await deploy("ChildOracle", { + ...xChainFactoryDeployOptions, + args: [anyCallTranslatorProxy.address], + }) + + // Set up storage variables in child gauge factory from deployer account + await execute( + "ChildGaugeFactory", + executeOptions, + "set_implementation", + cg.address, + ) + await execute( + "ChildGaugeFactory", + executeOptions, + "set_voting_escrow", + co.address, + ) + await execute( + "ChildGaugeFactory", + executeOptions, + "set_call_proxy", + anyCallTranslatorProxy.address, + ) + + // Add to RootGaugeFactory to master registry + const prodDeployer = await impersonateAccount(PROD_DEPLOYER_ADDRESS) + await setEtherBalance(PROD_DEPLOYER_ADDRESS, BIG_NUMBER_1E18.mul(10000)) + const mr = await ethers.getContract("MasterRegistry") + await mr + .connect(prodDeployer) + .addRegistry( + ethers.utils.formatBytes32String("ChildGaugeFactory"), + cgf.address, + ) + + // Add CGF to addKnownCallers from owner + const anyCallTranslatorProxyContract: AnyCallTranslator = + await ethers.getContractAt( + "AnyCallTranslator", + anyCallTranslatorProxy.address, + ) + await anyCallTranslatorProxyContract + .connect(await impersonateAccount(owner)) + .addKnownCallers([cgf.address]) + + // Deploy a child gauge for testing + // Impersonate AnyCall.executor() and call AnyCallTranslatorProxy.anyExecute() + // with correct calldata for creating a new child gauge + // For testing purposes, we will create a child gauge for SaddleFRAXBPPool + const executorAddress = await ethers + .getContractAt("MockAnyCall", ANYCALL_ADDRESS) + .then((c) => c.executor()) + const executorContract: AnyCallExecutor = await ethers.getContractAt( + "AnyCallExecutor", + executorAddress, + ) + const executorCreatorAddress = await executorContract.creator() + const executorCreator = await impersonateAccount(executorCreatorAddress) + await setEtherBalance(executorCreatorAddress, BIG_NUMBER_1E18.mul(10000)) + + // Format deploy_gauge call data that will be passed from RootGaugeFactory + let callData = await ethers + .getContractFactory("ChildGaugeFactory") + .then(async (c) => + c.interface.encodeFunctionData( + "deploy_gauge(address,bytes32,string,address)", + [ + ( + await get("SaddleFRAXBPPoolLPToken") + ).address, // LP token address + convertGaugeNameToSalt("FraxBP X-Chain Gauge"), // salt + "FraxBP X-Chain Gauge", // name + deployer, // manager of the gauge + ], + ), + ) + + // Format additional calldata for calling AnyCallTranslatorProxy.anyExecute() + callData = ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [cgf.address, callData], + ) + + // Call anyExecute from impersonated executor account (owned by AnyCall) + await executorContract.connect(executorCreator).execute( + anyCallTranslatorProxy.address, + callData, + anyCallTranslatorProxy.address, // Pretend the call came from same address from source chain + CHAIN_ID.MAINNET, // Source chain ID + 0, // Source nonce + ) + } else { + log(`Not running on forked mode, skipping`) + } +} +export default func diff --git a/test/testUtils.ts b/test/testUtils.ts index 4e6f9b7f..ad31d52e 100644 --- a/test/testUtils.ts +++ b/test/testUtils.ts @@ -195,3 +195,7 @@ export async function asyncForEach( await callback(array[index], index) } } + +export function convertGaugeNameToSalt(name: string): string { + return ethers.utils.keccak256(ethers.utils.formatBytes32String(name)) +} diff --git a/test/xchainGauges/anyCallTranslator.ts b/test/xchainGauges/anyCallTranslator.ts new file mode 100644 index 00000000..006cad2e --- /dev/null +++ b/test/xchainGauges/anyCallTranslator.ts @@ -0,0 +1,316 @@ +import chai from "chai" +import { Signer } from "ethers" +import { deployments, ethers } from "hardhat" +import { + AnyCallTranslator, + ChildGaugeFactory, + ChildOracle, + GenericERC20, + MockAnyCall, + RootGaugeFactory, + RootOracle, + SDL, + VotingEscrow, +} from "../../build/typechain" +import { MAX_LOCK_TIME, WEEK } from "../../utils/time" +import { + BIG_NUMBER_1E18, + convertGaugeNameToSalt, + getCurrentBlockTimestamp, + MAX_UINT256, + setTimestamp, +} from "../testUtils" +import { + TEST_SIDE_CHAIN_ID, + setupAnyCallTranslator, + setupChildGaugeFactory, + setupChildOracle, + setupRootGaugeFactory, + setupRootOracle, +} from "./utils" + +import { anyValue } from "@nomicfoundation/hardhat-chai-matchers/withArgs" + +const { execute } = deployments + +const { expect } = chai + +describe("AnycallTranslator", () => { + let signers: Array + let users: string[] + let mockAnycall: MockAnyCall + let rootGaugeFactory: RootGaugeFactory + let childGaugeFactory: ChildGaugeFactory + let anyCallTranslator: AnyCallTranslator + let veSDL: VotingEscrow + let rootOracle: RootOracle + let childOracle: ChildOracle + let dummyToken: GenericERC20 + + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + const GAUGE_NAME = "Dummy Token X-chain Gauge" + const GAUGE_SALT = convertGaugeNameToSalt(GAUGE_NAME) + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + const contracts = await setupAnyCallTranslator(users[0]) + anyCallTranslator = contracts.anyCallTranslator + mockAnycall = contracts.mockAnycall + + // **** Setup rootGauge Factory **** + + rootGaugeFactory = await setupRootGaugeFactory( + anyCallTranslator.address, + users[0], + ) + + // **** Setup RootOracle **** + rootOracle = await setupRootOracle( + anyCallTranslator.address, + rootGaugeFactory.address, + ) + + // **** Setup ChildOracle **** + childOracle = await setupChildOracle(anyCallTranslator.address) + + // **** Setup ChildGaugeFactory **** + childGaugeFactory = await setupChildGaugeFactory( + anyCallTranslator.address, + users[0], + childOracle.address, + ) + + // **** Add expected callers to known callers **** + await anyCallTranslator.addKnownCallers([ + rootGaugeFactory.address, + rootOracle.address, + childGaugeFactory.address, + ]) + + // Set timestamp to start of the week + await setTimestamp( + Math.floor(((await getCurrentBlockTimestamp()) + WEEK) / WEEK) * WEEK, + ) + + // Create max lock from deployer address + veSDL = await ethers.getContract("VotingEscrow") + await ethers + .getContract("SDL") + .then((sdl) => (sdl as SDL).approve(veSDL.address, MAX_UINT256)) + await veSDL.create_lock( + BIG_NUMBER_1E18.mul(10_000_000), + (await getCurrentBlockTimestamp()) + MAX_LOCK_TIME, + ) + + dummyToken = (await ethers + .getContractFactory("GenericERC20") + .then((f) => f.deploy("Dummy Token", "DUMMY", 18))) as GenericERC20 + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("Root chain", () => { + describe("Is used as source chain, originates anycall()", () => { + it("Should be able to send message to trigger ChildGaugeFactory.deploy_gauge()", async () => { + const DUMMY_TOKEN_ADDRESS = dummyToken.address + const GAUGE_OWNER = users[0] + + const callData = childGaugeFactory.interface.encodeFunctionData( + "deploy_gauge(address,bytes32,string,address)", + [dummyToken.address, GAUGE_SALT, GAUGE_NAME, GAUGE_OWNER], + ) + + await expect( + rootGaugeFactory[ + "deploy_child_gauge(uint256,address,bytes32,string,address)" + ]( + TEST_SIDE_CHAIN_ID, + DUMMY_TOKEN_ADDRESS, + GAUGE_SALT, + GAUGE_NAME, + GAUGE_OWNER, + ), + ) + .to.emit(mockAnycall, "AnyCallMessage") + .withArgs( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [rootGaugeFactory.address, callData], + ), + ZERO_ADDRESS, + TEST_SIDE_CHAIN_ID, + 0, + ) + }) + + it("Should be able to send message to trigger ChildOracle.receive()", async () => { + await veSDL.checkpoint() + + const returnData = await veSDL.callStatic.user_point_history( + users[0], + veSDL.callStatic.user_point_epoch(users[0]), + ) + + const userPoint = { + bias: returnData.bias, + slope: returnData.slope, + ts: returnData.ts, + } + + const returnDataGlobal = await veSDL.callStatic.point_history( + veSDL.callStatic.epoch(), + ) + + const globalPoint = { + bias: returnDataGlobal.bias, + slope: returnDataGlobal.slope, + ts: returnDataGlobal.ts, + } + + // receive((int128,int128,uint256),(int128,int128,uint256),address) + const callData = childOracle.interface.encodeFunctionData("recieve", [ + userPoint, + globalPoint, + users[0], + ]) + + await expect(rootOracle["push(uint256)"](TEST_SIDE_CHAIN_ID)) + .to.emit(mockAnycall, "AnyCallMessage") + .withArgs( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [rootOracle.address, callData], + ), + ZERO_ADDRESS, + TEST_SIDE_CHAIN_ID, + 0, + ) + }) + }) + }) + describe("Is used as destination chain, target.anyExecute() is executed", () => { + it("Should be able to recieve the message to deploy a root gauge via RootGaugeFactory.deploy_gauge()", async () => { + const callData = rootGaugeFactory.interface.encodeFunctionData( + "deploy_gauge", + [TEST_SIDE_CHAIN_ID, GAUGE_SALT, GAUGE_NAME], + ) + const implementation = await rootGaugeFactory.get_implementation() + + // Expect RootGaugeFactory to emit DeployedGauge event + await expect( + mockAnycall.callAnyExecute( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [rootGaugeFactory.address, callData], + ), + ), + ) + .to.emit(rootGaugeFactory, "DeployedGauge") + .withArgs( + implementation, + TEST_SIDE_CHAIN_ID, + anyCallTranslator.address, + GAUGE_SALT, + anyValue, + ) + + // Expect there is a new gauge deployed + expect(await rootGaugeFactory.get_gauge_count(TEST_SIDE_CHAIN_ID)).to.eq( + 1, + ) + }) + }) + + describe("Side chain", () => { + it("AnyCall -> AnyCallTranslator -> ChildGaugeFactory.deploy_gauge() -> AnyCallTranslator -> Anycall", async () => { + const DUMMY_TOKEN_ADDRESS = dummyToken.address + const GAUGE_OWNER = users[0] + + const callData = childGaugeFactory.interface.encodeFunctionData( + "deploy_gauge(address,bytes32,string,address)", + [DUMMY_TOKEN_ADDRESS, GAUGE_SALT, GAUGE_NAME, GAUGE_OWNER], + ) + + // The expected call data for creating root gauge on mainnet + const expectedCallDataRoot = + rootGaugeFactory.interface.encodeFunctionData("deploy_gauge", [ + 31337, + GAUGE_SALT, + GAUGE_NAME, + ]) + + await expect( + mockAnycall.callAnyExecute( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [childGaugeFactory.address, callData], + ), + ), + ) + .to.emit(mockAnycall, "AnyCallMessage") + .withArgs( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [childGaugeFactory.address, expectedCallDataRoot], + ), + ZERO_ADDRESS, + 1, // Expect the message to be sent to mainnet + 0, + ) + .and.emit(childGaugeFactory, "DeployedGauge") + }) + + it("AnyCall -> AnyCallTranslator -> ChildOracle.receive()", async () => { + // Pretend this is data that was sent from mainnet + const userPoint = { + bias: 1, + slope: 2, + ts: 3, + } + + const globalPoint = { + bias: 4, + slope: 5, + ts: 6, + } + + // receive((int128,int128,uint256),(int128,int128,uint256),address) + const callData = childOracle.interface.encodeFunctionData("recieve", [ + userPoint, + globalPoint, + users[0], + ]) + + await expect( + mockAnycall.callAnyExecute( + anyCallTranslator.address, + ethers.utils.defaultAbiCoder.encode( + ["address", "bytes"], + [childOracle.address, callData], + ), + ), + ) + .to.emit(childOracle, "Recieve") + .withArgs( + Object.values(userPoint), + Object.values(globalPoint), + users[0], + ) + }) + }) +}) diff --git a/test/xchainGauges/arbitrumBridger.ts b/test/xchainGauges/arbitrumBridger.ts new file mode 100644 index 00000000..bc60f0f0 --- /dev/null +++ b/test/xchainGauges/arbitrumBridger.ts @@ -0,0 +1,99 @@ +import chai from "chai" +import { ContractFactory, Signer } from "ethers" +import { deployments, network } from "hardhat" +import { + AnyCallTranslator, + ArbitrumBridger, + ChildGauge, + ChildGaugeFactory, + GenericERC20, + LPToken, + RewardForwarder, +} from "../../build/typechain" +import { ALCHEMY_BASE_URL, CHAIN_ID } from "../../utils/network" + +import { setTimestamp } from "../testUtils" +const { execute } = deployments + +const { expect } = chai + +describe("ArbitrumBridger", () => { + let signers: Array + let users: string[] + let user1: Signer + let deployer: Signer + let rewardForwarder: RewardForwarder + let testToken: LPToken + let firstGaugeToken: GenericERC20 + let lpTokenFactory: ContractFactory + let childGaugeFactory: ChildGaugeFactory + let arbitrumBridger: ArbitrumBridger + let anycallTranslator: AnyCallTranslator + let childGauge: ChildGauge + let SDLAddr: string + + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + const gasLimit = 1000000 + const gasPrice = 990000000 + const maxSubmissionCost = 10000000000000 + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + user1 = signers[1] + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + const SDLAddr = (await ethers.getContract("SDL")).address + console.log("SDLAddr: ", SDLAddr) + // 0xe7f1725E7734CE288F8367e1Bb143E90bb3F0512 + + // Deploy child gauge + + const bridgerFactory = await ethers.getContractFactory("ArbitrumBridger") + + // TODO: below fails + arbitrumBridger = (await bridgerFactory.deploy( + gasLimit, + gasPrice, + maxSubmissionCost, + ( + await ethers.getContract("SDL") + ).address, + )) as ArbitrumBridger + }, + ) + + beforeEach(async () => { + await setupTest() + // fork mainnet + before(async () => { + await network.provider.request({ + method: "hardhat_reset", + params: [ + { + forking: { + jsonRpcUrl: + ALCHEMY_BASE_URL[CHAIN_ID.MAINNET] + + process.env.ALCHEMY_API_KEY, + blockNumber: 11598050, + }, + }, + ], + }) + + await setTimestamp(1609896169) + }) + }) + + describe("Arbitrum Bridger", () => { + it(`Successfully initializes with cost`, async () => { + console.log(SDLAddr) + expect(await arbitrumBridger.cost()).to.eq( + gasLimit * gasPrice + maxSubmissionCost, + ) + }) + }) +}) diff --git a/test/xchainGauges/childGaugeFactory.ts b/test/xchainGauges/childGaugeFactory.ts new file mode 100644 index 00000000..b7d11a11 --- /dev/null +++ b/test/xchainGauges/childGaugeFactory.ts @@ -0,0 +1,122 @@ +import { anyValue } from "@nomicfoundation/hardhat-chai-matchers/withArgs" +import chai from "chai" +import { Signer, utils } from "ethers" +import { deployments } from "hardhat" +import { + AnyCallTranslator, + ChildGauge, + ChildGaugeFactory, + ChildOracle, + GenericERC20, +} from "../../build/typechain" +import { convertGaugeNameToSalt, ZERO_ADDRESS } from "../testUtils" +import { + setupAnyCallTranslator, + setupChildGaugeFactory, + setupChildOracle, +} from "./utils" + +const { expect } = chai +const saltBytes = utils.formatBytes32String("0") + +describe("ChildGaugeFactory", () => { + let signers: Array + let users: string[] + let user1: Signer + let deployer: Signer + let childGaugeFactory: ChildGaugeFactory + let childGauge: ChildGauge + let dummyToken: GenericERC20 + let anyCallTranslator: AnyCallTranslator + let childOracle: ChildOracle + + const MOCK_ADDRESS = "0x1B4ab394327FDf9524632dDf2f0F04F9FA1Fe2eC" + const TEST_BYTES = + "0x7465737400000000000000000000000000000000000000000000000000000000" + + const TEST_GAUGE_NAME = "USD pool" + const TEST_ADDRESS = "0x00000000000000000000000000000000DeaDBeef" + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + const contracts = await setupAnyCallTranslator(users[0]) + anyCallTranslator = contracts.anyCallTranslator + + // **** Setup ChildOracle **** + childOracle = await setupChildOracle(anyCallTranslator.address) + + // **** Setup ChildGaugeFactory **** + childGaugeFactory = await setupChildGaugeFactory( + anyCallTranslator.address, + users[0], + childOracle.address, + ) + + // **** Add expected callers to known callers **** + await anyCallTranslator.addKnownCallers([childGaugeFactory.address]) + + dummyToken = (await ethers + .getContractFactory("GenericERC20") + .then((f) => f.deploy("Dummy Token", "DUMMY", 18))) as GenericERC20 + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("Initialize ChildGaugeFactory", () => { + it(`Successfully sets child gauge implementation`, async () => { + await expect(childGaugeFactory.set_implementation(TEST_ADDRESS)) + .to.emit(childGaugeFactory, "UpdateImplementation") + .withArgs(anyValue, TEST_ADDRESS) + expect(await childGaugeFactory.get_implementation()).to.eq(TEST_ADDRESS) + }) + it(`Successfully access checks when setting root gauge implementation`, async () => { + await expect( + childGaugeFactory.connect(signers[1]).set_implementation(TEST_ADDRESS), + ).to.be.reverted + }) + it(`Successfully sets voting escrow implementation`, async () => { + await expect(childGaugeFactory.set_voting_escrow(TEST_ADDRESS)) + .to.emit(childGaugeFactory, "UpdateVotingEscrow") + .withArgs(anyValue, TEST_ADDRESS) + expect(await childGaugeFactory.voting_escrow()).to.eq(TEST_ADDRESS) + }) + it("Successfully access checks sets voting escrow implementation", async () => { + await expect( + childGaugeFactory.connect(signers[1]).set_voting_escrow(MOCK_ADDRESS), + ).to.be.reverted + }) + }) + describe("Successfully deploys a child gauge", () => { + it(`Successfully deploys a child gauge`, async () => { + await expect( + childGaugeFactory["deploy_gauge(address,bytes32,string,address)"]( + dummyToken.address, + convertGaugeNameToSalt(TEST_GAUGE_NAME), + TEST_GAUGE_NAME, + users[0], + ), + ) + .to.emit(childGaugeFactory, "DeployedGauge") + .withArgs( + anyValue, // implementation address + dummyToken.address, + anyValue, // msg.sender, pool creation requestor + convertGaugeNameToSalt(TEST_GAUGE_NAME), + anyValue, // deployed gauge address + TEST_GAUGE_NAME, + ) + + expect(await childGaugeFactory.get_gauge(0)).to.not.eq(ZERO_ADDRESS) + }) + }) +}) diff --git a/test/xchainGauges/rewardForwarder.ts b/test/xchainGauges/rewardForwarder.ts new file mode 100644 index 00000000..5989bebf --- /dev/null +++ b/test/xchainGauges/rewardForwarder.ts @@ -0,0 +1,151 @@ +import chai from "chai" +import { ContractFactory, Signer } from "ethers" +import { deployments } from "hardhat" +import { + ChildGaugeFactory, + LPToken, + RewardForwarder, + AnyCallTranslator, + ChildGauge, + GenericERC20, +} from "../../build/typechain" + +import { BIG_NUMBER_1E18 } from "../testUtils" +const { execute } = deployments + +const { expect } = chai + +describe("RewardForwarder", () => { + let signers: Array + let users: string[] + let user1: Signer + let deployer: Signer + let rewardForwarder: RewardForwarder + let testToken: LPToken + let firstGaugeToken: GenericERC20 + let lpTokenFactory: ContractFactory + let childGaugeFactory: ChildGaugeFactory + let anycallTranslator: AnyCallTranslator + let childGauge: ChildGauge + + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + user1 = signers[1] + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + // Deploy child gauge + const childGaugeFactoryFactory = await ethers.getContractFactory( + "ChildGaugeFactory", + ) + + childGaugeFactory = (await childGaugeFactoryFactory.deploy( + ZERO_ADDRESS, + ( + await ethers.getContract("SDL") + ).address, + users[0], + )) as ChildGaugeFactory + + // Root Gauge Implementation + const gaugeImplementationFactory = await ethers.getContractFactory( + "ChildGauge", + ) + childGauge = (await gaugeImplementationFactory.deploy( + ( + await ethers.getContract("SDL") + ).address, + childGaugeFactory.address, + )) as ChildGauge + + // Reward Forwarder Deployment + // Root Gauge Implementation + const rewardFowarderFactory = await ethers.getContractFactory( + "RewardForwarder", + ) + rewardForwarder = (await rewardFowarderFactory.deploy( + childGauge.address, + )) as RewardForwarder + + // Deploy dummy tokens + lpTokenFactory = await ethers.getContractFactory("LPToken") + const erc20Factory = await ethers.getContractFactory("GenericERC20") + firstGaugeToken = (await erc20Factory.deploy( + "First Gauge Token", + "GFIRST", + "18", + )) as GenericERC20 + await firstGaugeToken.mint(users[0], BIG_NUMBER_1E18) + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("Initialize RewardForwarder", () => { + it(`Successfully initializes with gauge`, async () => { + expect(await rewardForwarder.gauge()).to.eq(childGauge.address) + }) + }) + describe("Successfully deposits in RewardForwarder", () => { + it(`Successfully deposits lp token`, async () => { + testToken = (await lpTokenFactory.deploy()) as LPToken + testToken.initialize("Gauge Test Token", "GT") + await testToken.mint(users[0], 100) + // TODO: Property 'deposit' does not exist on type 'ChildGauge', so have to use execute + // following does not work await childGauge.deposit(100) + // TODO: Execute cannot find the deployment + // await execute( + // "ChildGauge", + // { from: users[0], log: true }, + // "deposit(uint256)", + // 100, + // ) + }) + it(`Successfully adds reward`, async () => { + const firstGaugeTokenAddr = firstGaugeToken.address + const rewardForwarderAddr = rewardForwarder.address + await childGauge.add_reward(firstGaugeTokenAddr, rewardForwarderAddr) + await firstGaugeToken.transfer(rewardForwarderAddr, BIG_NUMBER_1E18) + await rewardForwarder.allow(firstGaugeTokenAddr) + await rewardForwarder + .connect(user1) + .depositRewardToken(firstGaugeTokenAddr) + expect(await firstGaugeToken.balanceOf(childGauge.address)).to.be.eq( + BIG_NUMBER_1E18, + ) + expect( + (await childGauge.reward_data(firstGaugeTokenAddr))["rate"], + ).to.be.gt(0) + }) + it(`Reverts deposit without allow`, async () => { + const firstGaugeTokenAddr = firstGaugeToken.address + const rewardForwarderAddr = rewardForwarder.address + await childGauge.add_reward(firstGaugeTokenAddr, rewardForwarderAddr) + await firstGaugeToken.transfer(rewardForwarderAddr, BIG_NUMBER_1E18) + console.log("...") + // rewardForwarder cannot deposit if allow(token) is not called + await expect( + rewardForwarder.depositRewardToken(firstGaugeTokenAddr), + ).to.be.revertedWith("ERC20: transfer amount exceeds allowance") + }) + it(`Reverts if reward token is not added to gauge`, async () => { + const firstGaugeTokenAddr = firstGaugeToken.address + const rewardForwarderAddr = rewardForwarder.address + await firstGaugeToken.transfer(rewardForwarderAddr, BIG_NUMBER_1E18) + await rewardForwarder.allow(firstGaugeTokenAddr) + // token cannot be deposited without being added as a reward first + await expect( + rewardForwarder.connect(user1).depositRewardToken(firstGaugeTokenAddr), + ).to.be.reverted + }) + // TODO: add test for claiming rewards + }) +}) diff --git a/test/xchainGauges/rootGauge.ts b/test/xchainGauges/rootGauge.ts new file mode 100644 index 00000000..8d72f52f --- /dev/null +++ b/test/xchainGauges/rootGauge.ts @@ -0,0 +1,68 @@ +import chai from "chai" +import { ContractFactory, Signer } from "ethers" +import { deployments } from "hardhat" +import { + AnyCallTranslator, + MockAnyCall, + MockBridger, + RootGauge, + RootGaugeFactory, +} from "../../build/typechain" + +const { expect } = chai + +describe("Root_Gauge", () => { + let signers: Array + let users: string[] + let user1: Signer + let deployer: Signer + let rootGaugeFactory: RootGaugeFactory + let rootGauge: RootGauge + let anycallTranslator: AnyCallTranslator + let mockBridger: MockBridger + let mockAnyCall: MockAnyCall + let lpTokenFactory: ContractFactory + let sampleLPToken: string + + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + const NON_ZERO_ADDRESS = "0x0C8BAe14c9f9BF2c953997C881BEfaC7729FD314" + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + user1 = signers[1] + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + // Root Gauge Implementation + const gaugeImplementationFactory = await ethers.getContractFactory( + "RootGauge", + ) + rootGauge = (await gaugeImplementationFactory.deploy( + ( + await ethers.getContract("SDL") + ).address, + ( + await ethers.getContract("GaugeController") + ).address, + ( + await ethers.getContract("Minter") + ).address, + )) as RootGauge + + // TODO: Root Gauge Initialize fails because "already initialized", however we did not initialize + // await rootGauge.initialize(NON_ZERO_ADDRESS, 11, "Test") + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("Tests Checkpoint", () => { + it(`deploys`, async () => true) + }) +}) diff --git a/test/xchainGauges/rootGaugeFactory.ts b/test/xchainGauges/rootGaugeFactory.ts new file mode 100644 index 00000000..de8ba01d --- /dev/null +++ b/test/xchainGauges/rootGaugeFactory.ts @@ -0,0 +1,97 @@ +import { anyValue } from "@nomicfoundation/hardhat-chai-matchers/withArgs" +import chai from "chai" +import { Signer } from "ethers" +import { deployments } from "hardhat" +import { AnyCallTranslator, RootGaugeFactory } from "../../build/typechain" +import { convertGaugeNameToSalt, ZERO_ADDRESS } from "../testUtils" +import { + TEST_SIDE_CHAIN_ID, + setupAnyCallTranslator, + setupRootGaugeFactory, +} from "./utils" + +const { expect } = chai + +describe("RootGaugeFactory", () => { + let signers: Array + let users: string[] + let user1: Signer + let rootGaugeFactory: RootGaugeFactory + let anyCallTranslator: AnyCallTranslator + + const TEST_GAUGE_NAME = "USD pool" + const TEST_ADDRESS = "0x00000000000000000000000000000000DeaDBeef" + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + user1 = signers[1] + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + const contracts = await setupAnyCallTranslator(users[0]) + anyCallTranslator = contracts.anyCallTranslator + + // **** Setup rootGauge Factory **** + + rootGaugeFactory = await setupRootGaugeFactory( + anyCallTranslator.address, + users[0], + ) + await anyCallTranslator.addKnownCallers([rootGaugeFactory.address]) + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("Initialize RootGaugeFactory", () => { + it(`Successfully sets root gauge implementation`, async () => { + await expect(rootGaugeFactory.set_implementation(TEST_ADDRESS)) + .to.emit(rootGaugeFactory, "UpdateImplementation") + .withArgs(anyValue, TEST_ADDRESS) + expect(await rootGaugeFactory.get_implementation()).to.eq(TEST_ADDRESS) + }) + it(`Successfully access checks when setting root gauge implementation`, async () => { + await expect( + rootGaugeFactory.connect(user1).set_implementation(TEST_ADDRESS), + ).to.be.reverted + }) + it(`Successfully sets bridger`, async () => { + await expect( + rootGaugeFactory.set_bridger(TEST_SIDE_CHAIN_ID, ZERO_ADDRESS), + ) + .to.emit(rootGaugeFactory, "BridgerUpdated") + .withArgs(TEST_SIDE_CHAIN_ID, anyValue, ZERO_ADDRESS) + expect(await rootGaugeFactory.get_bridger(TEST_SIDE_CHAIN_ID)).to.eq( + ZERO_ADDRESS, + ) + }) + it(`Successfully access checks when setting bridger`, async () => { + await expect( + rootGaugeFactory + .connect(user1) + .set_bridger(TEST_SIDE_CHAIN_ID, ZERO_ADDRESS), + ).to.be.reverted + }) + }) + describe("deploy_gauge", () => { + it(`Successfully deploys a root gauge`, async () => { + await rootGaugeFactory.deploy_gauge( + TEST_SIDE_CHAIN_ID, + convertGaugeNameToSalt(TEST_GAUGE_NAME), + TEST_GAUGE_NAME, + ) + expect(await rootGaugeFactory.get_gauge_count(TEST_SIDE_CHAIN_ID)).to.eq( + 1, + ) + expect(await rootGaugeFactory.get_gauge(TEST_SIDE_CHAIN_ID, 0)).to.not.eq( + ZERO_ADDRESS, + ) + }) + }) +}) diff --git a/test/xchainGauges/rootOracle.ts b/test/xchainGauges/rootOracle.ts new file mode 100644 index 00000000..1c45ceb9 --- /dev/null +++ b/test/xchainGauges/rootOracle.ts @@ -0,0 +1,123 @@ +import chai from "chai" +import { ContractFactory, Signer } from "ethers" +import { deployments, ethers } from "hardhat" +import { + AnyCallTranslator, + ArbitrumBridger, + ChildGauge, + GenericERC20, + LPToken, + MockAnyCall, + RewardForwarder, + RootGaugeFactory, + RootOracle, + SDL, + VotingEscrow, +} from "../../build/typechain" +import { MAX_LOCK_TIME } from "../../utils/time" + +import { + BIG_NUMBER_1E18, + getCurrentBlockTimestamp, + MAX_UINT256, + setTimestamp, +} from "../testUtils" +import { + setupAnyCallTranslator, + setupRootGaugeFactory, + setupRootOracle, +} from "./utils" +const { execute } = deployments + +const { expect } = chai + +describe("RootOracle", () => { + let signers: Array + let users: string[] + let user1: Signer + let deployer: Signer + let rewardForwarder: RewardForwarder + let testToken: LPToken + let firstGaugeToken: GenericERC20 + let lpTokenFactory: ContractFactory + let rootGaugeFactory: RootGaugeFactory + let arbitrumBridger: ArbitrumBridger + let anyCallTranslator: AnyCallTranslator + let childGauge: ChildGauge + let rootOracle: RootOracle + let mockAnycall: MockAnyCall + let veSDL: VotingEscrow + let sdl: SDL + + const ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + const WEEK = 86400 * 7 + const MAXTIME = 86400 * 365 * 4 + const anyCallAddress = "0xC10Ef9F491C9B59f936957026020C321651ac078" + + const setupTest = deployments.createFixture( + async ({ deployments, ethers }) => { + await deployments.fixture(["veSDL"], { fallbackToGlobal: false }) // ensure you start from a fresh deployments + + signers = await ethers.getSigners() + user1 = signers[1] + users = await Promise.all( + signers.map(async (signer) => signer.getAddress()), + ) + + const contracts = await setupAnyCallTranslator(users[0]) + anyCallTranslator = contracts.anyCallTranslator + + // **** Setup rootGauge Factory **** + + rootGaugeFactory = await setupRootGaugeFactory( + anyCallTranslator.address, + users[0], + ) + + // **** Setup RootOracle **** + rootOracle = await setupRootOracle( + anyCallTranslator.address, + rootGaugeFactory.address, + ) + + // **** Add expected callers to known callers **** + anyCallTranslator.addKnownCallers([ + rootGaugeFactory.address, + rootOracle.address, + ]) + + // Set timestamp to start of the week + await setTimestamp( + Math.floor(((await getCurrentBlockTimestamp()) + WEEK) / WEEK) * WEEK, + ) + + // Create max lock from deployer address + veSDL = await ethers.getContract("VotingEscrow") + await ethers + .getContract("SDL") + .then((sdl) => (sdl as SDL).approve(veSDL.address, MAX_UINT256)) + await veSDL.create_lock( + BIG_NUMBER_1E18.mul(10_000_000), + (await getCurrentBlockTimestamp()) + MAX_LOCK_TIME, + ) + }, + ) + + beforeEach(async () => { + await setupTest() + }) + + describe("constructor", () => { + it(`Successfully sets FACTORY`, async () => { + expect(await rootOracle.FACTORY()).to.eq(rootGaugeFactory.address) + }) + it(`Successfully sets VE`, async () => { + expect(await rootOracle.VE()).to.eq( + (await ethers.getContract("VotingEscrow")).address, + ) + }) + it(`Successfully sets callProxy`, async () => { + expect(await rootOracle.callProxy()).to.eq(anyCallTranslator.address) + }) + }) +}) diff --git a/test/xchainGauges/utils.ts b/test/xchainGauges/utils.ts new file mode 100644 index 00000000..7adfc65e --- /dev/null +++ b/test/xchainGauges/utils.ts @@ -0,0 +1,153 @@ +import { ethers } from "hardhat" +import { + AnyCallTranslator, + ChildGauge, + ChildGaugeFactory, + ChildOracle, + MockAnyCall, + RootGaugeFactory, + RootOracle, + TransparentUpgradeableProxy__factory, +} from "../../build/typechain" + +export const TEST_SIDE_CHAIN_ID = 11 + +export async function setupChildGaugeFactory( + anyCallTranslatorAddress: string, + ownerAddress: string, + childOracleAddress: string, +): Promise { + // Child Gauge factory + const childGaugeFactoryFactory = await ethers.getContractFactory( + "ChildGaugeFactory", + ) + const childGaugeFactory = (await childGaugeFactoryFactory.deploy( + anyCallTranslatorAddress, + ( + await ethers.getContract("SDL") + ).address, + ownerAddress, + )) as ChildGaugeFactory + + // Root Gauge Implementation + const gaugeImplementationFactory = await ethers.getContractFactory( + "ChildGauge", + ) + const childGauge = (await gaugeImplementationFactory.deploy( + ( + await ethers.getContract("SDL") + ).address, + childGaugeFactory.address, + )) as ChildGauge + + await childGaugeFactory.set_implementation(childGauge.address) + await childGaugeFactory.set_voting_escrow(childOracleAddress) + + return childGaugeFactory +} + +export async function setupRootGaugeFactory( + anyCallTranslatorAddress: string, + ownerAddress: string, +): Promise { + const rootGaugeFactoryFactory = await ethers.getContractFactory( + "RootGaugeFactory", + ) + const rootGaugeFactory = (await rootGaugeFactoryFactory.deploy( + anyCallTranslatorAddress, + ownerAddress, + )) as RootGaugeFactory + + const mockBridgerFactory = await ethers.getContractFactory("MockBridger") + const mockBridger = await mockBridgerFactory.deploy() + // Set Bridger to mock bridger + await rootGaugeFactory.set_bridger(TEST_SIDE_CHAIN_ID, mockBridger.address) + + // Root Gauge Implementation + const gaugeImplementationFactory = await ethers.getContractFactory( + "RootGauge", + ) + const rootGauge = await gaugeImplementationFactory.deploy( + ( + await ethers.getContract("SDL") + ).address, + ( + await ethers.getContract("GaugeController") + ).address, + ( + await ethers.getContract("Minter") + ).address, + ) + await rootGaugeFactory.set_implementation(rootGauge.address) + + return rootGaugeFactory +} + +export async function setupAnyCallTranslator(ownerAddress: string): Promise<{ + anyCallTranslator: AnyCallTranslator + mockAnycall: MockAnyCall +}> { + // Deploy mock anycall + const mockAnycallFactory = await ethers.getContractFactory("MockAnyCall") + const mockAnycall = (await mockAnycallFactory.deploy()) as MockAnyCall + + // Deploy ProxyAdmin + const proxyAdminFactory = await ethers.getContractFactory("ProxyAdmin") + const proxyAdmin = await proxyAdminFactory.deploy() + + // Deploy AnycallTranslator with mock anycall + const anycallTranslatorFactory = await ethers.getContractFactory( + "AnyCallTranslator", + ) + const anycallTranslatorLogic = + (await anycallTranslatorFactory.deploy()) as AnyCallTranslator + + // Deploy the proxy that will be used as AnycallTranslator + // We want to set the owner of the logic level to be deployer + const initializeCallData = ( + await anycallTranslatorLogic.populateTransaction.initialize( + ownerAddress, + mockAnycall.address, + ) + ).data as string + + // Deploy the proxy with anycall translator logic and initialize it + const proxyFactory: TransparentUpgradeableProxy__factory = + await ethers.getContractFactory("TransparentUpgradeableProxy") + const proxy = await proxyFactory.deploy( + anycallTranslatorLogic.address, + proxyAdmin.address, + initializeCallData, + ) + const anyCallTranslator = (await ethers.getContractAt( + "AnyCallTranslator", + proxy.address, + )) as AnyCallTranslator + + await mockAnycall.setanyCallTranslator(anyCallTranslator.address) + return { anyCallTranslator, mockAnycall } +} + +export async function setupRootOracle( + anyCallTranslatorAddress: string, + rootGaugeFactoryAddress: string, +): Promise { + const rootOracleFactory = await ethers.getContractFactory("RootOracle") + + const rootOracle = (await rootOracleFactory.deploy( + rootGaugeFactoryAddress, + ( + await ethers.getContract("VotingEscrow") + ).address, + anyCallTranslatorAddress, + )) as RootOracle + return rootOracle +} + +export async function setupChildOracle( + anyCallTranslatorAddress: string, +): Promise { + const childOracleFactory = await ethers.getContractFactory("ChildOracle") + const childOracle = await childOracleFactory.deploy(anyCallTranslatorAddress) + return childOracle as ChildOracle +} diff --git a/utils/accounts.ts b/utils/accounts.ts index 2183b443..68a58134 100644 --- a/utils/accounts.ts +++ b/utils/accounts.ts @@ -26,3 +26,6 @@ export const FRAX_MULTISIG_ADDRESSES = { export const PROD_DEPLOYER_ADDRESS = "0x5BDb37d0Ddea3A90F233c7B7F6b9394B6b2eef34" + +// https://docs.multichain.org/developer-guide/anycall/anycall-v6/how-to-integrate-anycall-v6 +export const ANYCALL_ADDRESS = "0xC10Ef9F491C9B59f936957026020C321651ac078" diff --git a/utils/time.ts b/utils/time.ts index 890a54ee..a71ed2d9 100644 --- a/utils/time.ts +++ b/utils/time.ts @@ -19,3 +19,7 @@ export function timestampToUTCString(timestamp: BigNumberish): string { timestamp = BigNumber.from(timestamp).toNumber() return new Date(timestamp * 1000).toUTCString() } + +export const DAY = 86400 +export const WEEK = DAY * 7 +export const MAX_LOCK_TIME = DAY * 365 * 4