From fa80c06bb3d67699dbbe63efd3788622ea5caec2 Mon Sep 17 00:00:00 2001 From: viatrix Date: Thu, 11 Dec 2025 23:50:38 +0200 Subject: [PATCH 01/27] Add Arbitrum gateway --- contracts/Repayer.sol | 19 +- .../interfaces/IArbitrumGatewayRouter.sol | 41 ++ contracts/interfaces/IRoute.sol | 3 +- contracts/testing/TestArbitrum.sol | 42 ++ contracts/testing/TestRepayer.sol | 6 +- contracts/utils/ArbitrumGatewayAdapter.sol | 57 ++ network.config.ts | 4 + scripts/common.ts | 2 + specific-fork-test/ethereum/Repayer.ts | 280 ++++++- test/Repayer.ts | 697 +++++++++++++++++- 10 files changed, 1129 insertions(+), 22 deletions(-) create mode 100644 contracts/interfaces/IArbitrumGatewayRouter.sol create mode 100644 contracts/testing/TestArbitrum.sol create mode 100644 contracts/utils/ArbitrumGatewayAdapter.sol diff --git a/contracts/Repayer.sol b/contracts/Repayer.sol index 7de894b..9c02afc 100644 --- a/contracts/Repayer.sol +++ b/contracts/Repayer.sol @@ -14,6 +14,7 @@ import {AcrossAdapter} from "./utils/AcrossAdapter.sol"; import {StargateAdapter} from "./utils/StargateAdapter.sol"; import {EverclearAdapter} from "./utils/EverclearAdapter.sol"; import {SuperchainStandardBridgeAdapter} from "./utils/SuperchainStandardBridgeAdapter.sol"; +import {ArbitrumGatewayAdapter} from "./utils/ArbitrumGatewayAdapter.sol"; import {ERC7201Helper} from "./utils/ERC7201Helper.sol"; /// @title Performs repayment to Liquidity Pools on same/different chains. @@ -28,7 +29,8 @@ contract Repayer is AcrossAdapter, StargateAdapter, EverclearAdapter, - SuperchainStandardBridgeAdapter + SuperchainStandardBridgeAdapter, + ArbitrumGatewayAdapter { using SafeERC20 for IERC20; using BitMaps for BitMaps.BitMap; @@ -95,13 +97,15 @@ contract Repayer is address wrappedNativeToken, address stargateTreasurer, address optimismBridge, - address baseBridge + address baseBridge, + address arbitrumGatewayRouter ) CCTPAdapter(cctpTokenMessenger, cctpMessageTransmitter) AcrossAdapter(acrossSpokePool) StargateAdapter(stargateTreasurer) EverclearAdapter(everclearFeeAdapter) SuperchainStandardBridgeAdapter(optimismBridge, baseBridge, wrappedNativeToken) + ArbitrumGatewayAdapter(arbitrumGatewayRouter) { ERC7201Helper.validateStorageLocation( STORAGE_LOCATION, @@ -225,6 +229,17 @@ contract Repayer is DOMAIN, $.inputOutputTokens[address(token)] ); + } else + if (provider == Provider.ARBITRUM_GATEWAY) { + initiateTransferArbitrum( + token, + amount, + destinationPool, + destinationDomain, + extraData, + DOMAIN, + $.inputOutputTokens[address(token)] + ); } else { // Unreachable atm, but could become so when more providers are added to enum. revert UnsupportedProvider(); diff --git a/contracts/interfaces/IArbitrumGatewayRouter.sol b/contracts/interfaces/IArbitrumGatewayRouter.sol new file mode 100644 index 0000000..08d9248 --- /dev/null +++ b/contracts/interfaces/IArbitrumGatewayRouter.sol @@ -0,0 +1,41 @@ +// SPDX-License-Identifier: MIT + +pragma solidity 0.8.28; + +/** + * @title Interface for Arbitrum Gateway Router + */ +interface IArbitrumGatewayRouter { + + event TransferRouted( + address indexed token, + address indexed _userFrom, + address indexed _userTo, + address gateway + ); + + /** + * @notice For new versions of gateways it's recommended to use outboundTransferCustomRefund() method. + * @notice Some legacy gateways (for example, DAI) don't have the outboundTransferCustomRefund method + * @notice so using outboundTransfer() method is a universal solution + */ + function outboundTransfer( + address _token, + address _to, + uint256 _amount, + uint256 _maxGas, + uint256 _gasPriceBid, + bytes calldata _data + ) external payable returns (bytes memory); + + /** + * @notice Calculate the address used when bridging an ERC20 token + * @dev the L1 and L2 address oracles may not always be in sync. + * For example, a custom token may have been registered but not deploy or the contract self destructed. + * @param l1ERC20 address of L1 token + * @return L2 address of a bridged ERC20 token + */ + function calculateL2TokenAddress(address l1ERC20) external view returns (address); + + function getGateway(address _token) external view returns (address gateway); +} diff --git a/contracts/interfaces/IRoute.sol b/contracts/interfaces/IRoute.sol index bcf2aab..3d7cf4c 100644 --- a/contracts/interfaces/IRoute.sol +++ b/contracts/interfaces/IRoute.sol @@ -27,7 +27,8 @@ interface IRoute { ACROSS, STARGATE, EVERCLEAR, - SUPERCHAIN_STANDARD_BRIDGE + SUPERCHAIN_STANDARD_BRIDGE, + ARBITRUM_GATEWAY } enum PoolType { diff --git a/contracts/testing/TestArbitrum.sol b/contracts/testing/TestArbitrum.sol new file mode 100644 index 0000000..aef0a78 --- /dev/null +++ b/contracts/testing/TestArbitrum.sol @@ -0,0 +1,42 @@ +// SPDX-License-Identifier: LGPL-3.0-only +pragma solidity 0.8.28; + +import {IERC20, SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; +import {IArbitrumGatewayRouter} from "../interfaces/IArbitrumGatewayRouter.sol"; + +contract TestArbitrumGatewayRouter is IArbitrumGatewayRouter { + + address public immutable LOCAL_TOKEN; + address public immutable L2_TOKEN; + + error InvalidToken(); + error SimulatedRevert(); + + constructor(address _localtoken, address _l2token) { + LOCAL_TOKEN = _localtoken; + L2_TOKEN = _l2token; + } + + function calculateL2TokenAddress(address) external view override returns (address) { + return L2_TOKEN; + } + + function getGateway(address) external view returns (address gateway) { + return address(this); + } + + function outboundTransfer( + address _token, + address _to, + uint256 _amount, + uint256, + uint256, + bytes calldata + ) external payable returns (bytes memory) { + require(_token == LOCAL_TOKEN, InvalidToken()); + require(_amount != 2000, SimulatedRevert()); + SafeERC20.safeTransferFrom(IERC20(LOCAL_TOKEN), msg.sender, address(this), _amount); + emit TransferRouted(LOCAL_TOKEN, msg.sender, _to, address(this)); + return "GATEWAY_DATA"; + } +} diff --git a/contracts/testing/TestRepayer.sol b/contracts/testing/TestRepayer.sol index 2d9f939..73d3649 100644 --- a/contracts/testing/TestRepayer.sol +++ b/contracts/testing/TestRepayer.sol @@ -14,7 +14,8 @@ contract TestRepayer is Repayer { address wrappedNativeToken, address stargateTreasurer, address optimismBridge, - address baseBridge + address baseBridge, + address arbitrumGatewayRouter ) Repayer( localDomain, assets, @@ -25,7 +26,8 @@ contract TestRepayer is Repayer { wrappedNativeToken, stargateTreasurer, optimismBridge, - baseBridge + baseBridge, + arbitrumGatewayRouter ) {} function domainCCTP(Domain destinationDomain) public pure override returns (uint32) { diff --git a/contracts/utils/ArbitrumGatewayAdapter.sol b/contracts/utils/ArbitrumGatewayAdapter.sol new file mode 100644 index 0000000..a7b62b2 --- /dev/null +++ b/contracts/utils/ArbitrumGatewayAdapter.sol @@ -0,0 +1,57 @@ +// SPDX-License-Identifier: LGPL-3.0-only +pragma solidity 0.8.28; + +import {BitMaps} from "@openzeppelin/contracts/utils/structs/BitMaps.sol"; +import {IERC20, SafeERC20} from "@openzeppelin/contracts/token/ERC20/utils/SafeERC20.sol"; +import {IArbitrumGatewayRouter} from ".././interfaces/IArbitrumGatewayRouter.sol"; +import {AdapterHelper} from "./AdapterHelper.sol"; + +abstract contract ArbitrumGatewayAdapter is AdapterHelper { + using SafeERC20 for IERC20; + + IArbitrumGatewayRouter immutable public ARBITRUM_GATEWAY_ROUTER; + + event ArbitrumERC20TransferInitiated(bytes gatewayData); + + constructor( + address arbitrumGatewayRouter + ) { + // No check for address(0) to allow deployment on chains where Arbitrum Bridge is not available + ARBITRUM_GATEWAY_ROUTER = IArbitrumGatewayRouter(arbitrumGatewayRouter); + } + + function initiateTransferArbitrum( + IERC20 token, + uint256 amount, + address destinationPool, + Domain destinationDomain, + bytes calldata extraData, + Domain localDomain, + mapping(bytes32 => BitMaps.BitMap) storage outputTokens + ) internal { + // We are only interested in fast L1->L2 bridging, because the reverse is slow. + require(localDomain == Domain.ETHEREUM, UnsupportedDomain()); + require(destinationDomain == Domain.ARBITRUM_ONE, UnsupportedDomain()); + IArbitrumGatewayRouter router = ARBITRUM_GATEWAY_ROUTER; + require(address(router) != address(0), ZeroAddress()); + (address outputToken, uint256 maxGas, uint256 gasPriceBid, bytes memory data) = + abi.decode(extraData, (address, uint256, uint256, bytes)); + + _validateOutputToken(_addressToBytes32(outputToken), destinationDomain, outputTokens); + // Get output token from the gateway + address gatewayOutputToken = ARBITRUM_GATEWAY_ROUTER.calculateL2TokenAddress(address(token)); + // Check that output tokens match + require(gatewayOutputToken == outputToken, InvalidOutputToken()); + address gateway = ARBITRUM_GATEWAY_ROUTER.getGateway(address(token)); + token.forceApprove(gateway, amount); + bytes memory gatewayData = router.outboundTransfer{value: msg.value}( + address(token), + destinationPool, + amount, + maxGas, + gasPriceBid, + data + ); + emit ArbitrumERC20TransferInitiated(gatewayData); + } +} diff --git a/network.config.ts b/network.config.ts index d589b6b..712f984 100644 --- a/network.config.ts +++ b/network.config.ts @@ -83,6 +83,7 @@ export enum Provider { EVERCLEAR = "EVERCLEAR", STARGATE = "STARGATE", SUPERCHAIN_STANDARD_BRIDGE = "SUPERCHAIN_STANDARD_BRIDGE", + ARBITRUM_GATEWAY = "ARBITRUM_GATEWAY", } export enum Token { @@ -164,6 +165,7 @@ export interface NetworkConfig { EverclearFeeAdapter?: string; OptimismStandardBridge?: string; BaseStandardBridge?: string; + ArbitrumGatewayRouter?: string; Tokens: { [Token.USDC]: string; [Token.USDT]?: string; @@ -213,6 +215,7 @@ export const networkConfig: NetworksConfig = { EverclearFeeAdapter: "0xd0185bfb8107c5b2336bC73cE3fdd9Bfb504540e", OptimismStandardBridge: "0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1", BaseStandardBridge: "0x3154Cf16ccdb4C6d922629664174b904d80F2C35", + ArbitrumGatewayRouter: "0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef", Tokens: { USDC: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", USDT: "0xdAC17F958D2ee523a2206206994597C13D831ec7", @@ -310,6 +313,7 @@ export const networkConfig: NetworksConfig = { EverclearFeeAdapter: "0xd0185bfb8107c5b2336bC73cE3fdd9Bfb504540e", OptimismStandardBridge: "0x99C9fc46f92E8a1c0deC1b1747d010903E884bE1", BaseStandardBridge: "0x3154Cf16ccdb4C6d922629664174b904d80F2C35", + ArbitrumGatewayRouter: "0x72Ce9c846789fdB6fC1f34aC4AD25Dd9ef7031ef", Tokens: { USDC: "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", USDT: "0xdAC17F958D2ee523a2206206994597C13D831ec7", diff --git a/scripts/common.ts b/scripts/common.ts index 798fe8c..23461f7 100644 --- a/scripts/common.ts +++ b/scripts/common.ts @@ -48,6 +48,7 @@ export const ProviderSolidity = { STARGATE: 3n, EVERCLEAR: 4n, SUPERCHAIN_STANDARD_BRIDGE: 5n, + ARBITRUM_GATEWAY: 6n, }; export const DomainSolidity = { @@ -93,6 +94,7 @@ export const SolidityProvider: { [n: number]: Provider } = { 3: Provider.STARGATE, 4: Provider.EVERCLEAR, 5: Provider.SUPERCHAIN_STANDARD_BRIDGE, + 6: Provider.ARBITRUM_GATEWAY, }; export const CCTPDomain: { [n: number]: Network } = { diff --git a/specific-fork-test/ethereum/Repayer.ts b/specific-fork-test/ethereum/Repayer.ts index eb21a20..236bfa3 100644 --- a/specific-fork-test/ethereum/Repayer.ts +++ b/specific-fork-test/ethereum/Repayer.ts @@ -20,7 +20,7 @@ import {networkConfig} from "../../network.config"; describe("Repayer", function () { const deployAll = async () => { - const [deployer, admin, repayUser, user, setTokensUser] = await hre.ethers.getSigners(); + const [deployer, admin, repayUser, setTokensUser] = await hre.ethers.getSigners(); await setCode(repayUser.address, "0x00"); const forkNetworkConfig = networkConfig.ETHEREUM; @@ -29,6 +29,8 @@ describe("Repayer", function () { const DEPOSIT_PROFIT_ROLE = toBytes32("DEPOSIT_PROFIT_ROLE"); const usdc = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.USDC); + const dai = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.DAI!); + const wbtc = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.WBTC!); const liquidityPool = (await deploy( "TestLiquidityPool", deployer, @@ -69,10 +71,16 @@ describe("Repayer", function () { "ISuperchainStandardBridge", forkNetworkConfig.BaseStandardBridge! ); + const arbitrumGatewayRouter = await hre.ethers.getContractAt( + "IArbitrumGatewayRouter", + forkNetworkConfig.ArbitrumGatewayRouter! + ); const everclearFeeAdapter = await hre.ethers.getContractAt("IFeeAdapterV2", forkNetworkConfig.EverclearFeeAdapter!); const weth = await hre.ethers.getContractAt("IWrappedNativeToken", forkNetworkConfig.WrappedNativeToken); const USDC_DEC = 10n ** (await usdc.decimals()); + const DAI_DEC = 10n ** (await dai.decimals()); + const WBTC_DEC = 10n ** (await wbtc.decimals()); const repayerImpl = ( await deployX("Repayer", deployer, "Repayer", {}, @@ -86,16 +94,23 @@ describe("Repayer", function () { stargateTreasurer, optimismStandardBridge, baseStandardBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( admin, repayUser, setTokensUser, - [liquidityPool, liquidityPool2, liquidityPool, liquidityPool], - [Domain.ETHEREUM, Domain.ETHEREUM, Domain.OP_MAINNET, Domain.BASE], - [Provider.LOCAL, Provider.LOCAL, Provider.SUPERCHAIN_STANDARD_BRIDGE, Provider.SUPERCHAIN_STANDARD_BRIDGE], - [true, false, true, true], + [liquidityPool, liquidityPool2, liquidityPool, liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ETHEREUM, Domain.OP_MAINNET, Domain.BASE, Domain.ARBITRUM_ONE], + [ + Provider.LOCAL, + Provider.LOCAL, + Provider.SUPERCHAIN_STANDARD_BRIDGE, + Provider.SUPERCHAIN_STANDARD_BRIDGE, + Provider.ARBITRUM_GATEWAY + ], + [true, false, true, true, true], [ { inputToken: usdc, @@ -109,6 +124,33 @@ describe("Repayer", function () { {destinationDomain: Domain.BASE, outputToken: addressToBytes32(networkConfig.BASE.Tokens.USDC)} ] }, + { + inputToken: dai, + destinationTokens: [ + { + destinationDomain: Domain.ARBITRUM_ONE, + outputToken: addressToBytes32(networkConfig.ARBITRUM_ONE.Tokens.DAI) + } + ] + }, + { + inputToken: wbtc, + destinationTokens: [ + { + destinationDomain: Domain.ARBITRUM_ONE, + outputToken: addressToBytes32(networkConfig.ARBITRUM_ONE.Tokens.WBTC) + } + ] + }, + { + inputToken: weth, + destinationTokens: [ + { + destinationDomain: Domain.ARBITRUM_ONE, + outputToken: addressToBytes32(networkConfig.ARBITRUM_ONE.Tokens.WETH) + } + ] + }, ], )).data; const repayerProxy = (await deployX( @@ -122,10 +164,11 @@ describe("Repayer", function () { await liquidityPool.grantRole(DEPOSIT_PROFIT_ROLE, repayer); return { - deployer, admin, repayUser, user, usdc, setTokensUser, + deployer, admin, repayUser, usdc, setTokensUser, USDC_DEC, liquidityPool, liquidityPool2, repayer, repayerProxy, repayerAdmin, cctpTokenMessenger, cctpMessageTransmitter, REPAYER_ROLE, DEFAULT_ADMIN_ROLE, acrossV3SpokePool, weth, stargateTreasurer, everclearFeeAdapter, forkNetworkConfig, optimismStandardBridge, baseStandardBridge, + arbitrumGatewayRouter, dai, DAI_DEC, wbtc, WBTC_DEC, }; }; @@ -288,4 +331,229 @@ describe("Repayer", function () { expect(await getBalance(repayer)).to.equal(0n); expect(await weth.balanceOf(repayer)).to.equal(0n); }); + + it("Should allow repayer to initiate Arbitrum Gateway DAI repay on fork", async function () { + const { + repayer, repayUser, liquidityPool, arbitrumGatewayRouter, dai, DAI_DEC + } = await loadFixture(deployAll); + + assertAddress(process.env.DAI_OWNER_ETH_ADDRESS, "Env variables not configured (DAI_OWNER_ETH_ADDRESS missing)"); + const DAI_OWNER_ETH_ADDRESS = process.env.DAI_OWNER_ETH_ADDRESS; + const daiOwner = await hre.ethers.getImpersonatedSigner(DAI_OWNER_ETH_ADDRESS); + await setBalance(DAI_OWNER_ETH_ADDRESS, 10n ** 18n); + + const amount = 4n * DAI_DEC; + const maxGas = 10000000n; + const gasPriceBid = 60000000n; + const maxSubmissionCost = 100000000000000n; + const fee = 1000000000000000n; + await dai.connect(daiOwner).transfer(repayer, amount); + + const outputToken = networkConfig.ARBITRUM_ONE.Tokens.DAI; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [outputToken, maxGas, gasPriceBid, data] + ); + + const gatewayAddress = await arbitrumGatewayRouter.getGateway(dai.target); + const tx = repayer.connect(repayUser).initiateRepay( + dai, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData, + {value: fee} + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(dai.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(dai.target, repayer.target, liquidityPool.target, gatewayAddress); + expect(await dai.balanceOf(repayer)).to.equal(0n); + }); + + it("Should allow repayer to initiate Arbitrum Gateway WBTC repay on fork", async function () { + const { + repayer, repayUser, liquidityPool, arbitrumGatewayRouter, wbtc, WBTC_DEC + } = await loadFixture(deployAll); + + assertAddress(process.env.WBTC_OWNER_ETH_ADDRESS, "Env variables not configured (WBTC_OWNER_ETH_ADDRESS missing)"); + const WBTC_OWNER_ETH_ADDRESS = process.env.WBTC_OWNER_ETH_ADDRESS; + const wbtcOwner = await hre.ethers.getImpersonatedSigner(WBTC_OWNER_ETH_ADDRESS); + await setBalance(WBTC_OWNER_ETH_ADDRESS, 10n ** 18n); + + const amount = 4n * WBTC_DEC; + const maxGas = 10000000n; + const gasPriceBid = 60000000n; + const maxSubmissionCost = 100000000000000n; + const fee = 1000000000000000n; + await wbtc.connect(wbtcOwner).transfer(repayer, amount); + + const outputToken = networkConfig.ARBITRUM_ONE.Tokens.WBTC; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [outputToken, maxGas, gasPriceBid, data] + ); + + const gatewayAddress = await arbitrumGatewayRouter.getGateway(wbtc.target); + const tx = repayer.connect(repayUser).initiateRepay( + wbtc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData, + {value: fee} + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(wbtc.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(wbtc.target, repayer.target, liquidityPool.target, gatewayAddress); + expect(await wbtc.balanceOf(repayer)).to.equal(0n); + }); + + it("Should allow repayer to initiate Arbitrum Gateway DAI repay on fork", async function () { + const { + repayer, repayUser, liquidityPool, arbitrumGatewayRouter, dai, DAI_DEC + } = await loadFixture(deployAll); + + assertAddress(process.env.DAI_OWNER_ETH_ADDRESS, "Env variables not configured (DAI_OWNER_ETH_ADDRESS missing)"); + const DAI_OWNER_ETH_ADDRESS = process.env.DAI_OWNER_ETH_ADDRESS; + const daiOwner = await hre.ethers.getImpersonatedSigner(DAI_OWNER_ETH_ADDRESS); + await setBalance(DAI_OWNER_ETH_ADDRESS, 10n ** 18n); + + const amount = 4n * DAI_DEC; + const maxGas = 10000000n; + const gasPriceBid = 60000000n; + const maxSubmissionCost = 100000000000000n; + const fee = 1000000000000000n; + await dai.connect(daiOwner).transfer(repayer, amount); + + const outputToken = networkConfig.ARBITRUM_ONE.Tokens.DAI; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [outputToken, maxGas, gasPriceBid, data] + ); + + const gatewayAddress = await arbitrumGatewayRouter.getGateway(dai.target); + const tx = repayer.connect(repayUser).initiateRepay( + dai, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData, + {value: fee} + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(dai.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(dai.target, repayer.target, liquidityPool.target, gatewayAddress); + expect(await dai.balanceOf(repayer)).to.equal(0n); + }); + + it("Should allow repayer to initiate Arbitrum Gateway WETH repay on fork", async function () { + const { + repayer, repayUser, liquidityPool, weth, arbitrumGatewayRouter, + } = await loadFixture(deployAll); + + const amount = 4n * ETH; + const maxGas = 10000000n; + const gasPriceBid = 60000000n; + const maxSubmissionCost = 100000000000000n; + const fee = 1000000000000000n; + await weth.connect(repayUser).deposit({value: amount}); + await weth.connect(repayUser).transfer(repayer, amount); + + const outputToken = networkConfig.ARBITRUM_ONE.Tokens.WETH; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [outputToken, maxGas, gasPriceBid, data] + ); + + const gatewayAddress = await arbitrumGatewayRouter.getGateway(weth.target); + const tx = repayer.connect(repayUser).initiateRepay( + weth, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData, + {value: fee} + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(weth.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(weth.target, repayer.target, liquidityPool.target, gatewayAddress); + expect(await weth.balanceOf(repayer)).to.equal(0n); + }); + + it("Should revert Arbitrum Gateway repay on fork if output tokens don't match", async function () { + const { + repayer, repayUser, liquidityPool, usdc, USDC_DEC, + } = await loadFixture(deployAll); + + const amount = 4n * USDC_DEC; + const maxGas = 10000000n; + const gasPriceBid = 60000000n; + const maxSubmissionCost = 100000000000000n; + const fee = 1000000000000000n; + + assertAddress(process.env.USDC_OWNER_ETH_ADDRESS, "Env variables not configured (USDC_OWNER_ETH_ADDRESS missing)"); + const USDC_OWNER_ETH_ADDRESS = process.env.USDC_OWNER_ETH_ADDRESS; + const usdcOwner = await hre.ethers.getImpersonatedSigner(USDC_OWNER_ETH_ADDRESS); + await setBalance(USDC_OWNER_ETH_ADDRESS, 10n ** 18n); + + await usdc.connect(usdcOwner).transfer(repayer, 10n * USDC_DEC); + await usdc.connect(usdcOwner).transfer(repayer, amount); + + const outputToken = networkConfig.ARBITRUM_ONE.Tokens.USDC; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [outputToken, maxGas, gasPriceBid, data] + ); + + await expect(repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData, + {value: fee} + )).to.be.revertedWithCustomError(repayer, "InvalidOutputToken()"); + }); }); diff --git a/test/Repayer.ts b/test/Repayer.ts index df2e265..c1cd50c 100644 --- a/test/Repayer.ts +++ b/test/Repayer.ts @@ -3,7 +3,7 @@ import { } from "@nomicfoundation/hardhat-toolbox/network-helpers"; import {expect} from "chai"; import hre from "hardhat"; -import {AbiCoder} from "ethers"; +import {AbiCoder, hexlify, toUtf8Bytes} from "ethers"; import {anyValue} from "@nomicfoundation/hardhat-chai-matchers/withArgs"; import { getCreateAddress, getContractAt, deploy, deployX, toBytes32, getBalance, @@ -16,7 +16,7 @@ import { TestUSDC, TransparentUpgradeableProxy, ProxyAdmin, TestLiquidityPool, Repayer, TestCCTPTokenMessenger, TestCCTPMessageTransmitter, TestAcrossV3SpokePool, TestStargate, MockStargateTreasurerTrue, MockStargateTreasurerFalse, - TestSuperchainStandardBridge, IWrappedNativeToken + TestSuperchainStandardBridge, IWrappedNativeToken, TestArbitrumGatewayRouter } from "../typechain-types"; import {networkConfig} from "../network.config"; @@ -72,6 +72,10 @@ describe("Repayer", function () { const baseBridge = ( await deploy("TestSuperchainStandardBridge", deployer, {}) ) as TestSuperchainStandardBridge; + const l2TokenAddress = "0xabcdefabcdefabcdefabcdefabcdefabcdefabcd"; + const arbitrumGatewayRouter = ( + await deploy("TestArbitrumGatewayRouter", deployer, {}, usdc.target, l2TokenAddress) + ) as TestArbitrumGatewayRouter; const USDC_DEC = 10n ** (await usdc.decimals()); @@ -100,6 +104,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -138,7 +143,7 @@ describe("Repayer", function () { USDC_DEC, eurc, EURC_DEC, eurcOwner, liquidityPool, liquidityPool2, repayer, repayerProxy, repayerAdmin, cctpTokenMessenger, cctpMessageTransmitter, REPAYER_ROLE, DEFAULT_ADMIN_ROLE, acrossV3SpokePool, weth, stargateTreasurerTrue, stargateTreasurerFalse, everclearFeeAdapter, forkNetworkConfig, optimismBridge, - baseBridge, setTokensUser, + baseBridge, setTokensUser, arbitrumGatewayRouter, l2TokenAddress, }; }; @@ -695,7 +700,7 @@ describe("Repayer", function () { it("Should allow repayer to initiate Across repay with SpokePool on fork", async function () { const {deployer, repayer, USDC_DEC, admin, repayUser, repayerAdmin, repayerProxy, liquidityPool, cctpTokenMessenger, cctpMessageTransmitter, weth, stargateTreasurerTrue, everclearFeeAdapter, - optimismBridge, baseBridge, setTokensUser, + optimismBridge, baseBridge, setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const acrossV3SpokePoolFork = await hre.ethers.getContractAt( @@ -725,6 +730,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; @@ -1127,7 +1133,7 @@ describe("Repayer", function () { const { USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, - setTokensUser, + setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const amount = 4n * USDC_DEC; const outputToken = networkConfig.OP_MAINNET.Tokens.USDC; @@ -1145,6 +1151,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -1194,7 +1201,7 @@ describe("Repayer", function () { const { USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, - setTokensUser, + setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const amount = 4n * USDC_DEC; const outputToken = networkConfig.BASE.Tokens.USDC; @@ -1212,6 +1219,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -1261,7 +1269,7 @@ describe("Repayer", function () { const { USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, - setTokensUser, + setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const repayerImpl = ( @@ -1276,6 +1284,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -1324,7 +1333,7 @@ describe("Repayer", function () { const { USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, - setTokensUser, + setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const repayerImpl = ( @@ -1339,6 +1348,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -1388,7 +1398,7 @@ describe("Repayer", function () { const { USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, - setTokensUser, + setTokensUser, arbitrumGatewayRouter, } = await loadFixture(deployAll); const repayerImpl = ( @@ -1403,6 +1413,7 @@ describe("Repayer", function () { stargateTreasurerTrue, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; const repayerInit = (await repayerImpl.initialize.populateTransaction( @@ -1473,6 +1484,666 @@ describe("Repayer", function () { .to.be.revertedWithCustomError(repayer, "UnsupportedDomain"); }); + it("Should allow repayer to initiate Arbitrum Gateway repay with mock bridge", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, l2TokenAddress, arbitrumGatewayRouter + } = await loadFixture(deployAll); + const amount = 4n * USDC_DEC; + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter, + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [{ + inputToken: usdc, + destinationTokens: [ + {destinationDomain: Domain.ARBITRUM_ONE, outputToken: addressToBytes32(l2TokenAddress)} + ] + }], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(usdc.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(repayer, "ArbitrumERC20TransferInitiated") + .withArgs(hexlify(toUtf8Bytes("GATEWAY_DATA"))); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(usdc.target, repayer.target, liquidityPool.target, arbitrumGatewayRouter.target); + }); + + it("Should revert Arbitrum Gateway repay if output token doesn't match", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, arbitrumGatewayRouter + } = await loadFixture(deployAll); + const amount = 4n * USDC_DEC; + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter, + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [{ + inputToken: usdc, + destinationTokens: [ + {destinationDomain: Domain.ARBITRUM_ONE, outputToken: addressToBytes32(weth.target)} + ] + }], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [weth.target, maxGas, gasPriceBid, data] + ); + + await expect(repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + )).to.be.revertedWithCustomError(repayer, "InvalidOutputToken()"); + }); + + it("Should revert Arbitrum Gateway repay if call to Arbitrum Gateway reverts", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, l2TokenAddress, arbitrumGatewayRouter + } = await loadFixture(deployAll); + + // Deploy repayer configured to use Arbitrum Gateway Router + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter, + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [{ + inputToken: usdc, + destinationTokens: [ + {destinationDomain: Domain.ARBITRUM_ONE, outputToken: addressToBytes32(l2TokenAddress)} + ] + }], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + // Use amount 2000 to trigger the mock router revert + const amount = 2000n; + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + + await expect(repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + )).to.be.reverted; + }); + + it("Should initiate Arbitrum Gateway repay with wrapped native currency", async function () { + const { + usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, l2TokenAddress, + } = await loadFixture(deployAll); + const amount = 100000n; + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + + const arbitrumGatewayRouter = ( + await deploy("TestArbitrumGatewayRouter", deployer, {}, weth.target, l2TokenAddress) + ) as TestArbitrumGatewayRouter; + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter, + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [{ + inputToken: weth, + destinationTokens: [ + {destinationDomain: Domain.ARBITRUM_ONE, outputToken: addressToBytes32(l2TokenAddress)} + ] + }], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await weth.connect(repayUser).deposit({value: amount}); + await weth.connect(repayUser).transfer(repayer, amount); + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + const tx = repayer.connect(repayUser).initiateRepay( + weth, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.emit(repayer, "InitiateRepay") + .withArgs(weth.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); + await expect(tx) + .to.emit(arbitrumGatewayRouter, "TransferRouted") + .withArgs(weth.target, repayer.target, liquidityPool.target, arbitrumGatewayRouter.target); + }); + + it("Should revert Arbitrum Gateway repay if output token doesn't match the gateway token", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, l2TokenAddress, arbitrumGatewayRouter + } = await loadFixture(deployAll); + + // Deploy repayer configured to use Arbitrum Gateway Router + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter, + ) + ) as Repayer; + + const wrongOutputToken = weth.target; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [{ + inputToken: usdc, + destinationTokens: [ + {destinationDomain: Domain.ARBITRUM_ONE, outputToken: addressToBytes32(wrongOutputToken)} + ] + }], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + // Use amount 2000 to trigger the mock router revert + const amount = 2000n; + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + + await expect(repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + )).to.be.revertedWithCustomError(repayer, "InvalidOutputToken"); + }); + + it("Should revert Arbitrum Gateway repay if output token is not allowed", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, arbitrumGatewayRouter + } = await loadFixture(deployAll); + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + const amount = 4n * USDC_DEC; + const outputToken = ZERO_ADDRESS; + const minGasLimit = 100000n; + const maxSubmissionCost = 100000000000000n; + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint32", "bytes"], + [outputToken, minGasLimit, data], + ); + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.be.revertedWithCustomError(repayer, "InvalidOutputToken()"); + }); + + it("Should NOT allow repayer to initiate Arbitrum Gateway repay on invalid route", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, arbitrumGatewayRouter, l2TokenAddress + } = await loadFixture(deployAll); + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool], + [Domain.ETHEREUM], + [Provider.LOCAL], + [true], + [], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + const amount = 4n * USDC_DEC; + + await usdc.transfer(repayer, 10n * USDC_DEC); + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.be.revertedWithCustomError(repayer, "RouteDenied"); + }); + + it("Should NOT allow repayer to initiate Arbitrum Gateway repay if local domain is not ETHEREUM", async function () { + const {admin, USDC_DEC, usdc, repayUser, liquidityPool, repayer, l2TokenAddress} = await loadFixture(deployAll); + + await repayer.connect(admin).setRoute( + [liquidityPool], + [Domain.ARBITRUM_ONE], + [Provider.ARBITRUM_GATEWAY], + [true], + ALLOWED + ); + + const amount = 4n * USDC_DEC; + + await usdc.transfer(repayer, 10n * USDC_DEC); + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.be.revertedWithCustomError(repayer, "UnsupportedDomain"); + }); + + it("Should NOT initiate Arbitrum Gateway repay if destination domain is not ARBITRUM_ONE", async function () { + const {USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, arbitrumGatewayRouter, l2TokenAddress} = await loadFixture(deployAll); + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + arbitrumGatewayRouter + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.BASE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + const amount = 4n * USDC_DEC; + + await usdc.transfer(repayer, 10n * USDC_DEC); + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.BASE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.be.revertedWithCustomError(repayer, "UnsupportedDomain"); + }); + + it("Should revert Arbitrum Gateway repay if router address is 0", async function () { + const { + USDC_DEC, usdc, repayUser, liquidityPool, optimismBridge, cctpTokenMessenger, cctpMessageTransmitter, + acrossV3SpokePool, everclearFeeAdapter, weth, stargateTreasurerTrue, admin, deployer, baseBridge, + setTokensUser, l2TokenAddress + } = await loadFixture(deployAll); + + const repayerImpl = ( + await deployX("Repayer", deployer, "Repayer2", {}, + Domain.ETHEREUM, + usdc, + cctpTokenMessenger, + cctpMessageTransmitter, + acrossV3SpokePool, + everclearFeeAdapter, + weth, + stargateTreasurerTrue, + optimismBridge, + baseBridge, + ZERO_ADDRESS + ) + ) as Repayer; + const repayerInit = (await repayerImpl.initialize.populateTransaction( + admin, + repayUser, + setTokensUser, + [liquidityPool, liquidityPool], + [Domain.ETHEREUM, Domain.ARBITRUM_ONE], + [Provider.LOCAL, Provider.ARBITRUM_GATEWAY], + [true, true], + [], + )).data; + const repayerProxy = (await deployX( + "TransparentUpgradeableProxy", deployer, "TransparentUpgradeableProxyRepayer2", {}, + repayerImpl, admin, repayerInit + )) as TransparentUpgradeableProxy; + const repayer = (await getContractAt("Repayer", repayerProxy, deployer)) as Repayer; + + await usdc.transfer(repayer, 10n * USDC_DEC); + + const amount = 4n * USDC_DEC; + + await usdc.transfer(repayer, 10n * USDC_DEC); + const maxGas = 10000000n; + const gasPriceBid = 1000000000n; + const maxSubmissionCost = 100000000000000n; + const data = AbiCoder.defaultAbiCoder().encode( + ["uint256", "bytes"], + [maxSubmissionCost, "0x"], + ); + const extraData = AbiCoder.defaultAbiCoder().encode( + ["address", "uint256", "uint256", "bytes"], + [l2TokenAddress, maxGas, gasPriceBid, data] + ); + const tx = repayer.connect(repayUser).initiateRepay( + usdc, + amount, + liquidityPool, + Domain.ARBITRUM_ONE, + Provider.ARBITRUM_GATEWAY, + extraData + ); + await expect(tx) + .to.be.revertedWithCustomError(repayer, "ZeroAddress"); + }); + it("Should allow repayer to initiate repay of a different token", async function () { const {repayer, eurc, EURC_DEC, eurcOwner, repayUser, liquidityPool } = await loadFixture(deployAll); @@ -1750,7 +2421,7 @@ describe("Repayer", function () { it("Should revert Stargate repay if the pool is not registered", async function () { const {repayer, USDC_DEC, usdc, admin, repayUser, liquidityPool, deployer, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, weth, stargateTreasurerFalse, repayerAdmin, repayerProxy, - everclearFeeAdapter, optimismBridge, baseBridge, + everclearFeeAdapter, optimismBridge, baseBridge, arbitrumGatewayRouter, } = await loadFixture(deployAll); await usdc.transfer(repayer, 10n * USDC_DEC); @@ -1776,6 +2447,7 @@ describe("Repayer", function () { stargateTreasurerFalse, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; @@ -1883,6 +2555,7 @@ describe("Repayer", function () { const { repayer, USDC_DEC, admin, repayUser, liquidityPool, deployer, cctpTokenMessenger, cctpMessageTransmitter, acrossV3SpokePool, weth, repayerAdmin, repayerProxy, everclearFeeAdapter, optimismBridge, baseBridge, + arbitrumGatewayRouter, } = await loadFixture(deployAll); const stargatePoolUsdcAddress = "0x27a16dc786820B16E5c9028b75B99F6f604b5d26"; @@ -1914,6 +2587,7 @@ describe("Repayer", function () { stargateTreasurer, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; @@ -2052,7 +2726,7 @@ describe("Repayer", function () { it("Should unwrap enough native tokens on initiate repay", async function () { const { repayer, repayUser, liquidityPool, optimismBridge, usdc, cctpTokenMessenger, - cctpMessageTransmitter, repayerAdmin, admin, repayerProxy, deployer, baseBridge, + cctpMessageTransmitter, repayerAdmin, admin, repayerProxy, deployer, baseBridge, arbitrumGatewayRouter, } = await loadFixture(deployAll); const wrappedAmount = 10n * ETH; @@ -2081,6 +2755,7 @@ describe("Repayer", function () { ZERO_ADDRESS, optimismBridge, baseBridge, + arbitrumGatewayRouter, ) ) as Repayer; From a99aa0b1f6ae30b9a7bd7ef3df81df7b2b72c4ad Mon Sep 17 00:00:00 2001 From: viatrix Date: Fri, 12 Dec 2025 00:13:41 +0200 Subject: [PATCH 02/27] Update scripts and coverage --- .env.example | 2 ++ coverage-baseline.json | 8 ++++---- network.config.ts | 1 + scripts/deploy.ts | 4 ++++ scripts/deployRepayer.ts | 4 ++++ scripts/deployStandaloneRepayer.ts | 4 ++++ scripts/upgradeRepayer.ts | 4 ++++ 7 files changed, 23 insertions(+), 4 deletions(-) diff --git a/.env.example b/.env.example index db70506..a45c154 100644 --- a/.env.example +++ b/.env.example @@ -46,3 +46,5 @@ EURC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b WETH_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b PRIME_OWNER_ADDRESS=0x75a44A70cCb0E886E25084Be14bD45af57915451 USDC_OWNER_ETH_ADDRESS=0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf +DAI_OWNER_ETH_ADDRESS=0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf +WBTC_OWNER_ETH_ADDRESS=0x40ec5B33f54e0E8A33A975908C5BA1c14e5BbbDf diff --git a/coverage-baseline.json b/coverage-baseline.json index 0778ddc..74a045e 100644 --- a/coverage-baseline.json +++ b/coverage-baseline.json @@ -1,6 +1,6 @@ { - "lines": "96.88", - "functions": "98.57", - "branches": "87.76", - "statements": "96.88" + "lines": "96.93", + "functions": "98.58", + "branches": "87.79", + "statements": "96.93" } \ No newline at end of file diff --git a/network.config.ts b/network.config.ts index 93b0db8..45ba89b 100644 --- a/network.config.ts +++ b/network.config.ts @@ -1510,6 +1510,7 @@ export interface StandaloneRepayerConfig { EverclearFeeAdapter?: string; OptimismStandardBridge?: string; BaseStandardBridge?: string; + ArbitrumGatewayRouter?: string; // Repayer tokens are used from the general network config. WrappedNativeToken: string; RepayerRoutes: RepayerRoutesConfig; diff --git a/scripts/deploy.ts b/scripts/deploy.ts index 633acd8..e35d666 100644 --- a/scripts/deploy.ts +++ b/scripts/deploy.ts @@ -124,6 +124,9 @@ export async function main() { if (!config.BaseStandardBridge) { config.BaseStandardBridge = ZERO_ADDRESS; } + if (!config.ArbitrumGatewayRouter) { + config.ArbitrumGatewayRouter = ZERO_ADDRESS; + } let mainPool: LiquidityPool | undefined = undefined; let aavePoolLongTerm: LiquidityPoolAaveLongTerm; @@ -412,6 +415,7 @@ export async function main() { config.StargateTreasurer, config.OptimismStandardBridge, config.BaseStandardBridge, + config.ArbitrumGatewayRouter, ], [ config.Admin, diff --git a/scripts/deployRepayer.ts b/scripts/deployRepayer.ts index 2bff6f2..0ecfc3d 100644 --- a/scripts/deployRepayer.ts +++ b/scripts/deployRepayer.ts @@ -73,6 +73,9 @@ export async function main() { if (!config.BaseStandardBridge) { config.BaseStandardBridge = ZERO_ADDRESS; } + if (!config.ArbitrumGatewayRouter) { + config.ArbitrumGatewayRouter = ZERO_ADDRESS; + } const inputOutputTokens = getInputOutputTokens(network, config); const repayerVersion = config.IsTest ? "TestRepayer" : "Repayer"; @@ -93,6 +96,7 @@ export async function main() { config.StargateTreasurer, config.OptimismStandardBridge, config.BaseStandardBridge, + config.ArbitrumGatewayRouter, ], [ config.Admin, diff --git a/scripts/deployStandaloneRepayer.ts b/scripts/deployStandaloneRepayer.ts index 715fb25..e8c245c 100644 --- a/scripts/deployStandaloneRepayer.ts +++ b/scripts/deployStandaloneRepayer.ts @@ -80,6 +80,9 @@ export async function main() { if (!config.BaseStandardBridge) { config.BaseStandardBridge = ZERO_ADDRESS; } + if (!config.ArbitrumGatewayRouter) { + config.ArbitrumGatewayRouter = ZERO_ADDRESS; + } const inputOutputTokens = getInputOutputTokens(network, networkConfig[network]); const repayerVersion = config.IsTest ? "TestRepayer" : "Repayer"; @@ -100,6 +103,7 @@ export async function main() { config.StargateTreasurer, config.OptimismStandardBridge, config.BaseStandardBridge, + config.ArbitrumGatewayRouter, ], [ deployer, diff --git a/scripts/upgradeRepayer.ts b/scripts/upgradeRepayer.ts index 3343c3e..a5af97b 100644 --- a/scripts/upgradeRepayer.ts +++ b/scripts/upgradeRepayer.ts @@ -48,6 +48,9 @@ export async function main() { if (!config.BaseStandardBridge) { config.BaseStandardBridge = ZERO_ADDRESS; } + if (!config.ArbitrumGatewayRouter) { + config.ArbitrumGatewayRouter = ZERO_ADDRESS; + } const repayerAddress = await getDeployProxyXAddress("Repayer"); const repayerVersion = config.IsTest ? "TestRepayer" : "Repayer"; @@ -68,6 +71,7 @@ export async function main() { config.StargateTreasurer, config.OptimismStandardBridge, config.BaseStandardBridge, + config.ArbitrumGatewayRouter, ], "Repayer", ); From 229201e47858c197ed7e81cd6c95338df03cc6f0 Mon Sep 17 00:00:00 2001 From: viatrix Date: Sat, 13 Dec 2025 18:42:17 +0200 Subject: [PATCH 03/27] Fixes after review --- contracts/utils/ArbitrumGatewayAdapter.sol | 4 ++-- specific-fork-test/ethereum/Repayer.ts | 6 ++++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/contracts/utils/ArbitrumGatewayAdapter.sol b/contracts/utils/ArbitrumGatewayAdapter.sol index a7b62b2..4e651c8 100644 --- a/contracts/utils/ArbitrumGatewayAdapter.sol +++ b/contracts/utils/ArbitrumGatewayAdapter.sol @@ -39,10 +39,10 @@ abstract contract ArbitrumGatewayAdapter is AdapterHelper { _validateOutputToken(_addressToBytes32(outputToken), destinationDomain, outputTokens); // Get output token from the gateway - address gatewayOutputToken = ARBITRUM_GATEWAY_ROUTER.calculateL2TokenAddress(address(token)); + address gatewayOutputToken = router.calculateL2TokenAddress(address(token)); // Check that output tokens match require(gatewayOutputToken == outputToken, InvalidOutputToken()); - address gateway = ARBITRUM_GATEWAY_ROUTER.getGateway(address(token)); + address gateway = router.getGateway(address(token)); token.forceApprove(gateway, amount); bytes memory gatewayData = router.outboundTransfer{value: msg.value}( address(token), diff --git a/specific-fork-test/ethereum/Repayer.ts b/specific-fork-test/ethereum/Repayer.ts index 236bfa3..2b49fb5 100644 --- a/specific-fork-test/ethereum/Repayer.ts +++ b/specific-fork-test/ethereum/Repayer.ts @@ -29,8 +29,10 @@ describe("Repayer", function () { const DEPOSIT_PROFIT_ROLE = toBytes32("DEPOSIT_PROFIT_ROLE"); const usdc = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.USDC); - const dai = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.DAI!); - const wbtc = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.WBTC!); + assertAddress(forkNetworkConfig.Tokens.DAI, "DAI address is missing"); + const dai = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.DAI); + assertAddress(forkNetworkConfig.Tokens.WBTC, "WBTC address is missing"); + const wbtc = await hre.ethers.getContractAt("ERC20", forkNetworkConfig.Tokens.WBTC); const liquidityPool = (await deploy( "TestLiquidityPool", deployer, From 34e98773b3efafbb89af56c09817c15fd1ecc55b Mon Sep 17 00:00:00 2001 From: viatrix Date: Tue, 16 Dec 2025 10:49:44 +0200 Subject: [PATCH 04/27] Remove duplicated test --- specific-fork-test/ethereum/Repayer.ts | 47 -------------------------- 1 file changed, 47 deletions(-) diff --git a/specific-fork-test/ethereum/Repayer.ts b/specific-fork-test/ethereum/Repayer.ts index 2b49fb5..385edaa 100644 --- a/specific-fork-test/ethereum/Repayer.ts +++ b/specific-fork-test/ethereum/Repayer.ts @@ -428,53 +428,6 @@ describe("Repayer", function () { expect(await wbtc.balanceOf(repayer)).to.equal(0n); }); - it("Should allow repayer to initiate Arbitrum Gateway DAI repay on fork", async function () { - const { - repayer, repayUser, liquidityPool, arbitrumGatewayRouter, dai, DAI_DEC - } = await loadFixture(deployAll); - - assertAddress(process.env.DAI_OWNER_ETH_ADDRESS, "Env variables not configured (DAI_OWNER_ETH_ADDRESS missing)"); - const DAI_OWNER_ETH_ADDRESS = process.env.DAI_OWNER_ETH_ADDRESS; - const daiOwner = await hre.ethers.getImpersonatedSigner(DAI_OWNER_ETH_ADDRESS); - await setBalance(DAI_OWNER_ETH_ADDRESS, 10n ** 18n); - - const amount = 4n * DAI_DEC; - const maxGas = 10000000n; - const gasPriceBid = 60000000n; - const maxSubmissionCost = 100000000000000n; - const fee = 1000000000000000n; - await dai.connect(daiOwner).transfer(repayer, amount); - - const outputToken = networkConfig.ARBITRUM_ONE.Tokens.DAI; - - const data = AbiCoder.defaultAbiCoder().encode( - ["uint256", "bytes"], - [maxSubmissionCost, "0x"], - ); - const extraData = AbiCoder.defaultAbiCoder().encode( - ["address", "uint256", "uint256", "bytes"], - [outputToken, maxGas, gasPriceBid, data] - ); - - const gatewayAddress = await arbitrumGatewayRouter.getGateway(dai.target); - const tx = repayer.connect(repayUser).initiateRepay( - dai, - amount, - liquidityPool, - Domain.ARBITRUM_ONE, - Provider.ARBITRUM_GATEWAY, - extraData, - {value: fee} - ); - await expect(tx) - .to.emit(repayer, "InitiateRepay") - .withArgs(dai.target, amount, liquidityPool.target, Domain.ARBITRUM_ONE, Provider.ARBITRUM_GATEWAY); - await expect(tx) - .to.emit(arbitrumGatewayRouter, "TransferRouted") - .withArgs(dai.target, repayer.target, liquidityPool.target, gatewayAddress); - expect(await dai.balanceOf(repayer)).to.equal(0n); - }); - it("Should allow repayer to initiate Arbitrum Gateway WETH repay on fork", async function () { const { repayer, repayUser, liquidityPool, weth, arbitrumGatewayRouter, From 688b200426bb96c4096a6fe75553c286311026d0 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Tue, 16 Dec 2025 16:11:07 +0200 Subject: [PATCH 05/27] updated deterministic hardhat tests --- .env.example | 16 ++++++++++++++++ COVERAGE.md | 42 ++++++++++++++++++++++++++++++++++++++---- coverage-baseline.json | 2 +- hardhat.config.ts | 9 ++++++++- scripts/get-blocks.mjs | 35 +++++++++++++++++++++++++++++++++++ 5 files changed, 98 insertions(+), 6 deletions(-) create mode 100644 scripts/get-blocks.mjs diff --git a/.env.example b/.env.example index a45c154..86eeeff 100644 --- a/.env.example +++ b/.env.example @@ -40,6 +40,22 @@ ETHERSCAN_API_KEY= # Testing parameters. FORK_PROVIDER=https://base-mainnet.public.blastapi.io + +# Fork block numbers for consistent coverage between local and CI runs. +# Each chain has independent block heights, so we need different blocks per chain. +# When undefined, Hardhat forks at latest block which causes coverage variability (±0.2%). +# These blocks were captured on 2025-12-16 and should be updated periodically. +# Update process: Run scripts/get-blocks.mjs to fetch latest blocks, then run coverage and update baseline. +FORK_BLOCK_NUMBER_BASE=39550474 +FORK_BLOCK_NUMBER_ETHEREUM=24024515 +FORK_BLOCK_NUMBER_ARBITRUM_ONE=411254516 +FORK_BLOCK_NUMBER_OP_MAINNET=127000000 +FORK_BLOCK_NUMBER_POLYGON_MAINNET=80390425 +FORK_BLOCK_NUMBER_AVALANCHE=73848966 +FORK_BLOCK_NUMBER_BSC=71852875 +FORK_BLOCK_NUMBER_LINEA=26756433 +FORK_BLOCK_NUMBER_UNICHAIN=1000000 + USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b GHO_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b EURC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b diff --git a/COVERAGE.md b/COVERAGE.md index f6634eb..4f72f74 100644 --- a/COVERAGE.md +++ b/COVERAGE.md @@ -56,20 +56,25 @@ npm run coverage:update-baseline **Step-by-step:** 1. Make your code changes -2. Run coverage locally: +2. Ensure `.env` file exists with pinned fork blocks (copy from `.env.example` if needed): + ```bash + cp .env.example .env + ``` + **Important:** Using the same fork blocks as `.env.example` ensures your local coverage matches CI coverage. +3. Run coverage locally: ```bash npm run coverage ``` -3. Update the baseline file: +4. Update the baseline file: ```bash npm run coverage:update-baseline ``` -4. Commit the baseline file: +5. Commit the baseline file: ```bash git add coverage-baseline.json git commit -m "chore: update coverage baseline" ``` -5. Push your PR +6. Push your PR **What CI validates:** - āœ… **Check 1:** Your committed baseline matches CI coverage (proves you ran coverage) @@ -108,6 +113,35 @@ Current baseline (as of initial setup): ### Environment Setup for CI The workflow copies `.env.example` to `.env` to enable fork tests with public RPC endpoints during coverage runs. +### Fork Block Pinning for Deterministic Coverage + +**Why fork blocks are pinned:** +Coverage tests fork mainnet at specific block heights. Without pinning: +- Developer runs locally → forks at block X → gets 96.93% coverage +- CI runs 30 mins later → forks at block Y → gets 96.82% coverage +- Different blocks = different contract states = different test paths = different coverage + +**Solution:** +Pin each chain to a specific block number in `.env.example`: +```bash +FORK_BLOCK_NUMBER_BASE=39550474 +FORK_BLOCK_NUMBER_ETHEREUM=24024515 +FORK_BLOCK_NUMBER_ARBITRUM_ONE=411254516 +# etc... +``` + +This ensures both local and CI environments fork from **identical blockchain state**, producing **identical coverage results**. + +**Updating fork blocks:** +When you need to test against newer mainnet state: +1. Run the helper script: `node scripts/get-blocks.mjs` +2. Copy the output to `.env.example` +3. Run coverage: `npm run coverage` +4. If tests pass, update baseline: `npm run coverage:update-baseline` +5. Commit both `.env.example` and `coverage-baseline.json` + +**Note:** Each blockchain has independent block heights, so each needs its own pinned block number. + ### Branch Protection To enforce coverage checks, enable branch protection on main: 1. GitHub Settings → Branches → Branch protection rules diff --git a/coverage-baseline.json b/coverage-baseline.json index 74a045e..6ac40b0 100644 --- a/coverage-baseline.json +++ b/coverage-baseline.json @@ -1,6 +1,6 @@ { "lines": "96.93", "functions": "98.58", - "branches": "87.79", + "branches": "87.63", "statements": "96.93" } \ No newline at end of file diff --git a/hardhat.config.ts b/hardhat.config.ts index 9c3b3ed..f47c5ff 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -699,7 +699,14 @@ const config: HardhatUserConfig = { url: isSet(process.env.DRY_RUN) || isSet(process.env.FORK_TEST) ? process.env[`${process.env.DRY_RUN || process.env.FORK_TEST}_RPC`]! : (process.env.FORK_PROVIDER || process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), - blockNumber: process.env.FORK_BLOCK_NUMBER ? parseInt(process.env.FORK_BLOCK_NUMBER) : undefined, + blockNumber: (() => { + // Determine which chain is being forked + const chain = (process.env.DRY_RUN || process.env.FORK_TEST || 'BASE').toUpperCase(); + // Look up the per-chain fork block number + const blockVar = `FORK_BLOCK_NUMBER_${chain}`; + const blockNumber = process.env[blockVar]; + return blockNumber ? parseInt(blockNumber) : undefined; + })(), }, accounts: isSet(process.env.DRY_RUN) ? [{privateKey: process.env.PRIVATE_KEY!, balance: "1000000000000000000"}] diff --git a/scripts/get-blocks.mjs b/scripts/get-blocks.mjs new file mode 100644 index 0000000..c509fcb --- /dev/null +++ b/scripts/get-blocks.mjs @@ -0,0 +1,35 @@ +import { ethers } from 'ethers'; + +const chains = { + 'BASE': process.env.BASE_RPC || 'https://base-mainnet.public.blastapi.io', + 'ETHEREUM': process.env.ETHEREUM_RPC || 'https://eth-mainnet.public.blastapi.io', + 'ARBITRUM_ONE': process.env.ARBITRUM_ONE_RPC || 'https://arbitrum-one.public.blastapi.io', + 'OP_MAINNET': process.env.OP_MAINNET_RPC || 'https://public-op-mainnet.fastnode.io', + 'POLYGON_MAINNET': process.env.POLYGON_MAINNET_RPC || 'https://polygon-bor-rpc.publicnode.com', + 'AVALANCHE': process.env.AVALANCHE_RPC || 'https://avalanche-c-chain-rpc.publicnode.com', + 'BSC': process.env.BSC_RPC || 'https://bsc-mainnet.public.blastapi.io', + 'LINEA': process.env.LINEA_RPC || 'https://linea-rpc.publicnode.com', +}; + +async function getBlockNumber(name, url) { + try { + const provider = new ethers.JsonRpcProvider(url); + const blockNumber = await provider.getBlockNumber(); + // Subtract 1000 blocks for safety margin + const safeBlock = blockNumber - 1000; + console.log(`FORK_BLOCK_NUMBER_${name}=${safeBlock}`); + return safeBlock; + } catch (error) { + console.error(`# Error fetching ${name}: ${error.message}`); + return null; + } +} + +async function main() { + console.log('# Fetching current block numbers...'); + for (const [name, url] of Object.entries(chains)) { + await getBlockNumber(name, url); + } +} + +main().catch(console.error); From ca22cbd01de19a50808838dfe5a9de38f4da6c3b Mon Sep 17 00:00:00 2001 From: LiviuD Date: Tue, 16 Dec 2025 16:20:55 +0200 Subject: [PATCH 06/27] fixed lint issues --- hardhat.config.ts | 2 +- scripts/get-blocks.mjs | 20 ++++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/hardhat.config.ts b/hardhat.config.ts index f47c5ff..d712355 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -701,7 +701,7 @@ const config: HardhatUserConfig = { : (process.env.FORK_PROVIDER || process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), blockNumber: (() => { // Determine which chain is being forked - const chain = (process.env.DRY_RUN || process.env.FORK_TEST || 'BASE').toUpperCase(); + const chain = (process.env.DRY_RUN || process.env.FORK_TEST || "BASE").toUpperCase(); // Look up the per-chain fork block number const blockVar = `FORK_BLOCK_NUMBER_${chain}`; const blockNumber = process.env[blockVar]; diff --git a/scripts/get-blocks.mjs b/scripts/get-blocks.mjs index c509fcb..84dda9b 100644 --- a/scripts/get-blocks.mjs +++ b/scripts/get-blocks.mjs @@ -1,14 +1,14 @@ -import { ethers } from 'ethers'; +import {ethers} from "ethers"; const chains = { - 'BASE': process.env.BASE_RPC || 'https://base-mainnet.public.blastapi.io', - 'ETHEREUM': process.env.ETHEREUM_RPC || 'https://eth-mainnet.public.blastapi.io', - 'ARBITRUM_ONE': process.env.ARBITRUM_ONE_RPC || 'https://arbitrum-one.public.blastapi.io', - 'OP_MAINNET': process.env.OP_MAINNET_RPC || 'https://public-op-mainnet.fastnode.io', - 'POLYGON_MAINNET': process.env.POLYGON_MAINNET_RPC || 'https://polygon-bor-rpc.publicnode.com', - 'AVALANCHE': process.env.AVALANCHE_RPC || 'https://avalanche-c-chain-rpc.publicnode.com', - 'BSC': process.env.BSC_RPC || 'https://bsc-mainnet.public.blastapi.io', - 'LINEA': process.env.LINEA_RPC || 'https://linea-rpc.publicnode.com', + "BASE": process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io", + "ETHEREUM": process.env.ETHEREUM_RPC || "https://eth-mainnet.public.blastapi.io", + "ARBITRUM_ONE": process.env.ARBITRUM_ONE_RPC || "https://arbitrum-one.public.blastapi.io", + "OP_MAINNET": process.env.OP_MAINNET_RPC || "https://public-op-mainnet.fastnode.io", + "POLYGON_MAINNET": process.env.POLYGON_MAINNET_RPC || "https://polygon-bor-rpc.publicnode.com", + "AVALANCHE": process.env.AVALANCHE_RPC || "https://avalanche-c-chain-rpc.publicnode.com", + "BSC": process.env.BSC_RPC || "https://bsc-mainnet.public.blastapi.io", + "LINEA": process.env.LINEA_RPC || "https://linea-rpc.publicnode.com", }; async function getBlockNumber(name, url) { @@ -26,7 +26,7 @@ async function getBlockNumber(name, url) { } async function main() { - console.log('# Fetching current block numbers...'); + console.log("# Fetching current block numbers..."); for (const [name, url] of Object.entries(chains)) { await getBlockNumber(name, url); } From e7ffbd8b3b13cdd1637008fd229e3449c869ba22 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 14:05:18 +0200 Subject: [PATCH 07/27] push change --- .env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.example b/.env.example index 86eeeff..67fc336 100644 --- a/.env.example +++ b/.env.example @@ -50,7 +50,7 @@ FORK_BLOCK_NUMBER_BASE=39550474 FORK_BLOCK_NUMBER_ETHEREUM=24024515 FORK_BLOCK_NUMBER_ARBITRUM_ONE=411254516 FORK_BLOCK_NUMBER_OP_MAINNET=127000000 -FORK_BLOCK_NUMBER_POLYGON_MAINNET=80390425 +FORK_BLOCK_NUMBER_POLYGON_MAINNET=80390425 FORK_BLOCK_NUMBER_AVALANCHE=73848966 FORK_BLOCK_NUMBER_BSC=71852875 FORK_BLOCK_NUMBER_LINEA=26756433 From 110547bbe94d9346b66ecdc6c34bad5054ab8f29 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 15:32:12 +0200 Subject: [PATCH 08/27] updated debug behavior --- .env.example | 16 ++++++++-------- scripts/get-blocks.mjs | 4 ++-- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/.env.example b/.env.example index 67fc336..cb6a490 100644 --- a/.env.example +++ b/.env.example @@ -46,14 +46,14 @@ FORK_PROVIDER=https://base-mainnet.public.blastapi.io # When undefined, Hardhat forks at latest block which causes coverage variability (±0.2%). # These blocks were captured on 2025-12-16 and should be updated periodically. # Update process: Run scripts/get-blocks.mjs to fetch latest blocks, then run coverage and update baseline. -FORK_BLOCK_NUMBER_BASE=39550474 -FORK_BLOCK_NUMBER_ETHEREUM=24024515 -FORK_BLOCK_NUMBER_ARBITRUM_ONE=411254516 -FORK_BLOCK_NUMBER_OP_MAINNET=127000000 -FORK_BLOCK_NUMBER_POLYGON_MAINNET=80390425 -FORK_BLOCK_NUMBER_AVALANCHE=73848966 -FORK_BLOCK_NUMBER_BSC=71852875 -FORK_BLOCK_NUMBER_LINEA=26756433 +FORK_BLOCK_NUMBER_BASE=39590000 +FORK_BLOCK_NUMBER_ETHEREUM=21500000 +FORK_BLOCK_NUMBER_ARBITRUM_ONE=412000000 +FORK_BLOCK_NUMBER_OP_MAINNET=128000000 +FORK_BLOCK_NUMBER_POLYGON_MAINNET=81000000 +FORK_BLOCK_NUMBER_AVALANCHE=74000000 +FORK_BLOCK_NUMBER_BSC=72000000 +FORK_BLOCK_NUMBER_LINEA=27000000 FORK_BLOCK_NUMBER_UNICHAIN=1000000 USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b diff --git a/scripts/get-blocks.mjs b/scripts/get-blocks.mjs index 84dda9b..02628b5 100644 --- a/scripts/get-blocks.mjs +++ b/scripts/get-blocks.mjs @@ -15,8 +15,8 @@ async function getBlockNumber(name, url) { try { const provider = new ethers.JsonRpcProvider(url); const blockNumber = await provider.getBlockNumber(); - // Subtract 1000 blocks for safety margin - const safeBlock = blockNumber - 1000; + // Subtract 100 blocks for minimal safety margin (contracts are recent) + const safeBlock = blockNumber - 100; console.log(`FORK_BLOCK_NUMBER_${name}=${safeBlock}`); return safeBlock; } catch (error) { From 52dbfc290cd03f2e988bba8e7d27fb59d1cde1c7 Mon Sep 17 00:00:00 2001 From: Oleksii Matiiasevych Date: Wed, 17 Dec 2025 22:50:49 +0700 Subject: [PATCH 09/27] Update Everclear test --- test/Repayer.ts | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/test/Repayer.ts b/test/Repayer.ts index c1cd50c..d0bbf35 100644 --- a/test/Repayer.ts +++ b/test/Repayer.ts @@ -1021,14 +1021,20 @@ describe("Repayer", function () { maxFee: "200", }) })).json()).data; + const newIntentSelector = "0xae9b2bad"; // API returns selector for a variety of newIntent that takes 'address' as resipient. // We are using version that expects a 'bytes32' instead. Encoding other data remains the same. const apiTx = everclearFeeAdapter.interface.decodeFunctionData("newIntent", newIntentSelector + apiData.substr(10)); + const extraData = AbiCoder.defaultAbiCoder().encode( ["bytes32", "uint256", "uint48", "tuple(uint256, uint256, bytes)"], [apiTx[3], apiTx[5], apiTx[6], apiTx[8]] ); + const apiAmountIn = apiTx[4]; + const apiFee = apiTx[8][0]; + const apiAmountWithFee = apiAmountIn + apiFee; + expect(apiAmountWithFee).to.be.lessThanOrEqual(amount); await repayer.connect(setTokensUser).setInputOutputTokens( [{ inputToken: weth, @@ -1040,7 +1046,7 @@ describe("Repayer", function () { ); const tx = repayer.connect(repayUser).initiateRepay( weth, - amount, + apiAmountWithFee, liquidityPool, Domain.ETHEREUM, Provider.EVERCLEAR, @@ -1049,13 +1055,13 @@ describe("Repayer", function () { await expect(tx) .to.emit(repayer, "InitiateRepay") - .withArgs(weth.target, amount, liquidityPool.target, Domain.ETHEREUM, Provider.EVERCLEAR); + .withArgs(weth.target, apiAmountWithFee, liquidityPool.target, Domain.ETHEREUM, Provider.EVERCLEAR); await expect(tx) .to.emit(weth, "Transfer") - .withArgs(repayer.target, everclearFeeAdapter.target, amount); + .withArgs(repayer.target, everclearFeeAdapter.target, apiAmountWithFee); await expect(tx) .to.emit(everclearFeeAdapter, "IntentWithFeesAdded"); - expect(await weth.balanceOf(repayer)).to.equal(6n * ETH); + expect(await weth.balanceOf(repayer)).to.equal(10n * ETH - apiAmountWithFee); expect(await getBalance(repayer)).to.equal(0n); }); From bdff146e1be6dd444ad3f8720aa8a2a2a5e21161 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 18:15:23 +0200 Subject: [PATCH 10/27] updated baseline and added ubuntu 22 to github coverage action --- .github/workflows/coverage.yml | 2 +- coverage-baseline.json | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index d9a6461..479843d 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -6,7 +6,7 @@ on: jobs: coverage: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Checkout code diff --git a/coverage-baseline.json b/coverage-baseline.json index 6ac40b0..fd36c4f 100644 --- a/coverage-baseline.json +++ b/coverage-baseline.json @@ -1,6 +1,6 @@ { - "lines": "96.93", + "lines": "97.04", "functions": "98.58", - "branches": "87.63", - "statements": "96.93" + "branches": "88.46", + "statements": "97.04" } \ No newline at end of file From 4379e3f1e470b86919efc6cb55cbc0b3134cdcb8 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 18:34:58 +0200 Subject: [PATCH 11/27] normalized files --- .gitattributes | 29 +++++++++++++++++++++++++++++ .github/workflows/coverage.yml | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..715b01b --- /dev/null +++ b/.gitattributes @@ -0,0 +1,29 @@ +# Auto detect text files and perform LF normalization +* text=auto + +# Force LF line endings for all text files +* text eol=lf + +# Explicitly set line endings for source files +*.sol text eol=lf +*.ts text eol=lf +*.js text eol=lf +*.json text eol=lf +*.md text eol=lf +*.yml text eol=lf +*.yaml text eol=lf +*.sh text eol=lf +*.mjs text eol=lf + +# Binary files +*.png binary +*.jpg binary +*.jpeg binary +*.gif binary +*.pdf binary +*.ico binary +*.woff binary +*.woff2 binary +*.ttf binary +*.eot binary + diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 479843d..538b6bd 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -22,7 +22,16 @@ jobs: run: npm ci - name: Setup environment variables - run: cp .env.example .env + run: | + if [ ! -f .env.example ]; then + echo "āŒ ERROR: .env.example file is missing!" + echo "This file must exist with FORK_BLOCK_NUMBER_* variables for deterministic coverage." + exit 1 + fi + cp .env.example .env + echo "āœ… Copied .env.example to .env" + echo "šŸ“‹ Fork block numbers configured:" + grep "FORK_BLOCK_NUMBER" .env | head -10 || echo " āš ļø No FORK_BLOCK_NUMBER variables found in .env.example" - name: Get baseline from main branch run: | @@ -75,6 +84,27 @@ jobs: echo "šŸ” COVERAGE VALIDATION" echo "==================================================" + # Diagnostic: Show environment setup + echo "" + echo "šŸ“‹ Environment Diagnostics:" + echo " Node version: $(node --version)" + echo " NPM version: $(npm --version)" + if [ -f .env ]; then + echo " .env file: EXISTS" + echo " Fork block numbers in .env:" + grep "FORK_BLOCK_NUMBER" .env | head -5 || echo " (none found)" + else + echo " .env file: MISSING" + fi + if [ -f .env.example ]; then + echo " .env.example file: EXISTS" + echo " Fork block numbers in .env.example:" + grep "FORK_BLOCK_NUMBER" .env.example | head -5 || echo " (none found)" + else + echo " .env.example file: MISSING āš ļø" + fi + echo "" + # Parse CI-generated coverage using dedicated script CI_LINES=$(npx ts-node --files scripts/get-coverage-percentage.ts) @@ -98,6 +128,7 @@ jobs: echo "" echo " Expected: $PR_LINES% (from your committed coverage-baseline.json)" echo " Actual: $CI_LINES% (from fresh CI coverage run)" + echo " Difference: $(awk "BEGIN {printf \"%.2f\", $PR_LINES - $CI_LINES}")%" echo "" echo "šŸ’” This means either:" echo " 1. You forgot to run 'npm run coverage:update-baseline' locally" From 895684db438e6e029f420593953f5f74408f3029 Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 18:42:52 +0200 Subject: [PATCH 12/27] updated .env.example to use latest block and nor forked block --- .env.example | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.env.example b/.env.example index cb6a490..5adbd6f 100644 --- a/.env.example +++ b/.env.example @@ -46,15 +46,15 @@ FORK_PROVIDER=https://base-mainnet.public.blastapi.io # When undefined, Hardhat forks at latest block which causes coverage variability (±0.2%). # These blocks were captured on 2025-12-16 and should be updated periodically. # Update process: Run scripts/get-blocks.mjs to fetch latest blocks, then run coverage and update baseline. -FORK_BLOCK_NUMBER_BASE=39590000 -FORK_BLOCK_NUMBER_ETHEREUM=21500000 -FORK_BLOCK_NUMBER_ARBITRUM_ONE=412000000 -FORK_BLOCK_NUMBER_OP_MAINNET=128000000 -FORK_BLOCK_NUMBER_POLYGON_MAINNET=81000000 -FORK_BLOCK_NUMBER_AVALANCHE=74000000 -FORK_BLOCK_NUMBER_BSC=72000000 -FORK_BLOCK_NUMBER_LINEA=27000000 -FORK_BLOCK_NUMBER_UNICHAIN=1000000 +#FORK_BLOCK_NUMBER_BASE=39590000 +#FORK_BLOCK_NUMBER_ETHEREUM=21500000 +#FORK_BLOCK_NUMBER_ARBITRUM_ONE=412000000 +#FORK_BLOCK_NUMBER_OP_MAINNET=128000000 +#FORK_BLOCK_NUMBER_POLYGON_MAINNET=81000000 +#FORK_BLOCK_NUMBER_AVALANCHE=74000000 +#FORK_BLOCK_NUMBER_BSC=72000000 +#FORK_BLOCK_NUMBER_LINEA=27000000 +#FORK_BLOCK_NUMBER_UNICHAIN=1000000 USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b GHO_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b From 4e459899a0727af3f8d0af37a1741c4a97543c0d Mon Sep 17 00:00:00 2001 From: LiviuD Date: Wed, 17 Dec 2025 19:31:15 +0200 Subject: [PATCH 13/27] print github action env versions --- .github/workflows/coverage.yml | 23 +++++++++++++++++++++++ scripts/check-coverage.ts | 4 +++- scripts/get-coverage-percentage.ts | 3 ++- 3 files changed, 28 insertions(+), 2 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 538b6bd..5b538f2 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -21,6 +21,29 @@ jobs: - name: Install dependencies run: npm ci + - name: Print environment versions + run: | + echo "==================================================" + echo "šŸ” ENVIRONMENT VERSIONS (for debugging coverage differences)" + echo "==================================================" + echo "" + echo "Node.js version:" + node -v + echo "" + echo "NPM version:" + npm -v + echo "" + echo "Hardhat version:" + npx hardhat --version + echo "" + echo "Solidity compiler version:" + npx solcjs --version 2>/dev/null || echo " (will be downloaded by Hardhat if needed)" + echo "" + echo "Key package versions:" + npm ls hardhat solidity-coverage @nomicfoundation/hardhat-toolbox --depth=0 + echo "" + echo "==================================================" + - name: Setup environment variables run: | if [ ! -f .env.example ]; then diff --git a/scripts/check-coverage.ts b/scripts/check-coverage.ts index 3c2bf27..94a2834 100644 --- a/scripts/check-coverage.ts +++ b/scripts/check-coverage.ts @@ -14,7 +14,9 @@ interface CoverageData { * Parses coverage from lcov.info file */ function parseLcovCoverage(lcovPath: string): CoverageData { - const content = fs.readFileSync(lcovPath, "utf8"); + // Normalize line endings to handle both CRLF (Windows) and LF (Unix) + // This ensures consistent parsing regardless of how lcov.info was generated + const content = fs.readFileSync(lcovPath, "utf8").replace(/\r\n/g, "\n"); let linesFound = 0; let linesHit = 0; diff --git a/scripts/get-coverage-percentage.ts b/scripts/get-coverage-percentage.ts index fb33150..e9ed7a5 100644 --- a/scripts/get-coverage-percentage.ts +++ b/scripts/get-coverage-percentage.ts @@ -17,7 +17,8 @@ if (!fs.existsSync(lcovPath)) { } // Read and parse lcov file -const content = fs.readFileSync(lcovPath, "utf8"); +// Normalize line endings to handle both CRLF (Windows) and LF (Unix) +const content = fs.readFileSync(lcovPath, "utf8").replace(/\r\n/g, "\n"); const lines = content.split("\n"); let linesFound = 0; From b8e4e88dc3deeec4c16b850253a4e4c283fc3c02 Mon Sep 17 00:00:00 2001 From: Liviu Damian Date: Wed, 17 Dec 2025 20:40:58 +0200 Subject: [PATCH 14/27] generated coverage-baseline on ubuntu and aded 0.2 percentage tollerance --- coverage-baseline.json | 6 ++-- scripts/check-coverage.ts | 66 +++++++++++++++++++++++---------------- 2 files changed, 42 insertions(+), 30 deletions(-) diff --git a/coverage-baseline.json b/coverage-baseline.json index fd36c4f..6ac40b0 100644 --- a/coverage-baseline.json +++ b/coverage-baseline.json @@ -1,6 +1,6 @@ { - "lines": "97.04", + "lines": "96.93", "functions": "98.58", - "branches": "88.46", - "statements": "97.04" + "branches": "87.63", + "statements": "96.93" } \ No newline at end of file diff --git a/scripts/check-coverage.ts b/scripts/check-coverage.ts index 94a2834..b626e66 100644 --- a/scripts/check-coverage.ts +++ b/scripts/check-coverage.ts @@ -10,12 +10,13 @@ interface CoverageData { statements: string; } +// Allowed coverage drift (percent) +const COVERAGE_TOLERANCE = 0.2; + /** * Parses coverage from lcov.info file */ function parseLcovCoverage(lcovPath: string): CoverageData { - // Normalize line endings to handle both CRLF (Windows) and LF (Unix) - // This ensures consistent parsing regardless of how lcov.info was generated const content = fs.readFileSync(lcovPath, "utf8").replace(/\r\n/g, "\n"); let linesFound = 0; @@ -43,23 +44,21 @@ function parseLcovCoverage(lcovPath: string): CoverageData { } return { - lines: linesFound > 0 ? (linesHit / linesFound * 100).toFixed(2) : "0", - functions: functionsFound > 0 ? (functionsHit / functionsFound * 100).toFixed(2) : "0", - branches: branchesFound > 0 ? (branchesHit / branchesFound * 100).toFixed(2) : "0", - statements: linesFound > 0 ? (linesHit / linesFound * 100).toFixed(2) : "0" + lines: linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : "0", + functions: functionsFound > 0 ? ((functionsHit / functionsFound) * 100).toFixed(2) : "0", + branches: branchesFound > 0 ? ((branchesHit / branchesFound) * 100).toFixed(2) : "0", + statements: linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : "0", }; } // Main const lcovPath = path.join(__dirname, "..", "coverage", "lcov.info"); -// Check if custom baseline path provided (for CI to compare against main) const baselineArg = process.argv.find(arg => arg.startsWith("--baseline=")); const baselinePath = baselineArg ? baselineArg.split("=")[1] : path.join(__dirname, "..", "coverage-baseline.json"); -// Check if we're updating baseline const isUpdatingBaseline = process.argv.includes("--update-baseline"); if (!fs.existsSync(lcovPath)) { @@ -69,7 +68,7 @@ if (!fs.existsSync(lcovPath)) { const current = parseLcovCoverage(lcovPath); -// If updating baseline, save and exit +// Update baseline mode if (isUpdatingBaseline) { fs.writeFileSync(baselinePath, JSON.stringify(current, null, 2)); console.log("\nāœ… Coverage baseline updated:"); @@ -81,7 +80,13 @@ if (isUpdatingBaseline) { } // Load baseline -let baseline: CoverageData = {lines: "0", functions: "0", branches: "0", statements: "0"}; +let baseline: CoverageData = { + lines: "0", + functions: "0", + branches: "0", + statements: "0", +}; + if (fs.existsSync(baselinePath)) { baseline = JSON.parse(fs.readFileSync(baselinePath, "utf8")) as CoverageData; } @@ -95,27 +100,34 @@ console.log(`Branches: ${baseline.branches}% → ${current.branches}%`); console.log(`Statements: ${baseline.statements}% → ${current.statements}%`); console.log("─".repeat(50)); -// Check for drops -const drops: string[] = []; -if (parseFloat(current.lines) < parseFloat(baseline.lines)) { - drops.push(`Lines dropped: ${baseline.lines}% → ${current.lines}%`); -} -if (parseFloat(current.functions) < parseFloat(baseline.functions)) { - drops.push(`Functions dropped: ${baseline.functions}% → ${current.functions}%`); -} -if (parseFloat(current.branches) < parseFloat(baseline.branches)) { - drops.push(`Branches dropped: ${baseline.branches}% → ${current.branches}%`); -} -if (parseFloat(current.statements) < parseFloat(baseline.statements)) { - drops.push(`Statements dropped: ${baseline.statements}% → ${current.statements}%`); +// Tolerant comparison +function checkDrop(metric: keyof CoverageData): string | null { + const base = parseFloat(baseline[metric]); + const curr = parseFloat(current[metric]); + const diff = curr - base; + + if (diff < -COVERAGE_TOLERANCE) { + return `${metric} dropped: ${base}% → ${curr}% (Ī” ${diff.toFixed(2)}%)`; + } + + return null; } +const drops = [ + checkDrop("lines"), + checkDrop("functions"), + checkDrop("branches"), + checkDrop("statements"), +].filter(Boolean) as string[]; + if (drops.length > 0) { - console.log("\nāŒ Coverage decreased:\n"); - drops.forEach((drop: string) => console.log(` • ${drop}`)); - console.log("\nšŸ’” Please add tests to maintain or improve coverage.\n"); + console.log("\nāŒ Coverage decreased beyond tolerance:\n"); + drops.forEach(d => console.log(` • ${d}`)); + console.log(`\nšŸ’” Allowed tolerance: ±${COVERAGE_TOLERANCE}%\n`); process.exit(1); } -console.log("\nāœ… Coverage maintained or improved!\n"); +console.log( + `\nāœ… Coverage maintained within tolerance (±${COVERAGE_TOLERANCE}%)\n` +); process.exit(0); From 7ad368787e9e71a817708b8a909d6639e039f5c9 Mon Sep 17 00:00:00 2001 From: Liviu Damian Date: Wed, 17 Dec 2025 20:52:06 +0200 Subject: [PATCH 15/27] updated woflwo coverage script usage --- .github/workflows/coverage.yml | 171 ++------------------------------- 1 file changed, 10 insertions(+), 161 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 5b538f2..19d8cee 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -23,176 +23,25 @@ jobs: - name: Print environment versions run: | - echo "==================================================" - echo "šŸ” ENVIRONMENT VERSIONS (for debugging coverage differences)" - echo "==================================================" - echo "" - echo "Node.js version:" - node -v - echo "" - echo "NPM version:" - npm -v - echo "" - echo "Hardhat version:" + echo "Node.js: $(node -v)" + echo "NPM: $(npm -v)" npx hardhat --version - echo "" - echo "Solidity compiler version:" - npx solcjs --version 2>/dev/null || echo " (will be downloaded by Hardhat if needed)" - echo "" - echo "Key package versions:" - npm ls hardhat solidity-coverage @nomicfoundation/hardhat-toolbox --depth=0 - echo "" - echo "==================================================" + npx solcjs --version 2>/dev/null || echo "(will be downloaded by Hardhat if needed)" - name: Setup environment variables run: | - if [ ! -f .env.example ]; then - echo "āŒ ERROR: .env.example file is missing!" - echo "This file must exist with FORK_BLOCK_NUMBER_* variables for deterministic coverage." - exit 1 - fi cp .env.example .env - echo "āœ… Copied .env.example to .env" - echo "šŸ“‹ Fork block numbers configured:" - grep "FORK_BLOCK_NUMBER" .env | head -10 || echo " āš ļø No FORK_BLOCK_NUMBER variables found in .env.example" + echo "Fork block numbers configured:" + grep "FORK_BLOCK_NUMBER" .env || echo " (none found)" - - name: Get baseline from main branch + - name: Run coverage and verify run: | - # Fetch main branch - git fetch origin main - # Get baseline from main (for comparison - must not decrease) - git show origin/main:coverage-baseline.json > baseline-from-main.json 2>/dev/null || echo '{"lines":"0","functions":"0","branches":"0","statements":"0"}' > baseline-from-main.json - echo "šŸ“Š Baseline from main branch:" - cat baseline-from-main.json - - - name: Get baseline from PR - run: | - # Get baseline from current PR branch (what developer committed) - if [ -f coverage-baseline.json ]; then - # Validate JSON format - if jq empty coverage-baseline.json 2>/dev/null; then - cp coverage-baseline.json baseline-from-pr.json - echo "šŸ“Š Baseline from PR (committed by developer):" - cat baseline-from-pr.json - else - echo "āŒ ERROR: coverage-baseline.json is not valid JSON!" - echo "Please run: npm run coverage:update-baseline" - exit 1 - fi - else - echo "āŒ ERROR: No coverage-baseline.json found in PR!" - echo "" - echo "You must run coverage locally and commit the baseline file." - echo "" - echo "šŸ“ To fix: Run these commands locally and commit the result:" - echo " npm run coverage" - echo " npm run coverage:update-baseline" - echo " git add coverage-baseline.json" - echo " git commit -m 'chore: update coverage baseline'" - echo "" - exit 1 - fi - - - name: Run coverage - id: run_coverage - timeout-minutes: 15 - run: | - set -e # Exit immediately if coverage fails + set -e + echo "Running coverage..." npm run coverage - echo "āœ… Coverage completed successfully" - - - name: Validate coverage - run: | - echo "==================================================" - echo "šŸ” COVERAGE VALIDATION" - echo "==================================================" - - # Diagnostic: Show environment setup - echo "" - echo "šŸ“‹ Environment Diagnostics:" - echo " Node version: $(node --version)" - echo " NPM version: $(npm --version)" - if [ -f .env ]; then - echo " .env file: EXISTS" - echo " Fork block numbers in .env:" - grep "FORK_BLOCK_NUMBER" .env | head -5 || echo " (none found)" - else - echo " .env file: MISSING" - fi - if [ -f .env.example ]; then - echo " .env.example file: EXISTS" - echo " Fork block numbers in .env.example:" - grep "FORK_BLOCK_NUMBER" .env.example | head -5 || echo " (none found)" - else - echo " .env.example file: MISSING āš ļø" - fi - echo "" - - # Parse CI-generated coverage using dedicated script - CI_LINES=$(npx ts-node --files scripts/get-coverage-percentage.ts) - - # Get baselines - PR_LINES=$(jq -r .lines baseline-from-pr.json) - MAIN_LINES=$(jq -r .lines baseline-from-main.json) - - echo "" - echo "šŸ“Š Coverage Results:" - echo " CI (actual): $CI_LINES%" - echo " PR baseline: $PR_LINES%" - echo " Main baseline: $MAIN_LINES%" - echo "" - - # Check 1: CI must match PR baseline (developer ran coverage correctly) - echo "Check 1: Did developer run coverage locally?" - if [ "$CI_LINES" = "$PR_LINES" ]; then - echo " āœ… PASS - CI coverage matches PR baseline ($CI_LINES% == $PR_LINES%)" - else - echo " āŒ FAIL - CI coverage doesn't match PR baseline!" - echo "" - echo " Expected: $PR_LINES% (from your committed coverage-baseline.json)" - echo " Actual: $CI_LINES% (from fresh CI coverage run)" - echo " Difference: $(awk "BEGIN {printf \"%.2f\", $PR_LINES - $CI_LINES}")%" - echo "" - echo "šŸ’” This means either:" - echo " 1. You forgot to run 'npm run coverage:update-baseline' locally" - echo " 2. You modified coverage-baseline.json manually (cheating)" - echo " 3. Your local coverage differs from CI (check .env setup)" - echo "" - echo "šŸ“ To fix: Run these commands locally and commit the result:" - echo " npm run coverage" - echo " npm run coverage:update-baseline" - echo " git add coverage-baseline.json" - echo " git commit -m 'chore: update coverage baseline'" - echo "" - exit 1 - fi - - echo "" - - # Check 2: CI must be >= main baseline (coverage didn't decrease) - echo "Check 2: Did coverage decrease?" - if awk "BEGIN {exit !($CI_LINES >= $MAIN_LINES)}"; then - if awk "BEGIN {exit !($CI_LINES > $MAIN_LINES)}"; then - echo " āœ… PASS - Coverage improved! ($MAIN_LINES% → $CI_LINES%)" - else - echo " āœ… PASS - Coverage maintained ($CI_LINES%)" - fi - else - echo " āŒ FAIL - Coverage decreased!" - echo "" - echo " Main baseline: $MAIN_LINES%" - echo " Your PR: $CI_LINES%" - echo " Decrease: $(awk "BEGIN {print $MAIN_LINES - $CI_LINES}")%" - echo "" - echo "šŸ’” Please add tests to maintain or improve coverage." - echo "" - exit 1 - fi - echo "" - echo "==================================================" - echo "āœ… ALL CHECKS PASSED" - echo "==================================================" + echo "Verifying coverage against baseline..." + npx ts-node --files scripts/verify-coverage.ts - name: Upload coverage report (optional) if: always() From 81912c24c1d7be92730c630aaee135d736b9af2c Mon Sep 17 00:00:00 2001 From: Liviu Damian Date: Wed, 17 Dec 2025 21:04:04 +0200 Subject: [PATCH 16/27] fixed coverage script --- .github/workflows/coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 19d8cee..6cebc29 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -41,7 +41,7 @@ jobs: npm run coverage echo "Verifying coverage against baseline..." - npx ts-node --files scripts/verify-coverage.ts + npm run coverage:check - name: Upload coverage report (optional) if: always() From c4aa6c040637fcb66d71a58aa4329eca2e84a35d Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 11:33:09 +0200 Subject: [PATCH 17/27] updated coverage and cleaned branch --- .env.example | 18 ---------- .github/workflows/coverage.yml | 63 +++++++++++++++++++++++++++++++--- COVERAGE.md | 36 ++----------------- hardhat.config.ts | 11 ++---- scripts/get-blocks.mjs | 35 ------------------- 5 files changed, 63 insertions(+), 100 deletions(-) delete mode 100644 scripts/get-blocks.mjs diff --git a/.env.example b/.env.example index 5adbd6f..6e496b1 100644 --- a/.env.example +++ b/.env.example @@ -38,24 +38,6 @@ LINEA_RPC=https://linea-rpc.publicnode.com # Etherscan now uses a single API key for all chains. ETHERSCAN_API_KEY= -# Testing parameters. -FORK_PROVIDER=https://base-mainnet.public.blastapi.io - -# Fork block numbers for consistent coverage between local and CI runs. -# Each chain has independent block heights, so we need different blocks per chain. -# When undefined, Hardhat forks at latest block which causes coverage variability (±0.2%). -# These blocks were captured on 2025-12-16 and should be updated periodically. -# Update process: Run scripts/get-blocks.mjs to fetch latest blocks, then run coverage and update baseline. -#FORK_BLOCK_NUMBER_BASE=39590000 -#FORK_BLOCK_NUMBER_ETHEREUM=21500000 -#FORK_BLOCK_NUMBER_ARBITRUM_ONE=412000000 -#FORK_BLOCK_NUMBER_OP_MAINNET=128000000 -#FORK_BLOCK_NUMBER_POLYGON_MAINNET=81000000 -#FORK_BLOCK_NUMBER_AVALANCHE=74000000 -#FORK_BLOCK_NUMBER_BSC=72000000 -#FORK_BLOCK_NUMBER_LINEA=27000000 -#FORK_BLOCK_NUMBER_UNICHAIN=1000000 - USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b GHO_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b EURC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 6cebc29..61c3ded 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -31,16 +31,69 @@ jobs: - name: Setup environment variables run: | cp .env.example .env - echo "Fork block numbers configured:" - grep "FORK_BLOCK_NUMBER" .env || echo " (none found)" + echo "āœ… Environment configured" - - name: Run coverage and verify + - name: Fetch main branch baseline + run: | + echo "šŸ“„ Fetching coverage baseline from main branch..." + git fetch origin main:refs/remotes/origin/main + git show origin/main:coverage-baseline.json > coverage-baseline-main.json 2>/dev/null || echo "{}" + + - name: Run coverage run: | set -e - echo "Running coverage..." + echo "" + echo "šŸ“Š Running test coverage analysis in CI..." + echo "This generates fresh coverage from your PR code" npm run coverage - echo "Verifying coverage against baseline..." + - name: Display coverage comparison + run: | + set -e + echo "" + echo "==================================================" + echo " COVERAGE VALIDATION" + echo "==================================================" + echo "" + + # Extract coverage from CI run (actual) + CI_COV=$(ts-node --files -e " + import fs from 'fs'; + import path from 'path'; + const content = fs.readFileSync('coverage/lcov.info', 'utf8').replace(/\r\n/g, '\n'); + let linesFound = 0, linesHit = 0; + content.split('\n').forEach(line => { + if (line.startsWith('LF:')) linesFound += parseInt(line.substring(3)); + if (line.startsWith('LH:')) linesHit += parseInt(line.substring(3)); + }); + console.log(linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : '0'); + ") + + # Extract PR baseline (what developer committed) + PR_BASELINE=$(cat coverage-baseline.json | grep -o '"lines":"[^"]*"' | cut -d'"' -f4) + + # Extract main baseline (production baseline) + MAIN_BASELINE=$(cat coverage-baseline-main.json | grep -o '"lines":"[^"]*"' | cut -d'"' -f4 || echo "0") + + echo "šŸ“Š Coverage Results (Lines):" + echo " CI (actual): ${CI_COV}% ← Fresh coverage from this PR" + echo " PR baseline: ${PR_BASELINE}% ← Baseline you committed" + echo " Main baseline: ${MAIN_BASELINE}% ← Baseline from main branch" + echo "" + echo "āœ“ Check 1: CI coverage should match PR baseline" + echo " (proves you ran coverage locally)" + echo "āœ“ Check 2: CI coverage should be >= Main baseline - 0.2%" + echo " (proves coverage didn't decrease beyond tolerance)" + echo "" + echo "šŸ’” Allowed tolerance: ±0.2%" + echo "" + echo "==================================================" + echo "" + + - name: Verify coverage + run: | + set -e + echo "šŸ” Running coverage validation..." npm run coverage:check - name: Upload coverage report (optional) diff --git a/COVERAGE.md b/COVERAGE.md index 4f72f74..3dc3733 100644 --- a/COVERAGE.md +++ b/COVERAGE.md @@ -56,11 +56,10 @@ npm run coverage:update-baseline **Step-by-step:** 1. Make your code changes -2. Ensure `.env` file exists with pinned fork blocks (copy from `.env.example` if needed): +2. Ensure `.env` file exists (copy from `.env.example` if needed): ```bash cp .env.example .env ``` - **Important:** Using the same fork blocks as `.env.example` ensures your local coverage matches CI coverage. 3. Run coverage locally: ```bash npm run coverage @@ -110,37 +109,8 @@ Current baseline (as of initial setup): - `scripts/check-coverage.ts` - Local validation (compares coverage against baseline) - `scripts/get-coverage-percentage.ts` - Extracts coverage percentage from lcov.info (used by CI) -### Environment Setup for CI -The workflow copies `.env.example` to `.env` to enable fork tests with public RPC endpoints during coverage runs. - -### Fork Block Pinning for Deterministic Coverage - -**Why fork blocks are pinned:** -Coverage tests fork mainnet at specific block heights. Without pinning: -- Developer runs locally → forks at block X → gets 96.93% coverage -- CI runs 30 mins later → forks at block Y → gets 96.82% coverage -- Different blocks = different contract states = different test paths = different coverage - -**Solution:** -Pin each chain to a specific block number in `.env.example`: -```bash -FORK_BLOCK_NUMBER_BASE=39550474 -FORK_BLOCK_NUMBER_ETHEREUM=24024515 -FORK_BLOCK_NUMBER_ARBITRUM_ONE=411254516 -# etc... -``` - -This ensures both local and CI environments fork from **identical blockchain state**, producing **identical coverage results**. - -**Updating fork blocks:** -When you need to test against newer mainnet state: -1. Run the helper script: `node scripts/get-blocks.mjs` -2. Copy the output to `.env.example` -3. Run coverage: `npm run coverage` -4. If tests pass, update baseline: `npm run coverage:update-baseline` -5. Commit both `.env.example` and `coverage-baseline.json` - -**Note:** Each blockchain has independent block heights, so each needs its own pinned block number. +### Environment Setup +The workflow copies `.env.example` to `.env` to enable fork tests with public RPC endpoints during coverage runs. Tests fork at the latest block to ensure they work with current mainnet state. ### Branch Protection To enforce coverage checks, enable branch protection on main: diff --git a/hardhat.config.ts b/hardhat.config.ts index d712355..b4c7dc5 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -698,15 +698,8 @@ const config: HardhatUserConfig = { forking: { url: isSet(process.env.DRY_RUN) || isSet(process.env.FORK_TEST) ? process.env[`${process.env.DRY_RUN || process.env.FORK_TEST}_RPC`]! - : (process.env.FORK_PROVIDER || process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), - blockNumber: (() => { - // Determine which chain is being forked - const chain = (process.env.DRY_RUN || process.env.FORK_TEST || "BASE").toUpperCase(); - // Look up the per-chain fork block number - const blockVar = `FORK_BLOCK_NUMBER_${chain}`; - const blockNumber = process.env[blockVar]; - return blockNumber ? parseInt(blockNumber) : undefined; - })(), + : (process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), + blockNumber: undefined, }, accounts: isSet(process.env.DRY_RUN) ? [{privateKey: process.env.PRIVATE_KEY!, balance: "1000000000000000000"}] diff --git a/scripts/get-blocks.mjs b/scripts/get-blocks.mjs deleted file mode 100644 index 02628b5..0000000 --- a/scripts/get-blocks.mjs +++ /dev/null @@ -1,35 +0,0 @@ -import {ethers} from "ethers"; - -const chains = { - "BASE": process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io", - "ETHEREUM": process.env.ETHEREUM_RPC || "https://eth-mainnet.public.blastapi.io", - "ARBITRUM_ONE": process.env.ARBITRUM_ONE_RPC || "https://arbitrum-one.public.blastapi.io", - "OP_MAINNET": process.env.OP_MAINNET_RPC || "https://public-op-mainnet.fastnode.io", - "POLYGON_MAINNET": process.env.POLYGON_MAINNET_RPC || "https://polygon-bor-rpc.publicnode.com", - "AVALANCHE": process.env.AVALANCHE_RPC || "https://avalanche-c-chain-rpc.publicnode.com", - "BSC": process.env.BSC_RPC || "https://bsc-mainnet.public.blastapi.io", - "LINEA": process.env.LINEA_RPC || "https://linea-rpc.publicnode.com", -}; - -async function getBlockNumber(name, url) { - try { - const provider = new ethers.JsonRpcProvider(url); - const blockNumber = await provider.getBlockNumber(); - // Subtract 100 blocks for minimal safety margin (contracts are recent) - const safeBlock = blockNumber - 100; - console.log(`FORK_BLOCK_NUMBER_${name}=${safeBlock}`); - return safeBlock; - } catch (error) { - console.error(`# Error fetching ${name}: ${error.message}`); - return null; - } -} - -async function main() { - console.log("# Fetching current block numbers..."); - for (const [name, url] of Object.entries(chains)) { - await getBlockNumber(name, url); - } -} - -main().catch(console.error); From c0dc1c823d9081ba235ef737dd71d82e137f835e Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 11:35:42 +0200 Subject: [PATCH 18/27] test ssh signed commit From 9ba1f4a1aedfaf0872b0e9d2914674f0a68130c8 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 11:42:52 +0200 Subject: [PATCH 19/27] fixed fail coverage job --- .github/workflows/coverage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 61c3ded..7ba84ca 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -57,7 +57,7 @@ jobs: echo "" # Extract coverage from CI run (actual) - CI_COV=$(ts-node --files -e " + CI_COV=$(npx ts-node --files -e " import fs from 'fs'; import path from 'path'; const content = fs.readFileSync('coverage/lcov.info', 'utf8').replace(/\r\n/g, '\n'); From b7cf55d0736df93c13751d9d9d956b82dc8fd66d Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 11:56:04 +0200 Subject: [PATCH 20/27] fix: use node to parse JSON in coverage workflow --- .github/workflows/coverage.yml | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 7ba84ca..51d81cb 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -57,9 +57,8 @@ jobs: echo "" # Extract coverage from CI run (actual) - CI_COV=$(npx ts-node --files -e " - import fs from 'fs'; - import path from 'path'; + CI_COV=$(node -e " + const fs = require('fs'); const content = fs.readFileSync('coverage/lcov.info', 'utf8').replace(/\r\n/g, '\n'); let linesFound = 0, linesHit = 0; content.split('\n').forEach(line => { @@ -70,10 +69,24 @@ jobs: ") # Extract PR baseline (what developer committed) - PR_BASELINE=$(cat coverage-baseline.json | grep -o '"lines":"[^"]*"' | cut -d'"' -f4) + PR_BASELINE=$(node -e " + try { + const data = require('./coverage-baseline.json'); + console.log(data.lines || '0'); + } catch(e) { + console.log('0'); + } + ") # Extract main baseline (production baseline) - MAIN_BASELINE=$(cat coverage-baseline-main.json | grep -o '"lines":"[^"]*"' | cut -d'"' -f4 || echo "0") + MAIN_BASELINE=$(node -e " + try { + const data = require('./coverage-baseline-main.json'); + console.log(data.lines || '0'); + } catch(e) { + console.log('0'); + } + ") echo "šŸ“Š Coverage Results (Lines):" echo " CI (actual): ${CI_COV}% ← Fresh coverage from this PR" From bf20db6a5c35e78e6e2fdb3f49b2d3c4e6d38646 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 16:03:35 +0200 Subject: [PATCH 21/27] removed .gitattributes configuration --- .gitattributes | 29 ----------------------------- 1 file changed, 29 deletions(-) delete mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 715b01b..0000000 --- a/.gitattributes +++ /dev/null @@ -1,29 +0,0 @@ -# Auto detect text files and perform LF normalization -* text=auto - -# Force LF line endings for all text files -* text eol=lf - -# Explicitly set line endings for source files -*.sol text eol=lf -*.ts text eol=lf -*.js text eol=lf -*.json text eol=lf -*.md text eol=lf -*.yml text eol=lf -*.yaml text eol=lf -*.sh text eol=lf -*.mjs text eol=lf - -# Binary files -*.png binary -*.jpg binary -*.jpeg binary -*.gif binary -*.pdf binary -*.ico binary -*.woff binary -*.woff2 binary -*.ttf binary -*.eot binary - From e739570b31370494002906b7867eeccc7b4bb367 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Thu, 18 Dec 2025 16:32:08 +0200 Subject: [PATCH 22/27] verify ci coverage with main coverage --- .github/workflows/coverage.yml | 2 +- COVERAGE.md | 43 +++++++++++++--------- scripts/check-coverage.ts | 67 ++++++++++++++++++++++++++-------- 3 files changed, 77 insertions(+), 35 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 51d81cb..ee78835 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -107,7 +107,7 @@ jobs: run: | set -e echo "šŸ” Running coverage validation..." - npm run coverage:check + npx ts-node --files scripts/check-coverage.ts --main-baseline=coverage-baseline-main.json - name: Upload coverage report (optional) if: always() diff --git a/COVERAGE.md b/COVERAGE.md index 3dc3733..80e0599 100644 --- a/COVERAGE.md +++ b/COVERAGE.md @@ -4,28 +4,35 @@ This project uses automated coverage checks to prevent test coverage from decrea ## How It Works: Dual Validation -Developers run coverage locally and commit the baseline file. CI validates both that the developer ran coverage correctly AND that coverage didn't decrease. +Developers run coverage locally and commit the baseline file. CI validates both that the developer ran coverage correctly AND that coverage didn't decrease beyond tolerance. ### Coverage Workflow (`.github/workflows/coverage.yml`) **Triggers:** Every pull request to main **What it does:** -1. **Fetches baseline from main branch** - the current production baseline -2. **Reads baseline from PR branch** - the baseline you committed -3. **Runs coverage fresh in CI** - generates actual coverage from your code -4. **Performs two validations:** +1. **Fetches baseline from main branch** - the current production baseline (coverage-baseline-main.json) +2. **Reads baseline from PR branch** - the baseline you committed (coverage-baseline.json) +3. **Runs coverage fresh in CI** - generates actual coverage from your code (coverage/lcov.info) +4. **Displays all three values** - Shows CI actual, PR baseline, and Main baseline side-by-side +5. **Performs two validations with ±0.2% tolerance:** **Validation 1: Did you run coverage locally?** - - āœ… **PASS** if `CI coverage === PR baseline` (you ran coverage correctly) - - āŒ **FAIL** if `CI coverage !== PR baseline` (you forgot to run coverage or tampered with file) + - āœ… **PASS** if `CI coverage ā‰ˆ PR baseline (±0.2%)` (you ran coverage correctly) + - āŒ **FAIL** if difference exceeds tolerance (you forgot to run coverage or tampered with file) **Validation 2: Did coverage decrease?** - - āœ… **PASS** if `CI coverage >= main baseline` (coverage maintained or improved) - - āŒ **FAIL** if `CI coverage < main baseline` (coverage decreased) + - āœ… **PASS** if `CI coverage >= main baseline - 0.2%` (coverage maintained within tolerance) + - āŒ **FAIL** if `CI coverage < main baseline - 0.2%` (coverage decreased beyond tolerance) + +**Tolerance:** +A ±0.2% tolerance is applied to both checks to account for: +- Minor variations in test execution +- Rounding differences in coverage calculation +- Small changes in external contract states (tests fork at latest block) **Security Model:** -- āœ… **Can't skip running coverage** - CI checks if your committed baseline matches actual coverage -- āœ… **Can't decrease coverage** - CI checks if your coverage is below main's baseline +- āœ… **Can't skip running coverage** - CI checks if your committed baseline matches actual coverage (within tolerance) +- āœ… **Can't decrease coverage** - CI checks if your coverage is below main's baseline (beyond tolerance) - āœ… **Can't cheat** - CI regenerates coverage fresh and validates against both baselines - āœ… **Can't commit invalid baseline** - CI validates JSON format before processing - āœ… **Can't skip baseline file** - CI fails immediately if baseline file is missing @@ -76,14 +83,13 @@ npm run coverage:update-baseline 6. Push your PR **What CI validates:** -- āœ… **Check 1:** Your committed baseline matches CI coverage (proves you ran coverage) -- āœ… **Check 2:** Your coverage is >= main's baseline (proves coverage didn't drop) +- āœ… **Check 1:** Your committed baseline matches CI coverage within ±0.2% (proves you ran coverage) +- āœ… **Check 2:** Your coverage is >= main's baseline - 0.2% (proves coverage didn't drop beyond tolerance) **If CI fails:** -- **"No coverage-baseline.json found in PR"** → You forgot to commit the baseline file. Run steps 2-4 above and push. +- **"No coverage-baseline.json found in PR"** → You forgot to commit the baseline file. Run steps 3-5 above and push. - **"coverage-baseline.json is not valid JSON"** → The baseline file is corrupted. Run `npm run coverage:update-baseline` and commit. -- **"CI coverage doesn't match PR baseline"** → You forgot to update the baseline. Run steps 2-3 above and push. -- **"Coverage decreased"** → Add more tests to maintain or improve coverage. +- **"Coverage decreased beyond tolerance"** → Coverage dropped more than 0.2% compared to PR baseline or main baseline. Add more tests to maintain or improve coverage. ### For Maintainers @@ -105,9 +111,10 @@ Current baseline (as of initial setup): - Uses Hardhat's built-in coverage tool (generates `coverage/lcov.info`) - Parses LCOV format to extract: lines, functions, branches, statements - Stores baseline in `coverage-baseline.json` at repository root +- CI fetches main branch baseline as `coverage-baseline-main.json` - Scripts: - - `scripts/check-coverage.ts` - Local validation (compares coverage against baseline) - - `scripts/get-coverage-percentage.ts` - Extracts coverage percentage from lcov.info (used by CI) + - `scripts/check-coverage.ts` - Validates coverage against both PR and main baselines with ±0.2% tolerance + - Accepts `--main-baseline=` parameter to compare against main branch baseline ### Environment Setup The workflow copies `.env.example` to `.env` to enable fork tests with public RPC endpoints during coverage runs. Tests fork at the latest block to ensure they work with current mainnet state. diff --git a/scripts/check-coverage.ts b/scripts/check-coverage.ts index b626e66..b78f0f8 100644 --- a/scripts/check-coverage.ts +++ b/scripts/check-coverage.ts @@ -59,6 +59,11 @@ const baselinePath = baselineArg ? baselineArg.split("=")[1] : path.join(__dirname, "..", "coverage-baseline.json"); +const mainBaselineArg = process.argv.find(arg => arg.startsWith("--main-baseline=")); +const mainBaselinePath = mainBaselineArg + ? mainBaselineArg.split("=")[1] + : path.join(__dirname, "..", "coverage-baseline-main.json"); + const isUpdatingBaseline = process.argv.includes("--update-baseline"); if (!fs.existsSync(lcovPath)) { @@ -79,8 +84,8 @@ if (isUpdatingBaseline) { process.exit(0); } -// Load baseline -let baseline: CoverageData = { +// Load PR baseline +let prBaseline: CoverageData = { lines: "0", functions: "0", branches: "0", @@ -88,41 +93,71 @@ let baseline: CoverageData = { }; if (fs.existsSync(baselinePath)) { - baseline = JSON.parse(fs.readFileSync(baselinePath, "utf8")) as CoverageData; + prBaseline = JSON.parse(fs.readFileSync(baselinePath, "utf8")) as CoverageData; +} + +// Load main baseline +let mainBaseline: CoverageData | null = null; +if (fs.existsSync(mainBaselinePath)) { + mainBaseline = JSON.parse(fs.readFileSync(mainBaselinePath, "utf8")) as CoverageData; } // Display comparison console.log("\nšŸ“Š Coverage Comparison:"); console.log("─".repeat(50)); -console.log(`Lines: ${baseline.lines}% → ${current.lines}%`); -console.log(`Functions: ${baseline.functions}% → ${current.functions}%`); -console.log(`Branches: ${baseline.branches}% → ${current.branches}%`); -console.log(`Statements: ${baseline.statements}% → ${current.statements}%`); +console.log(`Lines: ${prBaseline.lines}% → ${current.lines}%`); +console.log(`Functions: ${prBaseline.functions}% → ${current.functions}%`); +console.log(`Branches: ${prBaseline.branches}% → ${current.branches}%`); +console.log(`Statements: ${prBaseline.statements}% → ${current.statements}%`); console.log("─".repeat(50)); // Tolerant comparison -function checkDrop(metric: keyof CoverageData): string | null { +function checkDrop(metric: keyof CoverageData, baseline: CoverageData, label: string): string | null { const base = parseFloat(baseline[metric]); const curr = parseFloat(current[metric]); const diff = curr - base; if (diff < -COVERAGE_TOLERANCE) { - return `${metric} dropped: ${base}% → ${curr}% (Ī” ${diff.toFixed(2)}%)`; + return `${metric} dropped below ${label}: ${base}% → ${curr}% (Ī” ${diff.toFixed(2)}%)`; } return null; } -const drops = [ - checkDrop("lines"), - checkDrop("functions"), - checkDrop("branches"), - checkDrop("statements"), +// Check against PR baseline +const prDrops = [ + checkDrop("lines", prBaseline, "PR baseline"), + checkDrop("functions", prBaseline, "PR baseline"), + checkDrop("branches", prBaseline, "PR baseline"), + checkDrop("statements", prBaseline, "PR baseline"), ].filter(Boolean) as string[]; -if (drops.length > 0) { +// Check against main baseline +const mainDrops: string[] = []; +if (mainBaseline) { + console.log("\nšŸ“Š Coverage vs Main Branch:"); + console.log("─".repeat(50)); + console.log(`Lines: ${mainBaseline.lines}% → ${current.lines}%`); + console.log(`Functions: ${mainBaseline.functions}% → ${current.functions}%`); + console.log(`Branches: ${mainBaseline.branches}% → ${current.branches}%`); + console.log(`Statements: ${mainBaseline.statements}% → ${current.statements}%`); + console.log("─".repeat(50)); + + mainDrops.push( + ...([ + checkDrop("lines", mainBaseline, "main baseline"), + checkDrop("functions", mainBaseline, "main baseline"), + checkDrop("branches", mainBaseline, "main baseline"), + checkDrop("statements", mainBaseline, "main baseline"), + ].filter(Boolean) as string[]) + ); +} + +const allDrops = [...prDrops, ...mainDrops]; + +if (allDrops.length > 0) { console.log("\nāŒ Coverage decreased beyond tolerance:\n"); - drops.forEach(d => console.log(` • ${d}`)); + allDrops.forEach(d => console.log(` • ${d}`)); console.log(`\nšŸ’” Allowed tolerance: ±${COVERAGE_TOLERANCE}%\n`); process.exit(1); } From 3dc28d31e8f6ae11769435fa293bbb8369a1cf46 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Fri, 19 Dec 2025 10:27:43 +0200 Subject: [PATCH 23/27] fixed code review changes --- .github/workflows/coverage.yml | 35 ++++++---------------------------- scripts/check-coverage.ts | 26 ++++++++++++------------- 2 files changed, 19 insertions(+), 42 deletions(-) diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index ee78835..0eb0835 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -36,8 +36,8 @@ jobs: - name: Fetch main branch baseline run: | echo "šŸ“„ Fetching coverage baseline from main branch..." - git fetch origin main:refs/remotes/origin/main - git show origin/main:coverage-baseline.json > coverage-baseline-main.json 2>/dev/null || echo "{}" + git fetch origin main + git show origin/main:coverage-baseline.json > baseline-from-main.json 2>/dev/null || echo '{"lines":"0","functions":"0","branches":"0","statements":"0"}' > baseline-from-main.json - name: Run coverage run: | @@ -57,36 +57,13 @@ jobs: echo "" # Extract coverage from CI run (actual) - CI_COV=$(node -e " - const fs = require('fs'); - const content = fs.readFileSync('coverage/lcov.info', 'utf8').replace(/\r\n/g, '\n'); - let linesFound = 0, linesHit = 0; - content.split('\n').forEach(line => { - if (line.startsWith('LF:')) linesFound += parseInt(line.substring(3)); - if (line.startsWith('LH:')) linesHit += parseInt(line.substring(3)); - }); - console.log(linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : '0'); - ") + CI_COV=$(npx ts-node --files scripts/get-coverage-percentage.ts) # Extract PR baseline (what developer committed) - PR_BASELINE=$(node -e " - try { - const data = require('./coverage-baseline.json'); - console.log(data.lines || '0'); - } catch(e) { - console.log('0'); - } - ") + PR_BASELINE=$(jq -r '.lines // "0"' coverage-baseline.json) # Extract main baseline (production baseline) - MAIN_BASELINE=$(node -e " - try { - const data = require('./coverage-baseline-main.json'); - console.log(data.lines || '0'); - } catch(e) { - console.log('0'); - } - ") + MAIN_BASELINE=$(jq -r '.lines // "0"' baseline-from-main.json) echo "šŸ“Š Coverage Results (Lines):" echo " CI (actual): ${CI_COV}% ← Fresh coverage from this PR" @@ -107,7 +84,7 @@ jobs: run: | set -e echo "šŸ” Running coverage validation..." - npx ts-node --files scripts/check-coverage.ts --main-baseline=coverage-baseline-main.json + npx ts-node --files scripts/check-coverage.ts --main-baseline=baseline-from-main.json - name: Upload coverage report (optional) if: always() diff --git a/scripts/check-coverage.ts b/scripts/check-coverage.ts index b78f0f8..99065e5 100644 --- a/scripts/check-coverage.ts +++ b/scripts/check-coverage.ts @@ -44,10 +44,10 @@ function parseLcovCoverage(lcovPath: string): CoverageData { } return { - lines: linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : "0", - functions: functionsFound > 0 ? ((functionsHit / functionsFound) * 100).toFixed(2) : "0", - branches: branchesFound > 0 ? ((branchesHit / branchesFound) * 100).toFixed(2) : "0", - statements: linesFound > 0 ? ((linesHit / linesFound) * 100).toFixed(2) : "0", + lines: linesFound > 0 ? (linesHit / linesFound * 100).toFixed(2) : "0", + functions: functionsFound > 0 ? (functionsHit / functionsFound * 100).toFixed(2) : "0", + branches: branchesFound > 0 ? (branchesHit / branchesFound * 100).toFixed(2) : "0", + statements: linesFound > 0 ? (linesHit / linesFound * 100).toFixed(2) : "0", }; } @@ -112,7 +112,7 @@ console.log(`Statements: ${prBaseline.statements}% → ${current.statements}%`); console.log("─".repeat(50)); // Tolerant comparison -function checkDrop(metric: keyof CoverageData, baseline: CoverageData, label: string): string | null { +function checkDrop(metric: keyof CoverageData, baseline: CoverageData, current: CoverageData, label: string): string | null { const base = parseFloat(baseline[metric]); const curr = parseFloat(current[metric]); const diff = curr - base; @@ -126,10 +126,10 @@ function checkDrop(metric: keyof CoverageData, baseline: CoverageData, label: st // Check against PR baseline const prDrops = [ - checkDrop("lines", prBaseline, "PR baseline"), - checkDrop("functions", prBaseline, "PR baseline"), - checkDrop("branches", prBaseline, "PR baseline"), - checkDrop("statements", prBaseline, "PR baseline"), + checkDrop("lines", prBaseline, current, "PR baseline"), + checkDrop("functions", prBaseline, current, "PR baseline"), + checkDrop("branches", prBaseline, current, "PR baseline"), + checkDrop("statements", prBaseline, current, "PR baseline"), ].filter(Boolean) as string[]; // Check against main baseline @@ -145,10 +145,10 @@ if (mainBaseline) { mainDrops.push( ...([ - checkDrop("lines", mainBaseline, "main baseline"), - checkDrop("functions", mainBaseline, "main baseline"), - checkDrop("branches", mainBaseline, "main baseline"), - checkDrop("statements", mainBaseline, "main baseline"), + checkDrop("lines", mainBaseline, current, "main baseline"), + checkDrop("functions", mainBaseline, current, "main baseline"), + checkDrop("branches", mainBaseline, current, "main baseline"), + checkDrop("statements", mainBaseline, current, "main baseline"), ].filter(Boolean) as string[]) ); } From 2574d78cbd2c52479d490e6dde34efc82d786eb8 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Fri, 19 Dec 2025 10:47:50 +0200 Subject: [PATCH 24/27] test: verify commit signing From 66b36aa8bb213f8c40ee7127dd632b0543b126f9 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Fri, 19 Dec 2025 10:53:45 +0200 Subject: [PATCH 25/27] fixed lint issues --- scripts/check-coverage.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/check-coverage.ts b/scripts/check-coverage.ts index 99065e5..dbca57e 100644 --- a/scripts/check-coverage.ts +++ b/scripts/check-coverage.ts @@ -112,7 +112,12 @@ console.log(`Statements: ${prBaseline.statements}% → ${current.statements}%`); console.log("─".repeat(50)); // Tolerant comparison -function checkDrop(metric: keyof CoverageData, baseline: CoverageData, current: CoverageData, label: string): string | null { +function checkDrop( + metric: keyof CoverageData, + baseline: CoverageData, + current: CoverageData, + label: string +): string | null { const base = parseFloat(baseline[metric]); const curr = parseFloat(current[metric]); const diff = curr - base; From e8e846c24e99ce5e11f6cc0d7282a6b5ec49297d Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Fri, 19 Dec 2025 12:34:05 +0200 Subject: [PATCH 26/27] updated harhat behafior to cover main --- .env.example | 4 +++- hardhat.config.ts | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.env.example b/.env.example index 6e496b1..c1bb527 100644 --- a/.env.example +++ b/.env.example @@ -38,7 +38,9 @@ LINEA_RPC=https://linea-rpc.publicnode.com # Etherscan now uses a single API key for all chains. ETHERSCAN_API_KEY= -USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b +# Testing parameters. +FORK_PROVIDER=https://base-mainnet.public.blastapi.io +USDC_OWNER_ADDRESS=0x498581fFF718922c3f8e6A244956aF099B2652b2b GHO_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b EURC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b WETH_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b diff --git a/hardhat.config.ts b/hardhat.config.ts index b4c7dc5..9c3b3ed 100644 --- a/hardhat.config.ts +++ b/hardhat.config.ts @@ -698,8 +698,8 @@ const config: HardhatUserConfig = { forking: { url: isSet(process.env.DRY_RUN) || isSet(process.env.FORK_TEST) ? process.env[`${process.env.DRY_RUN || process.env.FORK_TEST}_RPC`]! - : (process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), - blockNumber: undefined, + : (process.env.FORK_PROVIDER || process.env.BASE_RPC || "https://base-mainnet.public.blastapi.io"), + blockNumber: process.env.FORK_BLOCK_NUMBER ? parseInt(process.env.FORK_BLOCK_NUMBER) : undefined, }, accounts: isSet(process.env.DRY_RUN) ? [{privateKey: process.env.PRIVATE_KEY!, balance: "1000000000000000000"}] From 1fd7472a7a63b02a58b847e429c7558ebdc00059 Mon Sep 17 00:00:00 2001 From: LyonSsS Date: Fri, 19 Dec 2025 12:39:51 +0200 Subject: [PATCH 27/27] fix: correct USDC_OWNER_ADDRESS typo in .env.example --- .env.example | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.example b/.env.example index c1bb527..a45c154 100644 --- a/.env.example +++ b/.env.example @@ -40,7 +40,7 @@ ETHERSCAN_API_KEY= # Testing parameters. FORK_PROVIDER=https://base-mainnet.public.blastapi.io -USDC_OWNER_ADDRESS=0x498581fFF718922c3f8e6A244956aF099B2652b2b +USDC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b GHO_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b EURC_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b WETH_OWNER_ADDRESS=0x498581fF718922c3f8e6A244956aF099B2652b2b